lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | 99b5d6ed0a4042942db840cb8502db0b8f3aca38 | 0 | Wisienkas/libgdx,Senth/libgdx,ttencate/libgdx,sarkanyi/libgdx,xranby/libgdx,SidneyXu/libgdx,josephknight/libgdx,djom20/libgdx,SidneyXu/libgdx,junkdog/libgdx,azakhary/libgdx,BlueRiverInteractive/libgdx,snovak/libgdx,noelsison2/libgdx,basherone/libgdxcn,katiepino/libgdx,libgdx/libgdx,stinsonga/libgdx,nrallakis/libgdx,del-sol/libgdx,petugez/libgdx,JFixby/libgdx,xranby/libgdx,PedroRomanoBarbosa/libgdx,xpenatan/libgdx-LWJGL3,fiesensee/libgdx,zommuter/libgdx,stickyd/libgdx,gouessej/libgdx,Dzamir/libgdx,nrallakis/libgdx,nelsonsilva/libgdx,ya7lelkom/libgdx,zhimaijoy/libgdx,kotcrab/libgdx,Gliby/libgdx,Gliby/libgdx,309746069/libgdx,josephknight/libgdx,nudelchef/libgdx,srwonka/libGdx,ya7lelkom/libgdx,ThiagoGarciaAlves/libgdx,yangweigbh/libgdx,davebaol/libgdx,kzganesan/libgdx,ztv/libgdx,flaiker/libgdx,FredGithub/libgdx,tommycli/libgdx,srwonka/libGdx,firefly2442/libgdx,tommycli/libgdx,jsjolund/libgdx,junkdog/libgdx,saltares/libgdx,samskivert/libgdx,tommycli/libgdx,billgame/libgdx,saqsun/libgdx,jasonwee/libgdx,alex-dorokhov/libgdx,Wisienkas/libgdx,haedri/libgdx-1,Xhanim/libgdx,snovak/libgdx,yangweigbh/libgdx,codepoke/libgdx,xpenatan/libgdx-LWJGL3,sinistersnare/libgdx,ya7lelkom/libgdx,noelsison2/libgdx,revo09/libgdx,hyvas/libgdx,alex-dorokhov/libgdx,sarkanyi/libgdx,ninoalma/libgdx,bladecoder/libgdx,petugez/libgdx,kzganesan/libgdx,collinsmith/libgdx,309746069/libgdx,nrallakis/libgdx,samskivert/libgdx,luischavez/libgdx,tell10glu/libgdx,Thotep/libgdx,andyvand/libgdx,katiepino/libgdx,flaiker/libgdx,sarkanyi/libgdx,toa5/libgdx,realitix/libgdx,codepoke/libgdx,del-sol/libgdx,jberberick/libgdx,1yvT0s/libgdx,js78/libgdx,azakhary/libgdx,antag99/libgdx,junkdog/libgdx,davebaol/libgdx,nelsonsilva/libgdx,toloudis/libgdx,codepoke/libgdx,cypherdare/libgdx,Deftwun/libgdx,UnluckyNinja/libgdx,MadcowD/libgdx,toa5/libgdx,1yvT0s/libgdx,saqsun/libgdx,TheAks999/libgdx,Thotep/libgdx,djom20/libgdx,jsjolund/libgdx,luischavez/libgdx,sjosegarcia/libgdx,js78/libgdx,xranby/libgdx,davebaol/libgdx,1yvT0s/libgdx,antag99/libgdx,firefly2442/libgdx,azakhary/libgdx,mumer92/libgdx,collinsmith/libgdx,realitix/libgdx,snovak/libgdx,KrisLee/libgdx,MathieuDuponchelle/gdx,czyzby/libgdx,nrallakis/libgdx,BlueRiverInteractive/libgdx,titovmaxim/libgdx,ttencate/libgdx,ttencate/libgdx,UnluckyNinja/libgdx,nooone/libgdx,MathieuDuponchelle/gdx,nelsonsilva/libgdx,BlueRiverInteractive/libgdx,Gliby/libgdx,Heart2009/libgdx,Arcnor/libgdx,MathieuDuponchelle/gdx,fwolff/libgdx,czyzby/libgdx,FyiurAmron/libgdx,FredGithub/libgdx,MovingBlocks/libgdx,luischavez/libgdx,bsmr-java/libgdx,nooone/libgdx,bladecoder/libgdx,nave966/libgdx,yangweigbh/libgdx,Zonglin-Li6565/libgdx,djom20/libgdx,KrisLee/libgdx,1yvT0s/libgdx,luischavez/libgdx,djom20/libgdx,PedroRomanoBarbosa/libgdx,alex-dorokhov/libgdx,haedri/libgdx-1,Wisienkas/libgdx,ninoalma/libgdx,KrisLee/libgdx,stinsonga/libgdx,xranby/libgdx,alireza-hosseini/libgdx,Xhanim/libgdx,alex-dorokhov/libgdx,kotcrab/libgdx,xranby/libgdx,FyiurAmron/libgdx,jberberick/libgdx,petugez/libgdx,lordjone/libgdx,cypherdare/libgdx,jasonwee/libgdx,zhimaijoy/libgdx,shiweihappy/libgdx,Arcnor/libgdx,gf11speed/libgdx,TheAks999/libgdx,alireza-hosseini/libgdx,titovmaxim/libgdx,zommuter/libgdx,libgdx/libgdx,bsmr-java/libgdx,bgroenks96/libgdx,andyvand/libgdx,jsjolund/libgdx,FredGithub/libgdx,ztv/libgdx,josephknight/libgdx,yangweigbh/libgdx,youprofit/libgdx,309746069/libgdx,zommuter/libgdx,kagehak/libgdx,bgroenks96/libgdx,shiweihappy/libgdx,JFixby/libgdx,nudelchef/libgdx,PedroRomanoBarbosa/libgdx,xoppa/libgdx,sinistersnare/libgdx,jasonwee/libgdx,309746069/libgdx,xoppa/libgdx,alireza-hosseini/libgdx,sinistersnare/libgdx,MovingBlocks/libgdx,del-sol/libgdx,lordjone/libgdx,nave966/libgdx,Dzamir/libgdx,MovingBlocks/libgdx,MikkelTAndersen/libgdx,petugez/libgdx,srwonka/libGdx,xpenatan/libgdx-LWJGL3,zommuter/libgdx,js78/libgdx,del-sol/libgdx,haedri/libgdx-1,davebaol/libgdx,bsmr-java/libgdx,hyvas/libgdx,nudelchef/libgdx,Gliby/libgdx,sarkanyi/libgdx,thepullman/libgdx,SidneyXu/libgdx,petugez/libgdx,mumer92/libgdx,saltares/libgdx,yangweigbh/libgdx,copystudy/libgdx,MovingBlocks/libgdx,anserran/libgdx,BlueRiverInteractive/libgdx,jberberick/libgdx,jberberick/libgdx,mumer92/libgdx,fiesensee/libgdx,GreenLightning/libgdx,codepoke/libgdx,EsikAntony/libgdx,ztv/libgdx,firefly2442/libgdx,tommycli/libgdx,josephknight/libgdx,ryoenji/libgdx,curtiszimmerman/libgdx,zommuter/libgdx,antag99/libgdx,andyvand/libgdx,sjosegarcia/libgdx,azakhary/libgdx,ricardorigodon/libgdx,shiweihappy/libgdx,titovmaxim/libgdx,collinsmith/libgdx,tommyettinger/libgdx,katiepino/libgdx,fiesensee/libgdx,Badazdz/libgdx,nudelchef/libgdx,JDReutt/libgdx,Senth/libgdx,xoppa/libgdx,nelsonsilva/libgdx,copystudy/libgdx,gdos/libgdx,kotcrab/libgdx,fwolff/libgdx,zhimaijoy/libgdx,gouessej/libgdx,Xhanim/libgdx,JFixby/libgdx,ninoalma/libgdx,tommyettinger/libgdx,djom20/libgdx,revo09/libgdx,kotcrab/libgdx,youprofit/libgdx,designcrumble/libgdx,Deftwun/libgdx,realitix/libgdx,Gliby/libgdx,bsmr-java/libgdx,jasonwee/libgdx,Xhanim/libgdx,KrisLee/libgdx,samskivert/libgdx,nave966/libgdx,309746069/libgdx,lordjone/libgdx,jasonwee/libgdx,xpenatan/libgdx-LWJGL3,flaiker/libgdx,JDReutt/libgdx,jasonwee/libgdx,bladecoder/libgdx,copystudy/libgdx,xpenatan/libgdx-LWJGL3,MadcowD/libgdx,bsmr-java/libgdx,andyvand/libgdx,lordjone/libgdx,anserran/libgdx,collinsmith/libgdx,luischavez/libgdx,jsjolund/libgdx,Zonglin-Li6565/libgdx,JFixby/libgdx,309746069/libgdx,1yvT0s/libgdx,ttencate/libgdx,josephknight/libgdx,ya7lelkom/libgdx,titovmaxim/libgdx,ryoenji/libgdx,curtiszimmerman/libgdx,jasonwee/libgdx,GreenLightning/libgdx,titovmaxim/libgdx,gf11speed/libgdx,MathieuDuponchelle/gdx,FyiurAmron/libgdx,TheAks999/libgdx,sinistersnare/libgdx,toloudis/libgdx,UnluckyNinja/libgdx,nave966/libgdx,MikkelTAndersen/libgdx,ninoalma/libgdx,sarkanyi/libgdx,Zomby2D/libgdx,xoppa/libgdx,fwolff/libgdx,noelsison2/libgdx,jasonwee/libgdx,antag99/libgdx,nudelchef/libgdx,lordjone/libgdx,revo09/libgdx,sarkanyi/libgdx,basherone/libgdxcn,cypherdare/libgdx,ThiagoGarciaAlves/libgdx,noelsison2/libgdx,antag99/libgdx,GreenLightning/libgdx,MovingBlocks/libgdx,gouessej/libgdx,tell10glu/libgdx,copystudy/libgdx,billgame/libgdx,djom20/libgdx,ryoenji/libgdx,nrallakis/libgdx,ricardorigodon/libgdx,MetSystem/libgdx,MathieuDuponchelle/gdx,gdos/libgdx,BlueRiverInteractive/libgdx,bgroenks96/libgdx,ya7lelkom/libgdx,1yvT0s/libgdx,collinsmith/libgdx,tommycli/libgdx,samskivert/libgdx,thepullman/libgdx,junkdog/libgdx,Zonglin-Li6565/libgdx,saltares/libgdx,EsikAntony/libgdx,andyvand/libgdx,flaiker/libgdx,Arcnor/libgdx,antag99/libgdx,alireza-hosseini/libgdx,EsikAntony/libgdx,revo09/libgdx,fiesensee/libgdx,czyzby/libgdx,toa5/libgdx,zhimaijoy/libgdx,thepullman/libgdx,Wisienkas/libgdx,MovingBlocks/libgdx,Senth/libgdx,stinsonga/libgdx,kzganesan/libgdx,thepullman/libgdx,revo09/libgdx,billgame/libgdx,kzganesan/libgdx,noelsison2/libgdx,ThiagoGarciaAlves/libgdx,nave966/libgdx,xoppa/libgdx,lordjone/libgdx,Badazdz/libgdx,srwonka/libGdx,zhimaijoy/libgdx,nooone/libgdx,fwolff/libgdx,del-sol/libgdx,andyvand/libgdx,bsmr-java/libgdx,MathieuDuponchelle/gdx,MadcowD/libgdx,snovak/libgdx,curtiszimmerman/libgdx,codepoke/libgdx,1yvT0s/libgdx,thepullman/libgdx,srwonka/libGdx,davebaol/libgdx,ninoalma/libgdx,Thotep/libgdx,djom20/libgdx,samskivert/libgdx,tommycli/libgdx,ninoalma/libgdx,youprofit/libgdx,NathanSweet/libgdx,JFixby/libgdx,MadcowD/libgdx,petugez/libgdx,Deftwun/libgdx,noelsison2/libgdx,Heart2009/libgdx,MetSystem/libgdx,NathanSweet/libgdx,firefly2442/libgdx,ztv/libgdx,shiweihappy/libgdx,EsikAntony/libgdx,katiepino/libgdx,jsjolund/libgdx,sjosegarcia/libgdx,UnluckyNinja/libgdx,NathanSweet/libgdx,SidneyXu/libgdx,ThiagoGarciaAlves/libgdx,MetSystem/libgdx,samskivert/libgdx,Dzamir/libgdx,Badazdz/libgdx,JDReutt/libgdx,toa5/libgdx,youprofit/libgdx,fiesensee/libgdx,bgroenks96/libgdx,MikkelTAndersen/libgdx,nrallakis/libgdx,nelsonsilva/libgdx,gdos/libgdx,ThiagoGarciaAlves/libgdx,kotcrab/libgdx,gf11speed/libgdx,sjosegarcia/libgdx,BlueRiverInteractive/libgdx,gdos/libgdx,cypherdare/libgdx,firefly2442/libgdx,MathieuDuponchelle/gdx,js78/libgdx,czyzby/libgdx,firefly2442/libgdx,FredGithub/libgdx,PedroRomanoBarbosa/libgdx,GreenLightning/libgdx,Thotep/libgdx,Zonglin-Li6565/libgdx,TheAks999/libgdx,sarkanyi/libgdx,titovmaxim/libgdx,Zomby2D/libgdx,js78/libgdx,bgroenks96/libgdx,kagehak/libgdx,gf11speed/libgdx,JFixby/libgdx,zommuter/libgdx,saltares/libgdx,fwolff/libgdx,curtiszimmerman/libgdx,MathieuDuponchelle/gdx,MikkelTAndersen/libgdx,gdos/libgdx,xoppa/libgdx,srwonka/libGdx,NathanSweet/libgdx,Badazdz/libgdx,haedri/libgdx-1,gdos/libgdx,basherone/libgdxcn,Heart2009/libgdx,Heart2009/libgdx,Dzamir/libgdx,gouessej/libgdx,xranby/libgdx,stickyd/libgdx,ThiagoGarciaAlves/libgdx,FredGithub/libgdx,MovingBlocks/libgdx,katiepino/libgdx,basherone/libgdxcn,snovak/libgdx,Zonglin-Li6565/libgdx,Dzamir/libgdx,Deftwun/libgdx,xpenatan/libgdx-LWJGL3,bladecoder/libgdx,ttencate/libgdx,stickyd/libgdx,MetSystem/libgdx,ryoenji/libgdx,kzganesan/libgdx,zhimaijoy/libgdx,copystudy/libgdx,Arcnor/libgdx,codepoke/libgdx,jberberick/libgdx,designcrumble/libgdx,curtiszimmerman/libgdx,tell10glu/libgdx,ricardorigodon/libgdx,Thotep/libgdx,Heart2009/libgdx,KrisLee/libgdx,realitix/libgdx,designcrumble/libgdx,nooone/libgdx,Heart2009/libgdx,junkdog/libgdx,sinistersnare/libgdx,Senth/libgdx,nave966/libgdx,fwolff/libgdx,ThiagoGarciaAlves/libgdx,JDReutt/libgdx,srwonka/libGdx,shiweihappy/libgdx,alex-dorokhov/libgdx,srwonka/libGdx,katiepino/libgdx,ricardorigodon/libgdx,jsjolund/libgdx,shiweihappy/libgdx,kagehak/libgdx,bgroenks96/libgdx,luischavez/libgdx,kotcrab/libgdx,Senth/libgdx,toa5/libgdx,TheAks999/libgdx,EsikAntony/libgdx,stickyd/libgdx,JDReutt/libgdx,anserran/libgdx,KrisLee/libgdx,gouessej/libgdx,Arcnor/libgdx,saqsun/libgdx,KrisLee/libgdx,JDReutt/libgdx,billgame/libgdx,junkdog/libgdx,hyvas/libgdx,noelsison2/libgdx,shiweihappy/libgdx,bsmr-java/libgdx,basherone/libgdxcn,KrisLee/libgdx,1yvT0s/libgdx,EsikAntony/libgdx,MadcowD/libgdx,yangweigbh/libgdx,saqsun/libgdx,hyvas/libgdx,Xhanim/libgdx,nudelchef/libgdx,tommycli/libgdx,gouessej/libgdx,ninoalma/libgdx,ttencate/libgdx,xoppa/libgdx,youprofit/libgdx,youprofit/libgdx,MikkelTAndersen/libgdx,toloudis/libgdx,zhimaijoy/libgdx,gdos/libgdx,billgame/libgdx,josephknight/libgdx,junkdog/libgdx,bgroenks96/libgdx,snovak/libgdx,haedri/libgdx-1,anserran/libgdx,antag99/libgdx,Badazdz/libgdx,ryoenji/libgdx,luischavez/libgdx,ricardorigodon/libgdx,samskivert/libgdx,tell10glu/libgdx,ricardorigodon/libgdx,fiesensee/libgdx,Wisienkas/libgdx,FredGithub/libgdx,del-sol/libgdx,toa5/libgdx,Wisienkas/libgdx,anserran/libgdx,zommuter/libgdx,petugez/libgdx,djom20/libgdx,ryoenji/libgdx,mumer92/libgdx,GreenLightning/libgdx,katiepino/libgdx,codepoke/libgdx,xpenatan/libgdx-LWJGL3,Heart2009/libgdx,MadcowD/libgdx,stinsonga/libgdx,Badazdz/libgdx,js78/libgdx,alex-dorokhov/libgdx,tommyettinger/libgdx,basherone/libgdxcn,mumer92/libgdx,ttencate/libgdx,nrallakis/libgdx,xpenatan/libgdx-LWJGL3,Zomby2D/libgdx,jberberick/libgdx,junkdog/libgdx,toa5/libgdx,designcrumble/libgdx,saqsun/libgdx,gdos/libgdx,Senth/libgdx,petugez/libgdx,PedroRomanoBarbosa/libgdx,MetSystem/libgdx,fiesensee/libgdx,sjosegarcia/libgdx,alireza-hosseini/libgdx,designcrumble/libgdx,NathanSweet/libgdx,Thotep/libgdx,katiepino/libgdx,gf11speed/libgdx,collinsmith/libgdx,copystudy/libgdx,alex-dorokhov/libgdx,realitix/libgdx,bladecoder/libgdx,xoppa/libgdx,anserran/libgdx,Zomby2D/libgdx,TheAks999/libgdx,stinsonga/libgdx,PedroRomanoBarbosa/libgdx,FredGithub/libgdx,mumer92/libgdx,fwolff/libgdx,jberberick/libgdx,titovmaxim/libgdx,youprofit/libgdx,FredGithub/libgdx,czyzby/libgdx,lordjone/libgdx,tommyettinger/libgdx,kagehak/libgdx,js78/libgdx,Dzamir/libgdx,Gliby/libgdx,TheAks999/libgdx,FyiurAmron/libgdx,gouessej/libgdx,EsikAntony/libgdx,MovingBlocks/libgdx,haedri/libgdx-1,gf11speed/libgdx,Zonglin-Li6565/libgdx,BlueRiverInteractive/libgdx,Badazdz/libgdx,tommycli/libgdx,libgdx/libgdx,Thotep/libgdx,luischavez/libgdx,samskivert/libgdx,ttencate/libgdx,libgdx/libgdx,Xhanim/libgdx,realitix/libgdx,Wisienkas/libgdx,FyiurAmron/libgdx,titovmaxim/libgdx,nooone/libgdx,Zomby2D/libgdx,SidneyXu/libgdx,sjosegarcia/libgdx,sjosegarcia/libgdx,thepullman/libgdx,MetSystem/libgdx,collinsmith/libgdx,thepullman/libgdx,saltares/libgdx,ninoalma/libgdx,gouessej/libgdx,flaiker/libgdx,yangweigbh/libgdx,JFixby/libgdx,anserran/libgdx,del-sol/libgdx,ztv/libgdx,stickyd/libgdx,SidneyXu/libgdx,TheAks999/libgdx,bsmr-java/libgdx,ztv/libgdx,stickyd/libgdx,mumer92/libgdx,sinistersnare/libgdx,nrallakis/libgdx,revo09/libgdx,flaiker/libgdx,alex-dorokhov/libgdx,youprofit/libgdx,FyiurAmron/libgdx,zhimaijoy/libgdx,billgame/libgdx,Zonglin-Li6565/libgdx,Deftwun/libgdx,andyvand/libgdx,Xhanim/libgdx,cypherdare/libgdx,firefly2442/libgdx,andyvand/libgdx,jsjolund/libgdx,saltares/libgdx,Dzamir/libgdx,PedroRomanoBarbosa/libgdx,Xhanim/libgdx,ricardorigodon/libgdx,del-sol/libgdx,toloudis/libgdx,ztv/libgdx,kzganesan/libgdx,MadcowD/libgdx,nooone/libgdx,jsjolund/libgdx,realitix/libgdx,hyvas/libgdx,copystudy/libgdx,sjosegarcia/libgdx,UnluckyNinja/libgdx,realitix/libgdx,kagehak/libgdx,Wisienkas/libgdx,GreenLightning/libgdx,ThiagoGarciaAlves/libgdx,copystudy/libgdx,js78/libgdx,czyzby/libgdx,saltares/libgdx,josephknight/libgdx,davebaol/libgdx,fwolff/libgdx,snovak/libgdx,yangweigbh/libgdx,libgdx/libgdx,hyvas/libgdx,billgame/libgdx,nudelchef/libgdx,MadcowD/libgdx,kagehak/libgdx,MetSystem/libgdx,Deftwun/libgdx,kagehak/libgdx,ryoenji/libgdx,kagehak/libgdx,sarkanyi/libgdx,kotcrab/libgdx,saltares/libgdx,tell10glu/libgdx,saqsun/libgdx,shiweihappy/libgdx,GreenLightning/libgdx,MetSystem/libgdx,stickyd/libgdx,collinsmith/libgdx,designcrumble/libgdx,designcrumble/libgdx,nelsonsilva/libgdx,toloudis/libgdx,alireza-hosseini/libgdx,UnluckyNinja/libgdx,saqsun/libgdx,noelsison2/libgdx,lordjone/libgdx,zommuter/libgdx,kotcrab/libgdx,Gliby/libgdx,ya7lelkom/libgdx,flaiker/libgdx,mumer92/libgdx,thepullman/libgdx,jberberick/libgdx,gf11speed/libgdx,tell10glu/libgdx,tell10glu/libgdx,hyvas/libgdx,Senth/libgdx,ya7lelkom/libgdx,EsikAntony/libgdx,tommyettinger/libgdx,alireza-hosseini/libgdx,BlueRiverInteractive/libgdx,tell10glu/libgdx,MikkelTAndersen/libgdx,haedri/libgdx-1,azakhary/libgdx,nave966/libgdx,saqsun/libgdx,designcrumble/libgdx,ricardorigodon/libgdx,ztv/libgdx,azakhary/libgdx,nudelchef/libgdx,UnluckyNinja/libgdx,GreenLightning/libgdx,alireza-hosseini/libgdx,billgame/libgdx,MikkelTAndersen/libgdx,gf11speed/libgdx,UnluckyNinja/libgdx,309746069/libgdx,ya7lelkom/libgdx,stickyd/libgdx,MathieuDuponchelle/gdx,flaiker/libgdx,Badazdz/libgdx,FyiurAmron/libgdx,Deftwun/libgdx,curtiszimmerman/libgdx,curtiszimmerman/libgdx,hyvas/libgdx,haedri/libgdx-1,nave966/libgdx,Zonglin-Li6565/libgdx,SidneyXu/libgdx,czyzby/libgdx,toloudis/libgdx,firefly2442/libgdx,toloudis/libgdx,anserran/libgdx,JDReutt/libgdx,snovak/libgdx,Thotep/libgdx,xranby/libgdx,kzganesan/libgdx,309746069/libgdx,Dzamir/libgdx,czyzby/libgdx,JFixby/libgdx,fiesensee/libgdx,toloudis/libgdx,SidneyXu/libgdx,Arcnor/libgdx,antag99/libgdx,bgroenks96/libgdx,MikkelTAndersen/libgdx,PedroRomanoBarbosa/libgdx,revo09/libgdx,Gliby/libgdx,Senth/libgdx,xranby/libgdx,Heart2009/libgdx,revo09/libgdx,curtiszimmerman/libgdx,FyiurAmron/libgdx,Deftwun/libgdx,JDReutt/libgdx,codepoke/libgdx,josephknight/libgdx,toa5/libgdx | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.android;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.os.Debug;
import android.os.Handler;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Audio;
import com.badlogic.gdx.Files;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.backends.android.surfaceview.FillResolutionStrategy;
import com.badlogic.gdx.backends.android.surfaceview.GLSurfaceViewCupcake;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.GL11;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.GdxNativesLoader;
/** An implementation of the {@link Application} interface for Android. Create an {@link Activity} that derives from this class. In
* the {@link Activity#onCreate(Bundle)} method call the {@link #initialize(ApplicationListener, boolean)} method specifying the
* configuration for the GLSurfaceView.
*
* @author mzechner */
public class AndroidApplication extends Activity implements Application {
static {
GdxNativesLoader.load();
}
protected AndroidGraphics graphics;
protected AndroidInput input;
protected AndroidAudio audio;
protected AndroidFiles files;
protected ApplicationListener listener;
protected Handler handler;
protected boolean firstResume = true;
protected final Array<Runnable> runnables = new Array<Runnable>();
protected final Array<Runnable> executedRunnables = new Array<Runnable>();
protected WakeLock wakeLock = null;
protected int logLevel = LOG_INFO;
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If useGL20IfAvailable is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. Uses a default {@link AndroidApplicationConfiguration}.
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param useGL2IfAvailable whether to use OpenGL ES 2.0 if its available. */
public void initialize (ApplicationListener listener, boolean useGL2IfAvailable) {
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
config.useGL20 = useGL2IfAvailable;
initialize(listener, config);
}
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If config.useGL20 is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. You can configure other aspects of the application with the rest of the fields in
* the {@link AndroidApplicationConfiguration} instance.
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param config the {@link AndroidApplicationConfiguration}, defining various settings of the application (use accelerometer,
* etc.). */
public void initialize (ApplicationListener listener, AndroidApplicationConfiguration config) {
graphics = new AndroidGraphics(this, config, config.resolutionStrategy == null ? new FillResolutionStrategy()
: config.resolutionStrategy);
input = new AndroidInput(this, graphics.view, config);
audio = new AndroidAudio(this);
files = new AndroidFiles(this.getAssets(), this.getFilesDir().getAbsolutePath());
this.listener = listener;
this.handler = new Handler();
Gdx.app = this;
Gdx.input = this.getInput();
Gdx.audio = this.getAudio();
Gdx.files = this.getFiles();
Gdx.graphics = this.getGraphics();
try {
requestWindowFeature(Window.FEATURE_NO_TITLE);
} catch (Exception ex) {
log("AndroidApplication", "Content already displayed, cannot request FEATURE_NO_TITLE", ex);
}
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
setContentView(graphics.getView(), createLayoutParams());
createWakeLock(config);
}
protected FrameLayout.LayoutParams createLayoutParams () {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(android.view.ViewGroup.LayoutParams.FILL_PARENT,
android.view.ViewGroup.LayoutParams.FILL_PARENT);
layoutParams.gravity = Gravity.CENTER;
return layoutParams;
}
protected void createWakeLock (AndroidApplicationConfiguration config) {
if (config.useWakelock) {
PowerManager powerManager = (PowerManager)getSystemService(Context.POWER_SERVICE);
wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK, "libgdx wakelock");
}
}
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If useGL20IfAvailable is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. Uses a default {@link AndroidApplicationConfiguration}.
* <p/>
* Note: you have to add the returned view to your layout!
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param useGL2IfAvailable whether to use OpenGL ES 2.0 if its available.
* @return the GLSurfaceView of the application */
public View initializeForView (ApplicationListener listener, boolean useGL2IfAvailable) {
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
config.useGL20 = useGL2IfAvailable;
return initializeForView(listener, config);
}
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If config.useGL20 is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. You can configure other aspects of the application with the rest of the fields in
* the {@link AndroidApplicationConfiguration} instance.
* <p/>
* Note: you have to add the returned view to your layout!
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param config the {@link AndroidApplicationConfiguration}, defining various settings of the application (use accelerometer,
* etc.).
* @return the GLSurfaceView of the application */
public View initializeForView (ApplicationListener listener, AndroidApplicationConfiguration config) {
graphics = new AndroidGraphics(this, config, config.resolutionStrategy == null ? new FillResolutionStrategy()
: config.resolutionStrategy);
input = new AndroidInput(this, graphics.view, config);
audio = new AndroidAudio(this);
files = new AndroidFiles(this.getAssets(), this.getFilesDir().getAbsolutePath());
this.listener = listener;
this.handler = new Handler();
Gdx.app = this;
Gdx.input = this.getInput();
Gdx.audio = this.getAudio();
Gdx.files = this.getFiles();
Gdx.graphics = this.getGraphics();
createWakeLock(config);
return graphics.getView();
}
@Override
protected void onPause () {
if (wakeLock != null) wakeLock.release();
boolean isContinuous = graphics.isContinuousRendering();
graphics.setContinuousRendering(true);
graphics.pause();
input.unregisterSensorListeners();
// erase pointer ids. this sucks donkeyballs...
int[] realId = input.realId;
for (int i = 0; i < realId.length; i++)
realId[i] = -1;
if (isFinishing()) {
graphics.clearManagedCaches();
graphics.destroy();
}
graphics.setContinuousRendering(isContinuous);
if (graphics != null && graphics.view != null) {
if (graphics.view instanceof GLSurfaceViewCupcake) ((GLSurfaceViewCupcake)graphics.view).onPause();
if (graphics.view instanceof android.opengl.GLSurfaceView) ((android.opengl.GLSurfaceView)graphics.view).onPause();
}
super.onPause();
}
@Override
protected void onResume () {
if (wakeLock != null) wakeLock.acquire();
Gdx.app = this;
Gdx.input = this.getInput();
Gdx.audio = this.getAudio();
Gdx.files = this.getFiles();
Gdx.graphics = this.getGraphics();
((AndroidInput)getInput()).registerSensorListeners();
if (graphics != null && graphics.view != null) {
if (graphics.view instanceof GLSurfaceViewCupcake) ((GLSurfaceViewCupcake)graphics.view).onResume();
if (graphics.view instanceof android.opengl.GLSurfaceView) ((android.opengl.GLSurfaceView)graphics.view).onResume();
}
if (!firstResume) {
graphics.resume();
}
else
firstResume = false;
super.onResume();
}
@Override
protected void onDestroy () {
super.onDestroy();
}
/** {@inheritDoc} */
@Override
public Audio getAudio () {
return audio;
}
/** {@inheritDoc} */
@Override
public Files getFiles () {
return files;
}
/** {@inheritDoc} */
@Override
public Graphics getGraphics () {
return graphics;
}
/** {@inheritDoc} */
@Override
public Input getInput () {
return input;
}
/** {@inheritDoc} */
@Override
public ApplicationType getType () {
return ApplicationType.Android;
}
/** {@inheritDoc} */
@Override
public int getVersion () {
return Integer.parseInt(android.os.Build.VERSION.SDK);
}
@Override
public long getJavaHeap () {
return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
}
@Override
public long getNativeHeap () {
return Debug.getNativeHeapAllocatedSize();
}
@Override
public Preferences getPreferences (String name) {
return new AndroidPreferences(getSharedPreferences(name, Context.MODE_PRIVATE));
}
@Override
public void postRunnable (Runnable runnable) {
synchronized (runnables) {
runnables.add(runnable);
Gdx.graphics.requestRendering();
}
}
@Override
public void onConfigurationChanged (Configuration config) {
super.onConfigurationChanged(config);
boolean keyboardAvailable = false;
if (config.hardKeyboardHidden == Configuration.HARDKEYBOARDHIDDEN_NO) keyboardAvailable = true;
input.keyboardAvailable = keyboardAvailable;
}
@Override
public void exit () {
handler.post(new Runnable() {
@Override
public void run () {
AndroidApplication.this.finish();
}
});
}
@Override
public void debug (String tag, String message) {
if (logLevel >= LOG_DEBUG) {
Log.d(tag, message);
}
}
@Override
public void debug (String tag, String message, Throwable exception) {
if (logLevel >= LOG_DEBUG) {
Log.d(tag, message, exception);
}
}
@Override
public void log (String tag, String message) {
if (logLevel >= LOG_INFO) Log.i(tag, message);
}
@Override
public void log (String tag, String message, Exception exception) {
if (logLevel >= LOG_INFO) Log.i(tag, message, exception);
}
@Override
public void error (String tag, String message) {
if (logLevel >= LOG_ERROR) Log.e(tag, message);
}
@Override
public void error (String tag, String message, Throwable exception) {
if (logLevel >= LOG_ERROR) Log.e(tag, message, exception);
}
@Override
public void setLogLevel (int logLevel) {
this.logLevel = logLevel;
}
}
| backends/gdx-backend-android/src/com/badlogic/gdx/backends/android/AndroidApplication.java | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.android;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.os.Debug;
import android.os.Handler;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Audio;
import com.badlogic.gdx.Files;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.backends.android.surfaceview.FillResolutionStrategy;
import com.badlogic.gdx.backends.android.surfaceview.GLSurfaceViewCupcake;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.GL11;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.GdxNativesLoader;
/** An implementation of the {@link Application} interface for Android. Create an {@link Activity} that derives from this class. In
* the {@link Activity#onCreate(Bundle)} method call the {@link #initialize(ApplicationListener, boolean)} method specifying the
* configuration for the GLSurfaceView.
*
* @author mzechner */
public class AndroidApplication extends Activity implements Application {
static {
GdxNativesLoader.load();
}
protected AndroidGraphics graphics;
protected AndroidInput input;
protected AndroidAudio audio;
protected AndroidFiles files;
protected ApplicationListener listener;
protected Handler handler;
protected boolean firstResume = true;
protected final Array<Runnable> runnables = new Array<Runnable>();
protected final Array<Runnable> executedRunnables = new Array<Runnable>();
protected WakeLock wakeLock = null;
protected int logLevel = LOG_INFO;
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If useGL20IfAvailable is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. Uses a default {@link AndroidApplicationConfiguration}.
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param useGL2IfAvailable whether to use OpenGL ES 2.0 if its available. */
public void initialize (ApplicationListener listener, boolean useGL2IfAvailable) {
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
config.useGL20 = useGL2IfAvailable;
initialize(listener, config);
}
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If config.useGL20 is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. You can configure other aspects of the application with the rest of the fields in
* the {@link AndroidApplicationConfiguration} instance.
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param config the {@link AndroidApplicationConfiguration}, defining various settings of the application (use accelerometer,
* etc.). */
public void initialize (ApplicationListener listener, AndroidApplicationConfiguration config) {
graphics = new AndroidGraphics(this, config, config.resolutionStrategy == null ? new FillResolutionStrategy()
: config.resolutionStrategy);
input = new AndroidInput(this, graphics.view, config);
audio = new AndroidAudio(this);
files = new AndroidFiles(this.getAssets(), this.getFilesDir().getAbsolutePath());
this.listener = listener;
this.handler = new Handler();
Gdx.app = this;
Gdx.input = this.getInput();
Gdx.audio = this.getAudio();
Gdx.files = this.getFiles();
Gdx.graphics = this.getGraphics();
try {
requestWindowFeature(Window.FEATURE_NO_TITLE);
} catch (Exception ex) {
log("AndroidApplication", "Content already displayed, cannot request FEATURE_NO_TITLE", ex);
}
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
setContentView(graphics.getView(), createLayoutParams());
createWakeLock(config);
}
protected FrameLayout.LayoutParams createLayoutParams () {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(android.view.ViewGroup.LayoutParams.FILL_PARENT,
android.view.ViewGroup.LayoutParams.FILL_PARENT);
layoutParams.gravity = Gravity.CENTER;
return layoutParams;
}
protected void createWakeLock (AndroidApplicationConfiguration config) {
if (config.useWakelock) {
PowerManager powerManager = (PowerManager)getSystemService(Context.POWER_SERVICE);
wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK, "libgdx wakelock");
}
}
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If useGL20IfAvailable is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. Uses a default {@link AndroidApplicationConfiguration}.
* <p/>
* Note: you have to add the returned view to your layout!
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param useGL2IfAvailable whether to use OpenGL ES 2.0 if its available.
* @return the GLSurfaceView of the application */
public View initializeForView (ApplicationListener listener, boolean useGL2IfAvailable) {
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
config.useGL20 = useGL2IfAvailable;
return initializeForView(listener, config);
}
/** This method has to be called in the {@link Activity#onCreate(Bundle)} method. It sets up all the things necessary to get
* input, render via OpenGL and so on. If config.useGL20 is set the AndroidApplication will try to create an OpenGL ES 2.0
* context which can then be used via {@link Graphics#getGL20()}. The {@link GL10} and {@link GL11} interfaces should not be
* used when OpenGL ES 2.0 is enabled. To query whether enabling OpenGL ES 2.0 was successful use the
* {@link Graphics#isGL20Available()} method. You can configure other aspects of the application with the rest of the fields in
* the {@link AndroidApplicationConfiguration} instance.
* <p/>
* Note: you have to add the returned view to your layout!
*
* @param listener the {@link ApplicationListener} implementing the program logic
* @param config the {@link AndroidApplicationConfiguration}, defining various settings of the application (use accelerometer,
* etc.).
* @return the GLSurfaceView of the application */
public View initializeForView (ApplicationListener listener, AndroidApplicationConfiguration config) {
graphics = new AndroidGraphics(this, config, config.resolutionStrategy == null ? new FillResolutionStrategy()
: config.resolutionStrategy);
input = new AndroidInput(this, graphics.view, config);
audio = new AndroidAudio(this);
files = new AndroidFiles(this.getAssets(), this.getFilesDir().getAbsolutePath());
this.listener = listener;
this.handler = new Handler();
Gdx.app = this;
Gdx.input = this.getInput();
Gdx.audio = this.getAudio();
Gdx.files = this.getFiles();
Gdx.graphics = this.getGraphics();
createWakeLock(config);
return graphics.getView();
}
@Override
protected void onPause () {
if (wakeLock != null) wakeLock.release();
boolean isContinuous = graphics.isContinuousRendering();
graphics.setContinuousRendering(true);
graphics.pause();
input.unregisterSensorListeners();
// erase pointer ids. this sucks donkeyballs...
int[] realId = input.realId;
for (int i = 0; i < realId.length; i++)
realId[i] = -1;
if (isFinishing()) {
graphics.clearManagedCaches();
graphics.destroy();
}
graphics.setContinuousRendering(isContinuous);
if (graphics != null && graphics.view != null) {
if (graphics.view instanceof GLSurfaceViewCupcake) ((GLSurfaceViewCupcake)graphics.view).onPause();
if (graphics.view instanceof android.opengl.GLSurfaceView) ((android.opengl.GLSurfaceView)graphics.view).onPause();
}
super.onPause();
}
@Override
protected void onResume () {
if (wakeLock != null) wakeLock.acquire();
Gdx.app = this;
Gdx.input = this.getInput();
Gdx.audio = this.getAudio();
Gdx.files = this.getFiles();
Gdx.graphics = this.getGraphics();
((AndroidInput)getInput()).registerSensorListeners();
if (graphics != null && graphics.view != null) {
if (graphics.view instanceof GLSurfaceViewCupcake) ((GLSurfaceViewCupcake)graphics.view).onResume();
if (graphics.view instanceof android.opengl.GLSurfaceView) ((android.opengl.GLSurfaceView)graphics.view).onResume();
}
if (!firstResume) {
graphics.resume();
}
else
firstResume = false;
super.onResume();
}
@Override
protected void onDestroy () {
super.onDestroy();
}
/** {@inheritDoc} */
@Override
public Audio getAudio () {
return audio;
}
/** {@inheritDoc} */
@Override
public Files getFiles () {
return files;
}
/** {@inheritDoc} */
@Override
public Graphics getGraphics () {
return graphics;
}
/** {@inheritDoc} */
@Override
public Input getInput () {
return input;
}
/** {@inheritDoc} */
@Override
public ApplicationType getType () {
return ApplicationType.Android;
}
/** {@inheritDoc} */
@Override
public int getVersion () {
return Integer.parseInt(android.os.Build.VERSION.SDK);
}
@Override
public long getJavaHeap () {
return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
}
@Override
public long getNativeHeap () {
return Debug.getNativeHeapAllocatedSize();
}
@Override
public Preferences getPreferences (String name) {
return new AndroidPreferences(getSharedPreferences(name, Context.MODE_PRIVATE));
}
@Override
public void postRunnable (Runnable runnable) {
synchronized (runnables) {
runnables.add(runnable);
Gdx.graphics.requestRendering();
}
}
@Override
public void onConfigurationChanged (Configuration config) {
super.onConfigurationChanged(config);
boolean keyboardAvailable = false;
if (config.hardKeyboardHidden == Configuration.HARDKEYBOARDHIDDEN_NO) keyboardAvailable = true;
input.keyboardAvailable = keyboardAvailable;
}
@Override
public void exit () {
handler.post(new Runnable() {
@Override
public void run () {
AndroidApplication.this.finish();
}
});
}
@Override
public void debug (String tag, String message) {
if (logLevel >= LOG_DEBUG) {
System.out.println(tag + ": " + message);
}
}
@Override
public void debug (String tag, String message, Throwable exception) {
if (logLevel >= LOG_DEBUG) {
System.out.println(tag + ": " + message);
exception.printStackTrace(System.out);
}
}
@Override
public void log (String tag, String message) {
if (logLevel >= LOG_INFO) Log.i(tag, message);
}
@Override
public void log (String tag, String message, Exception exception) {
if (logLevel >= LOG_INFO) Log.i(tag, message, exception);
}
@Override
public void error (String tag, String message) {
if (logLevel >= LOG_ERROR) Log.e(tag, message);
}
@Override
public void error (String tag, String message, Throwable exception) {
if (logLevel >= LOG_ERROR) Log.e(tag, message, exception);
}
@Override
public void setLogLevel (int logLevel) {
this.logLevel = logLevel;
}
}
| [fixed] issue 782, AndroidApplication#debug logged via System.out. yeah, wtf?
| backends/gdx-backend-android/src/com/badlogic/gdx/backends/android/AndroidApplication.java | [fixed] issue 782, AndroidApplication#debug logged via System.out. yeah, wtf? |
|
Java | apache-2.0 | d85c86a5e59487b8fc9b9dd080937d8be25c6560 | 0 | cesar021/av1_v1.2 | import java.util.HashMap;
import java.util.Map;
import br.edu.infnet.Cor;
import br.edu.infnet.Loja;
import br.edu.infnet.Montadora;
import br.edu.infnet.Tipo;
import br.edu.infnet.TipoVeiculo;
import br.edu.infnet.Veiculo;
import br.edu.infnet.Cambio;
public class Main {
public static void main(String[] args) {
System.out.println("******************************************************");
System.out.println("* Projeto de uma loja de veculos de uma grande rede *");
System.out.println("******************************************************");
System.out.println("");
Loja loja = new Loja("GADS2014N", "Rua So Jos, 90 - Centro, Rio de Janeiro - RJ, 20010-020");
System.out.println("Loja : " + loja.getNome());
System.out.println("Endereo: " + loja.getEndereco());
System.out.println("Quantidade de veculo no estoque: " + loja.getEstoqueVeiculos().length);
Veiculo veiculo = new Veiculo(TipoVeiculo.CARRO, "000001", 70000.00);
Map<String, String> especificacao = new HashMap<String, String>();
especificacao.put("MONTADORA", "BMW");
especificacao.put("MODELO", "X1");
especificacao.put("TIPO", Tipo.SUV.name());
especificacao.put("COR", Cor.PRETO.name());
especificacao.put("MOTORIZACAO", "2.0");
especificacao.put("CAMBIO", Cambio.AUTOMATICO.name());
veiculo.setEspecificacao(especificacao);
loja.adicionarVeiculo(veiculo);
System.out.println("Quantidade de veculo no estoque: " + loja.getEstoqueVeiculos().length);
veiculo = new Veiculo(TipoVeiculo.MOTO, "000002", 27000.00);
especificacao = new HashMap<String, String>();
especificacao.put("MONTADORA", "YAMAHA");
especificacao.put("MODELO", "XJ6");
especificacao.put("TIPO", Tipo.ESPORTIVO.name());
especificacao.put("COR", Cor.PRETO.name());
especificacao.put("CILINDRADA", "600");
especificacao.put("CAPACIDADETANK", "17.3");
veiculo.setEspecificacao(especificacao);
loja.adicionarVeiculo(veiculo);
System.out.println("");
// Listando todos os veculos do estoque
loja.listarEstoquedeVeiculos(null);
// Listando apenas os carros do estoque
loja.listarEstoquedeVeiculos(TipoVeiculo.CARRO);
// Listando apenas as motos do estoque
loja.listarEstoquedeVeiculos(TipoVeiculo.MOTO);
// Pesquisando veiculos com 1 atributo
System.out.println("*****************************************");
System.out.println("Pesquisando veiculos com MONTADORA YAMAHA");
System.out.println("*****************************************\n");
System.out.println("TIPO\tCHASSI");
for (Veiculo pesqVeiculo : loja.pesquisarVeiculo("MONTADORA=YAMAHA;")) {
System.out.println(pesqVeiculo.getTipoVeiculo() + "\t" + pesqVeiculo.getChassi());
}
// Pesquisando veiculos com 2 atributo existente
System.out.println("*********************************************************");
System.out.println("Pesquisando veiculos com MONTADORA YAMAHA e CHASSI 000002");
System.out.println("*********************************************************\n");
System.out.println("TIPO\tCHASSI");
for (Veiculo pesqVeiculo : loja.pesquisarVeiculo("MONTADORA=YAMAHA;CHASSI=000002;")) {
System.out.println(pesqVeiculo.getTipoVeiculo() + "\t" + pesqVeiculo.getChassi());
}
// Pesquisando veiculos com 1 atributo no existente
System.out.println("*********************************************************");
System.out.println("Pesquisando veiculos com MONTADORA YAMAHA e CHASSI 000003");
System.out.println("*********************************************************\n");
System.out.println("TIPO\tCHASSI");
for (Veiculo pesqVeiculo : loja.pesquisarVeiculo("MONTADORA=YAMAHA;CHASSI=000003;")) {
System.out.println(pesqVeiculo.getTipoVeiculo() + "\t" + pesqVeiculo.getChassi());
}
loja.salvarEstoque("estoque.txt");
}
}
| Main.java | import java.util.HashMap;
import java.util.Map;
import br.edu.infnet.Loja;
import br.edu.infnet.Tipo;
import br.edu.infnet.TipoVeiculo;
import br.edu.infnet.Veiculo;
import br.edu.infnet.enums.Cambio;
import br.edu.infnet.enums.Cor;
public class Main {
public static void main(String[] args) {
System.out.println("******************************************************");
System.out.println("* Projeto de uma loja de veculos de uma grande rede *");
System.out.println("******************************************************");
System.out.println("");
Loja loja = new Loja("GADS2014N", "Rua So Jos, 90 - Centro, Rio de Janeiro - RJ, 20010-020");
System.out.println("Loja : " + loja.getNome());
System.out.println("Endereo: " + loja.getEndereco());
System.out.println("Quantidade de veculo no estoque: " + loja.getEstoqueVeiculos().length);
Veiculo veiculo = new Veiculo(TipoVeiculo.CARRO, "000001", 70000.00);
Map<String, String> especificacao = new HashMap<String, String>();
especificacao.put("MONTADORA", "BMW");
especificacao.put("MODELO", "X1");
especificacao.put("TIPO", Tipo.SUV.name());
especificacao.put("COR", Cor.PRETO.name());
especificacao.put("MOTORIZACAO", "2.0");
especificacao.put("CAMBIO", Cambio.AUTOMATICO.name());
veiculo.setEspecificacao(especificacao);
loja.adicionarVeiculo(veiculo);
System.out.println("Quantidade de veculo no estoque: " + loja.getEstoqueVeiculos().length);
veiculo = new Veiculo(TipoVeiculo.MOTO, "000002", 27000.00);
especificacao = new HashMap<String, String>();
especificacao.put("MONTADORA", "YAMAHA");
especificacao.put("MODELO", "XJ6");
especificacao.put("TIPO", Tipo.ESPORTIVO.name());
especificacao.put("COR", Cor.PRETO.name());
especificacao.put("CILINDRADA", "600");
especificacao.put("CAPACIDADETANK", "17.3");
veiculo.setEspecificacao(especificacao);
loja.adicionarVeiculo(veiculo);
System.out.println("");
// Listando todos os veculos do estoque
loja.listarEstoquedeVeiculos(null);
// Listando apenas os carros do estoque
loja.listarEstoquedeVeiculos(TipoVeiculo.CARRO);
// Listando apenas as motos do estoque
loja.listarEstoquedeVeiculos(TipoVeiculo.MOTO);
}
}
| Adicionado salvarEstoque | Main.java | Adicionado salvarEstoque |
|
Java | apache-2.0 | 1f3ff300050394727ecdd22ea8aead1c43445c07 | 0 | ekux44/LampShade,ekux44/HueMore | package com.kuxhausen.huemore;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.support.v4.app.NotificationCompat;
import android.util.Pair;
import com.kuxhausen.huemore.automation.FireReceiver;
import com.kuxhausen.huemore.net.Connection;
import com.kuxhausen.huemore.net.DeviceManager;
import com.kuxhausen.huemore.net.DeviceManager.OnStateChangedListener;
import com.kuxhausen.huemore.net.MoodPlayer;
import com.kuxhausen.huemore.persistence.DatabaseDefinitions.InternalArguments;
import com.kuxhausen.huemore.persistence.FutureEncodingException;
import com.kuxhausen.huemore.persistence.HueUrlEncoder;
import com.kuxhausen.huemore.persistence.InvalidEncodingException;
import com.kuxhausen.huemore.persistence.Utils;
import com.kuxhausen.huemore.state.Group;
import com.kuxhausen.huemore.state.Mood;
import com.kuxhausen.huemore.timing.AlarmReciever;
public class MoodExecuterService extends Service implements OnActiveMoodsChangedListener, OnStateChangedListener{
/**
* Class used for the client Binder. Because we know this service always
* runs in the same process as its clients, we don't need to deal with IPC.
*/
public class LocalBinder extends Binder {
MoodExecuterService getService() {
// Return this instance of LocalService so clients can call public methods
return MoodExecuterService.this;
}
}
// Binder given to clients
private final IBinder mBinder = new LocalBinder();
private final static int notificationId = 1337;
private boolean mBound;
private WakeLock mWakelock;
private DeviceManager mDeviceManager;
private MoodPlayer mMoodPlayer;
@Override
public void onCreate() {
super.onCreate();
//acquire wakelock needed till everything initialized
PowerManager pm = (PowerManager) this.getSystemService(Context.POWER_SERVICE);
mWakelock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, this.getClass().getName());
mWakelock.acquire();
//Initialize DeviceManager and Mood Player
mDeviceManager = new DeviceManager(this);
mDeviceManager.registerStateListener(this);
mMoodPlayer = new MoodPlayer(this,mDeviceManager);
mMoodPlayer.addOnActiveMoodsChangedListener(this);
}
@Override
/**
* Called after onCreate when service attaching to Activity(s)
*/
public IBinder onBind(Intent intent) {
mBound = true;
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
super.onUnbind(intent);
mBound = false;
return true; // ensures onRebind is called
}
@Override
public void onRebind(Intent intent) {
super.onRebind(intent);
mBound = true;
}
@Override
/**
* Called after onCreate when service (re)started independently
*/
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null) {
//remove any possible launched wakelocks
AlarmReciever.completeWakefulIntent(intent);
FireReceiver.completeWakefulIntent(intent);
String encodedMood = intent.getStringExtra(InternalArguments.ENCODED_MOOD);
String groupName = intent.getStringExtra(InternalArguments.GROUP_NAME);
String moodName = intent.getStringExtra(InternalArguments.MOOD_NAME);
Integer maxBri = intent.getIntExtra(InternalArguments.MAX_BRIGHTNESS, -1);
if(maxBri==-1)
maxBri = null;
if (encodedMood != null) {
try{
Pair<Integer[], Pair<Mood, Integer>> moodPairs = HueUrlEncoder.decode(encodedMood);
if(moodPairs.second.first!=null){
Group g = Group.loadFromLegacyData(moodPairs.first,groupName,this);
moodName = (moodName == null) ? "Unknown Mood" : moodName;
mMoodPlayer.playMood(g, moodPairs.second.first, moodName, moodPairs.second.second);
}
} catch (InvalidEncodingException e) {
Intent i = new Intent(this,DecodeErrorActivity.class);
i.putExtra(InternalArguments.DECODER_ERROR_UPGRADE, false);
startActivity(i);
} catch (FutureEncodingException e) {
Intent i = new Intent(this,DecodeErrorActivity.class);
i.putExtra(InternalArguments.DECODER_ERROR_UPGRADE, true);
startActivity(i);
}
} else if(moodName!=null && groupName!=null){
Group g = Group.loadFromDatabase(groupName, this);
Mood m = Utils.getMoodFromDatabase(moodName, this);
mMoodPlayer.playMood(g, m, moodName, maxBri);
}
}
calculateWakeNeeds();
return super.onStartCommand(intent, flags, startId);
}
/**
* don't call till after onCreate and near the end of onStartCommand so device doesn't sleep before launching mood events queued
*/
public void calculateWakeNeeds(){
boolean shouldStayAwake = false;
if(mMoodPlayer.hasImminentPendingWork())
shouldStayAwake = true;
for(Connection c : mDeviceManager.getConnections()){
if(c.hasPendingWork())
shouldStayAwake = true;
}
if(shouldStayAwake){
if(mWakelock == null){
//acquire wakelock till done doing work
PowerManager pm = (PowerManager) this.getSystemService(Context.POWER_SERVICE);
mWakelock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, this.getClass().getName());
mWakelock.acquire();
}
} else {
if(!mBound){
//not bound, so service may sleep after releasing wakelock
//save ongoing moods
mMoodPlayer.saveOngoingAndScheduleResores();
}
if(mWakelock!=null){
mWakelock.release();
mWakelock = null;
}
if(!mBound && mMoodPlayer.getPlayingMoods().isEmpty()){
//with no ongoing moods and not bound, go ahead and completely shut down
//this.stopSelf();
//doesn't work because it gets triggered before a service finishes binding and can't be undone.
}
}
}
public MoodPlayer getMoodPlayer(){
return mMoodPlayer;
}
public DeviceManager getDeviceManager(){
return mDeviceManager;
}
@Override
public void onStateChanged() {
calculateWakeNeeds();
}
public void onActiveMoodsChanged(){
Log.e("ccc","onActiveMoodsChanged");
if(mMoodPlayer.getPlayingMoods().isEmpty()){
this.stopForeground(true);
} else{
// Creates an explicit intent for an Activity in your app
Intent resultIntent = new Intent(this, MainFragment.class);
PendingIntent resultPendingIntent = PendingIntent.getActivity(this, 0, resultIntent, PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(
this)
.setSmallIcon(R.drawable.lampshade_notification)
.setContentTitle(
this.getResources().getString(R.string.app_name))
.setContentText(mMoodPlayer.getPlayingMoods().get(0).toString())
.setContentIntent(resultPendingIntent);
this.startForeground(notificationId, mBuilder.build());
}
calculateWakeNeeds();
}
@Override
public void onDestroy() {
mMoodPlayer.onDestroy();
mDeviceManager.onDestroy();
if(mWakelock!=null)
mWakelock.release();
super.onDestroy();
}
}
| app/src/com/kuxhausen/huemore/MoodExecuterService.java | package com.kuxhausen.huemore;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.util.Pair;
import com.kuxhausen.huemore.automation.FireReceiver;
import com.kuxhausen.huemore.net.Connection;
import com.kuxhausen.huemore.net.DeviceManager;
import com.kuxhausen.huemore.net.DeviceManager.OnStateChangedListener;
import com.kuxhausen.huemore.net.MoodPlayer;
import com.kuxhausen.huemore.persistence.DatabaseDefinitions.InternalArguments;
import com.kuxhausen.huemore.persistence.FutureEncodingException;
import com.kuxhausen.huemore.persistence.HueUrlEncoder;
import com.kuxhausen.huemore.persistence.InvalidEncodingException;
import com.kuxhausen.huemore.persistence.Utils;
import com.kuxhausen.huemore.state.Group;
import com.kuxhausen.huemore.state.Mood;
import com.kuxhausen.huemore.timing.AlarmReciever;
public class MoodExecuterService extends Service implements OnActiveMoodsChangedListener, OnStateChangedListener{
/**
* Class used for the client Binder. Because we know this service always
* runs in the same process as its clients, we don't need to deal with IPC.
*/
public class LocalBinder extends Binder {
MoodExecuterService getService() {
// Return this instance of LocalService so clients can call public methods
return MoodExecuterService.this;
}
}
// Binder given to clients
private final IBinder mBinder = new LocalBinder();
private final static int notificationId = 1337;
private boolean mBound;
private WakeLock mWakelock;
private DeviceManager mDeviceManager;
private MoodPlayer mMoodPlayer;
@Override
public void onCreate() {
super.onCreate();
//acquire wakelock needed till everything initialized
PowerManager pm = (PowerManager) this.getSystemService(Context.POWER_SERVICE);
mWakelock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, this.getClass().getName());
mWakelock.acquire();
//Initialize DeviceManager and Mood Player
mDeviceManager = new DeviceManager(this);
mDeviceManager.registerStateListener(this);
mMoodPlayer = new MoodPlayer(this,mDeviceManager);
mMoodPlayer.addOnActiveMoodsChangedListener(this);
}
@Override
/**
* Called after onCreate when service attaching to Activity(s)
*/
public IBinder onBind(Intent intent) {
mBound = true;
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
super.onUnbind(intent);
mBound = false;
return true; // ensures onRebind is called
}
@Override
public void onRebind(Intent intent) {
super.onRebind(intent);
mBound = true;
}
@Override
/**
* Called after onCreate when service (re)started independently
*/
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null) {
//remove any possible launched wakelocks
AlarmReciever.completeWakefulIntent(intent);
FireReceiver.completeWakefulIntent(intent);
String encodedMood = intent.getStringExtra(InternalArguments.ENCODED_MOOD);
String groupName = intent.getStringExtra(InternalArguments.GROUP_NAME);
String moodName = intent.getStringExtra(InternalArguments.MOOD_NAME);
Integer maxBri = intent.getIntExtra(InternalArguments.MAX_BRIGHTNESS, -1);
if(maxBri==-1)
maxBri = null;
if (encodedMood != null) {
try{
Pair<Integer[], Pair<Mood, Integer>> moodPairs = HueUrlEncoder.decode(encodedMood);
if(moodPairs.second.first!=null){
Group g = Group.loadFromLegacyData(moodPairs.first,groupName,this);
moodName = (moodName == null) ? "Unknown Mood" : moodName;
mMoodPlayer.playMood(g, moodPairs.second.first, moodName, moodPairs.second.second);
}
} catch (InvalidEncodingException e) {
Intent i = new Intent(this,DecodeErrorActivity.class);
i.putExtra(InternalArguments.DECODER_ERROR_UPGRADE, false);
startActivity(i);
} catch (FutureEncodingException e) {
Intent i = new Intent(this,DecodeErrorActivity.class);
i.putExtra(InternalArguments.DECODER_ERROR_UPGRADE, true);
startActivity(i);
}
} else if(moodName!=null && groupName!=null){
Group g = Group.loadFromDatabase(groupName, this);
Mood m = Utils.getMoodFromDatabase(moodName, this);
mMoodPlayer.playMood(g, m, moodName, maxBri);
}
}
calculateWakeNeeds();
return super.onStartCommand(intent, flags, startId);
}
/**
* don't call till after onCreate and near the end of onStartCommand so device doesn't sleep before launching mood events queued
*/
public void calculateWakeNeeds(){
boolean shouldStayAwake = false;
if(mMoodPlayer.hasImminentPendingWork())
shouldStayAwake = true;
Log.e("ccc","shoudlStayAwakeMood "+shouldStayAwake);
for(Connection c : mDeviceManager.getConnections()){
if(c.hasPendingWork())
shouldStayAwake = true;
}
Log.e("ccc","shoudlStayAwakeM&D "+shouldStayAwake);
if(shouldStayAwake){
if(mWakelock == null){
//acquire wakelock till done doing work
PowerManager pm = (PowerManager) this.getSystemService(Context.POWER_SERVICE);
mWakelock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, this.getClass().getName());
mWakelock.acquire();
}
} else {
if(!mBound){
//not bound, so service may die after releasing wakelock
//save ongoing moods
mMoodPlayer.saveOngoingAndScheduleResores();
this.stopSelf();
}
if(mWakelock!=null){
mWakelock.release();
mWakelock = null;
}
//if unbound, sleep
}
}
public MoodPlayer getMoodPlayer(){
return mMoodPlayer;
}
public DeviceManager getDeviceManager(){
return mDeviceManager;
}
@Override
public void onStateChanged() {
calculateWakeNeeds();
}
public void onActiveMoodsChanged(){
Log.e("ccc","onActiveMoodsChanged");
if(mMoodPlayer.getPlayingMoods().isEmpty()){
this.stopForeground(true);
} else{
// Creates an explicit intent for an Activity in your app
Intent resultIntent = new Intent(this, MainFragment.class);
PendingIntent resultPendingIntent = PendingIntent.getActivity(this, 0, resultIntent, PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(
this)
.setSmallIcon(R.drawable.lampshade_notification)
.setContentTitle(
this.getResources().getString(R.string.app_name))
.setContentText(mMoodPlayer.getPlayingMoods().get(0).toString())
.setContentIntent(resultPendingIntent);
this.startForeground(notificationId, mBuilder.build());
}
calculateWakeNeeds();
}
@Override
public void onDestroy() {
mMoodPlayer.onDestroy();
mDeviceManager.onDestroy();
if(mWakelock!=null)
mWakelock.release();
super.onDestroy();
}
}
| fixed service lifespan bug
| app/src/com/kuxhausen/huemore/MoodExecuterService.java | fixed service lifespan bug |
|
Java | apache-2.0 | f532f04301dcba6bb7138ee8d1d2d676f02c6096 | 0 | phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida | package ca.corefacility.bioinformatics.irida.service.export;
import ca.corefacility.bioinformatics.irida.exceptions.NcbiXmlParseException;
import ca.corefacility.bioinformatics.irida.exceptions.UploadException;
import ca.corefacility.bioinformatics.irida.model.NcbiExportSubmission;
import ca.corefacility.bioinformatics.irida.model.enums.ExportUploadState;
import ca.corefacility.bioinformatics.irida.model.export.NcbiBioSampleFiles;
import ca.corefacility.bioinformatics.irida.model.sample.MetadataTemplateField;
import ca.corefacility.bioinformatics.irida.model.sample.Sample;
import ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin;
import ca.corefacility.bioinformatics.irida.model.sample.metadata.MetadataEntry;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile;
import ca.corefacility.bioinformatics.irida.service.EmailController;
import ca.corefacility.bioinformatics.irida.service.sample.MetadataTemplateService;
import ca.corefacility.bioinformatics.irida.service.sample.SampleService;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.context.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Class which handles uploading a {@link NcbiExportSubmission} to NCBI
*/
@Service
public class ExportUploadService {
private static final Logger logger = LoggerFactory.getLogger(ExportUploadService.class);
private static final String NCBI_TEMPLATE = "ncbi";
public static final String NCBI_ACCESSION_METADATA_LABEL = "NCBI SRA Accession";
private NcbiExportSubmissionService exportSubmissionService;
private SampleService sampleService;
private MetadataTemplateService metadataTemplateService;
private TemplateEngine templateEngine;
private EmailController emailController;
@Value("${ncbi.upload.host}")
private String ftpHost;
@Value("${ncbi.upload.port}")
private int ftpPort;
@Value("${ncbi.upload.user}")
private String ftpUser;
@Value("${ncbi.upload.password}")
private String ftpPassword;
@Value("${ncbi.upload.baseDirectory}")
private String baseDirectory;
@Value("${ncbi.upload.controlKeepAliveTimeoutSeconds}")
private int controlKeepAliveTimeout;
@Value("${ncbi.upload.controlKeepAliveReplyTimeoutMilliseconds}")
private int controlKeepAliveReplyTimeout;
@Value("${ncbi.upload.ftp.passive}")
private boolean passiveMode;
@Value("${irida.administrative.notifications.email}")
private String notificationAdminEmail;
private static final int MAX_RETRIES = 3;
private static final long WAIT_BETWEEN_RETRIES = 5000L;
// set of statuses that should be watched and update
private static Set<ExportUploadState> updateableStates = ImmutableSet.of(ExportUploadState.UPLOADED,
ExportUploadState.SUBMITTED, ExportUploadState.CREATED, ExportUploadState.QUEUED,
ExportUploadState.PROCESSING, ExportUploadState.WAITING);
@Autowired
public ExportUploadService(NcbiExportSubmissionService exportSubmissionService, SampleService sampleService,
MetadataTemplateService metadataTemplateService,
@Qualifier("exportUploadTemplateEngine") TemplateEngine templateEngine, EmailController emailController) {
this.exportSubmissionService = exportSubmissionService;
this.sampleService = sampleService;
this.metadataTemplateService = metadataTemplateService;
this.templateEngine = templateEngine;
this.emailController = emailController;
}
/**
* Manually configure connection details for this service
*
* @param ftpHost
* The hostname to connect to
* @param ftpPort
* the ftp port to connect to
* @param ftpUser
* the username to authenticate with
* @param ftpPassword
* the password to authenticate with
* @param baseDirectory
* the base directory to upload new runs into
*/
public void setConnectionDetails(String ftpHost, int ftpPort, String ftpUser, String ftpPassword,
String baseDirectory) {
this.ftpHost = ftpHost;
this.ftpPort = ftpPort;
this.ftpUser = ftpUser;
this.ftpPassword = ftpPassword;
this.baseDirectory = baseDirectory;
}
/**
* Check for new {@link NcbiExportSubmission}s to be uploaded and begin
* their upload
*/
public synchronized void launchUpload() {
logger.trace("Getting new exports");
List<NcbiExportSubmission> submissionsWithState = exportSubmissionService
.getSubmissionsWithState(ExportUploadState.NEW);
for (NcbiExportSubmission submission : submissionsWithState) {
try {
logger.trace("Updating submission " + submission.getId());
submission.setUploadState(ExportUploadState.UPLOADING);
submission = exportSubmissionService.update(submission);
String xmlContent = createXml(submission);
submission = uploadSubmission(submission, xmlContent);
submission.setUploadState(ExportUploadState.UPLOADED);
submission = exportSubmissionService.update(submission);
} catch (Exception e) {
logger.error("Upload failed", e);
submission.setUploadState(ExportUploadState.UPLOAD_ERROR);
submission = exportSubmissionService.update(submission);
emailController.sendNCBIUploadExceptionEmail(notificationAdminEmail, e, submission.getId());
}
}
}
/**
* Check local database for submissions which may have updates on the NCBI
* server and update them as necessary.
*/
public synchronized void updateRunningUploads() {
logger.trace("Getting running exports");
List<NcbiExportSubmission> submissionsWithState = exportSubmissionService
.getSubmissionsWithState(updateableStates);
FTPClient client = null;
try {
for (NcbiExportSubmission submission : submissionsWithState) {
// connect to FTP site
client = getFtpClient();
try {
logger.trace("Getting report for submission " + submission.getId());
InputStream xmlStream = getLatestXMLStream(client, submission);
if (xmlStream != null) {
NcbiExportSubmission updateSubmissionForXml = updateSubmissionForXml(submission, xmlStream);
exportSubmissionService.update(updateSubmissionForXml);
xmlStream.close();
//If we're done processing, add the accessions
if (updateSubmissionForXml.getUploadState().equals(ExportUploadState.PROCESSED_OK)) {
addSampleAccessions(submission);
}
}
} catch (NcbiXmlParseException e) {
logger.error("Error getting response", e);
submission.setUploadState(ExportUploadState.UPLOAD_ERROR);
submission = exportSubmissionService.update(submission);
emailController.sendNCBIUploadExceptionEmail(notificationAdminEmail, e, submission.getId());
} catch (IOException e) {
logger.error("Error closing XML stream", e);
}
}
disconnectFtpCient(client);
} catch (Exception e) {
logger.error("Couldn't connect to FTP site", e);
} finally {
disconnectFtpCient(client);
}
}
/**
* Create the XML for an {@link NcbiExportSubmission}
*
* @param submission
* the {@link NcbiExportSubmission} to create submission xml for
* @return String content of the xml
*/
public String createXml(NcbiExportSubmission submission) {
logger.debug("Creating export xml for submission " + submission.getId());
final Context ctx = new Context();
ctx.setVariable("submission", submission);
String xmlContent = templateEngine.process(NCBI_TEMPLATE, ctx);
return xmlContent;
}
/**
* Upload an {@link NcbiExportSubmission}'s files and submission xml to the
* configured ftp site
*
* @param submission
* The {@link NcbiExportSubmission} to upload
* @param xml
* The submission xml to upload
* @return true/false if upload was successful
* @throws UploadException
* if the upload failed
*/
public NcbiExportSubmission uploadSubmission(NcbiExportSubmission submission, String xml) throws UploadException {
FTPClient client = null;
try {
client = getFtpClient();
// create submission directory name
String directoryName = submission.getId().toString() + "-" + new Date().getTime();
// cd to submission base directory
if (!client.changeWorkingDirectory(baseDirectory)) {
throw new UploadException("Couldn't change to base directory " + baseDirectory + " : "
+ client.getReplyString());
}
// create new submission directory
if (!client.makeDirectory(directoryName)) {
throw new UploadException("Couldn't create new upload directory " + directoryName + " : "
+ client.getReplyString());
}
// cd to submission directory
if (!client.changeWorkingDirectory(directoryName)) {
throw new UploadException("Couldn't change to upload directory " + directoryName + " : "
+ client.getReplyString());
}
// set the directory saved
String directoryPath = baseDirectory + "/" + directoryName;
submission.setDirectoryPath(directoryPath);
// upload submission.xml file
uploadString(client, "submission.xml", xml);
// upload biosample files
for (NcbiBioSampleFiles bsFile : submission.getBioSampleFiles()) {
// upload single end files
for (SingleEndSequenceFile file : bsFile.getFiles()) {
// Just using file IDs as the basename for uploaded files to
// avoid accidentally sending sensitive sample names to NCBI
String filename = file.getSequenceFile().getId() + ".fastq";
uploadPath(client, filename, file.getSequenceFile().getFile());
}
// upload paired end files
for (SequenceFilePair pair : bsFile.getPairs()) {
// upload forward
SequenceFile file = pair.getForwardSequenceFile();
// Just using file IDs as the basename for uploaded files to
// avoid accidentally sending sensitive sample names to NCBI
String filename = file.getId() + ".fastq";
uploadPath(client, filename, file.getFile());
// upload reverse
file = pair.getReverseSequenceFile();
filename = file.getId() + ".fastq";
uploadPath(client, filename, file.getFile());
}
}
// create submit.ready file
uploadString(client, "submit.ready", "");
} catch (IOException e) {
logger.error("Error in upload", e);
throw new UploadException("Could not upload run", e);
} finally {
disconnectFtpCient(client);
}
return submission;
}
/**
* Get the latest result.#.xml file for the given submission
*
* @param client
* {@link FTPClient} to use for the connection
* @param submission
* {@link NcbiExportSubmission} to get results for
* @return {@link InputStream} for the newest file if found. null if no file
* was found
* @throws NcbiXmlParseException
* if the file couldn't be found
*/
private InputStream getLatestXMLStream(FTPClient client, NcbiExportSubmission submission)
throws NcbiXmlParseException {
InputStream retrieveFileStream = null;
try {
String directoryPath = submission.getDirectoryPath();
// cd to submission base directory
if (!client.changeWorkingDirectory(directoryPath)) {
throw new NcbiXmlParseException("Couldn't change to base directory " + baseDirectory + " : "
+ client.getReplyString());
}
Pattern regex = Pattern.compile("report.(\\d+).xml");
String latestFile = null;
int highestNumber = 0;
// search for the highest number in the report.#.xml files
FTPFile[] listFiles = client.listFiles();
for (FTPFile file : listFiles) {
String fileName = file.getName();
Matcher matcher = regex.matcher(fileName);
if (matcher.matches()) {
int reportNumber = Integer.parseInt(matcher.group(1));
if (reportNumber > highestNumber) {
highestNumber = reportNumber;
latestFile = fileName;
}
}
}
if (latestFile != null) {
logger.trace("newest file is " + latestFile);
retrieveFileStream = client.retrieveFileStream(latestFile);
}
} catch (IOException e) {
throw new NcbiXmlParseException("Couldn't get response xml", e);
}
return retrieveFileStream;
}
/**
* Get the updates from the result.#.xml file for the given submission and
* update the object. XML will look like the following:
*
* <pre>
* <?xml version='1.0' encoding='utf-8'?>
* <SubmissionStatus submission_id="SUB1234" status="processed-ok">
* <Action action_id="SUB1234-submission12345" target_db="SRA" status="processed-ok" notify_submitter="true">
* <Response status="processed-ok">
* <Object target_db="SRA" object_id="RUN:4567" spuid_namespace="NML" spuid="submission12345" accession="SRR6789" status="updated">
* <Meta>
* <SRAStudy>SRP012345</SRAStudy>
* </Meta>
* </Object>
* </Response>
* </Action>
* </SubmissionStatus>
* </pre>
*
* @param submission
* {@link NcbiExportSubmission} to update
* @param xml
* {@link InputStream} of xml
* @return Updated {@link NcbiExportSubmission}
* @throws NcbiXmlParseException
* if the xml couldn't be parsed
*/
private NcbiExportSubmission updateSubmissionForXml(NcbiExportSubmission submission, InputStream xml)
throws NcbiXmlParseException {
try {
// read the incoming xml file
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse(xml);
XPath xPath = XPathFactory.newInstance().newXPath();
// get the submission status and set it in the submission
String submissionStatusString = xPath.compile("SubmissionStatus/@status").evaluate(doc);
if (submissionStatusString == null) {
throw new NcbiXmlParseException("result file should have 1 SubmissionStatus element with a status");
}
ExportUploadState submissionStatus = ExportUploadState.fromString(submissionStatusString);
submission.setUploadState(submissionStatus);
logger.trace("Root export state is " + submissionStatus);
// get all the sample files objects by name
Map<String, NcbiBioSampleFiles> sampleMap = getSampleNameMap(submission);
// get the actions
NodeList actions = (NodeList) xPath.compile("SubmissionStatus/Action")
.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < actions.getLength(); i++) {
if (actions.item(i).getNodeType() == Node.ELEMENT_NODE) {
Element action = (Element) actions.item(i);
// get the status and action id
String status = action.getAttribute("status");
String actionId = action.getAttribute("action_id");
// action id is of the form SUBMISSIONID-sampleid
String sampleId = actionId.substring(actionId.indexOf("-") + 1);
// get the sample for this action
NcbiBioSampleFiles ncbiBioSampleFiles = sampleMap.get(sampleId);
ExportUploadState sampleStatus = ExportUploadState.fromString(status);
ncbiBioSampleFiles.setSubmissionStatus(sampleStatus);
logger.trace("Sample export state for sample " + ncbiBioSampleFiles.getId() + " is " + sampleStatus);
String accession = xPath.compile("Response/Object/@accession").evaluate(action);
if (accession != null && !accession.isEmpty()) {
logger.trace("Found accession " + accession);
ncbiBioSampleFiles.setAccession(accession);
}
}
}
} catch (XPathExpressionException | ParserConfigurationException | SAXException | IOException e) {
logger.error("Couldn't parse response XML", e);
throw new NcbiXmlParseException("Error parsing NCBI response", e);
}
return submission;
}
/**
* Get a Map of {@link NcbiBioSampleFiles} for a
* {@link NcbiExportSubmission} indexed by the submitted sample ids
*
* @param submission
* Submission to get the {@link NcbiBioSampleFiles} for
* @return A Map of String => {@link NcbiBioSampleFiles}
*/
private Map<String, NcbiBioSampleFiles> getSampleNameMap(NcbiExportSubmission submission) {
Map<String, NcbiBioSampleFiles> map = new HashMap<>();
for (NcbiBioSampleFiles sample : submission.getBioSampleFiles()) {
map.put(sample.getId().toLowerCase(), sample);
}
return map;
}
/**
* Connect an {@link FTPClient} with the configured connection details
*
* @return a connected {@link FTPClient}
* @throws IOException if a connection error occurred
*/
private FTPClient getFtpClient() throws IOException {
FTPClient client = new FTPClient();
// login to host
logger.trace("Logging in to " + ftpHost + " as " + ftpUser);
try {
client.connect(ftpHost, ftpPort);
} catch (ConnectException ex) {
logger.error("Couldn't connect to server " + ftpHost + ":" + ftpPort);
throw ex;
}
if (!client.login(ftpUser, ftpPassword)) {
throw new IOException("Couldn't log in as " + ftpUser + client.getReplyString());
}
if (passiveMode) {
logger.trace("Entering FTP passive mode");
client.enterLocalPassiveMode();
} else {
logger.trace("Entering FTP active mode");
client.enterLocalActiveMode();
}
logger.trace(client.getStatus());
if (controlKeepAliveTimeout < 0 || controlKeepAliveReplyTimeout < 0) {
throw new IllegalArgumentException("Error: controlKeepAliveTimeout [" + controlKeepAliveTimeout
+ "] or controlKeepAliveReplyTimeout [" + controlKeepAliveReplyTimeout + "] < 0");
} else {
logger.trace("Using controlKeepAliveTimeout=" + controlKeepAliveTimeout + ", controlKeepAliveReplyTimeout="
+ controlKeepAliveReplyTimeout);
client.setControlKeepAliveTimeout(controlKeepAliveTimeout);
}
return client;
}
/**
* Disconnect an {@link FTPClient} if it's connected. Just doing this to
* avoid the old try-catch-in-finally mess.
*
* @param client
* An {@link FTPClient} to shut down if it's connected
*/
private void disconnectFtpCient(FTPClient client) {
if (client != null && client.isConnected()) {
try {
client.disconnect();
} catch (IOException e) {
logger.error("Couldn't disconnect FTP Client", e);
}
}
}
/**
* Upload a string to remote ftp client
*
* @param client
* {@link FTPClient} to use for upload
* @param filename
* name of file to create
* @param content
* content of file to create
* @throws UploadException
* if file could not be uploaded
*/
private void uploadString(FTPClient client, String filename, String content) throws UploadException, IOException {
int tries = 0;
boolean done = false;
client.setFileType(FTP.ASCII_FILE_TYPE);
do {
tries++;
try (ByteArrayInputStream stringStream = new ByteArrayInputStream(content.getBytes())) {
logger.trace("Uploading string to file [" + filename + "], data_connection_mode ["
+ client.getDataConnectionMode() + "]");
client.storeFile(filename, stringStream);
logger.trace("Finished uploading string to file [" + filename + "]" + ", response ["
+ client.getReplyString() + "]");
done = true;
} catch (Exception e) {
String reply = client.getReplyString();
if (tries >= MAX_RETRIES) {
throw new UploadException("Could not upload file " + filename + " : " + reply, e);
}
logger.error("Error uploading file: " + reply, e);
try {
Thread.sleep(WAIT_BETWEEN_RETRIES);
} catch (InterruptedException e1) {
throw new UploadException("Sleep failed", e1);
}
}
} while (!done);
}
/**
* Upload a file {@link Path} to a remote ftp client
*
* @param client {@link FTPClient} to upload with
* @param filename name of file to create
* @param path {@link Path} to upload
* @throws UploadException if file could not be uploaded
*/
private void uploadPath(FTPClient client, String filename, Path path) throws UploadException, IOException {
int tries = 0;
boolean done = false;
client.setFileType(FTP.BINARY_FILE_TYPE);
do {
tries++;
try (InputStream stream = Files.newInputStream(path)) {
logger.trace("Uploading path [" + path + "], filename [" + filename + "], data_connection_mode ["
+ client.getDataConnectionMode() + "]");
client.storeFile(filename, stream);
logger.trace("Finished uploading path [" + path + "], filename [" + filename + "], response ["
+ client.getReplyString() + "]");
done = true;
} catch (Exception e) {
String reply = client.getReplyString();
if (tries >= MAX_RETRIES) {
throw new UploadException("Could not upload file " + filename + " : " + reply, e);
}
logger.error("Error uploading file: " + reply, e);
try {
Thread.sleep(WAIT_BETWEEN_RETRIES);
} catch (InterruptedException e1) {
throw new UploadException("Sleep failed", e1);
}
}
} while (!done);
}
/**
* Add the accession numbers for uploaded NCBI data to the associated {@link Sample}
*
* @param submission an {@link NcbiExportSubmission} with associated accessions
*/
private void addSampleAccessions(NcbiExportSubmission submission) {
//get all samplefile objects for the submission
for (NcbiBioSampleFiles file : submission.getBioSampleFiles()) {
//read the accession
String accession = file.getAccession();
//if an accession exists
if (!Strings.isNullOrEmpty(accession)) {
//build a metadata map entry
Map<String, MetadataEntry> metadata = new HashMap<>();
metadata.put(NCBI_ACCESSION_METADATA_LABEL, new MetadataEntry(accession, "text"));
Map<MetadataTemplateField, MetadataEntry> metadataMap = metadataTemplateService
.getMetadataMap(metadata);
//get all the sequencing objects involved
Set<SequencingObject> objects = new HashSet<>();
if (!file.getPairs().isEmpty()) {
objects.addAll(file.getPairs());
} else if (!file.getFiles().isEmpty()) {
objects.addAll(file.getFiles());
}
// get all the samples for those sequencing objects
Set<Sample> samples = new HashSet<>();
for (SequencingObject object : objects) {
SampleSequencingObjectJoin join = sampleService.getSampleForSequencingObject(object);
samples.add(join.getSubject());
}
//update the samples with the accession
for (Sample s : samples) {
s.mergeMetadata(metadataMap);
sampleService.update(s);
}
}
}
}
}
| src/main/java/ca/corefacility/bioinformatics/irida/service/export/ExportUploadService.java | package ca.corefacility.bioinformatics.irida.service.export;
import ca.corefacility.bioinformatics.irida.exceptions.NcbiXmlParseException;
import ca.corefacility.bioinformatics.irida.exceptions.UploadException;
import ca.corefacility.bioinformatics.irida.model.NcbiExportSubmission;
import ca.corefacility.bioinformatics.irida.model.enums.ExportUploadState;
import ca.corefacility.bioinformatics.irida.model.export.NcbiBioSampleFiles;
import ca.corefacility.bioinformatics.irida.model.sample.MetadataTemplateField;
import ca.corefacility.bioinformatics.irida.model.sample.Sample;
import ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin;
import ca.corefacility.bioinformatics.irida.model.sample.metadata.MetadataEntry;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile;
import ca.corefacility.bioinformatics.irida.service.EmailController;
import ca.corefacility.bioinformatics.irida.service.sample.MetadataTemplateService;
import ca.corefacility.bioinformatics.irida.service.sample.SampleService;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.context.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Class which handles uploading a {@link NcbiExportSubmission} to NCBI
*/
@Service
public class ExportUploadService {
private static final Logger logger = LoggerFactory.getLogger(ExportUploadService.class);
private static final String NCBI_TEMPLATE = "ncbi";
public static final String NCBI_ACCESSION_METADATA_LABEL = "NCBI SRA Accession";
private NcbiExportSubmissionService exportSubmissionService;
private SampleService sampleService;
private MetadataTemplateService metadataTemplateService;
private TemplateEngine templateEngine;
private EmailController emailController;
@Value("${ncbi.upload.host}")
private String ftpHost;
@Value("${ncbi.upload.port}")
private int ftpPort;
@Value("${ncbi.upload.user}")
private String ftpUser;
@Value("${ncbi.upload.password}")
private String ftpPassword;
@Value("${ncbi.upload.baseDirectory}")
private String baseDirectory;
@Value("${ncbi.upload.controlKeepAliveTimeoutSeconds}")
private int controlKeepAliveTimeout;
@Value("${ncbi.upload.controlKeepAliveReplyTimeoutMilliseconds}")
private int controlKeepAliveReplyTimeout;
@Value("${ncbi.upload.ftp.passive}")
private boolean passiveMode;
@Value("${irida.administrative.notifications.email}")
private String notificationAdminEmail;
private static final int MAX_RETRIES = 3;
private static final long WAIT_BETWEEN_RETRIES = 5000L;
// set of statuses that should be watched and update
private static Set<ExportUploadState> updateableStates = ImmutableSet.of(ExportUploadState.UPLOADED,
ExportUploadState.SUBMITTED, ExportUploadState.CREATED, ExportUploadState.QUEUED,
ExportUploadState.PROCESSING, ExportUploadState.WAITING);
@Autowired
public ExportUploadService(NcbiExportSubmissionService exportSubmissionService, SampleService sampleService,
MetadataTemplateService metadataTemplateService,
@Qualifier("exportUploadTemplateEngine") TemplateEngine templateEngine, EmailController emailController) {
this.exportSubmissionService = exportSubmissionService;
this.sampleService = sampleService;
this.metadataTemplateService = metadataTemplateService;
this.templateEngine = templateEngine;
this.emailController = emailController;
}
/**
* Manually configure connection details for this service
*
* @param ftpHost
* The hostname to connect to
* @param ftpPort
* the ftp port to connect to
* @param ftpUser
* the username to authenticate with
* @param ftpPassword
* the password to authenticate with
* @param baseDirectory
* the base directory to upload new runs into
*/
public void setConnectionDetails(String ftpHost, int ftpPort, String ftpUser, String ftpPassword,
String baseDirectory) {
this.ftpHost = ftpHost;
this.ftpPort = ftpPort;
this.ftpUser = ftpUser;
this.ftpPassword = ftpPassword;
this.baseDirectory = baseDirectory;
}
/**
* Check for new {@link NcbiExportSubmission}s to be uploaded and begin
* their upload
*/
public synchronized void launchUpload() {
logger.trace("Getting new exports");
List<NcbiExportSubmission> submissionsWithState = exportSubmissionService
.getSubmissionsWithState(ExportUploadState.NEW);
for (NcbiExportSubmission submission : submissionsWithState) {
try {
logger.trace("Updating submission " + submission.getId());
submission.setUploadState(ExportUploadState.UPLOADING);
submission = exportSubmissionService.update(submission);
String xmlContent = createXml(submission);
submission = uploadSubmission(submission, xmlContent);
submission.setUploadState(ExportUploadState.UPLOADED);
submission = exportSubmissionService.update(submission);
} catch (Exception e) {
logger.error("Upload failed", e);
submission.setUploadState(ExportUploadState.UPLOAD_ERROR);
submission = exportSubmissionService.update(submission);
emailController.sendNCBIUploadExceptionEmail(notificationAdminEmail, e, submission.getId());
}
}
}
/**
* Check local database for submissions which may have updates on the NCBI
* server and update them as necessary.
*/
public synchronized void updateRunningUploads() {
logger.trace("Getting running exports");
List<NcbiExportSubmission> submissionsWithState = exportSubmissionService
.getSubmissionsWithState(updateableStates);
FTPClient client = null;
try {
for (NcbiExportSubmission submission : submissionsWithState) {
// connect to FTP site
client = getFtpClient();
try {
logger.trace("Getting report for submission " + submission.getId());
InputStream xmlStream = getLatestXMLStream(client, submission);
if (xmlStream != null) {
NcbiExportSubmission updateSubmissionForXml = updateSubmissionForXml(submission, xmlStream);
exportSubmissionService.update(updateSubmissionForXml);
xmlStream.close();
//If we're done processing, add the accessions
if (updateSubmissionForXml.getUploadState().equals(ExportUploadState.PROCESSED_OK)) {
addSampleAccessions(submission);
}
}
} catch (NcbiXmlParseException e) {
logger.error("Error getting response", e);
submission.setUploadState(ExportUploadState.UPLOAD_ERROR);
submission = exportSubmissionService.update(submission);
emailController.sendNCBIUploadExceptionEmail(notificationAdminEmail, e, submission.getId());
} catch (IOException e) {
logger.error("Error closing XML stream", e);
}
}
disconnectFtpCient(client);
} catch (Exception e) {
logger.error("Couldn't connect to FTP site", e);
} finally {
disconnectFtpCient(client);
}
}
/**
* Create the XML for an {@link NcbiExportSubmission}
*
* @param submission
* the {@link NcbiExportSubmission} to create submission xml for
* @return String content of the xml
*/
public String createXml(NcbiExportSubmission submission) {
logger.debug("Creating export xml for submission " + submission.getId());
final Context ctx = new Context();
ctx.setVariable("submission", submission);
String xmlContent = templateEngine.process(NCBI_TEMPLATE, ctx);
return xmlContent;
}
/**
* Upload an {@link NcbiExportSubmission}'s files and submission xml to the
* configured ftp site
*
* @param submission
* The {@link NcbiExportSubmission} to upload
* @param xml
* The submission xml to upload
* @return true/false if upload was successful
* @throws UploadException
* if the upload failed
*/
public NcbiExportSubmission uploadSubmission(NcbiExportSubmission submission, String xml) throws UploadException {
FTPClient client = null;
try {
client = getFtpClient();
// create submission directory name
String directoryName = submission.getId().toString() + "-" + new Date().getTime();
// cd to submission base directory
if (!client.changeWorkingDirectory(baseDirectory)) {
throw new UploadException("Couldn't change to base directory " + baseDirectory + " : "
+ client.getReplyString());
}
// create new submission directory
if (!client.makeDirectory(directoryName)) {
throw new UploadException("Couldn't create new upload directory " + directoryName + " : "
+ client.getReplyString());
}
// cd to submission directory
if (!client.changeWorkingDirectory(directoryName)) {
throw new UploadException("Couldn't change to upload directory " + directoryName + " : "
+ client.getReplyString());
}
// set the directory saved
String directoryPath = baseDirectory + "/" + directoryName;
submission.setDirectoryPath(directoryPath);
// upload submission.xml file
uploadString(client, "submission.xml", xml);
// upload biosample files
for (NcbiBioSampleFiles bsFile : submission.getBioSampleFiles()) {
// upload single end files
for (SingleEndSequenceFile file : bsFile.getFiles()) {
// Just using file IDs as the basename for uploaded files to
// avoid accidentally sending sensitive sample names to NCBI
String filename = file.getSequenceFile().getId() + ".fastq";
uploadPath(client, filename, file.getSequenceFile().getFile());
}
// upload paired end files
for (SequenceFilePair pair : bsFile.getPairs()) {
// upload forward
SequenceFile file = pair.getForwardSequenceFile();
// Just using file IDs as the basename for uploaded files to
// avoid accidentally sending sensitive sample names to NCBI
String filename = file.getId() + ".fastq";
uploadPath(client, filename, file.getFile());
// upload reverse
file = pair.getReverseSequenceFile();
filename = file.getId() + ".fastq";
uploadPath(client, filename, file.getFile());
}
}
// create submit.ready file
uploadString(client, "submit.ready", "");
} catch (IOException e) {
logger.error("Error in upload", e);
throw new UploadException("Could not upload run", e);
} finally {
disconnectFtpCient(client);
}
return submission;
}
/**
* Get the latest result.#.xml file for the given submission
*
* @param client
* {@link FTPClient} to use for the connection
* @param submission
* {@link NcbiExportSubmission} to get results for
* @return {@link InputStream} for the newest file if found. null if no file
* was found
* @throws NcbiXmlParseException
* if the file couldn't be found
*/
private InputStream getLatestXMLStream(FTPClient client, NcbiExportSubmission submission)
throws NcbiXmlParseException {
InputStream retrieveFileStream = null;
try {
String directoryPath = submission.getDirectoryPath();
// cd to submission base directory
if (!client.changeWorkingDirectory(directoryPath)) {
throw new NcbiXmlParseException("Couldn't change to base directory " + baseDirectory + " : "
+ client.getReplyString());
}
Pattern regex = Pattern.compile("report.(\\d+).xml");
String latestFile = null;
int highestNumber = 0;
// search for the highest number in the report.#.xml files
FTPFile[] listFiles = client.listFiles();
for (FTPFile file : listFiles) {
String fileName = file.getName();
Matcher matcher = regex.matcher(fileName);
if (matcher.matches()) {
int reportNumber = Integer.parseInt(matcher.group(1));
if (reportNumber > highestNumber) {
highestNumber = reportNumber;
latestFile = fileName;
}
}
}
if (latestFile != null) {
logger.trace("newest file is " + latestFile);
retrieveFileStream = client.retrieveFileStream(latestFile);
}
} catch (IOException e) {
throw new NcbiXmlParseException("Couldn't get response xml", e);
}
return retrieveFileStream;
}
/**
* Get the updates from the result.#.xml file for the given submission and
* update the object. XML will look like the following:
*
* <pre>
* <?xml version='1.0' encoding='utf-8'?>
* <SubmissionStatus submission_id="SUB1234" status="processed-ok">
* <Action action_id="SUB1234-submission12345" target_db="SRA" status="processed-ok" notify_submitter="true">
* <Response status="processed-ok">
* <Object target_db="SRA" object_id="RUN:4567" spuid_namespace="NML" spuid="submission12345" accession="SRR6789" status="updated">
* <Meta>
* <SRAStudy>SRP012345</SRAStudy>
* </Meta>
* </Object>
* </Response>
* </Action>
* </SubmissionStatus>
* </pre>
*
* @param submission
* {@link NcbiExportSubmission} to update
* @param xml
* {@link InputStream} of xml
* @return Updated {@link NcbiExportSubmission}
* @throws NcbiXmlParseException
* if the xml couldn't be parsed
*/
private NcbiExportSubmission updateSubmissionForXml(NcbiExportSubmission submission, InputStream xml)
throws NcbiXmlParseException {
try {
// read the incoming xml file
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse(xml);
XPath xPath = XPathFactory.newInstance().newXPath();
// get the submission status and set it in the submission
String submissionStatusString = xPath.compile("SubmissionStatus/@status").evaluate(doc);
if (submissionStatusString == null) {
throw new NcbiXmlParseException("result file should have 1 SubmissionStatus element with a status");
}
ExportUploadState submissionStatus = ExportUploadState.fromString(submissionStatusString);
submission.setUploadState(submissionStatus);
logger.trace("Root export state is " + submissionStatus);
// get all the sample files objects by name
Map<String, NcbiBioSampleFiles> sampleMap = getSampleNameMap(submission);
// get the actions
NodeList actions = (NodeList) xPath.compile("SubmissionStatus/Action")
.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < actions.getLength(); i++) {
if (actions.item(i).getNodeType() == Node.ELEMENT_NODE) {
Element action = (Element) actions.item(i);
// get the status and action id
String status = action.getAttribute("status");
String actionId = action.getAttribute("action_id");
// action id is of the form SUBMISSIONID-sampleid
String sampleId = actionId.substring(actionId.indexOf("-") + 1);
// get the sample for this action
NcbiBioSampleFiles ncbiBioSampleFiles = sampleMap.get(sampleId);
ExportUploadState sampleStatus = ExportUploadState.fromString(status);
ncbiBioSampleFiles.setSubmissionStatus(sampleStatus);
logger.trace("Sample export state for sample " + ncbiBioSampleFiles.getId() + " is " + sampleStatus);
String accession = xPath.compile("Response/Object/@accession").evaluate(action);
if (accession != null && !accession.isEmpty()) {
logger.trace("Found accession " + accession);
ncbiBioSampleFiles.setAccession(accession);
}
}
}
} catch (XPathExpressionException | ParserConfigurationException | SAXException | IOException e) {
logger.error("Couldn't parse response XML", e);
throw new NcbiXmlParseException("Error parsing NCBI response", e);
}
return submission;
}
/**
* Get a Map of {@link NcbiBioSampleFiles} for a
* {@link NcbiExportSubmission} indexed by the submitted sample ids
*
* @param submission
* Submission to get the {@link NcbiBioSampleFiles} for
* @return A Map of String => {@link NcbiBioSampleFiles}
*/
private Map<String, NcbiBioSampleFiles> getSampleNameMap(NcbiExportSubmission submission) {
Map<String, NcbiBioSampleFiles> map = new HashMap<>();
for (NcbiBioSampleFiles sample : submission.getBioSampleFiles()) {
map.put(sample.getId().toLowerCase(), sample);
}
return map;
}
/**
* Connect an {@link FTPClient} with the configured connection details
*
* @return a connected {@link FTPClient}
* @throws IOException if a connection error occurred
*/
private FTPClient getFtpClient() throws IOException {
FTPClient client = new FTPClient();
// login to host
logger.trace("Logging in to " + ftpHost + " as " + ftpUser);
try {
client.connect(ftpHost, ftpPort);
} catch (ConnectException ex) {
logger.error("Couldn't connect to server " + ftpHost + ":" + ftpPort);
throw ex;
}
if (!client.login(ftpUser, ftpPassword)) {
throw new IOException("Couldn't log in as " + ftpUser + client.getReplyString());
}
if (passiveMode) {
logger.trace("Entering FTP passive mode");
client.enterLocalPassiveMode();
} else {
logger.trace("Entering FTP active mode");
client.enterLocalActiveMode();
}
logger.trace(client.getStatus());
if (controlKeepAliveTimeout < 0 || controlKeepAliveReplyTimeout < 0) {
throw new IllegalArgumentException("Error: controlKeepAliveTimeout [" + controlKeepAliveTimeout
+ "] or controlKeepAliveReplyTimeout [" + controlKeepAliveReplyTimeout + "] < 0");
} else {
logger.trace("Using controlKeepAliveTimeout=" + controlKeepAliveTimeout + ", controlKeepAliveReplyTimeout="
+ controlKeepAliveReplyTimeout);
client.setControlKeepAliveTimeout(controlKeepAliveTimeout);
}
return client;
}
/**
* Disconnect an {@link FTPClient} if it's connected. Just doing this to
* avoid the old try-catch-in-finally mess.
*
* @param client
* An {@link FTPClient} to shut down if it's connected
*/
private void disconnectFtpCient(FTPClient client) {
if (client != null && client.isConnected()) {
try {
client.disconnect();
} catch (IOException e) {
logger.error("Couldn't disconnect FTP Client", e);
}
}
}
/**
* Upload a string to remote ftp client
*
* @param client
* {@link FTPClient} to use for upload
* @param filename
* name of file to create
* @param content
* content of file to create
* @throws UploadException
* if file could not be uploaded
*/
private void uploadString(FTPClient client, String filename, String content) throws UploadException, IOException {
int tries = 0;
boolean done = false;
client.setFileType(FTP.ASCII_FILE_TYPE);
do {
tries++;
try (ByteArrayInputStream stringStream = new ByteArrayInputStream(content.getBytes())) {
logger.trace("Uploading string to file [" + filename + "]");
client.storeFile(filename, stringStream);
logger.trace("Finished uploading string to file [" + filename + "]"
+ ", response [" + client.getReplyString() + "]");
done = true;
} catch (Exception e) {
String reply = client.getReplyString();
if (tries >= MAX_RETRIES) {
throw new UploadException("Could not upload file " + filename + " : " + reply, e);
}
logger.error("Error uploading file: " + reply, e);
try {
Thread.sleep(WAIT_BETWEEN_RETRIES);
} catch (InterruptedException e1) {
throw new UploadException("Sleep failed", e1);
}
}
} while (!done);
}
/**
* Upload a file {@link Path} to a remote ftp client
*
* @param client {@link FTPClient} to upload with
* @param filename name of file to create
* @param path {@link Path} to upload
* @throws UploadException if file could not be uploaded
*/
private void uploadPath(FTPClient client, String filename, Path path) throws UploadException, IOException {
int tries = 0;
boolean done = false;
client.setFileType(FTP.BINARY_FILE_TYPE);
do {
tries++;
try (InputStream stream = Files.newInputStream(path)) {
logger.trace("Uploading path [" + path + "], filename [" + filename + "]");
client.storeFile(filename, stream);
logger.trace("Finished uploading path [" + path + "], filename [" + filename + "], response ["
+ client.getReplyString() + "]");
done = true;
} catch (Exception e) {
String reply = client.getReplyString();
if (tries >= MAX_RETRIES) {
throw new UploadException("Could not upload file " + filename + " : " + reply, e);
}
logger.error("Error uploading file: " + reply, e);
try {
Thread.sleep(WAIT_BETWEEN_RETRIES);
} catch (InterruptedException e1) {
throw new UploadException("Sleep failed", e1);
}
}
} while (!done);
}
/**
* Add the accession numbers for uploaded NCBI data to the associated {@link Sample}
*
* @param submission an {@link NcbiExportSubmission} with associated accessions
*/
private void addSampleAccessions(NcbiExportSubmission submission) {
//get all samplefile objects for the submission
for (NcbiBioSampleFiles file : submission.getBioSampleFiles()) {
//read the accession
String accession = file.getAccession();
//if an accession exists
if (!Strings.isNullOrEmpty(accession)) {
//build a metadata map entry
Map<String, MetadataEntry> metadata = new HashMap<>();
metadata.put(NCBI_ACCESSION_METADATA_LABEL, new MetadataEntry(accession, "text"));
Map<MetadataTemplateField, MetadataEntry> metadataMap = metadataTemplateService
.getMetadataMap(metadata);
//get all the sequencing objects involved
Set<SequencingObject> objects = new HashSet<>();
if (!file.getPairs().isEmpty()) {
objects.addAll(file.getPairs());
} else if (!file.getFiles().isEmpty()) {
objects.addAll(file.getFiles());
}
// get all the samples for those sequencing objects
Set<Sample> samples = new HashSet<>();
for (SequencingObject object : objects) {
SampleSequencingObjectJoin join = sampleService.getSampleForSequencingObject(object);
samples.add(join.getSubject());
}
//update the samples with the accession
for (Sample s : samples) {
s.mergeMetadata(metadataMap);
sampleService.update(s);
}
}
}
}
}
| Added printing of data connection mode
| src/main/java/ca/corefacility/bioinformatics/irida/service/export/ExportUploadService.java | Added printing of data connection mode |
|
Java | apache-2.0 | e8ffbe18fecc8961dfb486eccda12e373069a166 | 0 | fhoeben/hsac-fitnesse-fixtures,fhoeben/hsac-fitnesse-fixtures,teunisnl/hsac-fitnesse-fixtures,GDasai/hsac-fitnesse-fixtures,ilseh/hsac-fitnesse-fixtures,ilseh/hsac-fitnesse-fixtures,teunisnl/hsac-fitnesse-fixtures,fhoeben/hsac-fitnesse-fixtures,ilseh/hsac-fitnesse-fixtures,fhoeben/hsac-fitnesse-fixtures,teunisnl/hsac-fitnesse-fixtures,teunisnl/hsac-fitnesse-fixtures,GDasai/hsac-fitnesse-fixtures,ilseh/hsac-fitnesse-fixtures,GDasai/hsac-fitnesse-fixtures,GDasai/hsac-fitnesse-fixtures | package nl.hsac.fitnesse.fixture.slim.web;
import nl.hsac.fitnesse.fixture.slim.SlimFixture;
import nl.hsac.fitnesse.fixture.util.SeleniumHelper;
import org.openqa.selenium.*;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.FluentWait;
import org.openqa.selenium.support.ui.WebDriverWait;
import java.io.File;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class BrowserTest extends SlimFixture {
private static final String FILES_DIR = new File("FitNesseRoot/files/").getAbsolutePath();
private SeleniumHelper seleniumHelper = getEnvironment().getSeleniumHelper();
private int secondsBeforeTimeout = 10;
private String screenshotBase = FILES_DIR + "/screenshots/";
private String screenshotHeight = "200";
public boolean open(String address) {
String url = getUrl(address);
getSeleniumHelper().navigate().to(url);
return true;
}
public String pageTitle() {
return getSeleniumHelper().getPageTitle();
}
/**
* Replaces content at place by value.
* @param value value to set.
* @param place element to set value on.
* @return true, if element was found.
*/
public boolean enterAs(String value, String place) {
boolean result = false;
WebElement element = getElement(place);
if (element != null) {
element.clear();
sendValue(element, value);
result = true;
}
return result;
}
/**
* Adds content to place.
* @param value value to add.
* @param place element to add value to.
* @return true, if element was found.
*/
public boolean enterFor(String value, String place) {
boolean result = false;
WebElement element = getElement(place);
if (element != null) {
sendValue(element, value);
result = true;
}
return result;
}
/**
* Sends Fitnesse cell content to element.
* @param element element to call sendKeys() on.
* @param value cell content.
*/
protected void sendValue(WebElement element, String value) {
String keys = cleanupValue(value);
element.sendKeys(keys);
}
public boolean selectAs(String value, String place) {
return selectFor(value, place);
}
public boolean selectFor(String value, String place) {
// choose option for select, if possible
boolean result = clickSelectOption(place, value);
if (!result) {
// try to click the first element with right value
result = click(value);
}
return result;
}
public boolean enterForHidden(String value, String idOrName) {
return getSeleniumHelper().setHiddenInputValue(idOrName, value);
}
private boolean clickSelectOption(String selectPlace, String optionValue) {
boolean result = false;
WebElement element = getElement(selectPlace);
if (element != null) {
if (isSelect(element)) {
String attrToUse = "id";
String attrValue = element.getAttribute(attrToUse);
if (attrValue == null || attrValue.isEmpty()) {
attrToUse = "name";
attrValue = element.getAttribute(attrToUse);
}
if (attrValue != null && !attrValue.isEmpty()) {
String xpathToOptions = "//select[@" + attrToUse + "='%s']//option";
result = clickOption(attrValue, xpathToOptions + "[text()='%s']", optionValue);
if (!result) {
result = clickOption(attrValue, xpathToOptions + "[contains(text(), '%s')]", optionValue);
}
}
}
}
return result;
}
private boolean clickOption(String selectId, String optionXPath, String optionValue) {
boolean result = false;
By optionWithText = getSeleniumHelper().byXpath(optionXPath, selectId, optionValue);
WebElement option = getSeleniumHelper().findElement(true, optionWithText);
if (option != null) {
result = clickElement(option);
}
return result;
}
public boolean click(String place) {
// if other element hides the element (in Chrome) an exception is thrown
// we retry clicking the element a few times before giving up.
boolean result = false;
boolean retry = true;
for (int i = 0;
!result && retry;
i++) {
try {
if (i > 0) {
waitSeconds(1);
}
result = clickImpl(place);
} catch (WebDriverException e) {
String msg = e.getMessage();
if (!msg.contains("Other element would receive the click")
|| i == secondsBeforeTimeout()) {
retry = false;
}
}
}
return result;
}
protected boolean clickImpl(String place) {
WebElement element = getElement(place);
return clickElement(element);
}
protected boolean clickElement(WebElement element) {
boolean result = false;
if (element != null) {
scrollTo(element);
if (element.isDisplayed() && element.isEnabled()) {
element.click();
result = true;
}
}
return result;
}
public boolean clickAndWaitForPage(String place, final String pageName) {
boolean result = click(place);
if (result) {
result = waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver webDriver) {
boolean ok = false;
try {
ok = pageTitle().equals(pageName);
} catch (StaleElementReferenceException e) {
// element detached from DOM
ok = false;
}
return ok;
}
});
}
return result;
}
public boolean clickAndWaitForTagWithText(String place, final String tagName, final String expectedText) {
boolean result = click(place);
if (result) {
result = waitForTagWithText(tagName, expectedText);
}
return result;
}
public boolean waitForTagWithText(final String tagName, final String expectedText) {
boolean result;
result = waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver webDriver) {
boolean ok = false;
List<WebElement> elements = webDriver.findElements(By.tagName(tagName));
if (elements != null) {
for (WebElement element : elements) {
try {
String actual = element.getText();
if (expectedText == null) {
ok = actual == null;
} else {
if (actual == null) {
actual = element.getAttribute("value");
}
ok = expectedText.equals(actual);
}
} catch (StaleElementReferenceException e) {
// element detached from DOM
ok = false;
}
if (ok) {
// no need to continue to check other elements
break;
}
}
}
return ok;
}
});
return result;
}
public String valueOf(String place) {
return valueFor(place);
}
public String valueFor(String place) {
String result = null;
WebElement element = getElement(place);
if (element != null) {
if (isSelect(element)) {
String id = element.getAttribute("id");
By selectedOption = getSeleniumHelper().byXpath("//select[@id='%s']//option[@selected]", id);
WebElement option = getSeleniumHelper().findElement(true, selectedOption);
if (option != null) {
scrollTo(option);
result = option.getText();
}
} else {
result = element.getAttribute("value");
if (result == null) {
scrollTo(element);
result = element.getText();
}
}
}
return result;
}
private boolean isSelect(WebElement element) {
return "select".equalsIgnoreCase(element.getTagName());
}
public boolean clear(String place) {
boolean result = false;
WebElement element = getElement(place);
if (element != null) {
element.clear();
result = true;
}
return result;
}
protected WebElement getElement(String place) {
return getSeleniumHelper().getElement(place);
}
/**
* Scrolls browser window so top of place becomes visible.
* @param place element to scroll to.
*/
public void scrollTo(String place) {
WebElement element = getElement(place);
if (place != null) {
scrollTo(element);
}
}
/**
* Scrolls browser window so top of element becomes visible.
* @param element element to scroll to.
*/
protected void scrollTo(WebElement element) {
getSeleniumHelper().executeJavascript("arguments[0].scrollIntoView(true);", element);
}
/**
* @param timeout number of seconds before waitUntil() throws TimeOutException.
*/
public void secondsBeforeTimeout(int timeout) {
secondsBeforeTimeout = timeout;
}
/**
* @return number of seconds waitUntil() will wait at most.
*/
public int secondsBeforeTimeout() {
return secondsBeforeTimeout;
}
/**
* Clears HTML5's localStorage.
*/
public void clearLocalStorage() {
getSeleniumHelper().executeJavascript("localStorage.clear();");
}
/**
* @param directory sets base directory where screenshots will be stored.
*/
public void screenshotBaseDirectory(String directory) {
if (directory.equals("")
|| directory.endsWith("/")
|| directory.endsWith("\\")) {
screenshotBase = directory;
} else {
screenshotBase = directory + "/";
}
}
/**
* @param height height to use to display screenshot images
*/
public void screenshotShowHeight(String height) {
screenshotHeight = height;
}
/**
* Takes screenshot from current page
* @param basename filename (below screenshot base directory).
* @return location of screenshot.
*/
public String takeScreenshot(String basename) {
String name = screenshotBase + basename;
String screenshotFile = getSeleniumHelper().takeScreenshot(name);
if (screenshotFile == null) {
throw new RuntimeException("Unable to take screenshot: does the webdriver support it?");
} else {
if (screenshotFile.startsWith(FILES_DIR)) {
// make href to screenshot
String relativeFile = screenshotFile.substring(FILES_DIR.length());
relativeFile = relativeFile.replace('\\', '/');
String wikiUrl = "/files" + relativeFile;
if ("".equals(screenshotHeight)) {
wikiUrl = String.format("<a href=\"%s\">%s</a>",
wikiUrl, screenshotFile);
} else {
wikiUrl = String.format("<a href=\"%1$s\"><img src=\"%1$s\" title=\"%2$s\" height=\"%3$s\"></a>",
wikiUrl, screenshotFile, screenshotHeight);
}
screenshotFile = wikiUrl;
}
}
return screenshotFile;
}
/**
* Implementations should wait until the condition evaluates to a value that is neither null nor
* false. Because of this contract, the return type must not be Void.
* @param <T> the return type of the method, which must not be Void
* @param condition condition to evaluate to determine whether waiting can be stopped.
* @throws org.openqa.selenium.TimeoutException if condition was not met before secondsBeforeTimeout.
* @return result of condition.
*/
protected <T> T waitUntil(ExpectedCondition<T> condition) {
try {
FluentWait<WebDriver> wait = waitDriver().withTimeout(secondsBeforeTimeout(), TimeUnit.SECONDS);
return wait.until(condition);
} catch (TimeoutException e) {
// take a screenshot of what was on screen
takeScreenshot("timeouts/" + getClass().getSimpleName() + "/timeout");
throw e;
}
}
private WebDriverWait waitDriver() {
return getSeleniumHelper().waitDriver();
}
/**
* @return helper to use.
*/
protected final SeleniumHelper getSeleniumHelper() {
return seleniumHelper;
}
/**
* Sets SeleniumHelper to use, for testing purposes.
* @param helper helper to use.
*/
void setSeleniumHelper(SeleniumHelper helper) {
seleniumHelper = helper;
}
}
| src/main/java/nl/hsac/fitnesse/fixture/slim/web/BrowserTest.java | package nl.hsac.fitnesse.fixture.slim.web;
import nl.hsac.fitnesse.fixture.slim.SlimFixture;
import nl.hsac.fitnesse.fixture.util.SeleniumHelper;
import org.openqa.selenium.*;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.FluentWait;
import org.openqa.selenium.support.ui.WebDriverWait;
import java.io.File;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class BrowserTest extends SlimFixture {
private static final String FILES_DIR = new File("FitNesseRoot/files/").getAbsolutePath();
private SeleniumHelper seleniumHelper = getEnvironment().getSeleniumHelper();
private int secondsBeforeTimeout = 10;
private String screenshotBase = FILES_DIR + "/screenshots/";
private String screenshotHeight = "200";
public boolean open(String address) {
String url = getUrl(address);
getSeleniumHelper().navigate().to(url);
return true;
}
public String pageTitle() {
return getSeleniumHelper().getPageTitle();
}
/**
* Replaces content at place by value.
* @param value value to set.
* @param place element to set value on.
* @return true, if element was found.
*/
public boolean enterAs(String value, String place) {
boolean result = false;
WebElement element = getElement(place);
if (element != null) {
element.clear();
sendValue(element, value);
result = true;
}
return result;
}
/**
* Adds content to place.
* @param value value to add.
* @param place element to add value to.
* @return true, if element was found.
*/
public boolean enterFor(String value, String place) {
boolean result = false;
WebElement element = getElement(place);
if (element != null) {
sendValue(element, value);
result = true;
}
return result;
}
/**
* Sends Fitnesse cell content to element.
* @param element element to call sendKeys() on.
* @param value cell content.
*/
protected void sendValue(WebElement element, String value) {
String keys = cleanupValue(value);
element.sendKeys(keys);
}
public boolean selectAs(String value, String place) {
return selectFor(value, place);
}
public boolean selectFor(String value, String place) {
// choose option for select, if possible
boolean result = clickSelectOption(place, value);
if (!result) {
// try to click the first element with right value
result = click(value);
}
return result;
}
public boolean enterForHidden(String value, String idOrName) {
return getSeleniumHelper().setHiddenInputValue(idOrName, value);
}
private boolean clickSelectOption(String selectPlace, String optionValue) {
boolean result = false;
WebElement element = getElement(selectPlace);
if (element != null) {
if (isSelect(element)) {
String attrToUse = "id";
String attrValue = element.getAttribute(attrToUse);
if (attrValue == null || attrValue.isEmpty()) {
attrToUse = "name";
attrValue = element.getAttribute(attrToUse);
}
if (attrValue != null && !attrValue.isEmpty()) {
String xpathToOptions = "//select[@" + attrToUse + "='%s']//option";
result = clickOption(attrValue, xpathToOptions + "[text()='%s']", optionValue);
if (!result) {
result = clickOption(attrValue, xpathToOptions + "[contains(text(), '%s')]", optionValue);
}
}
}
}
return result;
}
private boolean clickOption(String selectId, String optionXPath, String optionValue) {
boolean result = false;
By optionWithText = getSeleniumHelper().byXpath(optionXPath, selectId, optionValue);
WebElement option = getSeleniumHelper().findElement(true, optionWithText);
if (option != null) {
option.click();
result = true;
}
return result;
}
public boolean click(String place) {
// if other element hides the element (in Chrome) an exception is thrown
// we retry clicking the element a few times before giving up.
boolean result = false;
boolean retry = true;
for (int i = 0;
!result && retry;
i++) {
try {
if (i > 0) {
waitSeconds(1);
}
result = clickImpl(place);
} catch (WebDriverException e) {
String msg = e.getMessage();
if (!msg.contains("Other element would receive the click")
|| i == secondsBeforeTimeout()) {
retry = false;
}
}
}
return result;
}
protected boolean clickImpl(String place) {
WebElement element = getElement(place);
return clickElement(element);
}
protected boolean clickElement(WebElement element) {
boolean result = false;
if (element != null) {
if (element.isDisplayed() && element.isEnabled()) {
element.click();
result = true;
}
}
return result;
}
public boolean clickAndWaitForPage(String place, final String pageName) {
boolean result = click(place);
if (result) {
result = waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver webDriver) {
boolean ok = false;
try {
ok = pageTitle().equals(pageName);
} catch (StaleElementReferenceException e) {
// element detached from DOM
ok = false;
}
return ok;
}
});
}
return result;
}
public boolean clickAndWaitForTagWithText(String place, final String tagName, final String expectedText) {
boolean result = click(place);
if (result) {
result = waitForTagWithText(tagName, expectedText);
}
return result;
}
public boolean waitForTagWithText(final String tagName, final String expectedText) {
boolean result;
result = waitUntil(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver webDriver) {
boolean ok = false;
List<WebElement> elements = webDriver.findElements(By.tagName(tagName));
if (elements != null) {
for (WebElement element : elements) {
try {
String actual = element.getText();
if (expectedText == null) {
ok = actual == null;
} else {
if (actual == null) {
actual = element.getAttribute("value");
}
ok = expectedText.equals(actual);
}
} catch (StaleElementReferenceException e) {
// element detached from DOM
ok = false;
}
if (ok) {
// no need to continue to check other elements
break;
}
}
}
return ok;
}
});
return result;
}
public String valueOf(String place) {
return valueFor(place);
}
public String valueFor(String place) {
String result = null;
WebElement element = getElement(place);
if (element != null) {
if (isSelect(element)) {
String id = element.getAttribute("id");
By selectedOption = getSeleniumHelper().byXpath("//select[@id='%s']//option[@selected]", id);
WebElement option = getSeleniumHelper().findElement(true, selectedOption);
if (option != null) {
result = option.getText();
}
} else {
result = element.getAttribute("value");
if (result == null) {
result = element.getText();
}
}
}
return result;
}
private boolean isSelect(WebElement element) {
return "select".equalsIgnoreCase(element.getTagName());
}
public boolean clear(String place) {
boolean result = false;
WebElement element = getElement(place);
if (element != null) {
element.clear();
result = true;
}
return result;
}
protected WebElement getElement(String place) {
return getSeleniumHelper().getElement(place);
}
/**
* Scrolls browser window (and waits a bit) so top of place becomes visible.
* @param place element to scroll to.
*/
public void scrollTo(String place) {
WebElement element = getElement(place);
if (place != null) {
scrollTo(element, 250);
}
}
/**
* Scrolls browser window so top of element becomes visible.
* @param element element to scroll to.
*/
protected void scrollTo(WebElement element) {
getSeleniumHelper().executeJavascript("arguments[0].scrollIntoView(true);", element);
}
/**
* Scrolls browser window so top of element becomes visible.
* After scroll command some millisecond wait will allow scroll to complete.
* @param element element to scroll to.
* @param msToWaitAfterScroll number of milliseconds to wait.
*/
protected void scrollTo(WebElement element, int msToWaitAfterScroll) {
scrollTo(element);
waitMilliSeconds(msToWaitAfterScroll);
}
/**
* @param timeout number of seconds before waitUntil() throws TimeOutException.
*/
public void secondsBeforeTimeout(int timeout) {
secondsBeforeTimeout = timeout;
}
/**
* @return number of seconds waitUntil() will wait at most.
*/
public int secondsBeforeTimeout() {
return secondsBeforeTimeout;
}
/**
* Clears HTML5's localStorage.
*/
public void clearLocalStorage() {
getSeleniumHelper().executeJavascript("localStorage.clear();");
}
/**
* @param directory sets base directory where screenshots will be stored.
*/
public void screenshotBaseDirectory(String directory) {
if (directory.equals("")
|| directory.endsWith("/")
|| directory.endsWith("\\")) {
screenshotBase = directory;
} else {
screenshotBase = directory + "/";
}
}
/**
* @param height height to use to display screenshot images
*/
public void screenshotShowHeight(String height) {
screenshotHeight = height;
}
/**
* Takes screenshot from current page
* @param basename filename (below screenshot base directory).
* @return location of screenshot.
*/
public String takeScreenshot(String basename) {
String name = screenshotBase + basename;
String screenshotFile = getSeleniumHelper().takeScreenshot(name);
if (screenshotFile == null) {
throw new RuntimeException("Unable to take screenshot: does the webdriver support it?");
} else {
if (screenshotFile.startsWith(FILES_DIR)) {
// make href to screenshot
String relativeFile = screenshotFile.substring(FILES_DIR.length());
relativeFile = relativeFile.replace('\\', '/');
String wikiUrl = "/files" + relativeFile;
if ("".equals(screenshotHeight)) {
wikiUrl = String.format("<a href=\"%s\">%s</a>",
wikiUrl, screenshotFile);
} else {
wikiUrl = String.format("<a href=\"%1$s\"><img src=\"%1$s\" title=\"%2$s\" height=\"%3$s\"></a>",
wikiUrl, screenshotFile, screenshotHeight);
}
screenshotFile = wikiUrl;
}
}
return screenshotFile;
}
/**
* Implementations should wait until the condition evaluates to a value that is neither null nor
* false. Because of this contract, the return type must not be Void.
* @param <T> the return type of the method, which must not be Void
* @param condition condition to evaluate to determine whether waiting can be stopped.
* @throws org.openqa.selenium.TimeoutException if condition was not met before secondsBeforeTimeout.
* @return result of condition.
*/
protected <T> T waitUntil(ExpectedCondition<T> condition) {
try {
FluentWait<WebDriver> wait = waitDriver().withTimeout(secondsBeforeTimeout(), TimeUnit.SECONDS);
return wait.until(condition);
} catch (TimeoutException e) {
// take a screenshot of what was on screen
takeScreenshot("timeouts/" + getClass().getSimpleName() + "/timeout");
throw e;
}
}
private WebDriverWait waitDriver() {
return getSeleniumHelper().waitDriver();
}
/**
* @return helper to use.
*/
protected final SeleniumHelper getSeleniumHelper() {
return seleniumHelper;
}
/**
* Sets SeleniumHelper to use, for testing purposes.
* @param helper helper to use.
*/
void setSeleniumHelper(SeleniumHelper helper) {
seleniumHelper = helper;
}
}
| more scrolling to ensure elements can be clicked, or text obtained.
There does not seem to be a need to wait after scroll
| src/main/java/nl/hsac/fitnesse/fixture/slim/web/BrowserTest.java | more scrolling to ensure elements can be clicked, or text obtained. There does not seem to be a need to wait after scroll |
|
Java | apache-2.0 | 807576d82a6430d79bd84b354635f76e5de99e76 | 0 | jguerinet/MyMartlet-Android,jguerinet/MyMartlet,jguerinet/MyMartlet,jguerinet/MyMartlet,jguerinet/MyMartlet | /*
* Copyright 2014-2016 Appvelopers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.appvelopers.mcgillmobile.ui;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.ColorInt;
import android.support.annotation.NonNull;
import android.support.v4.app.FragmentManager;
import android.support.v4.util.Pair;
import android.support.v4.view.MenuItemCompat;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.SearchView;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.CameraPosition;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.guerinet.formgenerator.FormGenerator;
import com.guerinet.formgenerator.TextViewFormItem;
import com.guerinet.utils.Utils;
import com.guerinet.utils.dialog.DialogUtils;
import junit.framework.Assert;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.BindColor;
import butterknife.ButterKnife;
import butterknife.OnClick;
import ca.appvelopers.mcgillmobile.R;
import ca.appvelopers.mcgillmobile.model.Place;
import ca.appvelopers.mcgillmobile.model.PlaceType;
import ca.appvelopers.mcgillmobile.ui.dialog.list.PlaceTypeListAdapter;
import ca.appvelopers.mcgillmobile.util.manager.HomepageManager;
import ca.appvelopers.mcgillmobile.util.manager.PlacesManager;
import timber.log.Timber;
/**
* Displays a campus map
* @author Ryan Singzon
* @author Julien Guerinet
* @author Quang Dao
* @since 1.0.0
*/
public class MapActivity extends DrawerActivity implements OnMapReadyCallback,
GoogleMap.OnMarkerClickListener {
private static final int LOCATION_REQUEST = 101;
/**
* Info container used to show the current place's detail
*/
@Bind(R.id.info_container)
protected LinearLayout infoContainer;
/**
* {@link FormGenerator} container for the filter
*/
@Bind(R.id.container)
protected LinearLayout container;
/**
* Current place's title
*/
@Bind(R.id.place_title)
protected TextView title;
/**
* Current place's address
*/
@Bind(R.id.place_address)
protected TextView address;
/**
* Button to get directions to a place
*/
@Bind(R.id.directions)
protected Button directions;
/**
* Button to add or remove a place from the user's favorites
*/
@Bind(R.id.map_favorite)
protected Button favorite;
/**
* Primary color for the {@link TextView} drawables
*/
@BindColor(R.color.red)
protected @ColorInt int primaryColor;
/**
* {@link PlacesManager} instance
*/
@Inject
protected PlacesManager placesManager;
/**
* Fragment containing the map
*/
private GoogleMap map;
/**
* Total list of places with their associated markers
*/
private List<Pair<Place, Marker>> places;
/**
* Currently shown map places with their associated markers
*/
private List<Pair<Place, Marker>> shownPlaces;
/**
* Currently shown place with its associated marker
*/
private Pair<Place, Marker> place;
/**
* Currently selected category
*/
private PlaceType type;
/**
* Current search String
*/
private String searchString;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_map);
ButterKnife.bind(this);
analytics.sendScreen("Map");
//Set up the initial information
places = new ArrayList<>();
shownPlaces = new ArrayList<>();
searchString = "";
type = new PlaceType(false);
FormGenerator fg = FormGenerator.bind(this, container);
//Set up the place filter
final TextViewFormItem typeView = fg.text(type.getString(this, languageManager.get()));
typeView.leftIcon(R.drawable.ic_location)
.rightIcon(R.drawable.ic_chevron_right, R.color.grey)
.onClick(new View.OnClickListener() {
@Override
public void onClick(View v) {
DialogUtils.list(MapActivity.this, R.string.map_filter,
new PlaceTypeListAdapter(MapActivity.this, type) {
@Override
public void onPlaceTypeSelected(PlaceType type) {
MapActivity.this.type = type;
//Update the text
typeView.view().setText(
MapActivity.this.type.getString(MapActivity.this,
languageManager.get()));
//Update the filtered places
filterByCategory();
}
});
}
});
//Tint the drawables for the two buttons
Utils.setTint(directions, 0, primaryColor);
Utils.setTint(favorite, 0, primaryColor);
FragmentManager manager = getSupportFragmentManager();
//Get the MapFragment
SupportMapFragment fragment = (SupportMapFragment) manager.findFragmentById(R.id.map);
//If it's null, initialize it and put it in its view
if (fragment == null) {
fragment = SupportMapFragment.newInstance();
manager.beginTransaction()
.replace(R.id.map, fragment)
.addToBackStack(null)
.commit();
}
fragment.getMapAsync(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.search, menu);
//Get the SearchView
MenuItem item = menu.findItem(R.id.action_search);
Assert.assertNotNull(getSupportActionBar());
final SearchView searchView = new SearchView(getSupportActionBar().getThemedContext());
final int textViewID = searchView.getContext().getResources()
.getIdentifier("android:id/search_src_text", null, null);
final AutoCompleteTextView searchTextView =
(AutoCompleteTextView) searchView.findViewById(textViewID);
try {
//Set the cursor to the same color as the text
Field cursorDrawable = TextView.class.getDeclaredField("mCursorDrawableRes");
cursorDrawable.setAccessible(true);
cursorDrawable.set(searchTextView, 0);
} catch (Exception e) {
Timber.e(e, "Cannot change color of cursor");
}
//Set up the query listener
MenuItemCompat.setActionView(item, searchView);
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
searchString = query;
filterBySearchString();
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
searchString = newText;
filterBySearchString();
return false;
}
});
//Reset the search view
searchView.setOnCloseListener(new SearchView.OnCloseListener() {
@Override
public boolean onClose() {
searchString = "";
filterBySearchString();
return false;
}
});
return true;
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String permissions[],
@NonNull int[] grantResults) {
switch (requestCode) {
case LOCATION_REQUEST:
//Check if the permission has been granted
if (grantResults.length > 0 &&
grantResults[0] == PackageManager.PERMISSION_GRANTED) {
//Show the user on the map if that is the case
if (map != null) {
//noinspection MissingPermission
map.setMyLocationEnabled(true);
}
}
break;
default:
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
@Override
protected @HomepageManager.Homepage
int getCurrentPage() {
return HomepageManager.MAP;
}
/**
* Opens Google Maps with directions to the chosen place
*/
@OnClick(R.id.directions)
protected void directions() {
//Open Google Maps
if (place != null) {
Intent intent = new Intent(Intent.ACTION_VIEW,
Uri.parse("http://maps.google.com/maps?f=d &daddr=" +
place.second.getPosition().latitude + "," +
place.second.getPosition().longitude));
startActivity(intent);
}
}
/**
* Adds or remove a place from the user's favorites
*/
@OnClick(R.id.map_favorite)
protected void favorites() {
if (place != null) {
String message;
//Check if it was in the favorites
if (placesManager.isFavorite(place.first)) {
placesManager.removeFavorite(place.first);
//Set the toast message
message = getString(R.string.map_favorites_removed, place.first.getName());
//Change the text to "Add Favorites"
favorite.setText(R.string.map_favorites_add);
//If we are in the favorites category, we need to hide this pin
if (type.getId() == PlaceType.FAVORITES) {
place.second.setVisible(false);
}
} else {
placesManager.addFavorite(place.first);
//Set the toast message
message = getString(R.string.map_favorites_added, place.first.getName());
//Change the text to "Remove Favorites"
favorite.setText(getString(R.string.map_favorites_remove));
}
//Alert the user
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
}
/**
* Shows or hides the given place
*
* @param place The place
* @param visible True if the place should be visible, false otherwise
*/
private void showPlace(Pair<Place, Marker> place, boolean visible) {
place.second.setVisible(visible);
if (visible) {
shownPlaces.add(place);
}
}
/**
* Filters the current places by the selected category
*/
private void filterByCategory() {
//Reset the current places
shownPlaces.clear();
//Go through the places
for (Pair<Place, Marker> place : places) {
switch (type.getId()) {
//Show all of the places
case PlaceType.ALL:
showPlace(place, true);
break;
//Show only the favorite places
case PlaceType.FAVORITES:
showPlace(place, placesManager.isFavorite(place.first));
break;
//Show the places for the current category
default:
showPlace(place, place.first.isOfType(type));
break;
}
}
//Filter also by the search String if there is one
filterBySearchString();
}
/**
* Filters the current places by the entered search String
*/
private void filterBySearchString() {
//If there is no search String, just show everything
if (searchString.isEmpty()) {
for (Pair<Place, Marker> place : shownPlaces) {
place.second.setVisible(true);
}
return;
}
//Keep track of the shown place if there's only one
Marker shownPlace = null;
boolean onePlace = false;
for (Pair<Place, Marker> mapPlace : shownPlaces) {
boolean visible = mapPlace.first.getName().toLowerCase()
.contains(searchString.toLowerCase());
mapPlace.second.setVisible(visible);
if (visible) {
//If onePlace is already set, then set it back to false
// since there will be more than 2
if (onePlace) {
onePlace = false;
}
if (shownPlace == null) {
//If there's no shown place, set it
shownPlace = mapPlace.second;
onePlace = true;
}
}
}
//If you're showing only one place, focus on that place
if (onePlace && map != null) {
map.animateCamera(CameraUpdateFactory.newLatLng(shownPlace.getPosition()));
}
}
@Override
public void onMapReady(GoogleMap googleMap) {
map = googleMap;
//Set the camera's center position to the McGill campus
CameraPosition cameraPosition = new CameraPosition.Builder()
.target(new LatLng(45.504435, -73.576006))
.zoom(14)
.bearing(-54)
.tilt(0)
.build();
map.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition));
//Show the user's location if we have the permission to
if (Utils.requestPermission(this, Manifest.permission.ACCESS_FINE_LOCATION,
LOCATION_REQUEST)) {
//noinspection MissingPermission
map.setMyLocationEnabled(true);
}
//If we don't, it will be requested
//Go through all of the places
for (Place place : placesManager.getPlaces()) {
//Create a MapPlace for this
Marker marker = map.addMarker(new MarkerOptions()
.position(place.getCoordinates())
.draggable(false)
.visible(true));
//Add it to the list
places.add(new Pair<>(place, marker));
}
//Filter
filterByCategory();
map.setOnMarkerClickListener(this);
}
@Override
public boolean onMarkerClick(Marker marker) {
//If there was a marker that was selected before set it back to red
if (place != null) {
place.second.setIcon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_RED));
}
//Pull up the info container
infoContainer.setVisibility(View.VISIBLE);
//Find the concerned place
place = null;
for (Pair<Place, Marker> mapPlace : places) {
if (mapPlace.second.equals(marker)) {
place = mapPlace;
break;
}
}
if (place == null) {
Timber.e("Tapped place marker was not found");
return false;
}
//Set it to blue
place.second.setIcon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_AZURE));
//Set up the info
title.setText(place.first.getName());
address.setText(place.first.getAddress());
//Set up the favorite text
favorite.setText(placesManager.isFavorite(place.first) ?
R.string.map_favorites_remove : R.string.map_favorites_add);
return false;
}
} | app/src/main/java/ca/appvelopers/mcgillmobile/ui/MapActivity.java | /*
* Copyright 2014-2016 Appvelopers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.appvelopers.mcgillmobile.ui;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.ColorInt;
import android.support.annotation.NonNull;
import android.support.v4.app.FragmentManager;
import android.support.v4.util.Pair;
import android.support.v4.view.MenuItemCompat;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.SearchView;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.CameraPosition;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.guerinet.formgenerator.FormGenerator;
import com.guerinet.formgenerator.TextViewFormItem;
import com.guerinet.utils.Utils;
import com.guerinet.utils.dialog.DialogUtils;
import junit.framework.Assert;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.BindColor;
import butterknife.ButterKnife;
import butterknife.OnClick;
import ca.appvelopers.mcgillmobile.R;
import ca.appvelopers.mcgillmobile.model.Place;
import ca.appvelopers.mcgillmobile.model.PlaceType;
import ca.appvelopers.mcgillmobile.ui.dialog.list.PlaceTypeListAdapter;
import ca.appvelopers.mcgillmobile.util.manager.HomepageManager;
import ca.appvelopers.mcgillmobile.util.manager.PlacesManager;
import timber.log.Timber;
/**
* Displays a campus map
* @author Ryan Singzon
* @author Julien Guerinet
* @author Quang Dao
* @since 1.0.0
*/
public class MapActivity extends DrawerActivity implements OnMapReadyCallback,
GoogleMap.OnMarkerClickListener {
private static final int LOCATION_REQUEST = 101;
/**
* Info container used to show the current place's detail
*/
@Bind(R.id.info_container)
protected LinearLayout infoContainer;
/**
* {@link FormGenerator} container for the filter
*/
@Bind(R.id.container)
protected LinearLayout container;
/**
* Current place's title
*/
@Bind(R.id.place_title)
protected TextView title;
/**
* Current place's address
*/
@Bind(R.id.place_address)
protected TextView address;
/**
* Button to get directions to a place
*/
@Bind(R.id.directions)
protected Button directions;
/**
* Button to add or remove a place from the user's favorites
*/
@Bind(R.id.map_favorite)
protected Button favorite;
/**
* Primary color for the {@link TextView} drawables
*/
@BindColor(R.color.red)
protected @ColorInt int primaryColor;
/**
* {@link PlacesManager} instance
*/
@Inject
protected PlacesManager placesManager;
/**
* Fragment containing the map
*/
private GoogleMap map;
/**
* Total list of places with their associated markers
*/
private List<Pair<Place, Marker>> places;
/**
* Currently shown map places with their associated markers
*/
private List<Pair<Place, Marker>> shownPlaces;
/**
* Currently shown place with its associated marker
*/
private Pair<Place, Marker> place;
/**
* Currently selected category
*/
private PlaceType mType;
/**
* Current search String
*/
private String mSearchString;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_map);
ButterKnife.bind(this);
analytics.sendScreen("Map");
//Set up the initial information
places = new ArrayList<>();
shownPlaces = new ArrayList<>();
mSearchString = "";
mType = new PlaceType(false);
FormGenerator fg = FormGenerator.bind(this, container);
//Set up the place filter
final TextViewFormItem typeView = fg.text(mType.getString(this, languageManager.get()));
typeView.leftIcon(R.drawable.ic_location)
.rightIcon(R.drawable.ic_chevron_right, R.color.grey)
.onClick(new View.OnClickListener() {
@Override
public void onClick(View v) {
DialogUtils.list(MapActivity.this, R.string.map_filter,
new PlaceTypeListAdapter(MapActivity.this, mType) {
@Override
public void onPlaceTypeSelected(PlaceType type) {
mType = type;
//Update the text
typeView.view().setText(mType.getString(MapActivity.this,
languageManager.get()));
//Update the filtered places
filterByCategory();
}
});
}
});
//Tint the drawables for the two buttons
Utils.setTint(directions, 0, primaryColor);
Utils.setTint(favorite, 0, primaryColor);
FragmentManager manager = getSupportFragmentManager();
//Get the MapFragment
SupportMapFragment fragment = (SupportMapFragment) manager.findFragmentById(R.id.map);
//If it's null, initialize it and put it in its view
if (fragment == null) {
fragment = SupportMapFragment.newInstance();
manager.beginTransaction()
.replace(R.id.map, fragment)
.addToBackStack(null)
.commit();
}
fragment.getMapAsync(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.search, menu);
//Get the SearchView
MenuItem item = menu.findItem(R.id.action_search);
Assert.assertNotNull(getSupportActionBar());
final SearchView searchView = new SearchView(getSupportActionBar().getThemedContext());
final int textViewID = searchView.getContext().getResources()
.getIdentifier("android:id/search_src_text", null, null);
final AutoCompleteTextView searchTextView =
(AutoCompleteTextView) searchView.findViewById(textViewID);
try {
//Set the cursor to the same color as the text
Field cursorDrawable = TextView.class.getDeclaredField("mCursorDrawableRes");
cursorDrawable.setAccessible(true);
cursorDrawable.set(searchTextView, 0);
} catch (Exception e) {
Timber.e(e, "Cannot change color of cursor");
}
//Set up the query listener
MenuItemCompat.setActionView(item, searchView);
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
mSearchString = query;
filterBySearchString();
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
mSearchString = newText;
filterBySearchString();
return false;
}
});
//Reset the search view
searchView.setOnCloseListener(new SearchView.OnCloseListener() {
@Override
public boolean onClose() {
mSearchString = "";
filterBySearchString();
return false;
}
});
return true;
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String permissions[],
@NonNull int[] grantResults) {
switch (requestCode) {
case LOCATION_REQUEST:
//Check if the permission has been granted
if (grantResults.length > 0 &&
grantResults[0] == PackageManager.PERMISSION_GRANTED) {
//Show the user on the map if that is the case
if (map != null) {
//noinspection MissingPermission
map.setMyLocationEnabled(true);
}
}
break;
default:
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
@Override
protected @HomepageManager.Homepage
int getCurrentPage() {
return HomepageManager.MAP;
}
/**
* Opens Google Maps with directions to the chosen place
*/
@OnClick(R.id.directions)
protected void directions() {
//Open Google Maps
if (place != null) {
Intent intent = new Intent(Intent.ACTION_VIEW,
Uri.parse("http://maps.google.com/maps?f=d &daddr=" +
place.second.getPosition().latitude + "," +
place.second.getPosition().longitude));
startActivity(intent);
}
}
/**
* Adds or remove a place from the user's favorites
*/
@OnClick(R.id.map_favorite)
protected void favorites() {
if (place != null) {
String message;
//Check if it was in the favorites
if (placesManager.isFavorite(place.first)) {
placesManager.removeFavorite(place.first);
//Set the toast message
message = getString(R.string.map_favorites_removed, place.first.getName());
//Change the text to "Add Favorites"
favorite.setText(R.string.map_favorites_add);
//If we are in the favorites category, we need to hide this pin
if (mType.getId() == PlaceType.FAVORITES) {
place.second.setVisible(false);
}
} else {
placesManager.addFavorite(place.first);
//Set the toast message
message = getString(R.string.map_favorites_added, place.first.getName());
//Change the text to "Remove Favorites"
favorite.setText(getString(R.string.map_favorites_remove));
}
//Alert the user
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
}
/**
* Shows or hides the given place
*
* @param place The place
* @param visible True if the place should be visible, false otherwise
*/
private void showPlace(Pair<Place, Marker> place, boolean visible) {
place.second.setVisible(visible);
if (visible) {
shownPlaces.add(place);
}
}
/**
* Filters the current places by the selected category
*/
private void filterByCategory() {
//Reset the current places
shownPlaces.clear();
//Go through the places
for (Pair<Place, Marker> place : places) {
switch (mType.getId()) {
//Show all of the places
case PlaceType.ALL:
showPlace(place, true);
break;
//Show only the favorite places
case PlaceType.FAVORITES:
showPlace(place, placesManager.isFavorite(place.first));
break;
//Show the places for the current category
default:
showPlace(place, place.first.isOfType(mType));
break;
}
}
//Filter also by the search String if there is one
filterBySearchString();
}
/**
* Filters the current places by the entered search String
*/
private void filterBySearchString() {
//If there is no search String, just show everything
if (mSearchString.isEmpty()) {
for (Pair<Place, Marker> place : shownPlaces) {
place.second.setVisible(true);
}
return;
}
//Keep track of the shown place if there's only one
Marker shownPlace = null;
boolean onePlace = false;
for (Pair<Place, Marker> mapPlace : shownPlaces) {
boolean visible = mapPlace.first.getName().toLowerCase()
.contains(mSearchString.toLowerCase());
mapPlace.second.setVisible(visible);
if (visible) {
//If onePlace is already set, then set it back to false
// since there will be more than 2
if (onePlace) {
onePlace = false;
}
if (shownPlace == null) {
//If there's no shown place, set it
shownPlace = mapPlace.second;
onePlace = true;
}
}
}
//If you're showing only one place, focus on that place
if (onePlace && map != null) {
map.animateCamera(CameraUpdateFactory.newLatLng(shownPlace.getPosition()));
}
}
@Override
public void onMapReady(GoogleMap googleMap) {
map = googleMap;
//Set the camera's center position to the McGill campus
CameraPosition cameraPosition = new CameraPosition.Builder()
.target(new LatLng(45.504435, -73.576006))
.zoom(14)
.bearing(-54)
.tilt(0)
.build();
map.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition));
//Show the user's location if we have the permission to
if (Utils.requestPermission(this, Manifest.permission.ACCESS_FINE_LOCATION,
LOCATION_REQUEST)) {
//noinspection MissingPermission
map.setMyLocationEnabled(true);
}
//If we don't, it will be requested
//Go through all of the places
for (Place place : placesManager.getPlaces()) {
//Create a MapPlace for this
Marker marker = map.addMarker(new MarkerOptions()
.position(place.getCoordinates())
.draggable(false)
.visible(true));
//Add it to the list
places.add(new Pair<>(place, marker));
}
//Filter
filterByCategory();
map.setOnMarkerClickListener(this);
}
@Override
public boolean onMarkerClick(Marker marker) {
//If there was a marker that was selected before set it back to red
if (place != null) {
place.second.setIcon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_RED));
}
//Pull up the info container
infoContainer.setVisibility(View.VISIBLE);
//Find the concerned place
place = null;
for (Pair<Place, Marker> mapPlace : places) {
if (mapPlace.second.equals(marker)) {
place = mapPlace;
break;
}
}
if (place == null) {
Timber.e("Tapped place marker was not found");
return false;
}
//Set it to blue
place.second.setIcon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_AZURE));
//Set up the info
title.setText(place.first.getName());
address.setText(place.first.getAddress());
//Set up the favorite text
favorite.setText(placesManager.isFavorite(place.first) ?
R.string.map_favorites_remove : R.string.map_favorites_add);
return false;
}
} | Removed Hungarian notation from MapActivity.
| app/src/main/java/ca/appvelopers/mcgillmobile/ui/MapActivity.java | Removed Hungarian notation from MapActivity. |
|
Java | apache-2.0 | 7f6788af1a10688662034b33569b4bca89222605 | 0 | gfyoung/elasticsearch,uschindler/elasticsearch,nknize/elasticsearch,HonzaKral/elasticsearch,gingerwizard/elasticsearch,uschindler/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,vroyer/elassandra,gingerwizard/elasticsearch,gfyoung/elasticsearch,strapdata/elassandra,HonzaKral/elasticsearch,GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,HonzaKral/elasticsearch,coding0011/elasticsearch,nknize/elasticsearch,coding0011/elasticsearch,scorpionvicky/elasticsearch,GlenRSmith/elasticsearch,strapdata/elassandra,gingerwizard/elasticsearch,gingerwizard/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,coding0011/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,scorpionvicky/elasticsearch,scorpionvicky/elasticsearch,strapdata/elassandra,gfyoung/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,strapdata/elassandra,gfyoung/elasticsearch,gingerwizard/elasticsearch,coding0011/elasticsearch,robin13/elasticsearch,vroyer/elassandra,strapdata/elassandra,uschindler/elasticsearch,HonzaKral/elasticsearch,vroyer/elassandra,coding0011/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,gfyoung/elasticsearch,GlenRSmith/elasticsearch | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.integration;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsIndices;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.license.core.License.OperationMode;
import org.elasticsearch.license.plugin.Licensing;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.license.plugin.core.Licensee;
import org.elasticsearch.license.plugin.core.LicensesService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSource;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.xpack.MockNettyPlugin;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.graph.GraphLicensee;
import org.elasticsearch.xpack.monitoring.MonitoringLicensee;
import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.xpack.security.SecurityLicenseState;
import org.elasticsearch.xpack.security.SecurityLicensee;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.WatcherLicensee;
import org.junit.After;
import static java.util.Collections.emptyList;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
*
*/
public class LicensingTests extends SecurityIntegTestCase {
public static final String ROLES =
SecuritySettingsSource.DEFAULT_ROLE + ":\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [manage]\n" +
" - names: '/.*/'\n" +
" privileges: [write]\n" +
" - names: 'test'\n" +
" privileges: [read]\n" +
" - names: 'test1'\n" +
" privileges: [read]\n" +
"\n" +
"role_a:\n" +
" indices:\n" +
" - names: 'a'\n" +
" privileges: [all]\n" +
"\n" +
"role_b:\n" +
" indices:\n" +
" - names: 'b'\n" +
" privileges: [all]\n";
public static final String USERS =
SecuritySettingsSource.CONFIG_STANDARD_USER +
"user_a:{plain}passwd\n" +
"user_b:{plain}passwd\n";
public static final String USERS_ROLES =
SecuritySettingsSource.CONFIG_STANDARD_USER_ROLES +
"role_a:user_a,user_b\n" +
"role_b:user_b\n";
@Override
protected String configRoles() {
return ROLES;
}
@Override
protected String configUsers() {
return USERS;
}
@Override
protected String configUsersRoles() {
return USERS_ROLES;
}
@Override
public Settings nodeSettings(int nodeOrdinal) {
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(MockNettyPlugin.class); // for http
return plugins;
}
@Override
protected Class<? extends XPackPlugin> xpackPluginClass() {
return InternalXPackPlugin.class;
}
@After
public void resetLicensing() {
enableLicensing();
}
public void testEnableDisableBehaviour() throws Exception {
IndexResponse indexResponse = index("test", "type", jsonBuilder()
.startObject()
.field("name", "value")
.endObject());
assertThat(indexResponse.isCreated(), is(true));
indexResponse = index("test1", "type", jsonBuilder()
.startObject()
.field("name", "value1")
.endObject());
assertThat(indexResponse.isCreated(), is(true));
refresh();
Client client = internalCluster().transportClient();
disableLicensing();
assertElasticsearchSecurityException(() -> client.admin().indices().prepareStats().get());
assertElasticsearchSecurityException(() -> client.admin().cluster().prepareClusterStats().get());
assertElasticsearchSecurityException(() -> client.admin().cluster().prepareHealth().get());
assertElasticsearchSecurityException(() -> client.admin().cluster().prepareNodesStats().get());
enableLicensing(randomFrom(OperationMode.values()));
IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats().get();
assertNoFailures(indicesStatsResponse);
ClusterStatsResponse clusterStatsNodeResponse = client.admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsNodeResponse, notNullValue());
ClusterStatsIndices indices = clusterStatsNodeResponse.getIndicesStats();
assertThat(indices, notNullValue());
assertThat(indices.getIndexCount(), greaterThanOrEqualTo(2));
ClusterHealthResponse clusterIndexHealth = client.admin().cluster().prepareHealth().get();
assertThat(clusterIndexHealth, notNullValue());
NodesStatsResponse nodeStats = client.admin().cluster().prepareNodesStats().get();
assertThat(nodeStats, notNullValue());
}
public void testRestAuthenticationByLicenseType() throws Exception {
try (Response response = getRestClient().performRequest("GET", "/")) {
// the default of the licensing tests is basic
assertThat(response.getStatusLine().getStatusCode(), is(200));
}
// generate a new license with a mode that enables auth
OperationMode mode = randomFrom(OperationMode.GOLD, OperationMode.TRIAL, OperationMode.PLATINUM, OperationMode.STANDARD);
enableLicensing(mode);
try {
getRestClient().performRequest("GET", "/");
fail("request should have failed");
} catch(ResponseException e) {
assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401));
}
}
public void testTransportClientAuthenticationByLicenseType() throws Exception {
Settings.Builder builder = Settings.builder()
.put(internalCluster().transportClient().settings());
// remove user info
builder.remove(Security.USER_SETTING.getKey());
builder.remove(ThreadContext.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER);
// basic has no auth
try (TransportClient client = TransportClient.builder().settings(builder).addPlugin(XPackPlugin.class).build()) {
client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress());
assertGreenClusterState(client);
}
// enable a license that enables security
OperationMode mode = randomFrom(OperationMode.GOLD, OperationMode.TRIAL, OperationMode.PLATINUM, OperationMode.STANDARD);
enableLicensing(mode);
try (TransportClient client = TransportClient.builder().settings(builder).addPlugin(XPackPlugin.class).build()) {
client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress());
client.admin().cluster().prepareHealth().get();
fail("should not have been able to connect to a node!");
} catch (NoNodeAvailableException e) {
// expected
}
}
private static void assertElasticsearchSecurityException(ThrowingRunnable runnable) {
ElasticsearchSecurityException ee = expectThrows(ElasticsearchSecurityException.class, runnable);
assertThat(ee.getHeader("es.license.expired.feature"), hasItem(Security.NAME));
assertThat(ee.status(), is(RestStatus.FORBIDDEN));
}
public static void disableLicensing() {
disableLicensing(OperationMode.BASIC);
}
public static void disableLicensing(OperationMode operationMode) {
for (TestLicensesService service : internalCluster().getInstances(TestLicensesService.class)) {
service.disable(operationMode);
}
}
public static void enableLicensing() {
enableLicensing(OperationMode.BASIC);
}
public static void enableLicensing(OperationMode operationMode) {
for (TestLicensesService service : internalCluster().getInstances(TestLicensesService.class)) {
service.enable(operationMode);
}
}
public static class InternalLicensing extends Licensing {
@Override
public Collection<Module> nodeModules() {
return Collections.singletonList(b -> b.bind(LicensesService.class).to(TestLicensesService.class));
}
@Override
public Collection<Object> createComponents(ClusterService clusterService, Clock clock,
SecurityLicenseState securityLicenseState) {
SecurityLicensee securityLicensee = new SecurityLicensee(settings, securityLicenseState);
WatcherLicensee watcherLicensee = new WatcherLicensee(settings);
MonitoringLicensee monitoringLicensee = new MonitoringLicensee(settings);
GraphLicensee graphLicensee = new GraphLicensee(settings);
TestLicensesService licensesService = new TestLicensesService(settings,
Arrays.asList(securityLicensee, watcherLicensee, monitoringLicensee, graphLicensee));
return Arrays.asList(securityLicensee, licensesService, watcherLicensee, monitoringLicensee,
graphLicensee, securityLicenseState);
}
public InternalLicensing() {
super(Settings.EMPTY);
}
@Override
public List<ActionHandler<? extends ActionRequest<?>, ? extends ActionResponse>> getActions() {
return emptyList();
}
@Override
public List<Class<? extends RestHandler>> getRestHandlers() {
return emptyList();
}
}
public static class InternalXPackPlugin extends XPackPlugin {
public InternalXPackPlugin(Settings settings) throws IOException {
super(settings);
licensing = new InternalLicensing();
}
}
public static class TestLicensesService extends LicensesService {
private final List<Licensee> licensees;
public TestLicensesService(Settings settings, List<Licensee> licensees) {
super(settings, null, null, Collections.emptyList());
this.licensees = licensees;
enable(OperationMode.BASIC);
}
void enable(OperationMode operationMode) {
for (Licensee licensee : licensees) {
licensee.onChange(new Licensee.Status(operationMode, LicenseState.ENABLED));
}
}
void disable(OperationMode operationMode) {
for (Licensee licensee : licensees) {
licensee.onChange(new Licensee.Status(operationMode, LicenseState.DISABLED));
}
}
@Override
protected void doStart() {}
@Override
protected void doStop() {}
}
}
| elasticsearch/x-pack/security/src/test/java/org/elasticsearch/integration/LicensingTests.java | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.integration;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsIndices;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.license.core.License.OperationMode;
import org.elasticsearch.license.plugin.Licensing;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.license.plugin.core.Licensee;
import org.elasticsearch.license.plugin.core.LicensesService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSource;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.xpack.MockNettyPlugin;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.graph.GraphLicensee;
import org.elasticsearch.xpack.monitoring.MonitoringLicensee;
import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.xpack.security.SecurityLicenseState;
import org.elasticsearch.xpack.security.SecurityLicensee;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.WatcherLicensee;
import org.junit.After;
import static java.util.Collections.emptyList;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
*
*/
public class LicensingTests extends SecurityIntegTestCase {
public static final String ROLES =
SecuritySettingsSource.DEFAULT_ROLE + ":\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [manage]\n" +
" - names: '/.*/'\n" +
" privileges: [write]\n" +
" - names: 'test'\n" +
" privileges: [read]\n" +
" - names: 'test1'\n" +
" privileges: [read]\n" +
"\n" +
"role_a:\n" +
" indices:\n" +
" - names: 'a'\n" +
" privileges: [all]\n" +
"\n" +
"role_b:\n" +
" indices:\n" +
" - names: 'b'\n" +
" privileges: [all]\n";
public static final String USERS =
SecuritySettingsSource.CONFIG_STANDARD_USER +
"user_a:{plain}passwd\n" +
"user_b:{plain}passwd\n";
public static final String USERS_ROLES =
SecuritySettingsSource.CONFIG_STANDARD_USER_ROLES +
"role_a:user_a,user_b\n" +
"role_b:user_b\n";
@Override
protected String configRoles() {
return ROLES;
}
@Override
protected String configUsers() {
return USERS;
}
@Override
protected String configUsersRoles() {
return USERS_ROLES;
}
@Override
public Settings nodeSettings(int nodeOrdinal) {
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(MockNettyPlugin.class); // for http
return plugins;
}
@Override
protected Class<? extends XPackPlugin> xpackPluginClass() {
return InternalXPackPlugin.class;
}
@After
public void resetLicensing() {
enableLicensing();
}
public void testEnableDisableBehaviour() throws Exception {
IndexResponse indexResponse = index("test", "type", jsonBuilder()
.startObject()
.field("name", "value")
.endObject());
assertThat(indexResponse.isCreated(), is(true));
indexResponse = index("test1", "type", jsonBuilder()
.startObject()
.field("name", "value1")
.endObject());
assertThat(indexResponse.isCreated(), is(true));
refresh();
Client client = internalCluster().transportClient();
disableLicensing();
assertElasticsearchSecurityException(() -> client.admin().indices().prepareStats().get());
assertElasticsearchSecurityException(() -> client.admin().cluster().prepareClusterStats().get());
assertElasticsearchSecurityException(() -> client.admin().cluster().prepareHealth().get());
assertElasticsearchSecurityException(() -> client.admin().cluster().prepareNodesStats().get());
enableLicensing(randomFrom(OperationMode.values()));
IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats().get();
assertNoFailures(indicesStatsResponse);
ClusterStatsResponse clusterStatsNodeResponse = client.admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsNodeResponse, notNullValue());
ClusterStatsIndices indices = clusterStatsNodeResponse.getIndicesStats();
assertThat(indices, notNullValue());
assertThat(indices.getIndexCount(), greaterThanOrEqualTo(2));
ClusterHealthResponse clusterIndexHealth = client.admin().cluster().prepareHealth().get();
assertThat(clusterIndexHealth, notNullValue());
NodesStatsResponse nodeStats = client.admin().cluster().prepareNodesStats().get();
assertThat(nodeStats, notNullValue());
}
public void testRestAuthenticationByLicenseType() throws Exception {
try (Response response = getRestClient().performRequest("GET", "/")) {
// the default of the licensing tests is basic
assertThat(response.getStatusLine().getStatusCode(), is(200));
}
// generate a new license with a mode that enables auth
OperationMode mode = randomFrom(OperationMode.GOLD, OperationMode.TRIAL, OperationMode.PLATINUM, OperationMode.STANDARD);
enableLicensing(mode);
try {
getRestClient().performRequest("GET", "/");
fail("request should have failed");
} catch(ResponseException e) {
assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401));
}
}
public void testTransportClientAuthenticationByLicenseType() throws Exception {
Settings.Builder builder = Settings.builder()
.put(internalCluster().transportClient().settings());
// remove user info
builder.remove(Security.USER_SETTING.getKey());
builder.remove(ThreadContext.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER);
// basic has no auth
try (TransportClient client = TransportClient.builder().settings(builder).addPlugin(XPackPlugin.class).build()) {
client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress());
assertGreenClusterState(client);
}
// enable a license that enables security
OperationMode mode = randomFrom(OperationMode.GOLD, OperationMode.TRIAL, OperationMode.PLATINUM, OperationMode.STANDARD);
enableLicensing(mode);
try (TransportClient client = TransportClient.builder().settings(builder).addPlugin(XPackPlugin.class).build()) {
client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress());
client.admin().cluster().prepareHealth().get();
fail("should not have been able to connect to a node!");
} catch (NoNodeAvailableException e) {
// expected
}
}
private static void assertElasticsearchSecurityException(ThrowingRunnable runnable) {
ElasticsearchSecurityException ee = expectThrows(ElasticsearchSecurityException.class, runnable);
assertThat(ee.getHeader("es.license.expired.feature"), hasItem(Security.NAME));
assertThat(ee.status(), is(RestStatus.FORBIDDEN));
}
public static void disableLicensing() {
disableLicensing(OperationMode.BASIC);
}
public static void disableLicensing(OperationMode operationMode) {
for (TestLicensesService service : internalCluster().getInstances(TestLicensesService.class)) {
service.disable(operationMode);
}
}
public static void enableLicensing() {
enableLicensing(OperationMode.BASIC);
}
public static void enableLicensing(OperationMode operationMode) {
for (TestLicensesService service : internalCluster().getInstances(TestLicensesService.class)) {
service.enable(operationMode);
}
}
public static class InternalLicensing extends Licensing {
@Override
public Collection<Module> nodeModules() {
return Collections.singletonList(b -> b.bind(LicensesService.class).to(TestLicensesService.class));
}
@Override
public Collection<Object> createComponents(ClusterService clusterService, Clock clock,
SecurityLicenseState securityLicenseState) {
SecurityLicensee securityLicensee = new SecurityLicensee(settings, securityLicenseState);
WatcherLicensee watcherLicensee = new WatcherLicensee(settings);
MonitoringLicensee monitoringLicensee = new MonitoringLicensee(settings);
GraphLicensee graphLicensee = new GraphLicensee(settings);
TestLicensesService licensesService = new TestLicensesService(settings,
Arrays.asList(securityLicensee, watcherLicensee, monitoringLicensee, graphLicensee));
return Arrays.asList(securityLicensee, licensesService, watcherLicensee, monitoringLicensee, graphLicensee, securityLicenseState);
}
public InternalLicensing() {
super(Settings.EMPTY);
}
@Override
public List<ActionHandler<? extends ActionRequest<?>, ? extends ActionResponse>> getActions() {
return emptyList();
}
@Override
public List<Class<? extends RestHandler>> getRestHandlers() {
return emptyList();
}
}
public static class InternalXPackPlugin extends XPackPlugin {
public InternalXPackPlugin(Settings settings) throws IOException {
super(settings);
licensing = new InternalLicensing();
}
}
public static class TestLicensesService extends LicensesService {
private final List<Licensee> licensees;
public TestLicensesService(Settings settings, List<Licensee> licensees) {
super(settings, null, null, Collections.emptyList());
this.licensees = licensees;
enable(OperationMode.BASIC);
}
void enable(OperationMode operationMode) {
for (Licensee licensee : licensees) {
licensee.onChange(new Licensee.Status(operationMode, LicenseState.ENABLED));
}
}
void disable(OperationMode operationMode) {
for (Licensee licensee : licensees) {
licensee.onChange(new Licensee.Status(operationMode, LicenseState.DISABLED));
}
}
@Override
protected void doStart() {}
@Override
protected void doStop() {}
}
}
| Fix line length
Original commit: elastic/x-pack-elasticsearch@50e9ef0667108f4225922e62126a39bcb7b714de
| elasticsearch/x-pack/security/src/test/java/org/elasticsearch/integration/LicensingTests.java | Fix line length |
|
Java | apache-2.0 | bb3d657db1d850d0390d25ecacfc11c528bbda03 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.newvfs.persistent;
import com.intellij.ide.plugins.DynamicPluginsTestUtil;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.roots.ContentEntry;
import com.intellij.openapi.roots.ModuleRootModificationUtil;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.IoTestUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.ex.temp.TempFileSystem;
import com.intellij.openapi.vfs.impl.jar.JarFileSystemImpl;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.ManagingFS;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.events.*;
import com.intellij.openapi.vfs.newvfs.impl.VirtualDirectoryImpl;
import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;
import com.intellij.testFramework.LoggedErrorProcessor;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.UsefulTestCase;
import com.intellij.testFramework.VfsTestUtil;
import com.intellij.testFramework.fixtures.BareTestFixtureTestCase;
import com.intellij.testFramework.rules.TempDirectory;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PathUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.Compressor;
import com.intellij.util.io.DataInputOutputUtil;
import com.intellij.util.io.SuperUserStatus;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.messages.MessageBusConnection;
import org.apache.log4j.Logger;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Rule;
import org.junit.Test;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.*;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import static com.intellij.openapi.util.io.IoTestUtil.assumeWindows;
import static com.intellij.openapi.util.io.IoTestUtil.setCaseSensitivity;
import static com.intellij.testFramework.EdtTestUtil.runInEdtAndGet;
import static com.intellij.testFramework.EdtTestUtil.runInEdtAndWait;
import static com.intellij.testFramework.UsefulTestCase.assertOneElement;
import static org.junit.Assert.*;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
public class PersistentFsTest extends BareTestFixtureTestCase {
@Rule public TempDirectory tempDirectory = new TempDirectory();
@Test
public void testAccessingFileByID() {
File file = tempDirectory.newFile("test.txt");
VirtualFile vFile = refreshAndFind(file);
int id = ((VirtualFileWithId)vFile).getId();
assertEquals(vFile, PersistentFS.getInstance().findFileById(id));
VfsTestUtil.deleteFile(vFile);
assertNull(PersistentFS.getInstance().findFileById(id));
}
private static VirtualFile refreshAndFind(File file) {
return Objects.requireNonNull(LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file), file.getPath());
}
@Test
public void testFileContentHash() throws Exception {
File file = tempDirectory.newFile("test.txt", "one".getBytes(StandardCharsets.UTF_8));
VirtualFile vFile = refreshAndFind(file);
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
byte[] hash = PersistentFSImpl.getContentHashIfStored(vFile);
assertNull(hash); // content is not yet loaded
vFile.contentsToByteArray();
hash = PersistentFSImpl.getContentHashIfStored(vFile);
assertNotNull(hash);
WriteAction.runAndWait(() -> VfsUtil.saveText(vFile, "two"));
byte[] newHash = PersistentFSImpl.getContentHashIfStored(vFile);
assertNotNull(newHash);
assertFalse(Arrays.equals(hash, newHash)); // different contents should have different hashes
WriteAction.runAndWait(() -> VfsUtil.saveText(vFile, "one"));
newHash = PersistentFSImpl.getContentHashIfStored(vFile);
assertArrayEquals(hash, newHash); // equal contents should have the equal hashes
VfsTestUtil.deleteFile(vFile);
assertNotNull(fs.contentsToByteArray(vFile)); // deleted files preserve content, and thus hash
assertArrayEquals(hash, PersistentFSImpl.getContentHashIfStored(vFile));
}
@Test
public void testFindRootShouldNotBeFooledByRelativePath() {
File x = tempDirectory.newFile("x.jar");
VirtualFile vx = refreshAndFind(x);
JarFileSystem jfs = JarFileSystem.getInstance();
VirtualFile root = jfs.getJarRootForLocalFile(vx);
String path = vx.getPath() + "/../" + vx.getName() + JarFileSystem.JAR_SEPARATOR;
assertSame(PersistentFS.getInstance().findRoot(path, jfs), root);
}
@Test
public void testFindRootMustCreateFileWithCanonicalPath() {
checkMustCreateRootWithCanonicalPath("x.jar");
}
@Test
public void testFindRootMustCreateFileWithStillCanonicalPath() {
checkMustCreateRootWithCanonicalPath("x..jar");
}
@Test
public void testFindRootMustCreateFileWithYetAnotherCanonicalPath() {
checkMustCreateRootWithCanonicalPath("x...jar");
}
private void checkMustCreateRootWithCanonicalPath(String jarName) {
File x = tempDirectory.newFile(jarName);
refreshAndFind(x);
JarFileSystem jfs = JarFileSystem.getInstance();
String path = x.getPath() + "/../" + x.getName() + JarFileSystem.JAR_SEPARATOR;
NewVirtualFile root = PersistentFS.getInstance().findRoot(path, jfs);
assertNotNull(path, root);
assertFalse(root.getPath(), root.getPath().contains("../"));
assertFalse(root.getPath(), root.getPath().contains("/.."));
}
@Test
public void testDeleteSubstRoots() {
IoTestUtil.assumeWindows();
File substRoot = IoTestUtil.createSubst(tempDirectory.getRoot().getPath());
VirtualFile subst;
try {
subst = refreshAndFind(substRoot);
assertNotNull(substRoot.listFiles());
}
finally {
IoTestUtil.deleteSubst(substRoot.getPath());
}
subst.refresh(false, true);
VirtualFile[] roots = PersistentFS.getInstance().getRoots(LocalFileSystem.getInstance());
for (VirtualFile root : roots) {
String prefix = StringUtil.commonPrefix(root.getPath(), substRoot.getPath());
assertTrue(prefix, prefix.isEmpty());
}
}
@Test
public void testLocalRoots() {
VirtualFile tempRoot = VirtualFileManager.getInstance().findFileByUrl("temp:///");
assertNotNull(tempRoot);
for (VirtualFile root : PersistentFS.getInstance().getLocalRoots()) {
assertTrue("root=" + root, root.isInLocalFileSystem());
VirtualFileSystem fs = root.getFileSystem();
assertTrue("fs=" + fs, fs instanceof LocalFileSystem);
assertFalse("fs=" + fs, fs instanceof TempFileSystem);
}
}
@Test
public void testInvalidJarRootsIgnored() {
File file = tempDirectory.newFile("file.txt");
String url = "jar://" + FileUtil.toSystemIndependentName(file.getPath()) + "!/";
assertNull(VirtualFileManager.getInstance().findFileByUrl(url));
}
@Test
public void testBrokenJarRoots() throws IOException {
File jarFile = tempDirectory.newFile("empty.jar");
VirtualFile local = refreshAndFind(jarFile);
String rootUrl = "jar://" + local.getPath() + "!/";
String entryUrl = rootUrl + JarFile.MANIFEST_NAME;
int[] logCount = {0};
LoggedErrorProcessor.setNewInstance(new LoggedErrorProcessor() {
@Override
public void processWarn(String message, Throwable t, @NotNull Logger logger) {
super.processWarn(message, t, logger);
if (message.contains(jarFile.getName())) logCount[0]++;
}
});
try {
VirtualFile jarRoot = VirtualFileManager.getInstance().findFileByUrl(rootUrl);
assertNotNull(jarRoot);
assertTrue(jarRoot.isValid());
assertArrayEquals(VirtualFile.EMPTY_ARRAY, jarRoot.getChildren());
assertNull(VirtualFileManager.getInstance().findFileByUrl(entryUrl));
try (Compressor.Jar jar = new Compressor.Jar(jarFile)) {
jar.addManifest(new Manifest());
}
local.refresh(false, false);
assertTrue(jarRoot.isValid());
assertEquals(1, jarRoot.getChildren().length);
assertNotNull(VirtualFileManager.getInstance().findFileByUrl(entryUrl));
}
finally {
LoggedErrorProcessor.restoreDefaultProcessor();
}
assertEquals(1, logCount[0]);
}
@Test
public void testIterInDbChildrenWorksForRemovedDirsAfterRestart() throws IOException {
// The test (re)creates .../subDir/subSubDir/Foo.txt hierarchy outside of a watched project and checks for removal events.
// It starts the real testing "after a restart" - i.e. when launched for the second time using the same system directory.
// In terms of the persistence, "subDir/" is partially loaded and "subSubDir/" is fully loaded.
String testDirName = UsefulTestCase.TEMP_DIR_MARKER + getTestName(false);
Path nestedTestDir = tempDirectory.getRootPath().getParent().resolve(testDirName + "/subDir/subSubDir");
boolean secondRun = Files.exists(nestedTestDir.getParent().getParent());
StringBuilder eventLog = new StringBuilder();
if (secondRun) {
MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable());
connection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void before(@NotNull List<? extends VFileEvent> events) {
for (VFileEvent event : events) {
if (event instanceof VFileDeleteEvent) {
process(((VFileDeleteEvent)event).getFile());
}
}
}
private void process(VirtualFile file) {
String path = file.getPath();
eventLog.append(path, path.indexOf(testDirName) + testDirName.length() + 1, path.length()).append('\n');
((NewVirtualFile)file).iterInDbChildren().forEach(child -> process(child));
}
});
}
// Recreating the structure fires VFS removal events.
VirtualFile vNestedTestDir = WriteAction.computeAndWait(() -> {
VirtualFile dir = VfsUtil.createDirectoryIfMissing(nestedTestDir.toString());
dir.createChildData(null, "Foo.txt");
return dir;
});
// Making the directory "fully loaded" in terms of the persistence.
vNestedTestDir.getChildren();
// Removing .../subDir via java.io to have VFS events on the next launch.
FileUtil.delete(nestedTestDir.getParent());
assumeTrue("Not yet exists: " + nestedTestDir.getParent().getParent(), secondRun);
assertEquals("subDir\n" +
"subDir/subSubDir\n" +
"subDir/subSubDir/Foo.txt\n",
eventLog.toString());
}
@Test
public void testModCountIncreases() throws IOException {
VirtualFile vFile = tempDirectory.newVirtualFile("file.txt");
ManagingFS managingFS = ManagingFS.getInstance();
int inSessionModCount = managingFS.getModificationCount();
int globalModCount = managingFS.getFilesystemModificationCount();
int parentModCount = managingFS.getModificationCount(vFile.getParent());
WriteAction.runAndWait(() -> vFile.setWritable(false));
assertEquals(globalModCount + 1, managingFS.getModificationCount(vFile));
assertEquals(globalModCount + 1, managingFS.getFilesystemModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
assertEquals(inSessionModCount + 1, managingFS.getModificationCount());
FSRecords.force();
assertFalse(FSRecords.isDirty());
++globalModCount;
int finalGlobalModCount = globalModCount;
try (AccessToken ignore = HeavyProcessLatch.INSTANCE.processStarted("This test wants no indices flush")) {
WriteAction.runAndWait(() -> {
long timestamp = vFile.getTimeStamp();
int finalInSessionModCount = managingFS.getModificationCount();
vFile.setWritable(true); // 1 change
vFile.setBinaryContent("foo".getBytes(Charset.defaultCharset())); // content change + length change + maybe timestamp change
// we check in write action to avoid observing background thread to index stuff
int changesCount = timestamp == vFile.getTimeStamp() ? 3 : 4;
assertEquals(finalGlobalModCount + changesCount, managingFS.getModificationCount(vFile));
assertEquals(finalGlobalModCount + changesCount, managingFS.getFilesystemModificationCount());
assertEquals(finalInSessionModCount + changesCount, managingFS.getModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
});
}
}
@Test
public void testModCountNotIncreases() throws IOException {
VirtualFile vFile = tempDirectory.newVirtualFile("file.txt");
ManagingFS managingFS = ManagingFS.getInstance();
int globalModCount = managingFS.getFilesystemModificationCount();
int parentModCount = managingFS.getModificationCount(vFile.getParent());
int inSessionModCount = managingFS.getModificationCount();
FSRecords.force();
assertFalse(FSRecords.isDirty());
FileAttribute attribute = new FileAttribute("test.attribute", 1, true);
WriteAction.runAndWait(() -> {
try(DataOutputStream output = attribute.writeAttribute(vFile)) {
DataInputOutputUtil.writeINT(output, 1);
}
});
assertEquals(globalModCount, managingFS.getModificationCount(vFile));
assertEquals(globalModCount, managingFS.getFilesystemModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
assertEquals(inSessionModCount + 1, managingFS.getModificationCount());
assertTrue(FSRecords.isDirty());
FSRecords.force();
assertFalse(FSRecords.isDirty());
int fileId = ((VirtualFileWithId)vFile).getId();
FSRecords.setTimestamp(fileId, FSRecords.getTimestamp(fileId));
FSRecords.setLength(fileId, FSRecords.getLength(fileId));
assertEquals(globalModCount, managingFS.getModificationCount(vFile));
assertEquals(globalModCount, managingFS.getFilesystemModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
assertEquals(inSessionModCount + 1, managingFS.getModificationCount());
assertFalse(FSRecords.isDirty());
}
private static void checkEvents(String expectedEvents, VFileEvent... eventsToApply) {
StringBuilder log = new StringBuilder();
MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect();
try {
connection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void before(@NotNull List<? extends VFileEvent> events) {
log("Before:", events);
}
@Override
public void after(@NotNull List<? extends VFileEvent> events) {
log("After:", events);
}
private void log(String prefix, List<? extends VFileEvent> events) {
log.append(prefix);
for (VFileEvent e : events) {
log.append(' ').append(e.getClass().getSimpleName()).append("->").append(PathUtil.getFileName(e.getPath()));
}
log.append('\n');
}
});
WriteCommandAction.runWriteCommandAction(null, () -> PersistentFS.getInstance().processEvents(Arrays.asList(eventsToApply)));
}
finally {
connection.disconnect();
}
assertEquals(expectedEvents, log.toString());
}
@Test
public void testProcessEventsMustIgnoreDeleteDuplicates() {
VirtualFile file = tempDirectory.newVirtualFile("file.txt");
checkEvents("Before: VFileDeleteEvent->file.txt\n" +
"After: VFileDeleteEvent->file.txt\n",
new VFileDeleteEvent(this, file, false),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessEventsMustGroupDependentEventsCorrectly() {
VirtualFile file = tempDirectory.newVirtualFile("file.txt");
checkEvents("Before: VFileCreateEvent->xx.created VFileDeleteEvent->file.txt\n" +
"After: VFileCreateEvent->xx.created VFileDeleteEvent->file.txt\n",
new VFileDeleteEvent(this, file, false),
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessEventsMustBeAwareOfDeleteEventsDomination() {
VirtualFile file = tempDirectory.newVirtualFile("d/x.txt");
checkEvents("Before: VFileDeleteEvent->d\n" +
"After: VFileDeleteEvent->d\n",
new VFileDeleteEvent(this, file.getParent(), false),
new VFileDeleteEvent(this, file, false),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessCreateEventsMustFilterOutDuplicates() {
VirtualFile file = tempDirectory.newVirtualFile("d/x.txt");
checkEvents("Before: VFileCreateEvent->xx.created\n" +
"After: VFileCreateEvent->xx.created\n",
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null),
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null) );
}
@Test
public void testProcessEventsMustGroupDependentEventsCorrectly2() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
checkEvents("Before: VFileCreateEvent->xx.created VFileCreateEvent->xx.created2 VFileDeleteEvent->test.txt\n" +
"After: VFileCreateEvent->xx.created VFileCreateEvent->xx.created2 VFileDeleteEvent->test.txt\n" +
"Before: VFileDeleteEvent->c\n" +
"After: VFileDeleteEvent->c\n",
new VFileDeleteEvent(this, file, false),
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null),
new VFileCreateEvent(this, file.getParent(), "xx.created2", false, null, null, false, null),
new VFileDeleteEvent(this, file.getParent(), false));
}
@Test
public void testProcessEventsMustGroupDependentEventsCorrectly3() {
VirtualFile vFile = tempDirectory.newVirtualFile("a/b/c/test.txt");
checkEvents("Before: VFileContentChangeEvent->c\n" +
"After: VFileContentChangeEvent->c\n" +
"Before: VFileDeleteEvent->test.txt\n" +
"After: VFileDeleteEvent->test.txt\n",
new VFileContentChangeEvent(this, vFile.getParent(), 0, 0, false),
new VFileDeleteEvent(this, vFile, false));
}
@Test
public void testProcessNestedDeletions() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile file2 = tempDirectory.newVirtualFile("a/b/c/test2.txt");
checkEvents("Before: VFileDeleteEvent->test.txt\n" +
"After: VFileDeleteEvent->test.txt\n" +
"Before: VFileDeleteEvent->c\n" +
"After: VFileDeleteEvent->c\n",
new VFileDeleteEvent(this, file, false),
new VFileDeleteEvent(this, file.getParent(), false),
new VFileDeleteEvent(this, file2, false));
}
@Test
public void testProcessContentChangedLikeReconcilableEventsMustResultInSingleBatch() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
checkEvents("Before: VFileContentChangeEvent->test.txt VFilePropertyChangeEvent->test.txt VFilePropertyChangeEvent->test.txt\n" +
"After: VFileContentChangeEvent->test.txt VFilePropertyChangeEvent->test.txt VFilePropertyChangeEvent->test.txt\n",
new VFileContentChangeEvent(this, file, 0, 1, false),
new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_WRITABLE, false, true, false),
new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_ENCODING, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, false));
}
@Test
public void testProcessCompositeMoveEvents() {
VirtualFile testTxt = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile newParent = tempDirectory.newVirtualDirectory("a/b/d");
checkEvents("Before: VFileMoveEvent->test.txt\n" +
"After: VFileMoveEvent->test.txt\n" +
"Before: VFileDeleteEvent->d\n" +
"After: VFileDeleteEvent->d\n",
new VFileMoveEvent(this, testTxt, newParent),
new VFileDeleteEvent(this, newParent, false));
}
@Test
public void testProcessCompositeCopyEvents() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile newParent = tempDirectory.newVirtualDirectory("a/b/d");
checkEvents("Before: VFileCopyEvent->new.txt\n" +
"After: VFileCopyEvent->new.txt\n" +
"Before: VFileDeleteEvent->test.txt\n" +
"After: VFileDeleteEvent->test.txt\n",
new VFileCopyEvent(this, file, newParent, "new.txt"),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessCompositeRenameEvents() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile file2 = tempDirectory.newVirtualFile("a/b/c/test2.txt");
checkEvents("Before: VFileDeleteEvent->test2.txt\n" +
"After: VFileDeleteEvent->test2.txt\n" +
"Before: VFilePropertyChangeEvent->test.txt\n" +
"After: VFilePropertyChangeEvent->test2.txt\n",
new VFileDeleteEvent(this, file2, false),
new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_NAME, file.getName(), file2.getName(), false));
}
@Test
public void testCreateNewDirectoryEntailsLoadingAllChildren() throws Exception {
tempDirectory.newFile("d/d1/x.txt");
Path source = tempDirectory.getRootPath().resolve("d");
Path target = tempDirectory.getRootPath().resolve("target");
VirtualFile vTemp = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tempDirectory.getRoot());
assertNotNull(vTemp);
vTemp.refresh(false, true);
assertEquals("d", assertOneElement(vTemp.getChildren()).getName());
Project project = ProjectManager.getInstance().loadAndOpenProject(tempDirectory.getRoot().getPath());
Disposer.register(getTestRootDisposable(), () -> ProjectManager.getInstance().closeAndDispose(project));
Files.move(source, target, StandardCopyOption.ATOMIC_MOVE);
vTemp.refresh(false, true);
assertChildrenAreLoaded(vTemp);
VirtualFile vTarget = assertOneElement(((VirtualDirectoryImpl)vTemp).getCachedChildren());
assertEquals("target", vTarget.getName());
assertChildrenAreLoaded(vTarget);
VirtualFile vd1 = assertOneElement(((VirtualDirectoryImpl)vTarget).getCachedChildren());
assertEquals("d1", vd1.getName());
assertChildrenAreLoaded(vd1);
VirtualFile vx = assertOneElement(((VirtualDirectoryImpl)vd1).getCachedChildren());
assertEquals("x.txt", vx.getName());
}
@Test
public void testCreateNewDirectoryEntailsLoadingAllChildrenExceptExcluded() throws Exception {
tempDirectory.newFile("d/d1/x.txt");
Path source = tempDirectory.getRootPath().resolve("d");
Path target = tempDirectory.getRootPath().resolve("target");
VirtualFile vTemp = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tempDirectory.getRoot());
assertNotNull(vTemp);
vTemp.refresh(false, true);
assertEquals("d", assertOneElement(vTemp.getChildren()).getName());
Project project = ProjectManager.getInstance().loadAndOpenProject(tempDirectory.getRoot().getPath());
Disposer.register(getTestRootDisposable(), () -> ProjectManager.getInstance().closeAndDispose(project));
String imlPath = tempDirectory.getRootPath().resolve("temp.iml").toString();
String url = VfsUtilCore.pathToUrl(target.resolve("d1").toString());
WriteAction.runAndWait(() -> {
Module module = ModuleManager.getInstance(project).newModule(imlPath, ModuleTypeManager.getInstance().getDefaultModuleType().getId());
ModuleRootModificationUtil.updateModel(module, model -> {
ContentEntry contentEntry = model.addContentEntry(url);
contentEntry.addExcludeFolder(url);
});
});
Files.move(source, target, StandardCopyOption.ATOMIC_MOVE);
vTemp.refresh(false, true);
assertChildrenAreLoaded(vTemp);
VirtualFile vTarget = assertOneElement(((VirtualDirectoryImpl)vTemp).getCachedChildren());
assertEquals("target", vTarget.getName());
assertChildrenAreLoaded(vTarget);
VirtualFile vd1 = assertOneElement(((VirtualDirectoryImpl)vTarget).getCachedChildren());
assertEquals("d1", vd1.getName());
assertFalse(((VirtualDirectoryImpl)vd1).allChildrenLoaded());
assertEquals(Collections.emptyList(), ((VirtualDirectoryImpl)vd1).getCachedChildren());
}
private static void assertChildrenAreLoaded(VirtualFile file) {
assertTrue("children not loaded: " + file, ((VirtualDirectoryImpl)file).allChildrenLoaded());
assertTrue("children not loaded: " + file, PersistentFS.getInstance().areChildrenLoaded(file));
}
@Test
public void testRenameInBackgroundDoesntLeadToDuplicateFilesError() throws IOException {
assumeFalse("Case-insensitive OS expected, can't run on " + SystemInfo.OS_NAME, SystemInfo.isFileSystemCaseSensitive);
File file = tempDirectory.newFile("rename.txt", "x".getBytes(StandardCharsets.UTF_8));
VirtualFile vfile = refreshAndFind(file);
VirtualDirectoryImpl vTemp = (VirtualDirectoryImpl)vfile.getParent();
assertFalse(vTemp.allChildrenLoaded());
VfsUtil.markDirty(true, false, vTemp);
Files.move(file.toPath(), file.toPath().resolveSibling(file.getName().toUpperCase()), StandardCopyOption.ATOMIC_MOVE);
VirtualFile[] newChildren = vTemp.getChildren();
assertOneElement(newChildren);
}
@Test
public void testPersistentFsCacheDoesntContainInvalidFiles() {
File file = tempDirectory.newFile("subDir1/subDir2/subDir3/file.txt");
VirtualFileSystemEntry vFile = (VirtualFileSystemEntry)refreshAndFind(file);
VirtualFileSystemEntry vSubDir3 = vFile.getParent();
VirtualFileSystemEntry vSubDir2 = vSubDir3.getParent();
VirtualFileSystemEntry vSubDir1 = vSubDir2.getParent();
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
VirtualFileSystemEntry[] hardReferenceHolder = {vFile, vSubDir3, vSubDir2, vSubDir1};
VfsTestUtil.deleteFile(vSubDir1);
for (VirtualFileSystemEntry f : hardReferenceHolder) {
assertFalse("file is valid but deleted " + f.getName(), f.isValid());
}
for (VirtualFileSystemEntry f : hardReferenceHolder) {
assertNull(fs.getCachedDir(f.getId()));
assertNull(fs.findFileById(f.getId()));
}
for (VirtualFileSystemEntry f : fs.getIdToDirCache().values()) {
assertTrue(f.isValid());
}
}
@Test
public void testConcurrentListAllDoesntCauseDuplicateFileIds() throws Exception {
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
for (int i = 0; i < 10; i++) {
File file = tempDirectory.newFile("d" + i + "/file.txt", "x".getBytes(StandardCharsets.UTF_8));
VirtualDirectoryImpl vTemp = (VirtualDirectoryImpl)refreshAndFind(file).getParent();
assertFalse(vTemp.allChildrenLoaded());
Files.writeString(file.toPath().resolveSibling("new.txt"), "new");
Future<List<? extends ChildInfo>> f1 = ApplicationManager.getApplication().executeOnPooledThread(() -> fs.listAll(vTemp));
Future<List<? extends ChildInfo>> f2 = ApplicationManager.getApplication().executeOnPooledThread(() -> fs.listAll(vTemp));
List<? extends ChildInfo> children1 = f1.get();
List<? extends ChildInfo> children2 = f2.get();
int[] nameIds1 = children1.stream().mapToInt(n -> n.getNameId()).toArray();
int[] nameIds2 = children2.stream().mapToInt(n -> n.getNameId()).toArray();
// there can be one or two children, depending on whether the VFS refreshed in time or not.
// but in any case, there must not be duplicate ids (i.e. files with the same name but different getId())
for (int i1 = 0; i1 < nameIds1.length; i1++) {
int nameId1 = nameIds1[i1];
int i2 = ArrayUtil.find(nameIds2, nameId1);
if (i2 >= 0) {
int id1 = children1.get(i1).getId();
int id2 = children2.get(i2).getId();
assertEquals("Duplicate ids found. children1=" + children1 + "; children2=" + children2, id1, id2);
}
}
}
}
@Test
public void testMustNotDuplicateIdsOnRenameWithCaseChanged() {
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
File file = tempDirectory.newFile("file.txt", "x".getBytes(StandardCharsets.UTF_8));
VirtualFile vDir = refreshAndFind(file.getParentFile());
VirtualFile vf = assertOneElement(vDir.getChildren());
assertEquals("file.txt", vf.getName());
List<Future<?>> futures = new ArrayList<>();
String oldName = file.getName();
for (int i = 0; i < 100; i++) {
int u = i % oldName.length();
Future<?> f = ApplicationManager.getApplication().executeOnPooledThread(() -> {
String newName = oldName.substring(0, u) + Character.toUpperCase(oldName.charAt(u)) + oldName.substring(u + 1);
try {
Files.move(file.toPath(), file.toPath().resolveSibling(newName), StandardCopyOption.ATOMIC_MOVE);
}
catch (NoSuchFileException ignored) { }
catch (IOException e) {
throw new UncheckedIOException(e);
}
});
futures.add(f);
}
for (int i = 0; i < 10; i++) {
Future<?> f = ApplicationManager.getApplication().executeOnPooledThread(() -> {
for (int u=0; u<100; u++) {
List<? extends ChildInfo> infos = fs.listAll(vDir);
assertOneElement(infos);
}
});
futures.add(f);
}
runInEdtAndWait(() -> {
for (Future<?> future : futures) {
PlatformTestUtil.waitForFuture(future, 10_000);
}
});
}
public static class TracingJarFileSystemTestWrapper extends JarFileSystemImpl {
private final AtomicInteger myAttributeCallCount = new AtomicInteger();
@Override
public @Nullable FileAttributes getAttributes(@NotNull VirtualFile file) {
myAttributeCallCount.incrementAndGet();
return super.getAttributes(file);
}
private int getAttributeCallCount() {
return myAttributeCallCount.get();
}
@Override
public @NotNull String getProtocol() {
return "jar-wrapper";
}
}
private static File zipWithEntry(String fileName, File generationDir, File outputDir, String entryName, String entryContent) throws IOException {
File zipFile = new File(generationDir, fileName);
try (Compressor.Zip zip = new Compressor.Zip(zipFile)) {
zip.addFile(entryName, entryContent.getBytes(StandardCharsets.UTF_8));
}
File outputFile = new File(outputDir, fileName);
try (OutputStream out = Files.newOutputStream(outputFile.toPath())) {
Files.copy(zipFile.toPath(), out); // unlike `Files#copy(Path, Path)`, allows to overwrite an opened file on Windows
}
VfsUtil.markDirtyAndRefresh(false, true, true, outputFile);
return outputFile;
}
@Test
public void testReadOnlyFsCachesLength() throws IOException {
String text = "<virtualFileSystem implementationClass=\"" + TracingJarFileSystemTestWrapper.class.getName() + "\" key=\"jar-wrapper\" physical=\"true\"/>";
Disposable disposable = runInEdtAndGet(() -> DynamicPluginsTestUtil.loadExtensionWithText(text, TracingJarFileSystemTestWrapper.class.getClassLoader()));
try {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
String jarName = "test.jar";
String entryName = "Some.java";
String[] contents = {"class Some {}", "class Some { void m() {} }", "class Some { void mmm() {} }"};
File zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, contents[0]);
String url = "jar-wrapper://" + FileUtil.toSystemIndependentName(zipFile.getPath()) + "!/" + entryName;
VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url);
file.refresh(false, false);
assertTrue(file.isValid());
assertEquals(contents[0], new String(file.contentsToByteArray(), StandardCharsets.UTF_8));
TracingJarFileSystemTestWrapper fs = (TracingJarFileSystemTestWrapper)file.getFileSystem();
zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, contents[1]);
VfsUtil.markDirtyAndRefresh(false, true, true, zipFile);
int attrCallCount = fs.getAttributeCallCount();
file.refresh(false, false);
assertTrue(file.isValid());
assertEquals(contents[1], new String(file.contentsToByteArray(), StandardCharsets.UTF_8));
zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, contents[2]);
VfsUtil.markDirtyAndRefresh(false, true, true, zipFile);
assertNotEquals(attrCallCount, fs.getAttributeCallCount()); // we should read length from physical FS
file.refresh(false, false);
assertTrue(file.isValid());
assertEquals(contents[2], new String(file.contentsToByteArray(), StandardCharsets.UTF_8));
attrCallCount = fs.getAttributeCallCount();
for (int i = 0; i < 3; i++) {
file.getLength();
assertEquals(attrCallCount, fs.getAttributeCallCount()); // ensure it's cached
}
}
finally {
runInEdtAndWait(() -> Disposer.dispose(disposable));
}
}
@Test
public void testDoNotRecalculateLengthIfEndOfInputStreamIsNotReached() throws IOException {
String text = "<virtualFileSystem implementationClass=\"" + TracingJarFileSystemTestWrapper.class.getName() + "\" key=\"jar-wrapper\" physical=\"true\"/>";
Disposable disposable = runInEdtAndGet(() -> DynamicPluginsTestUtil.loadExtensionWithText(text, TracingJarFileSystemTestWrapper.class.getClassLoader()));
try {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
String jarName = "test.jar";
String entryName = "Some.java";
String content = "class Some {}";
File zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, content);
String url = "jar-wrapper://" + FileUtil.toSystemIndependentName(zipFile.getPath()) + "!/" + entryName;
VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url);
file.refresh(false, false);
TracingJarFileSystemTestWrapper fs = (TracingJarFileSystemTestWrapper)file.getFileSystem();
int attributeCallCount = fs.getAttributeCallCount();
try (InputStream stream = file.getInputStream()) {
// just read single byte
@SuppressWarnings("unused") int read = stream.read();
}
assertEquals(attributeCallCount, fs.getAttributeCallCount());
//noinspection EmptyTryBlock,unused
try (InputStream stream = file.getInputStream()) {
// just close
}
assertEquals(attributeCallCount, fs.getAttributeCallCount());
}
finally {
runInEdtAndWait(() -> Disposer.dispose(disposable));
}
}
@Test
public void testDeleteJarRootInsideJarMustCauseDeleteLocalJarFile() throws IOException {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
File jarFile = zipWithEntry("test.jar", generationDir, testDir, "Some.java", "class Some {}");
VirtualFile vFile = VirtualFileManager.getInstance().refreshAndFindFileByUrl(VfsUtilCore.pathToUrl(jarFile.getPath()));
VirtualFile jarVFile = JarFileSystem.getInstance().getJarRootForLocalFile(vFile);
FileUtil.delete(jarFile);
List<VFileEvent> events = new ArrayList<>();
ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable()).subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void after(@NotNull List<? extends VFileEvent> e) {
events.addAll(e);
}
});
jarVFile.refresh(false, false);
events.sort(Comparator.comparing((VFileEvent e) ->e.getFile().getUrl()));
assertEvents(events, new VFileDeleteEvent(this, vFile, false), new VFileDeleteEvent(this, jarVFile, false));
}
@Test
public void testDeleteFileDeepInsideJarFileMustCauseContentChangeForLocalJar() throws IOException {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
File jarFile = zipWithEntry("test.jar", generationDir, testDir, "web.xml", "<web/>");
VirtualFile vFile = VirtualFileManager.getInstance().refreshAndFindFileByUrl(VfsUtilCore.pathToUrl(jarFile.getPath()));
VirtualFile jarVFile = JarFileSystem.getInstance().getJarRootForLocalFile(vFile);
VirtualFile webXml = jarVFile.findChild("web.xml");
File newJarFile = zipWithEntry("test2.jar", generationDir, testDir, "x.java", "class X{}");
FileUtil.copy(newJarFile, jarFile);
List<VFileEvent> events = new ArrayList<>();
ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable()).subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void after(@NotNull List<? extends VFileEvent> e) {
events.addAll(e);
}
});
((JarFileSystemImpl)JarFileSystem.getInstance()).markDirtyAndRefreshVirtualFileDeepInsideJarForTest(webXml);
assertEvents(events, new VFileDeleteEvent(this, webXml, false),
new VFileContentChangeEvent(this, vFile, 0, 0, false));
}
@Test
public void testFileContentChangeEventsMustDifferentiateCaseSensitivityToggledFiles() throws IOException {
assumeWindows();
assumeTrue("'fsutil.exe' needs elevated privileges to work", SuperUserStatus.isSuperUser());
File dir = tempDirectory.newDirectory();
VirtualFile vDir = refreshAndFind(dir);
setCaseSensitivity(dir, true);
File file = new File(dir, "file.txt");
assertTrue(file.createNewFile());
File FILE = new File(dir, "FILE.TXT");
assertTrue(FILE.createNewFile());
VirtualFile vFile = refreshAndFind(file);
VirtualFile vFILE = refreshAndFind(FILE);
List<VFileEvent> events = new ArrayList<>();
ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable()).subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void after(@NotNull List<? extends VFileEvent> e) {
for (VFileEvent event : e) {
VirtualFile evFile = event.getFile();
if (evFile.getParent().equals(vDir)) {
events.add(event);
}
}
}
});
FileUtil.writeToFile(file, "content");
vFile.refresh(false, false);
vFILE.refresh(false, false);
assertEqualUnorderedEvents(events,
new VFileContentChangeEvent(this, vFile, -1, -1, true));
events.clear();
FileUtil.writeToFile(FILE, "content");
vFile.refresh(false, false);
vFILE.refresh(false, false);
assertEqualUnorderedEvents(events,
new VFileContentChangeEvent(this, vFILE,-1,-1, true));
events.clear();
FileUtil.writeToFile(file, "content2");
FileUtil.writeToFile(FILE, "content2");
vDir.refresh(false, true);
assertEqualUnorderedEvents(events,
new VFileContentChangeEvent(this, vFile,-1,-1,true),
new VFileContentChangeEvent(this, vFILE,-1,-1,true));
events.clear();
FileUtil.delete(file);
FileUtil.delete(FILE);
vDir.refresh(false, true);
assertEqualUnorderedEvents(events,
new VFileDeleteEvent(this, vFile,false),
new VFileDeleteEvent(this, vFILE,false));
events.clear();
assertTrue(file.createNewFile());
assertTrue(FILE.createNewFile());
vDir.refresh(false, true);
assertEqualUnorderedEvents(events,
new VFileCreateEvent(this, vDir, vFile.getName(),false, null, null, true, null),
new VFileCreateEvent(this, vDir, vFILE.getName(),false, null, null, true, null));
}
private void assertEqualUnorderedEvents(List<? extends VFileEvent> actual, VFileEvent... expected) {
Set<VFileEvent> act = new HashSet<>(ContainerUtil.map(actual, e -> ignoreCrazyVFileContentChangedEquals(e)));
Set<VFileEvent> exp = new HashSet<>(ContainerUtil.map(expected, e -> ignoreCrazyVFileContentChangedEquals(e)));
if (!act.equals(exp)) {
String expectedString = UsefulTestCase.toString(Arrays.asList(expected));
String actualString = UsefulTestCase.toString(actual);
assertEquals(expectedString, actualString);
fail("Warning! 'toString' does not reflect the difference.\nExpected: " + expectedString + "\nActual: " + actualString);
}
}
private VFileEvent ignoreCrazyVFileContentChangedEquals(VFileEvent exp) {
if (exp instanceof VFileContentChangeEvent) {
exp = new VFileContentChangeEvent(this, exp.getFile(), 0, 0, -1, -1, -1, -1, true);
}
return exp;
}
}
| platform/platform-tests/testSrc/com/intellij/openapi/vfs/newvfs/persistent/PersistentFsTest.java | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.newvfs.persistent;
import com.intellij.ide.plugins.DynamicPluginsTestUtil;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.roots.ContentEntry;
import com.intellij.openapi.roots.ModuleRootModificationUtil;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.IoTestUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.ex.temp.TempFileSystem;
import com.intellij.openapi.vfs.impl.jar.JarFileSystemImpl;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.ManagingFS;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.events.*;
import com.intellij.openapi.vfs.newvfs.impl.VirtualDirectoryImpl;
import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;
import com.intellij.testFramework.LoggedErrorProcessor;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.UsefulTestCase;
import com.intellij.testFramework.VfsTestUtil;
import com.intellij.testFramework.fixtures.BareTestFixtureTestCase;
import com.intellij.testFramework.rules.TempDirectory;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PathUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.Compressor;
import com.intellij.util.io.DataInputOutputUtil;
import com.intellij.util.io.SuperUserStatus;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.messages.MessageBusConnection;
import org.apache.log4j.Logger;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Rule;
import org.junit.Test;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.*;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import static com.intellij.openapi.util.io.IoTestUtil.assumeWindows;
import static com.intellij.openapi.util.io.IoTestUtil.setCaseSensitivity;
import static com.intellij.testFramework.EdtTestUtil.runInEdtAndGet;
import static com.intellij.testFramework.EdtTestUtil.runInEdtAndWait;
import static com.intellij.testFramework.UsefulTestCase.assertOneElement;
import static org.junit.Assert.*;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
public class PersistentFsTest extends BareTestFixtureTestCase {
@Rule public TempDirectory tempDirectory = new TempDirectory();
@Test
public void testAccessingFileByID() {
File file = tempDirectory.newFile("test.txt");
VirtualFile vFile = refreshAndFind(file);
int id = ((VirtualFileWithId)vFile).getId();
assertEquals(vFile, PersistentFS.getInstance().findFileById(id));
VfsTestUtil.deleteFile(vFile);
assertNull(PersistentFS.getInstance().findFileById(id));
}
private static VirtualFile refreshAndFind(File file) {
return Objects.requireNonNull(LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file), file.getPath());
}
@Test
public void testFileContentHash() throws Exception {
File file = tempDirectory.newFile("test.txt", "one".getBytes(StandardCharsets.UTF_8));
VirtualFile vFile = refreshAndFind(file);
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
byte[] hash = PersistentFSImpl.getContentHashIfStored(vFile);
assertNull(hash); // content is not yet loaded
vFile.contentsToByteArray();
hash = PersistentFSImpl.getContentHashIfStored(vFile);
assertNotNull(hash);
WriteAction.runAndWait(() -> VfsUtil.saveText(vFile, "two"));
byte[] newHash = PersistentFSImpl.getContentHashIfStored(vFile);
assertNotNull(newHash);
assertFalse(Arrays.equals(hash, newHash)); // different contents should have different hashes
WriteAction.runAndWait(() -> VfsUtil.saveText(vFile, "one"));
newHash = PersistentFSImpl.getContentHashIfStored(vFile);
assertArrayEquals(hash, newHash); // equal contents should have the equal hashes
VfsTestUtil.deleteFile(vFile);
assertNotNull(fs.contentsToByteArray(vFile)); // deleted files preserve content, and thus hash
assertArrayEquals(hash, PersistentFSImpl.getContentHashIfStored(vFile));
}
@Test
public void testFindRootShouldNotBeFooledByRelativePath() {
File x = tempDirectory.newFile("x.jar");
VirtualFile vx = refreshAndFind(x);
JarFileSystem jfs = JarFileSystem.getInstance();
VirtualFile root = jfs.getJarRootForLocalFile(vx);
String path = vx.getPath() + "/../" + vx.getName() + JarFileSystem.JAR_SEPARATOR;
assertSame(PersistentFS.getInstance().findRoot(path, jfs), root);
}
@Test
public void testFindRootMustCreateFileWithCanonicalPath() {
checkMustCreateRootWithCanonicalPath("x.jar");
}
@Test
public void testFindRootMustCreateFileWithStillCanonicalPath() {
checkMustCreateRootWithCanonicalPath("x..jar");
}
@Test
public void testFindRootMustCreateFileWithYetAnotherCanonicalPath() {
checkMustCreateRootWithCanonicalPath("x...jar");
}
private void checkMustCreateRootWithCanonicalPath(String jarName) {
File x = tempDirectory.newFile(jarName);
refreshAndFind(x);
JarFileSystem jfs = JarFileSystem.getInstance();
String path = x.getPath() + "/../" + x.getName() + JarFileSystem.JAR_SEPARATOR;
NewVirtualFile root = PersistentFS.getInstance().findRoot(path, jfs);
assertNotNull(path, root);
assertFalse(root.getPath(), root.getPath().contains("../"));
assertFalse(root.getPath(), root.getPath().contains("/.."));
}
@Test
public void testDeleteSubstRoots() {
IoTestUtil.assumeWindows();
File substRoot = IoTestUtil.createSubst(tempDirectory.getRoot().getPath());
VirtualFile subst;
try {
subst = refreshAndFind(substRoot);
assertNotNull(substRoot.listFiles());
}
finally {
IoTestUtil.deleteSubst(substRoot.getPath());
}
subst.refresh(false, true);
VirtualFile[] roots = PersistentFS.getInstance().getRoots(LocalFileSystem.getInstance());
for (VirtualFile root : roots) {
String prefix = StringUtil.commonPrefix(root.getPath(), substRoot.getPath());
assertTrue(prefix, prefix.isEmpty());
}
}
@Test
public void testLocalRoots() {
VirtualFile tempRoot = VirtualFileManager.getInstance().findFileByUrl("temp:///");
assertNotNull(tempRoot);
for (VirtualFile root : PersistentFS.getInstance().getLocalRoots()) {
assertTrue("root=" + root, root.isInLocalFileSystem());
VirtualFileSystem fs = root.getFileSystem();
assertTrue("fs=" + fs, fs instanceof LocalFileSystem);
assertFalse("fs=" + fs, fs instanceof TempFileSystem);
}
}
@Test
public void testInvalidJarRootsIgnored() {
File file = tempDirectory.newFile("file.txt");
String url = "jar://" + FileUtil.toSystemIndependentName(file.getPath()) + "!/";
assertNull(VirtualFileManager.getInstance().findFileByUrl(url));
}
@Test
public void testBrokenJarRoots() throws IOException {
File jarFile = tempDirectory.newFile("empty.jar");
VirtualFile local = refreshAndFind(jarFile);
String rootUrl = "jar://" + local.getPath() + "!/";
String entryUrl = rootUrl + JarFile.MANIFEST_NAME;
int[] logCount = {0};
LoggedErrorProcessor.setNewInstance(new LoggedErrorProcessor() {
@Override
public void processWarn(String message, Throwable t, @NotNull Logger logger) {
super.processWarn(message, t, logger);
if (message.contains(jarFile.getName())) logCount[0]++;
}
});
try {
VirtualFile jarRoot = VirtualFileManager.getInstance().findFileByUrl(rootUrl);
assertNotNull(jarRoot);
assertTrue(jarRoot.isValid());
assertArrayEquals(VirtualFile.EMPTY_ARRAY, jarRoot.getChildren());
assertNull(VirtualFileManager.getInstance().findFileByUrl(entryUrl));
try (Compressor.Jar jar = new Compressor.Jar(jarFile)) {
jar.addManifest(new Manifest());
}
local.refresh(false, false);
assertTrue(jarRoot.isValid());
assertEquals(1, jarRoot.getChildren().length);
assertNotNull(VirtualFileManager.getInstance().findFileByUrl(entryUrl));
}
finally {
LoggedErrorProcessor.restoreDefaultProcessor();
}
assertEquals(1, logCount[0]);
}
@Test
public void testIterInDbChildrenWorksForRemovedDirsAfterRestart() throws IOException {
// The test (re)creates .../subDir/subSubDir/Foo.txt hierarchy outside of a watched project and checks for removal events.
// It starts the real testing "after a restart" - i.e. when launched for the second time using the same system directory.
// In terms of the persistence, "subDir/" is partially loaded and "subSubDir/" is fully loaded.
String testDirName = UsefulTestCase.TEMP_DIR_MARKER + getTestName(false);
Path nestedTestDir = tempDirectory.getRootPath().getParent().resolve(testDirName + "/subDir/subSubDir");
boolean secondRun = Files.exists(nestedTestDir.getParent().getParent());
StringBuilder eventLog = new StringBuilder();
if (secondRun) {
MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable());
connection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void before(@NotNull List<? extends VFileEvent> events) {
for (VFileEvent event : events) {
if (event instanceof VFileDeleteEvent) {
process(((VFileDeleteEvent)event).getFile());
}
}
}
private void process(VirtualFile file) {
String path = file.getPath();
eventLog.append(path, path.indexOf(testDirName) + testDirName.length() + 1, path.length()).append('\n');
((NewVirtualFile)file).iterInDbChildren().forEach(child -> process(child));
}
});
}
// Recreating the structure fires VFS removal events.
VirtualFile vNestedTestDir = WriteAction.computeAndWait(() -> {
VirtualFile dir = VfsUtil.createDirectoryIfMissing(nestedTestDir.toString());
dir.createChildData(null, "Foo.txt");
return dir;
});
// Making the directory "fully loaded" in terms of the persistence.
vNestedTestDir.getChildren();
// Removing .../subDir via java.io to have VFS events on the next launch.
FileUtil.delete(nestedTestDir.getParent());
assumeTrue("Not yet exists: " + nestedTestDir.getParent().getParent(), secondRun);
assertEquals("subDir\n" +
"subDir/subSubDir\n" +
"subDir/subSubDir/Foo.txt\n",
eventLog.toString());
}
@Test
public void testModCountIncreases() throws IOException {
VirtualFile vFile = tempDirectory.newVirtualFile("file.txt");
ManagingFS managingFS = ManagingFS.getInstance();
int inSessionModCount = managingFS.getModificationCount();
int globalModCount = managingFS.getFilesystemModificationCount();
int parentModCount = managingFS.getModificationCount(vFile.getParent());
WriteAction.runAndWait(() -> vFile.setWritable(false));
assertEquals(globalModCount + 1, managingFS.getModificationCount(vFile));
assertEquals(globalModCount + 1, managingFS.getFilesystemModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
assertEquals(inSessionModCount + 1, managingFS.getModificationCount());
FSRecords.force();
assertFalse(FSRecords.isDirty());
++globalModCount;
int finalGlobalModCount = globalModCount;
try (AccessToken ignore = HeavyProcessLatch.INSTANCE.processStarted("This test wants no indices flush")) {
WriteAction.runAndWait(() -> {
long timestamp = vFile.getTimeStamp();
int finalInSessionModCount = managingFS.getModificationCount();
vFile.setWritable(true); // 1 change
vFile.setBinaryContent("foo".getBytes(Charset.defaultCharset())); // content change + length change + maybe timestamp change
// we check in write action to avoid observing background thread to index stuff
int changesCount = timestamp == vFile.getTimeStamp() ? 3 : 4;
assertEquals(finalGlobalModCount + changesCount, managingFS.getModificationCount(vFile));
assertEquals(finalGlobalModCount + changesCount, managingFS.getFilesystemModificationCount());
assertEquals(finalInSessionModCount + changesCount, managingFS.getModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
});
}
}
@Test
public void testModCountNotIncreases() throws IOException {
VirtualFile vFile = tempDirectory.newVirtualFile("file.txt");
ManagingFS managingFS = ManagingFS.getInstance();
int globalModCount = managingFS.getFilesystemModificationCount();
int parentModCount = managingFS.getModificationCount(vFile.getParent());
int inSessionModCount = managingFS.getModificationCount();
FSRecords.force();
assertFalse(FSRecords.isDirty());
FileAttribute attribute = new FileAttribute("test.attribute", 1, true);
WriteAction.runAndWait(() -> {
try(DataOutputStream output = attribute.writeAttribute(vFile)) {
DataInputOutputUtil.writeINT(output, 1);
}
});
assertEquals(globalModCount, managingFS.getModificationCount(vFile));
assertEquals(globalModCount, managingFS.getFilesystemModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
assertEquals(inSessionModCount + 1, managingFS.getModificationCount());
assertTrue(FSRecords.isDirty());
FSRecords.force();
assertFalse(FSRecords.isDirty());
int fileId = ((VirtualFileWithId)vFile).getId();
FSRecords.setTimestamp(fileId, FSRecords.getTimestamp(fileId));
FSRecords.setLength(fileId, FSRecords.getLength(fileId));
assertEquals(globalModCount, managingFS.getModificationCount(vFile));
assertEquals(globalModCount, managingFS.getFilesystemModificationCount());
assertEquals(parentModCount, managingFS.getModificationCount(vFile.getParent()));
assertEquals(inSessionModCount + 1, managingFS.getModificationCount());
assertFalse(FSRecords.isDirty());
}
private static void checkEvents(String expectedEvents, VFileEvent... eventsToApply) {
StringBuilder log = new StringBuilder();
MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect();
try {
connection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void before(@NotNull List<? extends VFileEvent> events) {
log("Before:", events);
}
@Override
public void after(@NotNull List<? extends VFileEvent> events) {
log("After:", events);
}
private void log(String prefix, List<? extends VFileEvent> events) {
log.append(prefix);
for (VFileEvent e : events) {
log.append(' ').append(e.getClass().getSimpleName()).append("->").append(PathUtil.getFileName(e.getPath()));
}
log.append('\n');
}
});
WriteCommandAction.runWriteCommandAction(null, () -> PersistentFS.getInstance().processEvents(Arrays.asList(eventsToApply)));
}
finally {
connection.disconnect();
}
assertEquals(expectedEvents, log.toString());
}
@Test
public void testProcessEventsMustIgnoreDeleteDuplicates() {
VirtualFile file = tempDirectory.newVirtualFile("file.txt");
checkEvents("Before: VFileDeleteEvent->file.txt\n" +
"After: VFileDeleteEvent->file.txt\n",
new VFileDeleteEvent(this, file, false),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessEventsMustGroupDependentEventsCorrectly() {
VirtualFile file = tempDirectory.newVirtualFile("file.txt");
checkEvents("Before: VFileCreateEvent->xx.created VFileDeleteEvent->file.txt\n" +
"After: VFileCreateEvent->xx.created VFileDeleteEvent->file.txt\n",
new VFileDeleteEvent(this, file, false),
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessEventsMustBeAwareOfDeleteEventsDomination() {
VirtualFile file = tempDirectory.newVirtualFile("d/x.txt");
checkEvents("Before: VFileDeleteEvent->d\n" +
"After: VFileDeleteEvent->d\n",
new VFileDeleteEvent(this, file.getParent(), false),
new VFileDeleteEvent(this, file, false),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessCreateEventsMustFilterOutDuplicates() {
VirtualFile file = tempDirectory.newVirtualFile("d/x.txt");
checkEvents("Before: VFileCreateEvent->xx.created\n" +
"After: VFileCreateEvent->xx.created\n",
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null),
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null) );
}
@Test
public void testProcessEventsMustGroupDependentEventsCorrectly2() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
checkEvents("Before: VFileCreateEvent->xx.created VFileCreateEvent->xx.created2 VFileDeleteEvent->test.txt\n" +
"After: VFileCreateEvent->xx.created VFileCreateEvent->xx.created2 VFileDeleteEvent->test.txt\n" +
"Before: VFileDeleteEvent->c\n" +
"After: VFileDeleteEvent->c\n",
new VFileDeleteEvent(this, file, false),
new VFileCreateEvent(this, file.getParent(), "xx.created", false, null, null, false, null),
new VFileCreateEvent(this, file.getParent(), "xx.created2", false, null, null, false, null),
new VFileDeleteEvent(this, file.getParent(), false));
}
@Test
public void testProcessEventsMustGroupDependentEventsCorrectly3() {
VirtualFile vFile = tempDirectory.newVirtualFile("a/b/c/test.txt");
checkEvents("Before: VFileContentChangeEvent->c\n" +
"After: VFileContentChangeEvent->c\n" +
"Before: VFileDeleteEvent->test.txt\n" +
"After: VFileDeleteEvent->test.txt\n",
new VFileContentChangeEvent(this, vFile.getParent(), 0, 0, false),
new VFileDeleteEvent(this, vFile, false));
}
@Test
public void testProcessNestedDeletions() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile file2 = tempDirectory.newVirtualFile("a/b/c/test2.txt");
checkEvents("Before: VFileDeleteEvent->test.txt\n" +
"After: VFileDeleteEvent->test.txt\n" +
"Before: VFileDeleteEvent->c\n" +
"After: VFileDeleteEvent->c\n",
new VFileDeleteEvent(this, file, false),
new VFileDeleteEvent(this, file.getParent(), false),
new VFileDeleteEvent(this, file2, false));
}
@Test
public void testProcessContentChangedLikeReconcilableEventsMustResultInSingleBatch() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
checkEvents("Before: VFileContentChangeEvent->test.txt VFilePropertyChangeEvent->test.txt VFilePropertyChangeEvent->test.txt\n" +
"After: VFileContentChangeEvent->test.txt VFilePropertyChangeEvent->test.txt VFilePropertyChangeEvent->test.txt\n",
new VFileContentChangeEvent(this, file, 0, 1, false),
new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_WRITABLE, false, true, false),
new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_ENCODING, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, false));
}
@Test
public void testProcessCompositeMoveEvents() {
VirtualFile testTxt = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile newParent = tempDirectory.newVirtualDirectory("a/b/d");
checkEvents("Before: VFileMoveEvent->test.txt\n" +
"After: VFileMoveEvent->test.txt\n" +
"Before: VFileDeleteEvent->d\n" +
"After: VFileDeleteEvent->d\n",
new VFileMoveEvent(this, testTxt, newParent),
new VFileDeleteEvent(this, newParent, false));
}
@Test
public void testProcessCompositeCopyEvents() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile newParent = tempDirectory.newVirtualDirectory("a/b/d");
checkEvents("Before: VFileCopyEvent->new.txt\n" +
"After: VFileCopyEvent->new.txt\n" +
"Before: VFileDeleteEvent->test.txt\n" +
"After: VFileDeleteEvent->test.txt\n",
new VFileCopyEvent(this, file, newParent, "new.txt"),
new VFileDeleteEvent(this, file, false));
}
@Test
public void testProcessCompositeRenameEvents() {
VirtualFile file = tempDirectory.newVirtualFile("a/b/c/test.txt");
VirtualFile file2 = tempDirectory.newVirtualFile("a/b/c/test2.txt");
checkEvents("Before: VFileDeleteEvent->test2.txt\n" +
"After: VFileDeleteEvent->test2.txt\n" +
"Before: VFilePropertyChangeEvent->test.txt\n" +
"After: VFilePropertyChangeEvent->test2.txt\n",
new VFileDeleteEvent(this, file2, false),
new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_NAME, file.getName(), file2.getName(), false));
}
@Test
public void testCreateNewDirectoryEntailsLoadingAllChildren() throws Exception {
tempDirectory.newFile("d/d1/x.txt");
Path source = tempDirectory.getRootPath().resolve("d");
Path target = tempDirectory.getRootPath().resolve("target");
VirtualFile vTemp = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tempDirectory.getRoot());
assertNotNull(vTemp);
vTemp.refresh(false, true);
assertEquals("d", assertOneElement(vTemp.getChildren()).getName());
Project project = ProjectManager.getInstance().loadAndOpenProject(tempDirectory.getRoot().getPath());
Disposer.register(getTestRootDisposable(), () -> ProjectManager.getInstance().closeAndDispose(project));
Files.move(source, target, StandardCopyOption.ATOMIC_MOVE);
vTemp.refresh(false, true);
assertChildrenAreLoaded(vTemp);
VirtualFile vTarget = assertOneElement(((VirtualDirectoryImpl)vTemp).getCachedChildren());
assertEquals("target", vTarget.getName());
assertChildrenAreLoaded(vTarget);
VirtualFile vd1 = assertOneElement(((VirtualDirectoryImpl)vTarget).getCachedChildren());
assertEquals("d1", vd1.getName());
assertChildrenAreLoaded(vd1);
VirtualFile vx = assertOneElement(((VirtualDirectoryImpl)vd1).getCachedChildren());
assertEquals("x.txt", vx.getName());
}
@Test
public void testCreateNewDirectoryEntailsLoadingAllChildrenExceptExcluded() throws Exception {
tempDirectory.newFile("d/d1/x.txt");
Path source = tempDirectory.getRootPath().resolve("d");
Path target = tempDirectory.getRootPath().resolve("target");
VirtualFile vTemp = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tempDirectory.getRoot());
assertNotNull(vTemp);
vTemp.refresh(false, true);
assertEquals("d", assertOneElement(vTemp.getChildren()).getName());
Project project = ProjectManager.getInstance().loadAndOpenProject(tempDirectory.getRoot().getPath());
Disposer.register(getTestRootDisposable(), () -> ProjectManager.getInstance().closeAndDispose(project));
String imlPath = tempDirectory.getRootPath().resolve("temp.iml").toString();
String url = VfsUtilCore.pathToUrl(target.resolve("d1").toString());
WriteAction.runAndWait(() -> {
Module module = ModuleManager.getInstance(project).newModule(imlPath, ModuleTypeManager.getInstance().getDefaultModuleType().getId());
ModuleRootModificationUtil.updateModel(module, model -> {
ContentEntry contentEntry = model.addContentEntry(url);
contentEntry.addExcludeFolder(url);
});
});
Files.move(source, target, StandardCopyOption.ATOMIC_MOVE);
vTemp.refresh(false, true);
assertChildrenAreLoaded(vTemp);
VirtualFile vTarget = assertOneElement(((VirtualDirectoryImpl)vTemp).getCachedChildren());
assertEquals("target", vTarget.getName());
assertChildrenAreLoaded(vTarget);
VirtualFile vd1 = assertOneElement(((VirtualDirectoryImpl)vTarget).getCachedChildren());
assertEquals("d1", vd1.getName());
assertFalse(((VirtualDirectoryImpl)vd1).allChildrenLoaded());
assertEquals(Collections.emptyList(), ((VirtualDirectoryImpl)vd1).getCachedChildren());
}
private static void assertChildrenAreLoaded(VirtualFile file) {
assertTrue("children not loaded: " + file, ((VirtualDirectoryImpl)file).allChildrenLoaded());
assertTrue("children not loaded: " + file, PersistentFS.getInstance().areChildrenLoaded(file));
}
@Test
public void testRenameInBackgroundDoesntLeadToDuplicateFilesError() throws IOException {
assumeFalse("Case-insensitive OS expected, can't run on " + SystemInfo.OS_NAME, SystemInfo.isFileSystemCaseSensitive);
File file = tempDirectory.newFile("rename.txt", "x".getBytes(StandardCharsets.UTF_8));
VirtualFile vfile = refreshAndFind(file);
VirtualDirectoryImpl vTemp = (VirtualDirectoryImpl)vfile.getParent();
assertFalse(vTemp.allChildrenLoaded());
VfsUtil.markDirty(true, false, vTemp);
Files.move(file.toPath(), file.toPath().resolveSibling(file.getName().toUpperCase()), StandardCopyOption.ATOMIC_MOVE);
VirtualFile[] newChildren = vTemp.getChildren();
assertOneElement(newChildren);
}
@Test
public void testPersistentFsCacheDoesntContainInvalidFiles() {
File file = tempDirectory.newFile("subDir1/subDir2/subDir3/file.txt");
VirtualFileSystemEntry vFile = (VirtualFileSystemEntry)refreshAndFind(file);
VirtualFileSystemEntry vSubDir3 = vFile.getParent();
VirtualFileSystemEntry vSubDir2 = vSubDir3.getParent();
VirtualFileSystemEntry vSubDir1 = vSubDir2.getParent();
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
VirtualFileSystemEntry[] hardReferenceHolder = {vFile, vSubDir3, vSubDir2, vSubDir1};
VfsTestUtil.deleteFile(vSubDir1);
for (VirtualFileSystemEntry f : hardReferenceHolder) {
assertFalse("file is valid but deleted " + f.getName(), f.isValid());
}
for (VirtualFileSystemEntry f : hardReferenceHolder) {
assertNull(fs.getCachedDir(f.getId()));
assertNull(fs.findFileById(f.getId()));
}
for (VirtualFileSystemEntry f : fs.getIdToDirCache().values()) {
assertTrue(f.isValid());
}
}
@Test
public void testConcurrentListAllDoesntCauseDuplicateFileIds() throws Exception {
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
for (int i = 0; i < 10; i++) {
File file = tempDirectory.newFile("d" + i + "/file.txt", "x".getBytes(StandardCharsets.UTF_8));
VirtualDirectoryImpl vTemp = (VirtualDirectoryImpl)refreshAndFind(file).getParent();
assertFalse(vTemp.allChildrenLoaded());
Files.writeString(file.toPath().resolveSibling("new.txt"), "new");
Future<List<? extends ChildInfo>> f1 = ApplicationManager.getApplication().executeOnPooledThread(() -> fs.listAll(vTemp));
Future<List<? extends ChildInfo>> f2 = ApplicationManager.getApplication().executeOnPooledThread(() -> fs.listAll(vTemp));
List<? extends ChildInfo> children1 = f1.get();
List<? extends ChildInfo> children2 = f2.get();
int[] nameIds1 = children1.stream().mapToInt(n -> n.getNameId()).toArray();
int[] nameIds2 = children2.stream().mapToInt(n -> n.getNameId()).toArray();
// there can be one or two children, depending on whether the VFS refreshed in time or not.
// but in any case, there must not be duplicate ids (i.e. files with the same name but different getId())
for (int i1 = 0; i1 < nameIds1.length; i1++) {
int nameId1 = nameIds1[i1];
int i2 = ArrayUtil.find(nameIds2, nameId1);
if (i2 >= 0) {
int id1 = children1.get(i1).getId();
int id2 = children2.get(i2).getId();
assertEquals("Duplicate ids found. children1=" + children1 + "; children2=" + children2, id1, id2);
}
}
}
}
@Test
public void testMustNotDuplicateIdsOnRenameWithCaseChanged() {
PersistentFSImpl fs = (PersistentFSImpl)PersistentFS.getInstance();
File file = tempDirectory.newFile("file.txt", "x".getBytes(StandardCharsets.UTF_8));
VirtualFile vDir = refreshAndFind(file.getParentFile());
VirtualFile vf = assertOneElement(vDir.getChildren());
assertEquals("file.txt", vf.getName());
List<Future<?>> futures = new ArrayList<>();
String oldName = file.getName();
for (int i = 0; i < 100; i++) {
int u = i % oldName.length();
Future<?> f = ApplicationManager.getApplication().executeOnPooledThread(() -> {
String newName = oldName.substring(0, u) + Character.toUpperCase(oldName.charAt(u)) + oldName.substring(u + 1);
try {
Files.move(file.toPath(), file.toPath().resolveSibling(newName), StandardCopyOption.ATOMIC_MOVE);
}
catch (NoSuchFileException ignored) { }
catch (IOException e) {
throw new UncheckedIOException(e);
}
});
futures.add(f);
}
for (int i = 0; i < 10; i++) {
Future<?> f = ApplicationManager.getApplication().executeOnPooledThread(() -> {
for (int u=0; u<100; u++) {
List<? extends ChildInfo> infos = fs.listAll(vDir);
assertOneElement(infos);
}
});
futures.add(f);
}
runInEdtAndWait(() -> {
for (Future<?> future : futures) {
PlatformTestUtil.waitForFuture(future, 10_000);
}
});
}
public static class TracingJarFileSystemTestWrapper extends JarFileSystemImpl {
private final AtomicInteger myAttributeCallCount = new AtomicInteger();
@Override
public @Nullable FileAttributes getAttributes(@NotNull VirtualFile file) {
myAttributeCallCount.incrementAndGet();
return super.getAttributes(file);
}
private int getAttributeCallCount() {
return myAttributeCallCount.get();
}
@Override
public @NotNull String getProtocol() {
return "jar-wrapper";
}
}
private static File zipWithEntry(String fileName, File generationDir, File outputDir, String entryName, String entryContent) throws IOException {
File zipFile = new File(generationDir, fileName);
try (Compressor.Zip zip = new Compressor.Zip(zipFile)) {
zip.addFile(entryName, entryContent.getBytes(StandardCharsets.UTF_8));
}
File outputFile = new File(outputDir, fileName);
try (OutputStream out = Files.newOutputStream(outputFile.toPath())) {
Files.copy(zipFile.toPath(), out); // unlike `Files#copy(Path, Path)`, allows to overwrite an opened file on Windows
}
VfsUtil.markDirtyAndRefresh(false, true, true, outputFile);
return outputFile;
}
@Test
public void testReadOnlyFsCachesLength() throws IOException {
String text = "<virtualFileSystem implementationClass=\"" + TracingJarFileSystemTestWrapper.class.getName() + "\" key=\"jar-wrapper\" physical=\"true\"/>";
Disposable disposable = runInEdtAndGet(() -> DynamicPluginsTestUtil.loadExtensionWithText(text, TracingJarFileSystemTestWrapper.class.getClassLoader()));
try {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
String jarName = "test.jar";
String entryName = "Some.java";
String[] contents = {"class Some {}", "class Some { void m() {} }", "class Some { void mmm() {} }"};
File zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, contents[0]);
String url = "jar-wrapper://" + FileUtil.toSystemIndependentName(zipFile.getPath()) + "!/" + entryName;
VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url);
file.refresh(false, false);
assertTrue(file.isValid());
assertEquals(contents[0], new String(file.contentsToByteArray(), StandardCharsets.UTF_8));
TracingJarFileSystemTestWrapper fs = (TracingJarFileSystemTestWrapper)file.getFileSystem();
zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, contents[1]);
VfsUtil.markDirtyAndRefresh(false, true, true, zipFile);
int attrCallCount = fs.getAttributeCallCount();
file.refresh(false, false);
assertTrue(file.isValid());
assertEquals(contents[1], new String(file.contentsToByteArray(), StandardCharsets.UTF_8));
zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, contents[2]);
VfsUtil.markDirtyAndRefresh(false, true, true, zipFile);
assertNotEquals(attrCallCount, fs.getAttributeCallCount()); // we should read length from physical FS
file.refresh(false, false);
assertTrue(file.isValid());
assertEquals(contents[2], new String(file.contentsToByteArray(), StandardCharsets.UTF_8));
attrCallCount = fs.getAttributeCallCount();
for (int i = 0; i < 3; i++) {
file.getLength();
assertEquals(attrCallCount, fs.getAttributeCallCount()); // ensure it's cached
}
}
finally {
runInEdtAndWait(() -> Disposer.dispose(disposable));
}
}
@Test
public void testDoNotRecalculateLengthIfEndOfInputStreamIsNotReached() throws IOException {
String text = "<virtualFileSystem implementationClass=\"" + TracingJarFileSystemTestWrapper.class.getName() + "\" key=\"jar-wrapper\" physical=\"true\"/>";
Disposable disposable = runInEdtAndGet(() -> DynamicPluginsTestUtil.loadExtensionWithText(text, TracingJarFileSystemTestWrapper.class.getClassLoader()));
try {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
String jarName = "test.jar";
String entryName = "Some.java";
String content = "class Some {}";
File zipFile = zipWithEntry(jarName, generationDir, testDir, entryName, content);
String url = "jar-wrapper://" + FileUtil.toSystemIndependentName(zipFile.getPath()) + "!/" + entryName;
VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url);
file.refresh(false, false);
TracingJarFileSystemTestWrapper fs = (TracingJarFileSystemTestWrapper)file.getFileSystem();
int attributeCallCount = fs.getAttributeCallCount();
try (InputStream stream = file.getInputStream()) {
// just read single byte
@SuppressWarnings("unused") int read = stream.read();
}
assertEquals(attributeCallCount, fs.getAttributeCallCount());
//noinspection EmptyTryBlock,unused
try (InputStream stream = file.getInputStream()) {
// just close
}
assertEquals(attributeCallCount, fs.getAttributeCallCount());
}
finally {
runInEdtAndWait(() -> Disposer.dispose(disposable));
}
}
@Test
public void testDeleteJarRootInsideJarMustCauseDeleteLocalJarFile() throws IOException {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
File jarFile = zipWithEntry("test.jar", generationDir, testDir, "Some.java", "class Some {}");
VirtualFile vFile = VirtualFileManager.getInstance().refreshAndFindFileByUrl(VfsUtilCore.pathToUrl(jarFile.getPath()));
VirtualFile jarVFile = JarFileSystem.getInstance().getJarRootForLocalFile(vFile);
FileUtil.delete(jarFile);
List<VFileEvent> events = new ArrayList<>();
ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable()).subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void after(@NotNull List<? extends VFileEvent> e) {
events.addAll(e);
}
});
jarVFile.refresh(false, false);
assertEquals(2, events.size());
for (VFileEvent event : events) {
assertTrue(event.toString(), event instanceof VFileDeleteEvent);
}
events.sort(Comparator.comparing((VFileEvent e) ->e.getFile().getUrl()));
assertEquals(vFile.getUrl(), events.get(0).getFile().getUrl());
assertEquals(jarVFile.getUrl(), events.get(1).getFile().getUrl());
}
@Test
public void testDeleteFileDeepInsideJarFileMustCauseContentChangeForLocalJar() throws IOException {
File generationDir = tempDirectory.newDirectory("gen");
File testDir = tempDirectory.newDirectory("test");
File jarFile = zipWithEntry("test.jar", generationDir, testDir, "web.xml", "<web/>");
VirtualFile vFile = VirtualFileManager.getInstance().refreshAndFindFileByUrl(VfsUtilCore.pathToUrl(jarFile.getPath()));
VirtualFile jarVFile = JarFileSystem.getInstance().getJarRootForLocalFile(vFile);
VirtualFile webXml = jarVFile.findChild("web.xml");
File newJarFile = zipWithEntry("test2.jar", generationDir, testDir, "x.java", "class X{}");
FileUtil.copy(newJarFile, jarFile);
List<VFileEvent> events = new ArrayList<>();
ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable()).subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void after(@NotNull List<? extends VFileEvent> e) {
events.addAll(e);
}
});
((JarFileSystemImpl)JarFileSystem.getInstance()).markDirtyAndRefreshVirtualFileDeepInsideJarForTest(webXml);
assertEquals(2, events.size());
VFileEvent event0 = events.get(0);
assertTrue(event0.toString(), event0 instanceof VFileDeleteEvent);
assertEquals(webXml.getUrl(), event0.getFile().getUrl());
VFileEvent event1 = events.get(1);
assertTrue(event1.toString(), event1 instanceof VFileContentChangeEvent);
assertEquals(vFile.getUrl(), event1.getFile().getUrl());
}
@Test
public void testFileContentChangeEventsMustDifferentiateCaseSensitivityToggledFiles() throws IOException {
assumeWindows();
assumeTrue("'fsutil.exe' needs elevated privileges to work", SuperUserStatus.isSuperUser());
File dir = tempDirectory.newDirectory();
VirtualFile vDir = refreshAndFind(dir);
setCaseSensitivity(dir, true);
File file = new File(dir, "file.txt");
assertTrue(file.createNewFile());
File FILE = new File(dir, "FILE.TXT");
assertTrue(FILE.createNewFile());
VirtualFile vFile = refreshAndFind(file);
VirtualFile vFILE = refreshAndFind(FILE);
List<VFileEvent> events = new ArrayList<>();
ApplicationManager.getApplication().getMessageBus().connect(getTestRootDisposable()).subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener() {
@Override
public void after(@NotNull List<? extends VFileEvent> e) {
for (VFileEvent event : e) {
VirtualFile evFile = event.getFile();
if (evFile.getParent().equals(vDir)) {
events.add(event);
}
}
}
});
FileUtil.writeToFile(file, "content");
vFile.refresh(false, false);
vFILE.refresh(false, false);
assertEqualUnorderedEvents(events,
new VFileContentChangeEvent(this, vFile, -1, -1, true));
events.clear();
FileUtil.writeToFile(FILE, "content");
vFile.refresh(false, false);
vFILE.refresh(false, false);
assertEqualUnorderedEvents(events,
new VFileContentChangeEvent(this, vFILE,-1,-1, true));
events.clear();
FileUtil.writeToFile(file, "content2");
FileUtil.writeToFile(FILE, "content2");
vDir.refresh(false, true);
assertEqualUnorderedEvents(events,
new VFileContentChangeEvent(this, vFile,-1,-1,true),
new VFileContentChangeEvent(this, vFILE,-1,-1,true));
events.clear();
FileUtil.delete(file);
FileUtil.delete(FILE);
vDir.refresh(false, true);
assertEqualUnorderedEvents(events,
new VFileDeleteEvent(this, vFile,false),
new VFileDeleteEvent(this, vFILE,false));
events.clear();
assertTrue(file.createNewFile());
assertTrue(FILE.createNewFile());
vDir.refresh(false, true);
assertEqualUnorderedEvents(events,
new VFileCreateEvent(this, vDir, vFile.getName(),false, null, null, true, null),
new VFileCreateEvent(this, vDir, vFILE.getName(),false, null, null, true, null));
}
private void assertEqualUnorderedEvents(List<? extends VFileEvent> actual, VFileEvent... expected) {
Set<VFileEvent> act = new HashSet<>(ContainerUtil.map(actual, e -> ignoreCrazyVFileContentChangedEquals(e)));
Set<VFileEvent> exp = new HashSet<>(ContainerUtil.map(expected, e -> ignoreCrazyVFileContentChangedEquals(e)));
if (!act.equals(exp)) {
String expectedString = UsefulTestCase.toString(Arrays.asList(expected));
String actualString = UsefulTestCase.toString(actual);
assertEquals(expectedString, actualString);
fail("Warning! 'toString' does not reflect the difference.\nExpected: " + expectedString + "\nActual: " + actualString);
}
}
private VFileEvent ignoreCrazyVFileContentChangedEquals(VFileEvent exp) {
if (exp instanceof VFileContentChangeEvent) {
exp = new VFileContentChangeEvent(this, exp.getFile(), 0, 0, -1, -1, -1, -1, true);
}
return exp;
}
}
| extract util method assertEvents
GitOrigin-RevId: 8ddb9cadd0bff58dd309dcd08cb66b6e90fa14e8 | platform/platform-tests/testSrc/com/intellij/openapi/vfs/newvfs/persistent/PersistentFsTest.java | extract util method assertEvents |
|
Java | apache-2.0 | 90a49e938883ac63f6e0716c113bf0b8f14eadb9 | 0 | internetarchive/tnh,aaronbinns/tnh,aaronbinns/tnh,internetarchive/tnh | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.*;
import java.util.*;
import org.apache.lucene.search.*;
import org.apache.lucene.analysis.*;
import org.apache.lucene.queryParser.*;
import org.apache.lucene.document.*;
import org.apache.lucene.index.*;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.util.Version;
public class Search
{
private static final DefaultQueryTranslator TRANSLATOR = new DefaultQueryTranslator( );
private Map<String,Searcher> searchers;
private FieldCache siteCache;
public Search( Searcher searcher )
{
if ( searcher == null ) throw new IllegalArgumentException( "searcher cannot be null" );
Map<String,Searcher> searchers = Collections.singletonMap( "", searcher );
this.init( searchers );
}
public Search( Map<String,Searcher> searchers )
{
if ( searchers == null ) throw new IllegalArgumentException( "searchers cannot be null" );
if ( searchers.get( "" ) == null )
{
throw new IllegalArgumentException( "Searchers map does not contain a searcher for key \"\"" );
}
this.init( searchers );
}
private void init( Map<String,Searcher> searchers )
{
this.searchers = searchers;
this.siteCache = new FieldCacheLucene( "site" );
}
public FieldCache getSiteCache( )
{
return this.siteCache;
}
public void setSiteCache( FieldCache siteCache )
{
if ( siteCache == null ) throw new IllegalArgumentException( "siteCache cannot be null" );
this.siteCache = siteCache;
}
public boolean hasIndex( String name )
{
return this.searchers.containsKey( name );
}
public Result search( String query, int maxHits, int hitsPerSite )
throws Exception
{
return this.search( this.searchers.get(""), query, maxHits, hitsPerSite );
}
public Result search( String indexName, String query, int maxHits, int hitsPerSite )
throws Exception
{
Searcher searcher = this.searchers.get( indexName );
if ( searcher == null ) throw new IllegalArgumentException( "Index not found: " + indexName );
return this.search( searcher, query, maxHits, hitsPerSite );
}
public Result search( String indexNames[], String query, int maxHits, int hitsPerSite )
throws Exception
{
Searcher s = buildMultiSearcher( indexNames );
return this.search( s, query, maxHits, hitsPerSite );
}
public Result search( String indexNames[], Query query, int maxHits, int hitsPerSite )
throws Exception
{
Searcher s = buildMultiSearcher( indexNames );
return this.search( s, query, maxHits, hitsPerSite );
}
public Result search( Searcher searcher, String query, int maxHits, int hitsPerSite )
throws Exception
{
Query q = TRANSLATOR.translate( query );
return this.search( searcher, q, maxHits, hitsPerSite );
}
public Result search( Searcher searcher, Query query, int maxHits, int hitsPerSite )
throws Exception
{
if ( searcher == null ) throw new IllegalArgumentException( "searcher cannot be null" );
if ( query == null ) throw new IllegalArgumentException( "query cannot be null" );
if ( maxHits <= 0 ) throw new IllegalArgumentException( "maxHits must be > 0" );
if ( hitsPerSite < 0 ) throw new IllegalArgumentException( "hitsPerSite must be >= 0" );
CollapsingCollector collector = new CollapsingCollector( this.siteCache, maxHits, hitsPerSite );
searcher.search( query, collector );
Result result = new Result( );
result.searcher = searcher;
result.numRawHits= collector.getNumUncollapsedHits( );
result.hits = collector.getHits( );
return result;
}
public MultiSearcher buildMultiSearcher( String indexNames[] )
throws IOException
{
if ( indexNames == null || indexNames.length == 0 ) throw new IllegalArgumentException( "At least one indexName must be specified" );
Searchable[] searchables = new Searchable[indexNames.length];
for ( int i = 0 ; i < indexNames.length ; i++ )
{
searchables[i] = this.searchers.get( indexNames[i] );
if ( searchables[i] == null ) throw new IllegalArgumentException( "Index not found: " + indexNames[i] );
}
MultiSearcher searcher = new MultiSearcher( searchables );
return searcher;
}
public static class Result
{
public Searcher searcher;
public int numRawHits;
public Hit[] hits;
}
}
| src/java/Search.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.Writer;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.URL;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.Iterator;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.PriorityQueue;
import org.apache.lucene.search.*;
import org.apache.lucene.analysis.*;
import org.apache.lucene.queryParser.*;
import org.apache.lucene.document.*;
import org.apache.lucene.index.*;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.util.Version;
public class Search
{
private static final DefaultQueryTranslator TRANSLATOR = new DefaultQueryTranslator( );
private Map<String,Searcher> searchers;
private FieldCache siteCache;
public Search( Searcher searcher )
{
if ( searcher == null ) throw new IllegalArgumentException( "searcher cannot be null" );
Map<String,Searcher> searchers = Collections.singletonMap( "", searcher );
this.init( searchers );
}
public Search( Map<String,Searcher> searchers )
{
if ( searchers == null ) throw new IllegalArgumentException( "searchers cannot be null" );
if ( searchers.get( "" ) == null )
{
throw new IllegalArgumentException( "Searchers map does not contain a searcher for key \"\"" );
}
this.init( searchers );
}
private void init( Map<String,Searcher> searchers )
{
this.searchers = searchers;
this.siteCache = new FieldCacheLucene( "site" );
}
public FieldCache getSiteCache( )
{
return this.siteCache;
}
public void setSiteCache( FieldCache siteCache )
{
if ( siteCache == null ) throw new IllegalArgumentException( "siteCache cannot be null" );
this.siteCache = siteCache;
}
public Result search( String query, int maxHits, int hitsPerSite )
throws Exception
{
return this.search( this.searchers.get(""), query, maxHits, hitsPerSite );
}
public Result search( String indexName, String query, int maxHits, int hitsPerSite )
throws Exception
{
Searcher searcher = this.searchers.get( indexName );
if ( searcher == null ) throw new IllegalArgumentException( "Index not found: " + indexName );
return this.search( searcher, query, maxHits, hitsPerSite );
}
public Result search( String indexNames[], String query, int maxHits, int hitsPerSite )
throws Exception
{
Searcher s = buildMultiSearcher( indexNames );
return this.search( s, query, maxHits, hitsPerSite );
}
public Result search( String indexNames[], Query query, int maxHits, int hitsPerSite )
throws Exception
{
Searcher s = buildMultiSearcher( indexNames );
return this.search( s, query, maxHits, hitsPerSite );
}
public Result search( Searcher searcher, String query, int maxHits, int hitsPerSite )
throws Exception
{
Query q = TRANSLATOR.translate( query );
return this.search( searcher, q, maxHits, hitsPerSite );
}
public Result search( Searcher searcher, Query query, int maxHits, int hitsPerSite )
throws Exception
{
if ( searcher == null ) throw new IllegalArgumentException( "searcher cannot be null" );
if ( query == null ) throw new IllegalArgumentException( "query cannot be null" );
if ( maxHits <= 0 ) throw new IllegalArgumentException( "maxHits must be > 0" );
if ( hitsPerSite < 0 ) throw new IllegalArgumentException( "hitsPerSite must be >= 0" );
CollapsingCollector collector = new CollapsingCollector( this.siteCache, maxHits, hitsPerSite );
searcher.search( query, collector );
Result result = new Result( );
result.searcher = searcher;
result.numRawHits= collector.getNumUncollapsedHits( );
result.hits = collector.getHits( );
return result;
}
public MultiSearcher buildMultiSearcher( String indexNames[] )
throws IOException
{
if ( indexNames == null || indexNames.length == 0 ) throw new IllegalArgumentException( "At least one indexName must be specified" );
Searchable[] searchables = new Searchable[indexNames.length];
for ( int i = 0 ; i < indexNames.length ; i++ )
{
searchables[i] = this.searchers.get( indexNames[i] );
if ( searchables[i] == null ) throw new IllegalArgumentException( "Index not found: " + indexNames[i] );
}
MultiSearcher searcher = new MultiSearcher( searchables );
return searcher;
}
public static class Result
{
public Searcher searcher;
public int numRawHits;
public Hit[] hits;
}
}
| Added hasIndex() method.
| src/java/Search.java | Added hasIndex() method. |
|
Java | bsd-3-clause | 9898a975d0b0173d0fb41606798d007f4236b682 | 0 | Clashsoft/Dyvil,Clashsoft/Dyvil | package dyvil.tools.compiler.config;
import dyvil.collection.List;
import dyvil.collection.mutable.ArrayList;
import dyvil.io.FileUtils;
import dyvil.tools.compiler.DyvilCompiler;
import dyvil.tools.compiler.ast.structure.Package;
import dyvil.tools.compiler.lang.I18n;
import dyvil.tools.compiler.library.Library;
import dyvil.tools.compiler.sources.FileFinder;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.regex.Pattern;
public class CompilerConfig
{
private DyvilCompiler compiler;
private String baseDirectory;
private String jarName;
private String jarVendor;
private String jarVersion;
private String jarNameFormat = "%1$s-%2$s.jar";
private File logFile;
private File sourceDir;
private File outputDir;
public final List<Library> libraries = new ArrayList<>();
public final List<String> includedFiles = new ArrayList<>();
public final List<Pattern> exclusionPatterns = new ArrayList<>();
private String mainType;
public final List<String> mainArgs = new ArrayList<>();
private boolean debug;
private boolean ansiColors;
private int constantFolding = 2;
private int maxConstantDepth = 10;
public CompilerConfig(DyvilCompiler compiler)
{
this.compiler = compiler;
this.libraries.add(Library.dyvilLibrary);
this.libraries.add(Library.javaLibrary);
}
public void setBaseDirectory(String baseDirectory)
{
this.baseDirectory = baseDirectory;
}
public void setConfigFile(File configFile)
{
this.baseDirectory = configFile.getParent();
}
public void setJarName(String jarName)
{
this.jarName = jarName;
}
public String getJarVendor()
{
return this.jarVendor;
}
public void setJarVendor(String jarVendor)
{
this.jarVendor = jarVendor;
}
public String getJarVersion()
{
return this.jarVersion;
}
public void setJarVersion(String jarVersion)
{
this.jarVersion = jarVersion;
}
public String getJarNameFormat()
{
return this.jarNameFormat;
}
public void setJarNameFormat(String jarNameFormat)
{
this.jarNameFormat = jarNameFormat;
}
public String getMainType()
{
return this.mainType;
}
public void setMainType(String mainType)
{
this.mainType = mainType;
}
public File getOutputDir()
{
return this.outputDir;
}
public void setOutputDir(String outputDir)
{
this.outputDir = this.resolveFile(outputDir);
}
public File getSourceDir()
{
return this.sourceDir;
}
public void setSourceDir(String sourceDir)
{
this.sourceDir = this.resolveFile(sourceDir);
}
public File getLogFile()
{
return this.logFile;
}
public void setLogFile(String logFile)
{
this.logFile = this.resolveFile(logFile);
}
public void addLibraryFile(String file)
{
try
{
this.libraries.add(Library.load(this.resolveFile(file)));
}
catch (FileNotFoundException ex)
{
this.compiler.error(I18n.get("library.not_found", file), ex);
}
}
public void addLibrary(Library library)
{
this.libraries.add(library);
}
public void includeFile(String fileName)
{
this.includedFiles.add(fileName);
}
public void excludeFile(String pattern)
{
this.exclusionPatterns.add(FileUtils.antToRegex(pattern));
}
public boolean isDebug()
{
return this.debug;
}
public void setDebug(boolean debug)
{
this.debug = debug;
}
public boolean useAnsiColors()
{
return this.ansiColors;
}
public void setAnsiColors(boolean ansiColors)
{
this.ansiColors = ansiColors;
}
public int getConstantFolding()
{
return this.constantFolding;
}
public void setConstantFolding(int constantFolding)
{
this.constantFolding = constantFolding;
}
public int getMaxConstantDepth()
{
return this.maxConstantDepth;
}
public void setMaxConstantDepth(int maxConstantDepth)
{
this.maxConstantDepth = maxConstantDepth;
}
private File resolveFile(String fileName)
{
if (fileName.length() == 0)
{
return new File(this.baseDirectory);
}
if (fileName.charAt(0) == File.separatorChar)
{
return new File(fileName);
}
return new File(this.baseDirectory, fileName);
}
public boolean isExcluded(String name)
{
for (Pattern p : this.exclusionPatterns)
{
if (p.matcher(name).find())
{
return false;
}
}
return true;
}
public void findUnits(FileFinder fileFinder)
{
if (!this.includedFiles.isEmpty())
{
for (String included : this.includedFiles)
{
File source = new File(this.sourceDir, included);
File output = new File(this.outputDir, included);
Package pack = packageFromFile(included, source.isDirectory());
fileFinder.process(this.compiler, source, output, pack);
}
return;
}
fileFinder.process(this.compiler, this.sourceDir, this.outputDir, Package.rootPackage);
}
private static Package packageFromFile(String file, boolean isDirectory)
{
int index = 0;
Package pack = Package.rootPackage;
do
{
int nextIndex = file.indexOf('/', index + 1);
if (nextIndex < 0)
{
return isDirectory ? pack.resolvePackage(file.substring(index)) : pack;
}
pack = pack.createSubPackage(file.substring(index, nextIndex));
index = nextIndex + 1;
}
while (index < file.length());
return pack;
}
public String getJarName()
{
return String.format(this.jarNameFormat, this.jarName, this.jarVersion);
}
public String[] getMainArgs()
{
return this.mainArgs.toArray(String.class);
}
@Override
public String toString()
{
return "CompilerConfig [jarName=" + this.getJarName() +
", sourceDir=" + this.sourceDir +
", outputDir=" + this.outputDir +
", libraryFiles=" + this.libraries +
", includedFiles=" + this.includedFiles +
", exclusionPatterns=" + this.exclusionPatterns +
", mainType=" + this.mainType +
", mainArgs=" + this.mainArgs + "]";
}
}
| src/compiler/dyvil/tools/compiler/config/CompilerConfig.java | package dyvil.tools.compiler.config;
import dyvil.collection.List;
import dyvil.collection.mutable.ArrayList;
import dyvil.tools.compiler.DyvilCompiler;
import dyvil.tools.compiler.ast.structure.Package;
import dyvil.tools.compiler.lang.I18n;
import dyvil.tools.compiler.library.Library;
import dyvil.tools.compiler.sources.FileFinder;
import java.io.File;
import java.io.FileNotFoundException;
public class CompilerConfig
{
private DyvilCompiler compiler;
private String baseDirectory;
private String jarName;
private String jarVendor;
private String jarVersion;
private String jarNameFormat = "%1$s-%2$s.jar";
private File logFile;
private File sourceDir;
private File outputDir;
public final List<Library> libraries = new ArrayList<>();
public final List<String> includedFiles = new ArrayList<>();
public final List<String> excludedFiles = new ArrayList<>();
private String mainType;
public final List<String> mainArgs = new ArrayList<>();
private boolean debug;
private boolean ansiColors;
private int constantFolding = 2;
private int maxConstantDepth = 10;
public CompilerConfig(DyvilCompiler compiler)
{
this.compiler = compiler;
this.libraries.add(Library.dyvilLibrary);
this.libraries.add(Library.javaLibrary);
}
public void setBaseDirectory(String baseDirectory)
{
this.baseDirectory = baseDirectory;
}
public void setConfigFile(File configFile)
{
this.baseDirectory = configFile.getParent();
}
public void setJarName(String jarName)
{
this.jarName = jarName;
}
public String getJarVendor()
{
return this.jarVendor;
}
public void setJarVendor(String jarVendor)
{
this.jarVendor = jarVendor;
}
public String getJarVersion()
{
return this.jarVersion;
}
public void setJarVersion(String jarVersion)
{
this.jarVersion = jarVersion;
}
public String getJarNameFormat()
{
return this.jarNameFormat;
}
public void setJarNameFormat(String jarNameFormat)
{
this.jarNameFormat = jarNameFormat;
}
public String getMainType()
{
return this.mainType;
}
public void setMainType(String mainType)
{
this.mainType = mainType;
}
public File getOutputDir()
{
return this.outputDir;
}
public void setOutputDir(String outputDir)
{
this.outputDir = this.resolveFile(outputDir);
}
public File getSourceDir()
{
return this.sourceDir;
}
public void setSourceDir(String sourceDir)
{
this.sourceDir = this.resolveFile(sourceDir);
}
public File getLogFile()
{
return this.logFile;
}
public void setLogFile(String logFile)
{
this.logFile = this.resolveFile(logFile);
}
public void addLibraryFile(String file)
{
try
{
this.libraries.add(Library.load(this.resolveFile(file)));
}
catch (FileNotFoundException ex)
{
this.compiler.error(I18n.get("library.not_found", file), ex);
}
}
public void addLibrary(Library library)
{
this.libraries.add(library);
}
public void includeFile(String fileName)
{
this.includedFiles.add(fileName);
}
public void excludeFile(String fileName)
{
this.excludedFiles.add(fileName);
}
public boolean isDebug()
{
return this.debug;
}
public void setDebug(boolean debug)
{
this.debug = debug;
}
public boolean useAnsiColors()
{
return this.ansiColors;
}
public void setAnsiColors(boolean ansiColors)
{
this.ansiColors = ansiColors;
}
public int getConstantFolding()
{
return this.constantFolding;
}
public void setConstantFolding(int constantFolding)
{
this.constantFolding = constantFolding;
}
public int getMaxConstantDepth()
{
return this.maxConstantDepth;
}
public void setMaxConstantDepth(int maxConstantDepth)
{
this.maxConstantDepth = maxConstantDepth;
}
private File resolveFile(String fileName)
{
if (fileName.length() == 0)
{
return new File(this.baseDirectory);
}
if (fileName.charAt(0) == File.separatorChar)
{
return new File(fileName);
}
return new File(this.baseDirectory, fileName);
}
public boolean isExcluded(String name)
{
for (String s : this.excludedFiles)
{
if (name.endsWith(s))
{
return false;
}
}
return true;
}
public void findUnits(FileFinder fileFinder)
{
if (!this.includedFiles.isEmpty())
{
for (String included : this.includedFiles)
{
File source = new File(this.sourceDir, included);
File output = new File(this.outputDir, included);
Package pack = packageFromFile(included, source.isDirectory());
fileFinder.process(this.compiler, source, output, pack);
}
return;
}
fileFinder.process(this.compiler, this.sourceDir, this.outputDir, Package.rootPackage);
}
private static Package packageFromFile(String file, boolean isDirectory)
{
int index = 0;
Package pack = Package.rootPackage;
do
{
int nextIndex = file.indexOf('/', index + 1);
if (nextIndex < 0)
{
return isDirectory ? pack.resolvePackage(file.substring(index)) : pack;
}
pack = pack.createSubPackage(file.substring(index, nextIndex));
index = nextIndex + 1;
}
while (index < file.length());
return pack;
}
public String getJarName()
{
return String.format(this.jarNameFormat, this.jarName, this.jarVersion);
}
public String[] getMainArgs()
{
return this.mainArgs.toArray(String.class);
}
@Override
public String toString()
{
return "CompilerConfig [jarName=" + this.getJarName() +
", sourceDir=" + this.sourceDir +
", outputDir=" + this.outputDir +
", libraryFiles=" + this.libraries +
", includedFiles=" + this.includedFiles +
", excludedFiles=" + this.excludedFiles +
", mainType=" + this.mainType +
", mainArgs=" + this.mainArgs + "]";
}
}
| Update Compiler Exclusion Patterns
comp: The `exclude` config entries can now use Ant-style filename
patterns.
| src/compiler/dyvil/tools/compiler/config/CompilerConfig.java | Update Compiler Exclusion Patterns |
|
Java | mit | 787b6f7a76848d794b79ca0ab7611a24959ea039 | 0 | dakaraphi/eclipse-plugin-commander | package dakara.eclipse.plugin.kavi.picklist;
import java.util.function.BiFunction;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.TextTransfer;
import org.eclipse.swt.dnd.Transfer;
import dakara.eclipse.plugin.command.settings.PersistedWorkingSet;
/*
* TODO - copy to clipboard commands
* - export/import history/preferences as JSON. maybe just export/import from clipboard as first option. Import would add to existing, not replace.
* - option to export hidden or non active items as well.
* - toggle favorite
* - show table headers, needed for resizing
* - sort on other columns
* - list unique by column
* - some way to view unique and expand items in the flat list
* - show hidden / non active items.
* - create alias: prepend alias name to command name or replace name entirely.
*/
public class InternalCommandContextProviderFactory {
public static InternalCommandContextProvider makeProvider(KaviPickListDialog kaviPickList) {
InternalCommandContextProvider provider = new InternalCommandContextProvider();
addDefaultInternalCommands(provider, kaviPickList);
return provider;
}
private static void addDefaultInternalCommands(InternalCommandContextProvider provider, KaviPickListDialog kaviPickList) {
provider.addCommand("list: toggle view selected", (currentProvider) -> {
currentProvider.toggleViewOnlySelected();
kaviPickList.togglePreviousProvider().refreshFromContentProvider();
});
// TODO - align column output. Include all 'searchable' columns
provider.addCommand("list: selected to clipboard", (currentProvider) -> {
Clipboard clipboard = new Clipboard(kaviPickList.getShell().getDisplay());
StringBuilder builder = new StringBuilder();
BiFunction<Object, Integer, String> columnContentFn = currentProvider.getKaviListColumns().getColumnOptions().get(1).getColumnContentFn();
currentProvider.getSelectedEntriesImplied().stream().forEach(item -> builder.append(columnContentFn.apply(item.dataItem, 0) + "\n"));
clipboard.setContents(new Object[] { builder.toString() }, new Transfer[] { TextTransfer.getInstance() });
kaviPickList.togglePreviousProvider().refreshFromContentProvider();
clipboard.dispose();
});
provider.addCommand("working", "list: toggle sort name", (currentProvider) -> {
kaviPickList.togglePreviousProvider().sortDefault().refreshFromContentProvider();
});
}
public static void addWorkingSetCommands(InternalCommandContextProvider contextProvider, KaviPickListDialog kaviPickList, PersistedWorkingSet historyStore) {
contextProvider.addCommand("working", "working: remove", (provider) -> {
provider.getSelectedEntriesImplied().stream().map(item -> item.dataItem).forEach(item -> historyStore.removeHistory(item));
provider.clearSelections();
provider.clearCursor();
kaviPickList.togglePreviousProvider().refreshFromContentProvider();
historyStore.save();
});
contextProvider.addCommand("working: set favorite", (provider) -> {
provider.getSelectedEntriesImplied().stream().map(item -> item.dataItem).forEach(item -> historyStore.setHistoryPermanent(item, true));
provider.clearSelections();
provider.clearCursor();
kaviPickList.setCurrentProvider("working").refreshFromContentProvider();
historyStore.save();
});
}
public static void installProvider(InternalCommandContextProvider contextProvider, KaviPickListDialog<? extends Object> kaviPickList) {
kaviPickList.setListContentProvider("context", contextProvider.makeProviderFunction()).setRestoreFilterTextOnProviderChange(true)
.setResolvedContextAction(( command, provider) -> {
command.commandAction.accept(provider);
})
.addColumn("name", item -> item.name).widthPercent(100);
}
}
| dakara.eclipse.plugin.command/src/dakara/eclipse/plugin/kavi/picklist/InternalCommandContextProviderFactory.java | package dakara.eclipse.plugin.kavi.picklist;
import java.util.function.BiFunction;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.TextTransfer;
import org.eclipse.swt.dnd.Transfer;
import dakara.eclipse.plugin.command.settings.PersistedWorkingSet;
/*
* TODO - copy to clipboard commands
* - export/import
* - toggle favorite
* - show table headers, needed for resizing
*/
public class InternalCommandContextProviderFactory {
public static InternalCommandContextProvider makeProvider(KaviPickListDialog kaviPickList) {
InternalCommandContextProvider provider = new InternalCommandContextProvider();
addDefaultInternalCommands(provider, kaviPickList);
return provider;
}
private static void addDefaultInternalCommands(InternalCommandContextProvider provider, KaviPickListDialog kaviPickList) {
provider.addCommand("list: toggle view selected", (currentProvider) -> {
currentProvider.toggleViewOnlySelected();
kaviPickList.togglePreviousProvider().refreshFromContentProvider();
});
// TODO - align column output. Include all 'searchable' columns
provider.addCommand("list: selected to clipboard", (currentProvider) -> {
Clipboard clipboard = new Clipboard(kaviPickList.getShell().getDisplay());
StringBuilder builder = new StringBuilder();
BiFunction<Object, Integer, String> columnContentFn = currentProvider.getKaviListColumns().getColumnOptions().get(1).getColumnContentFn();
currentProvider.getSelectedEntriesImplied().stream().forEach(item -> builder.append(columnContentFn.apply(item.dataItem, 0) + "\n"));
clipboard.setContents(new Object[] { builder.toString() }, new Transfer[] { TextTransfer.getInstance() });
kaviPickList.togglePreviousProvider().refreshFromContentProvider();
clipboard.dispose();
});
provider.addCommand("working", "list: toggle sort name", (currentProvider) -> {
kaviPickList.togglePreviousProvider().sortDefault().refreshFromContentProvider();
});
}
public static void addWorkingSetCommands(InternalCommandContextProvider contextProvider, KaviPickListDialog kaviPickList, PersistedWorkingSet historyStore) {
contextProvider.addCommand("working", "working: remove", (provider) -> {
provider.getSelectedEntriesImplied().stream().map(item -> item.dataItem).forEach(item -> historyStore.removeHistory(item));
provider.clearSelections();
provider.clearCursor();
kaviPickList.togglePreviousProvider().refreshFromContentProvider();
historyStore.save();
});
contextProvider.addCommand("working: set favorite", (provider) -> {
provider.getSelectedEntriesImplied().stream().map(item -> item.dataItem).forEach(item -> historyStore.setHistoryPermanent(item, true));
provider.clearSelections();
provider.clearCursor();
kaviPickList.setCurrentProvider("working").refreshFromContentProvider();
historyStore.save();
});
}
public static void installProvider(InternalCommandContextProvider contextProvider, KaviPickListDialog<? extends Object> kaviPickList) {
kaviPickList.setListContentProvider("context", contextProvider.makeProviderFunction()).setRestoreFilterTextOnProviderChange(true)
.setResolvedContextAction(( command, provider) -> {
command.commandAction.accept(provider);
})
.addColumn("name", item -> item.name).widthPercent(100);
}
}
| update comments | dakara.eclipse.plugin.command/src/dakara/eclipse/plugin/kavi/picklist/InternalCommandContextProviderFactory.java | update comments |
|
Java | mit | 98444adec6a94d68198b3a8b9a279dc4b8c62f86 | 0 | Mihail-K/Murmur | /*
* The MIT License (MIT)
*
* Copyright (c) 2014 Mihail K
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.cloudchaser.murmur.types;
/**
*
* @author Mihail K
* @since 0.1
**/
public abstract class MurmurObject {
private final MurmurType type;
public MurmurObject(MurmurType type) {
this.type = type;
}
public MurmurType getType() {
return type;
}
/**
* Converts this Murmur object into a Java object.
*
* @return A Java object.
*/
public Object toJavaObject() {
return this;
}
public MurmurObject getMember(String name) {
// Default implementation.
return MurmurVoid.VOID;
}
/**
* Checks if this Murmur object can convert to a Java type.
*
* @param type The Java type being converted to.
* @return <code>true</code> is this object converts to the given type.
*/
public boolean isCompatible(Class<?> type) {
return false;
}
/**
* Converts this Murmur object into a Java type.
*
* @param type The Java type being converted to.
* @return The converted Java object.
*/
public Object getAsJavaType(Class<?> type) {
throw new UnsupportedOperationException();
}
/**
* Converts this Object to an integer, if possible.
*
* @return This Object as an integer.
**/
public abstract MurmurInteger asInteger();
/**
* Converts this Object to a decimal, if possible.
*
* @return This Object as a decimal.
**/
public abstract MurmurDecimal asDecimal();
/**
* Converts this Object to a string, if possible.
*
* @return This Object as a string.
**/
public abstract MurmurString asString();
public abstract MurmurObject opPositive();
public abstract MurmurObject opNegative();
public abstract MurmurObject opIncrement();
public abstract MurmurObject opDecrement();
public abstract MurmurObject opPlus(MurmurObject other);
public abstract MurmurObject opMinus(MurmurObject other);
public abstract MurmurObject opMultiply(MurmurObject other);
public abstract MurmurObject opDivide(MurmurObject other);
public abstract MurmurObject opModulo(MurmurObject other);
public abstract MurmurObject opShiftLeft(MurmurObject other);
public abstract MurmurObject opShiftRight(MurmurObject other);
public abstract MurmurObject opLessThan(MurmurObject other);
public abstract MurmurObject opGreaterThan(MurmurObject other);
public abstract MurmurObject opLessOrEqual(MurmurObject other);
public abstract MurmurObject opGreaterOrEqual(MurmurObject other);
public abstract MurmurObject opEquals(MurmurObject other);
public abstract MurmurObject opNotEquals(MurmurObject other);
public abstract MurmurObject opBitNot();
public abstract MurmurObject opBitAnd(MurmurObject other);
public abstract MurmurObject opBitXor(MurmurObject other);
public abstract MurmurObject opBitOr(MurmurObject other);
public abstract MurmurObject opLogicalNot();
public abstract MurmurObject opLogicalAnd(MurmurObject other);
public abstract MurmurObject opLogicalOr(MurmurObject other);
public abstract MurmurObject opIndex(MurmurObject other);
public abstract MurmurObject opConcat(MurmurObject other);
}
| src/io/cloudchaser/murmur/types/MurmurObject.java | /*
* The MIT License (MIT)
*
* Copyright (c) 2014 Mihail K
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.cloudchaser.murmur.types;
/**
*
* @author Mihail K
* @since 0.1
**/
public abstract class MurmurObject {
private final MurmurType type;
public MurmurObject(MurmurType type) {
this.type = type;
}
public MurmurType getType() {
return type;
}
/**
* Converts this Murmur object into a Java object.
*
* @return A Java object.
*/
public Object toJavaObject() {
return this;
}
public MurmurObject getMember(String name) {
// Default implementation.
return MurmurVoid.VOID;
}
/**
* Converts this Object to an integer, if possible.
*
* @return This Object as an integer.
**/
public abstract MurmurInteger asInteger();
/**
* Converts this Object to a decimal, if possible.
*
* @return This Object as a decimal.
**/
public abstract MurmurDecimal asDecimal();
/**
* Converts this Object to a string, if possible.
*
* @return This Object as a string.
**/
public abstract MurmurString asString();
public abstract MurmurObject opPositive();
public abstract MurmurObject opNegative();
public abstract MurmurObject opIncrement();
public abstract MurmurObject opDecrement();
public abstract MurmurObject opPlus(MurmurObject other);
public abstract MurmurObject opMinus(MurmurObject other);
public abstract MurmurObject opMultiply(MurmurObject other);
public abstract MurmurObject opDivide(MurmurObject other);
public abstract MurmurObject opModulo(MurmurObject other);
public abstract MurmurObject opShiftLeft(MurmurObject other);
public abstract MurmurObject opShiftRight(MurmurObject other);
public abstract MurmurObject opLessThan(MurmurObject other);
public abstract MurmurObject opGreaterThan(MurmurObject other);
public abstract MurmurObject opLessOrEqual(MurmurObject other);
public abstract MurmurObject opGreaterOrEqual(MurmurObject other);
public abstract MurmurObject opEquals(MurmurObject other);
public abstract MurmurObject opNotEquals(MurmurObject other);
public abstract MurmurObject opBitNot();
public abstract MurmurObject opBitAnd(MurmurObject other);
public abstract MurmurObject opBitXor(MurmurObject other);
public abstract MurmurObject opBitOr(MurmurObject other);
public abstract MurmurObject opLogicalNot();
public abstract MurmurObject opLogicalAnd(MurmurObject other);
public abstract MurmurObject opLogicalOr(MurmurObject other);
public abstract MurmurObject opIndex(MurmurObject other);
public abstract MurmurObject opConcat(MurmurObject other);
}
| Added conversion functions to murmur object. | src/io/cloudchaser/murmur/types/MurmurObject.java | Added conversion functions to murmur object. |
|
Java | mit | 747f301f0445ec86b17a0e489d6188a22f2b95c5 | 0 | jesusaurus/jenkins-logstash-plugin,jenkinsci/logstash-plugin,hawky-4s-/logstash-plugin,jenkinsci/logstash-plugin,jesusaurus/jenkins-logstash-plugin,hawky-4s-/logstash-plugin | /*
* The MIT License
*
* Copyright 2013 Hewlett-Packard Development Company, L.P.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.logstash;
import hudson.Extension;
import hudson.Launcher;
import hudson.console.LineTransformationOutputStream;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Run;
import hudson.tasks.BuildWrapper;
import hudson.tasks.BuildWrapperDescriptor;
import hudson.util.FormValidation;
import java.io.IOException;
import java.io.OutputStream;
import java.util.regex.Pattern;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import redis.clients.jedis.Jedis;
/**
* Build wrapper that decorates the build's logger to insert a
* {@link LogstashNote} on each output line.
*
* @author K Jonathan Harker
*/
public class LogstashBuildWrapper extends BuildWrapper {
/**
* Encapsulate configuration data from the optionalBlock.
*/
public static class RedisBlock {
public String host;
public String port;
public String numb;
public String pass;
public String dataType;
public String key;
public String type;
@DataBoundConstructor
public RedisBlock(String host, String port, String numb,
String pass, String dataType, String key) {
this.host = host;
this.port = port;
this.numb = numb;
this.pass = pass;
this.dataType = dataType;
this.key = key;
this.type = new String("jenkins");
}
}
public RedisBlock redis;
public boolean useRedis;
private String jobName;
private String buildHost;
private int buildNum;
private String rootJobName;
private int rootBuildNum;
/**
* Create a new {@link LogstashBuildWrapper}.
*/
@DataBoundConstructor
public LogstashBuildWrapper(RedisBlock redis) {
this.redis = redis;
this.useRedis = true;
}
/**
* {@inheritDoc}
*/
@Override
public Environment setUp(AbstractBuild build, Launcher launcher,
BuildListener listener) throws IOException, InterruptedException {
this.jobName = build.getProject().getDisplayName();
this.buildHost = build.getBuiltOn().getDisplayName();
this.buildNum = ((Run)build).number;
this.rootJobName = build.getProject().getRootProject().getDisplayName();
this.rootBuildNum = ((Run)build.getRootBuild()).number;
return new Environment() {
};
}
/**
* {@inheritDoc}
*/
@Override
public OutputStream decorateLogger(AbstractBuild build, OutputStream logger) {
return new LogstashOutputStream(logger);
}
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
public FormValidation doCheckHost(@QueryParameter final String value) {
return (value.trim().length() == 0) ? FormValidation.error("Host cannot be empty.") : FormValidation.ok();
}
/**
* Output stream that writes each line to the provided delegate output
* stream and also sends it to redis for logstash to consume.
*/
private class LogstashOutputStream extends LineTransformationOutputStream {
/**
* The delegate output stream.
*/
private final OutputStream delegate;
private final Jedis jedis;
private boolean connFailed;
/**
* Create a new {@link LogstashOutputStream}.
*
* @param delegate
* the delegate output stream
*/
private LogstashOutputStream(OutputStream delegate) {
this.delegate = delegate;
this.connFailed = false;
if (LogstashBuildWrapper.this.useRedis) {
Jedis jedis;
try {
int port = (int)Integer.parseInt(LogstashBuildWrapper.this.redis.port);
jedis = new Jedis(LogstashBuildWrapper.this.redis.host, port);
String pass = LogstashBuildWrapper.this.redis.pass;
if (pass != null && !pass.isEmpty()) {
jedis.auth(pass);
}
int numb = (int)Integer.parseInt(LogstashBuildWrapper.this.redis.numb);
if (numb != 0) {
jedis.select(numb);
}
} catch (java.lang.Throwable t) {
LogstashBuildWrapper.this.useRedis = false;
String error = "Unable to connect to redis: " + t.getMessage() + "\n";
try {
delegate.write(error.getBytes());
delegate.flush();
} catch (IOException e) {
e.printStackTrace();
}
jedis = null;
}
this.jedis = jedis;
} else {
// finals must be initialized
this.jedis = null;
}
if (this.jedis != null) {
String msg = new String("Logstash plugin enabling redis.");
try {
delegate.write(msg.getBytes());
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected void eol(byte[] b, int len) throws IOException {
delegate.write(b, 0, len);
delegate.flush();
String line = new String(b, 0, len).trim().replaceAll("\\p{C}", "");
//remove ansi-conceal sequences
Pattern p = Pattern.compile(".*?\\[8m.*?\\[0m.*?");
while (p.matcher(line).matches()) {
int start = line.indexOf("[8m");
int end = line.indexOf("[0m") + 3;
line = line.substring(0, start) + line.substring(end);
}
if (LogstashBuildWrapper.this.redis != null && LogstashBuildWrapper.this.useRedis && !line.isEmpty() && !this.connFailed) {
try {
JSONObject fields = new JSONObject();
fields.put("logsource", LogstashBuildWrapper.this.redis.type);
fields.put("program", "jenkins");
fields.put("job", LogstashBuildWrapper.this.jobName);
fields.put("build", LogstashBuildWrapper.this.buildNum);
fields.put("node", LogstashBuildWrapper.this.buildHost);
fields.put("root-job", LogstashBuildWrapper.this.rootJobName);
fields.put("root-build", LogstashBuildWrapper.this.rootBuildNum);
JSONObject json = new JSONObject();
json.put("@fields", fields);
json.put("@type", LogstashBuildWrapper.this.redis.type);
json.put("@message", line);
this.jedis.rpush(LogstashBuildWrapper.this.redis.key, json.toString());
} catch (java.lang.Throwable t) {
this.connFailed = true;
String msg = new String("Connection to redis failed. Disabling logstash output.");
delegate.write(msg.getBytes());
delegate.flush();
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
delegate.close();
super.close();
}
}
/**
* Registers {@link LogstashBuildWrapper} as a {@link BuildWrapper}.
*/
@Extension
public static class DescriptorImpl extends BuildWrapperDescriptor {
public DescriptorImpl() {
super(LogstashBuildWrapper.class);
load();
}
/**
* {@inheritDoc}
*/
@Override
public String getDisplayName() {
return Messages.DisplayName();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isApplicable(AbstractProject<?, ?> item) {
return true;
}
}
}
| src/main/java/hudson/plugins/logstash/LogstashBuildWrapper.java | /*
* The MIT License
*
* Copyright 2013 Hewlett-Packard Development Company, L.P.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.logstash;
import hudson.Extension;
import hudson.Launcher;
import hudson.console.LineTransformationOutputStream;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Run;
import hudson.tasks.BuildWrapper;
import hudson.tasks.BuildWrapperDescriptor;
import hudson.util.FormValidation;
import java.io.IOException;
import java.io.OutputStream;
import java.util.regex.Pattern;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import redis.clients.jedis.Jedis;
/**
* Build wrapper that decorates the build's logger to insert a
* {@link LogstashNote} on each output line.
*
* @author K Jonathan Harker
*/
public class LogstashBuildWrapper extends BuildWrapper {
/**
* Encapsulate configuration data from the optionalBlock.
*/
public static class RedisBlock {
public String host;
public String port;
public String numb;
public String pass;
public String dataType;
public String key;
public String type;
@DataBoundConstructor
public RedisBlock(String host, String port, String numb,
String pass, String dataType, String key) {
this.host = host;
this.port = port;
this.numb = numb;
this.pass = pass;
this.dataType = dataType;
this.key = key;
this.type = new String("jenkins");
}
}
public RedisBlock redis;
public boolean useRedis;
private String jobName;
private String buildHost;
private int buildNum;
private String rootJobName;
private int rootBuildNum;
/**
* Create a new {@link LogstashBuildWrapper}.
*/
@DataBoundConstructor
public LogstashBuildWrapper(RedisBlock redis) {
this.redis = redis;
this.useRedis = true;
}
/**
* {@inheritDoc}
*/
@Override
public Environment setUp(AbstractBuild build, Launcher launcher,
BuildListener listener) throws IOException, InterruptedException {
this.jobName = build.getProject().getDisplayName();
this.buildHost = build.getBuiltOn().getDisplayName();
this.buildNum = ((Run)build).number;
this.rootJobName = build.getProject().getRootProject().getDisplayName();
this.rootBuildNum = ((Run)build.getRootBuild()).number;
return new Environment() {
};
}
/**
* {@inheritDoc}
*/
@Override
public OutputStream decorateLogger(AbstractBuild build, OutputStream logger) {
return new LogstashOutputStream(logger);
}
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
public FormValidation doCheckHost(@QueryParameter final String value) {
return (value.trim().length() == 0) ? FormValidation.error("Host cannot be empty.") : FormValidation.ok();
}
/**
* Output stream that writes each line to the provided delegate output
* stream and also sends it to redis for logstash to consume.
*/
private class LogstashOutputStream extends LineTransformationOutputStream {
/**
* The delegate output stream.
*/
private final OutputStream delegate;
private final Jedis jedis;
private boolean connFailed;
/**
* Create a new {@link LogstashOutputStream}.
*
* @param delegate
* the delegate output stream
*/
private LogstashOutputStream(OutputStream delegate) {
this.delegate = delegate;
this.connFailed = false;
if (LogstashBuildWrapper.this.useRedis) {
Jedis jedis;
try {
int port = (int)Integer.parseInt(LogstashBuildWrapper.this.redis.port);
jedis = new Jedis(LogstashBuildWrapper.this.redis.host, port);
String pass = LogstashBuildWrapper.this.redis.pass;
if (pass != null && !pass.isEmpty()) {
jedis.auth(pass);
}
int numb = (int)Integer.parseInt(LogstashBuildWrapper.this.redis.numb);
if (numb != 0) {
jedis.select(numb);
}
} catch (java.lang.Throwable t) {
LogstashBuildWrapper.this.useRedis = false;
String error = "Unable to connect to redis: " + t.getMessage() + "\n";
try {
delegate.write(error.getBytes());
delegate.flush();
} catch (IOException e) {
e.printStackTrace();
}
jedis = null;
}
this.jedis = jedis;
} else {
// finals must be initialized
this.jedis = null;
}
if (this.jedis != null) {
String msg = new String("Logstash plugin enabling redis.");
try {
delegate.write(msg.getBytes());
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected void eol(byte[] b, int len) throws IOException {
delegate.write(b, 0, len);
delegate.flush();
String line = new String(b, 0, len).trim().replaceAll("\\p{C}", "");
//remove ansi-conceal sequences
Pattern p = Pattern.compile(".*?\\[8m.*?\\[0m.*?");
while (p.matcher(line).matches()) {
int start = line.indexOf("[8m");
int end = line.indexOf("[0m") + 3;
line = line.substring(0, start) + line.substring(end);
}
if (LogstashBuildWrapper.this.redis != null && LogstashBuildWrapper.this.useRedis && !line.isEmpty() && !this.connFailed) {
try {
JSONObject fields = new JSONObject();
fields.put("logsource", LogstashBuildWrapper.this.redis.type);
fields.put("program", "jenkins");
fields.put("job", LogstashBuildWrapper.this.jobName);
fields.put("build", LogstashBuildWrapper.this.buildNum);
fields.put("node", LogstashBuildWrapper.this.buildHost);
fields.put("root-job", LogstashBuildWrapper.this.rootJobName);
fields.put("root-build", LogstashBuildWrapper.this.rootBuildNum);
JSONObject json = new JSONObject();
json.put("@fields", fields);
json.put("@type", LogstashBuildWrapper.this.redis.type);
json.put("@message", line);
this.jedis.rpush(LogstashBuildWrapper.this.redis.key, json.toString());
} catch (java.net.SocketException se) {
this.connFailed = true;
String msg = new String("Connection to redis failed. Disabling logstash output.");
delegate.write(msg.getBytes());
delegate.flush();
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
delegate.close();
super.close();
}
}
/**
* Registers {@link LogstashBuildWrapper} as a {@link BuildWrapper}.
*/
@Extension
public static class DescriptorImpl extends BuildWrapperDescriptor {
public DescriptorImpl() {
super(LogstashBuildWrapper.class);
load();
}
/**
* {@inheritDoc}
*/
@Override
public String getDisplayName() {
return Messages.DisplayName();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isApplicable(AbstractProject<?, ?> item) {
return true;
}
}
}
| Any old exception should stop us
| src/main/java/hudson/plugins/logstash/LogstashBuildWrapper.java | Any old exception should stop us |
|
Java | agpl-3.0 | 3a1cc1eef555e140e1ba4f32bbe754e1247dd46e | 0 | KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver | package nl.mpi.kinnate.ui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.io.File;
import java.net.URI;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import nl.mpi.arbil.GuiHelper;
import nl.mpi.arbil.ImdiTable;
import nl.mpi.arbil.ImdiTableModel;
import nl.mpi.arbil.LinorgWindowManager;
import nl.mpi.kinnate.kindata.GraphSorter;
import nl.mpi.kinnate.svg.GraphPanel;
import nl.mpi.kinnate.SavePanel;
import nl.mpi.kinnate.entityindexer.EntityIndex;
import nl.mpi.kinnate.entityindexer.EntityCollection;
import nl.mpi.kinnate.entityindexer.EntityService;
import nl.mpi.kinnate.entityindexer.EntityServiceException;
/**
* Document : KinTypeStringTestPanel
* Created on : Sep 29, 2010, 12:52:01 PM
* Author : Peter Withers
*/
public class KinTypeEgoSelectionTestPanel extends JPanel implements SavePanel {
private JTextArea kinTypeStringInput;
private GraphPanel graphPanel;
private GraphSorter graphData;
private EgoSelectionPanel egoSelectionPanel;
private KinTermPanel kinTermPanel;
private EntityService entityIndex;
private String defaultString = "# This test panel should provide a kin diagram based on selected egos and the the kintype strings entered here.\n# Enter one string per line.\n# By default all relations of the selected entity will be shown.\n";
private String kinTypeStrings[] = new String[]{};
public KinTypeEgoSelectionTestPanel(File existingFile) {
this.setLayout(new BorderLayout());
graphPanel = new GraphPanel(this);
egoSelectionPanel = new EgoSelectionPanel();
kinTermPanel = new KinTermPanel(this, graphPanel.getkinTerms());
kinTypeStringInput = new JTextArea(defaultString);
kinTypeStringInput.setBorder(javax.swing.BorderFactory.createTitledBorder("Kin Type Strings"));
JPanel kinGraphPanel = new JPanel(new BorderLayout());
kinGraphPanel.add(kinTypeStringInput, BorderLayout.PAGE_START);
// JSplitPane egoSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
JPanel kintermSplitPane = new JPanel(new BorderLayout());
// kinGraphPanel.add(egoSplitPane, BorderLayout.CENTER);
// outerSplitPane.setDividerLocation(0.5); // todo: add this to its parent so that the divider position sticks
kintermSplitPane.add(new HidePane(egoSelectionPanel, "Ego Selection", BorderLayout.LINE_END), BorderLayout.LINE_START);
kintermSplitPane.add(graphPanel, BorderLayout.CENTER);
kintermSplitPane.add(new HidePane(kinTermPanel, "Kin Terms", BorderLayout.LINE_START), BorderLayout.LINE_END);
kinGraphPanel.add(kintermSplitPane);
ImdiTableModel imdiTableModel = new ImdiTableModel();
ImdiTable imdiTable = new ImdiTable(imdiTableModel, "Selected Nodes");
TableCellDragHandler tableCellDragHandler = new TableCellDragHandler();
imdiTable.setTransferHandler(tableCellDragHandler);
imdiTable.setDragEnabled(true);
graphPanel.setImdiTableModel(imdiTableModel);
JScrollPane tableScrollPane = new JScrollPane(imdiTable);
// Dimension minimumSize = new Dimension(0, 0);
// fieldListTabs.setMinimumSize(minimumSize);
// tableScrollPane.setMinimumSize(minimumSize);
// EntityIndex loads the xml files and reads the document for entity data
// entityIndex = new EntityIndex(graphPanel.getIndexParameters());
// EntityCollection queries the xml collection to get the entity data
entityIndex = new EntityCollection();
graphData = new GraphSorter();
if (existingFile != null && existingFile.exists()) {
graphPanel.readSvg(existingFile);
} else {
graphPanel.drawNodes(graphData);
// todo: filter out the noise and only save or use the actual kin type strings
// graphPanel.setKinTypeStrigs(kinTypeStringInput.getText().split("\n"));
// kinTypeStrings = graphPanel.getKinTypeStrigs();
}
IndexerParametersPanel indexerParametersPanel = new IndexerParametersPanel(this, graphPanel, tableCellDragHandler);
JPanel advancedPanel = new JPanel(new BorderLayout());
advancedPanel.add(tableScrollPane, BorderLayout.CENTER);
advancedPanel.add(new HidePane(indexerParametersPanel, "Indexer Parameters", BorderLayout.LINE_START), BorderLayout.LINE_END);
JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, kinGraphPanel, advancedPanel);
this.add(splitPane);
kinTypeStringInput.addFocusListener(new FocusListener() {
public void focusGained(FocusEvent e) {
if (kinTypeStringInput.getText().equals(defaultString)) {
kinTypeStringInput.setText("");
kinTypeStringInput.setForeground(Color.BLACK);
}
}
public void focusLost(FocusEvent e) {
if (kinTypeStringInput.getText().length() == 0) {
kinTypeStringInput.setText(defaultString);
kinTypeStringInput.setForeground(Color.lightGray);
}
}
});
kinTypeStringInput.addKeyListener(new KeyListener() {
public void keyTyped(KeyEvent e) {
}
public void keyPressed(KeyEvent e) {
}
public void keyReleased(KeyEvent e) {
graphPanel.setKinTypeStrigs(kinTypeStringInput.getText().split("\n"));
kinTypeStrings = graphPanel.getKinTypeStrigs();
drawGraph();
}
});
boolean firstString = true;
for (String currentKinTypeString : kinTypeStrings) {
if (currentKinTypeString.trim().length() > 0) {
if (firstString) {
kinTypeStringInput.setText("");
firstString = false;
} else {
kinTypeStringInput.append("\n");
}
kinTypeStringInput.append(currentKinTypeString.trim());
}
}
}
public void drawGraph() {
try {
graphData.setEgoNodes(entityIndex.getRelationsOfEgo(graphPanel.getEgoList(), graphPanel.getEgoUniquiIdentifiersList(), kinTypeStrings, graphPanel.getIndexParameters()));
} catch (EntityServiceException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
LinorgWindowManager.getSingleInstance().addMessageDialogToQueue("Failed to load an entity", "Kinnate");
}
egoSelectionPanel.setEgoNodes(graphPanel.getEgoList());
kinTypeStrings = graphPanel.getKinTypeStrigs();
graphPanel.drawNodes(graphData);
}
public void setEgoNodes(URI[] egoSelection, String[] egoIdentifierArray) {
graphPanel.setEgoList(egoSelection, egoIdentifierArray);
drawGraph();
}
public void addEgoNodes(URI[] egoSelection, String[] egoIdentifierArray) {
graphPanel.addEgo(egoSelection, egoIdentifierArray);
drawGraph();
}
public void removeEgoNodes(URI[] egoSelection, String[] egoIdentifierArray) {
graphPanel.removeEgo(egoSelection, egoIdentifierArray);
drawGraph();
}
public boolean hasSaveFileName() {
return graphPanel.hasSaveFileName();
}
public boolean requiresSave() {
return graphPanel.requiresSave();
}
public void saveToFile() {
graphPanel.saveToFile();
}
public void saveToFile(File saveFile) {
graphPanel.saveToFile(saveFile);
}
public void updateGraph() {
this.drawGraph();
}
}
| src/main/java/nl/mpi/kinnate/ui/KinTypeEgoSelectionTestPanel.java | package nl.mpi.kinnate.ui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.io.File;
import java.net.URI;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import nl.mpi.arbil.GuiHelper;
import nl.mpi.arbil.ImdiTable;
import nl.mpi.arbil.ImdiTableModel;
import nl.mpi.arbil.LinorgWindowManager;
import nl.mpi.kinnate.kindata.GraphSorter;
import nl.mpi.kinnate.svg.GraphPanel;
import nl.mpi.kinnate.SavePanel;
import nl.mpi.kinnate.entityindexer.EntityIndex;
import nl.mpi.kinnate.entityindexer.EntityCollection;
import nl.mpi.kinnate.entityindexer.EntityService;
import nl.mpi.kinnate.entityindexer.EntityServiceException;
/**
* Document : KinTypeStringTestPanel
* Created on : Sep 29, 2010, 12:52:01 PM
* Author : Peter Withers
*/
public class KinTypeEgoSelectionTestPanel extends JPanel implements SavePanel {
private JTextArea kinTypeStringInput;
private GraphPanel graphPanel;
private GraphSorter graphData;
private EgoSelectionPanel egoSelectionPanel;
private KinTermPanel kinTermPanel;
private EntityService entityIndex;
private String defaultString = "# This test panel should provide a kin diagram based on selected egos and the the kintype strings entered here.\n# Enter one string per line.\n# By default all relations of the selected entity will be shown.\n";
private String kinTypeStrings[] = new String[]{};
public KinTypeEgoSelectionTestPanel(File existingFile) {
this.setLayout(new BorderLayout());
graphPanel = new GraphPanel(this);
egoSelectionPanel = new EgoSelectionPanel();
kinTermPanel = new KinTermPanel(this, graphPanel.getkinTerms());
kinTypeStringInput = new JTextArea(defaultString);
kinTypeStringInput.setBorder(javax.swing.BorderFactory.createTitledBorder("Kin Type Strings"));
JPanel kinGraphPanel = new JPanel(new BorderLayout());
kinGraphPanel.add(kinTypeStringInput, BorderLayout.PAGE_START);
// JSplitPane egoSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
JPanel kintermSplitPane = new JPanel(new BorderLayout());
// kinGraphPanel.add(egoSplitPane, BorderLayout.CENTER);
// outerSplitPane.setDividerLocation(0.5); // todo: add this to its parent so that the divider position sticks
kintermSplitPane.add(new HidePane(egoSelectionPanel, "Ego Selection", BorderLayout.LINE_END), BorderLayout.LINE_START);
kintermSplitPane.add(graphPanel, BorderLayout.CENTER);
kintermSplitPane.add(new HidePane(kinTermPanel, "Kin Terms", BorderLayout.LINE_START), BorderLayout.LINE_END);
kinGraphPanel.add(kintermSplitPane);
ImdiTableModel imdiTableModel = new ImdiTableModel();
ImdiTable imdiTable = new ImdiTable(imdiTableModel, "Selected Nodes");
TableCellDragHandler tableCellDragHandler = new TableCellDragHandler();
imdiTable.setTransferHandler(tableCellDragHandler);
imdiTable.setDragEnabled(true);
graphPanel.setImdiTableModel(imdiTableModel);
JScrollPane tableScrollPane = new JScrollPane(imdiTable);
// Dimension minimumSize = new Dimension(0, 0);
// fieldListTabs.setMinimumSize(minimumSize);
// tableScrollPane.setMinimumSize(minimumSize);
// EntityIndex loads the xml files and reads the document for entity data
// entityIndex = new EntityIndex(graphPanel.getIndexParameters());
// EntityCollection queries the xml collection to get the entity data
entityIndex = new EntityCollection();
graphData = new GraphSorter();
if (existingFile != null && existingFile.exists()) {
graphPanel.readSvg(existingFile);
} else {
graphPanel.drawNodes(graphData);
// todo: filter out the noise and only save or use the actual kin type strings
// graphPanel.setKinTypeStrigs(kinTypeStringInput.getText().split("\n"));
// kinTypeStrings = graphPanel.getKinTypeStrigs();
}
try {
graphData.setEgoNodes(entityIndex.getRelationsOfEgo(graphPanel.getEgoList(), graphPanel.getEgoUniquiIdentifiersList(), kinTypeStrings, graphPanel.getIndexParameters()));
} catch (EntityServiceException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
LinorgWindowManager.getSingleInstance().addMessageDialogToQueue("Failed to load an entity", "Kinnate");
}
egoSelectionPanel.setEgoNodes(graphPanel.getEgoList());
kinTypeStrings = graphPanel.getKinTypeStrigs();
IndexerParametersPanel indexerParametersPanel = new IndexerParametersPanel(this, graphPanel, tableCellDragHandler);
JPanel advancedPanel = new JPanel(new BorderLayout());
advancedPanel.add(tableScrollPane, BorderLayout.CENTER);
advancedPanel.add(new HidePane(indexerParametersPanel, "Indexer Parameters", BorderLayout.LINE_START), BorderLayout.LINE_END);
JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, kinGraphPanel, advancedPanel);
this.add(splitPane);
kinTypeStringInput.addFocusListener(new FocusListener() {
public void focusGained(FocusEvent e) {
if (kinTypeStringInput.getText().equals(defaultString)) {
kinTypeStringInput.setText("");
kinTypeStringInput.setForeground(Color.BLACK);
}
}
public void focusLost(FocusEvent e) {
if (kinTypeStringInput.getText().length() == 0) {
kinTypeStringInput.setText(defaultString);
kinTypeStringInput.setForeground(Color.lightGray);
}
}
});
kinTypeStringInput.addKeyListener(new KeyListener() {
public void keyTyped(KeyEvent e) {
}
public void keyPressed(KeyEvent e) {
}
public void keyReleased(KeyEvent e) {
graphPanel.setKinTypeStrigs(kinTypeStringInput.getText().split("\n"));
kinTypeStrings = graphPanel.getKinTypeStrigs();
drawGraph();
}
});
boolean firstString = true;
for (String currentKinTypeString : kinTypeStrings) {
if (currentKinTypeString.trim().length() > 0) {
if (firstString) {
kinTypeStringInput.setText("");
firstString = false;
} else {
kinTypeStringInput.append("\n");
}
kinTypeStringInput.append(currentKinTypeString.trim());
}
}
}
public void drawGraph() {
try {
graphData.setEgoNodes(entityIndex.getRelationsOfEgo(graphPanel.getEgoList(), graphPanel.getEgoUniquiIdentifiersList(), kinTypeStrings, graphPanel.getIndexParameters()));
} catch (EntityServiceException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
LinorgWindowManager.getSingleInstance().addMessageDialogToQueue("Failed to load an entity", "Kinnate");
}
graphPanel.drawNodes(graphData);
}
public void addEgoNodes(URI[] egoSelection, String[] egoIdentifierArray) {
graphPanel.setEgoList(egoSelection, egoIdentifierArray);
drawGraph();
egoSelectionPanel.setEgoNodes(graphPanel.getEgoList());
}
public boolean hasSaveFileName() {
return graphPanel.hasSaveFileName();
}
public boolean requiresSave() {
return graphPanel.requiresSave();
}
public void saveToFile() {
graphPanel.saveToFile();
}
public void saveToFile(File saveFile) {
graphPanel.saveToFile(saveFile);
}
public void updateGraph() {
this.drawGraph();
}
}
| Changed the selected elements list to use the ID not the path and updated the way the imdi path is obtained to show the table. Improved the mouse actions used to select entities on the diagram. Prevented duplicate entity relations and prevented relations of hidden entities being shown on the graph.
| src/main/java/nl/mpi/kinnate/ui/KinTypeEgoSelectionTestPanel.java | Changed the selected elements list to use the ID not the path and updated the way the imdi path is obtained to show the table. Improved the mouse actions used to select entities on the diagram. Prevented duplicate entity relations and prevented relations of hidden entities being shown on the graph. |
|
Java | agpl-3.0 | a3f9c198e124bbc4a83d766b81da89da94a53059 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 5995967c-2e61-11e5-9284-b827eb9e62be | hello.java | 59902ca0-2e61-11e5-9284-b827eb9e62be | 5995967c-2e61-11e5-9284-b827eb9e62be | hello.java | 5995967c-2e61-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | b0aa34b5c71b4b17e3af0717320effc5f5c106bf | 0 | Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker | package am.userInterface.canvas2.layouts;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Map.Entry;
import javax.swing.JOptionPane;
import javax.swing.event.ChangeEvent;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import com.hp.hpl.jena.ontology.ConversionException;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.ontology.OntResource;
import com.hp.hpl.jena.ontology.OntTools;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.vocabulary.OWL;
import am.Utility;
import am.app.Core;
import am.app.mappingEngine.AbstractMatcher;
import am.app.mappingEngine.Alignment;
import am.app.mappingEngine.AlignmentSet;
import am.app.mappingEngine.AbstractMatcher.alignType;
import am.app.ontology.Node;
import am.app.ontology.Ontology;
import am.userInterface.canvas2.Canvas2;
import am.userInterface.canvas2.graphical.GraphicalData;
import am.userInterface.canvas2.graphical.RectangleElement;
import am.userInterface.canvas2.graphical.TextElement;
import am.userInterface.canvas2.graphical.GraphicalData.NodeType;
import am.userInterface.canvas2.nodes.GraphicalNode;
import am.userInterface.canvas2.nodes.LegacyEdge;
import am.userInterface.canvas2.nodes.LegacyMapping;
import am.userInterface.canvas2.nodes.LegacyNode;
import am.userInterface.canvas2.popupmenus.CreateMappingMenu;
import am.userInterface.canvas2.popupmenus.DeleteMappingMenu;
import am.userInterface.canvas2.utility.Canvas2Edge;
import am.userInterface.canvas2.utility.Canvas2Layout;
import am.userInterface.canvas2.utility.Canvas2Vertex;
import am.userInterface.canvas2.utility.CanvasGraph;
import am.userInterface.canvas2.utility.GraphLocator;
import am.userInterface.canvas2.utility.GraphLocator.GraphType;
import am.userInterface.vertex.Vertex;
import am.utility.DirectedGraphEdge;
/**
* This layout is resposible for placement of nodes and edges.
* The layout algorithm is the same one as the original Canvas class.
*
* @author cosmin
*
*/
public class LegacyLayout extends Canvas2Layout {
/* FLAGS, and SETTINGS */
private boolean showLocalName = true;
private boolean showLabel = false;
// private String language = "EN";
private String labelAndNameSeparator = " || ";
private boolean[] pixelColumnDrawn; // used for a very special hack in LegacyEdge.draw(); Read about it in that method.
// It's done in order to avoid unnecessary draws() and speed up the paint() function.
private Rectangle pixelColumnViewport; // required to know the correct index in the pixelColumnDrawn array.
private HashMap<OntResource,LegacyNode> hashMap; // used in the graph building to avoid visiting the same nodes twice
private LegacyNode anonymousNode;
private Dimension oldViewportDimensions; // this variable is used in the stateChanged handler.
/**
* Graph Building Variables.
*/
private int subgraphXoffset = 20;
private int subgraphYoffset = 20;
private int depthIndent = 20;
private int marginBottom = 5;
private int nodeHeight = 20;
private int leftGraphX = 20;
private int leftGraphY = 42;
private int rightGraphX = 500;
private int rightGraphY = 42;
private int middleDividerLeftMargin = 10;
private int middleDividerWidth = 1;
private int topDividerTopMargin = 20;
private GraphicalNode middleDivider;
private GraphicalNode topDivider;
private GraphicalNode sourceOntologyText;
private GraphicalNode targetOntologyText;
private OntClass owlThing;
private boolean leftSideLoaded = false;
private boolean rightSideLoaded = false;
private boolean leftSide;
private int leftOntologyID = Ontology.ID_NONE; // the ontology ID of the graphs on the left side of the canvas layout
private int rightOntologyID = Ontology.ID_NONE; // the ontology ID of the graphs on the right side of the canvas layout
/** Mouse Event handlers Variables */
private ArrayList<LegacyNode> selectedNodes; // the list of currently selected nodes
private boolean PopupMenuActive = false;
private boolean SingleMappingView = false; // this is used when a mapping is doubleclicked with the left mouse button
// in order to show only that specific mapping
private ArrayList<LegacyMapping> SingleMappingMappings = new ArrayList<LegacyMapping>(); // we need to keep a list of the mappings we change for the SingleMappingView
private ArrayList<LegacyNode> SingleMappingMovedNodes = new ArrayList<LegacyNode>(); // we need to keep a list of the nodes we moved
public LegacyLayout(Canvas2 vp) {
super(vp);
hashMap = new HashMap<OntResource, LegacyNode>();
oldViewportDimensions = new Dimension(0,0);
layoutArtifactGraph = buildArtifactGraph(); // build the artifact graph
selectedNodes = new ArrayList<LegacyNode>();
}
/**
* This function sets up the pixel column array.
*/
@Override
public void getReadyForRepaint(Rectangle viewport) {
pixelColumnDrawn = new boolean[viewport.width]; // the array has one entry for every pixel column of the viewport. Values initialize to false.
pixelColumnViewport = viewport;
}
/**
* When a column of the viewport has been drawn in, set the corresponding array entry to true.
* That way, it doesn't get draw by every edge that shares that column.
*/
public void setPixelColumnDrawn(int canvasColNum) {
int viewportColNum = canvasColNum - pixelColumnViewport.x - 1; // translate the canvasColNum to an array index
pixelColumnDrawn[viewportColNum] = true; /* column has been filled in. */
}
public boolean isPixelColumnDrawn(int canvasColNum) { // LegacyEdge needs to know if a column has been drawn already.
int viewportColNum = canvasColNum - pixelColumnViewport.x - 1;
return pixelColumnDrawn[viewportColNum];
}
public Rectangle getPixelColumnViewport() { return pixelColumnViewport; } // edge needs to know the viewport
/**
* Utility Function. This function is called by the LegacyNodes in order to set their text label correctly.
*/
@Override
public String getNodeLabel(GraphicalData d ) {
if( d.r == null ) {
if( d.type == NodeType.TEXT_ELEMENT ) return ((TextElement)d).getText();
return "";
}
if(showLabel && showLocalName)
return d.r.getLocalName() + labelAndNameSeparator + d.r.getLabel(null);
else if(showLabel)
return d.r.getLabel(null);
else if(showLocalName)
return d.r.getLocalName();
else
return "";
}
@Override public void setShowLabel(boolean shL ) { showLabel = shL; }
@Override public void setShowLocalName( boolean shLN ) { showLocalName = shLN; }
@Override public boolean getShowLabel() { return showLabel; }
@Override public boolean getShowLocalName() { return showLocalName; }
public int getDepthIndent() { return depthIndent; } // * Getter.
@Override
public boolean canDisplayMoreOntologies() {
if( leftSideLoaded && rightSideLoaded ) { return false; }
return true;
}
@Override
public void displayOntology( ArrayList<CanvasGraph> graphRepository, int ontologyID) {
//TODO: Do graph positioning here, depending on which side is loaded into the layout.
// Right now we assume the user will load source then target, in that order.
// update the loaded info
if( leftSideLoaded == false ) {
leftSideLoaded = true;
leftOntologyID = ontologyID;
}
else if( rightSideLoaded == false ) {
rightSideLoaded = true;
rightOntologyID = ontologyID;
}
// show the graphs
ArrayList<CanvasGraph> gr = GraphLocator.getGraphsByID(graphRepository, ontologyID);
Iterator<CanvasGraph> graphIter = gr.iterator();
while( graphIter.hasNext() ) {
graphIter.next().setVisible(true);
}
}
@Override
public void removeOntology( ArrayList<CanvasGraph> graphs, int ontologyID ) {
for( int i = graphs.size() - 1; i >= 0; i-- ) {
CanvasGraph gr = graphs.get(i);
if( gr.getGraphType() == GraphLocator.GraphType.MATCHER_GRAPH || gr.getID() == ontologyID ) {
gr.detachEdges(); // we must detach the graph from other visible graphs before removing.
graphs.remove(i);
}
}
if( leftOntologyID == ontologyID ) { leftOntologyID = Core.ID_NONE; leftSideLoaded = false; }
if( rightOntologyID == ontologyID ) { rightOntologyID = Core.ID_NONE; rightSideLoaded = false; }
// we must remove the ontology elements from the hashMap.
Set<Entry<OntResource,LegacyNode>> entries = hashMap.entrySet();
Iterator<Entry<OntResource,LegacyNode>> setIter = entries.iterator();
ArrayList<OntResource> toBeRemoved = new ArrayList<OntResource>();
while( setIter.hasNext() ) { // make a list of things to be removed. We must make a list, because we cannot remove entries as we are iterating over them.
Entry<OntResource,LegacyNode> currentEntry = setIter.next();
if( currentEntry.getValue().getObject().ontologyID == ontologyID ) {
toBeRemoved.add(currentEntry.getKey());
}
}
Iterator<OntResource> tbrIter = toBeRemoved.iterator();
while( tbrIter.hasNext() ) {
hashMap.remove( tbrIter.next() );
}
}
/*
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
************************************************ GRAPH BULDING METHODS **********************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*/
/**
* This function will build the global graph for an ontology (classes graph, properties graph, all under one global graph).
* It does different things for different types of ontologies (OWL,RDFS,XML), but in the end it should all be under the
* main ontology graph, which is the return value.
*
* This includes
* - build the Class graph.
* - build the Properties graph.
* - Individuals? TODO
*
* @param ont The Ontology that we are building the visualization graphs for.
*
*/
@Override
public ArrayList<CanvasGraph> buildGlobalGraph( Ontology ont ) {
ArrayList<CanvasGraph> ontologyGraphs = new ArrayList<CanvasGraph>();
// Before we build the graph, update the preferences.
showLabel = Core.getUI().getAppPreferences().getShowLabel();
showLocalName = Core.getUI().getAppPreferences().getShowLocalname();
if( !leftSideLoaded ) // source goes on the left.
leftSide = true;
else if( !rightSideLoaded )
leftSide = false;
else
return ontologyGraphs; // we have ontologies loaded on both sides, do nothing for now.
if( leftSide ) { // we're loading a graph on the left side of the canvas
subgraphXoffset = leftGraphX + depthIndent;
subgraphYoffset = leftGraphY + nodeHeight + marginBottom;
} else { // we're loading a graph on the right side of the canvas.
rightGraphX = middleDivider.getObject().x + 10;
subgraphXoffset = rightGraphX + depthIndent;
subgraphYoffset = rightGraphY + nodeHeight + marginBottom;
}
/*
if( ont.getLanguage().equals("XML") ) {
// build the visualization from an XML ontology
// an XML ontology will only have classes.
CanvasGraph classesGraph = new CanvasGraph( GraphLocator.GraphType.CLASSES_GRAPH, ont.getID() );
Vertex classRootVertex = ont.getClassesTree();
// Create the root LegacyNode in order to call the recusive method correctly
TextElement gr = new TextElement(0*depthIndent + subgraphXoffset,
classesGraph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, this, classesGraph.getID() );
gr.setText("XML Classes Hierarchy");
LegacyNode classRootNode = new LegacyNode( gr );
classesGraph.insertVertex(classRootNode);
recursiveBuildClassGraphXML( classRootVertex, classRootNode, classesGraph, 1 );
} else */
// if ( ont.getLanguage().equals("OWL") || ont.getLanguage().endsWith("XML") ) {
OntModel m = ont.getModel();
if( m == null ) {
// this is an ontology that is not loaded by jena
} else {
owlThing = m.getOntClass( OWL.Thing.getURI() );
}
CanvasGraph classesGraph = new CanvasGraph( GraphLocator.GraphType.CLASSES_GRAPH, ont.getID() );
anonymousNode = new LegacyNode( new GraphicalData(0, 0, 0, 0, GraphicalData.NodeType.FAKE_NODE, this, ont.getID() ));
LegacyNode classesRoot = buildClassGraph( m, classesGraph ); // build the class graph here
// update the offsets to put the properties graph under the class graph.
if( leftSide ) {
subgraphYoffset = classesGraph.getBounds().y + classesGraph.getBounds().height + nodeHeight + marginBottom;
} else {
subgraphYoffset = classesGraph.getBounds().y + classesGraph.getBounds().height + nodeHeight + marginBottom;
}
CanvasGraph propertiesGraph = new CanvasGraph( GraphLocator.GraphType.PROPERTIES_GRAPH, ont.getID() );
LegacyNode propertiesRoot = buildPropertiesGraph(m, propertiesGraph); // and the properties graph here
CanvasGraph globalGraph = buildOntologyGraph(classesRoot, propertiesRoot, ont); // and put them all under a global graph
int deepestY = 0;
if( (classesGraph.getBounds().y + classesGraph.getBounds().height) > (propertiesGraph.getBounds().y+propertiesGraph.getBounds().height) )
deepestY = classesGraph.getBounds().y + classesGraph.getBounds().height;
else
deepestY = propertiesGraph.getBounds().y+propertiesGraph.getBounds().height;
int rightmostX = 0;
if( (classesGraph.getBounds().x + classesGraph.getBounds().width) > (propertiesGraph.getBounds().x + propertiesGraph.getBounds().width) )
rightmostX = classesGraph.getBounds().x + classesGraph.getBounds().width;
else
rightmostX = propertiesGraph.getBounds().x + propertiesGraph.getBounds().width;
updateArtifactGraph(deepestY, rightmostX , leftSide);
// add all the graphs created to the ontologyGraphs in the Canvas2.
ontologyGraphs.add(classesGraph);
ontologyGraphs.add(propertiesGraph);
ontologyGraphs.add(globalGraph);
return ontologyGraphs;
}
/**
* Called from the LegacyLayout constuctor.
* This function builds the graph that displays this layouts Artifacts:
* - The middle divider
* - the top divider
* - the source ontology text (on the left of the canvas)
* - the target ontology text (on the right of the canvas)
*/
private CanvasGraph buildArtifactGraph() {
CanvasGraph artifactGraph = new CanvasGraph(GraphLocator.GraphType.LAYOUT_GRAPH_IGNORE_BOUNDS, Ontology.ID_NONE);
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
// MIDDLE DIVIDER
RectangleElement dividerData = new RectangleElement(screenSize.width/2, 0, middleDividerWidth, screenSize.height , this, Ontology.ID_NONE );
middleDivider = new GraphicalNode( dividerData );
// TOP DIVIDER
RectangleElement topDividerData = new RectangleElement(0, topDividerTopMargin, screenSize.width, 1, this, Ontology.ID_NONE);
topDivider = new GraphicalNode( topDividerData );
// LEFT TEXT LABEL
TextElement sourceOntologyData = new TextElement( 10, 15, 10, 20, this, Ontology.ID_NONE );
sourceOntologyData.setBold(true);
sourceOntologyData.setText("Source Ontology");
sourceOntologyText = new GraphicalNode(sourceOntologyData);
// RIGHT TEXT LABEL
TextElement targetOntologyData = new TextElement( screenSize.width/2 + middleDividerWidth + 10, 15, 10, 20, this,Ontology.ID_NONE);
targetOntologyData.setBold(true);
targetOntologyData.setText("Target Ontology");
targetOntologyText = new GraphicalNode(targetOntologyData);
artifactGraph.insertVertex(middleDivider);
artifactGraph.insertVertex(topDivider);
artifactGraph.insertVertex(sourceOntologyText);
artifactGraph.insertVertex(targetOntologyText);
return artifactGraph;
}
/**
* Update the position of the middle divider according to this globalGraph.
*/
private void updateArtifactGraph(int deepestY, int rightmostX, boolean leftSide ) {
// update the x position of the middle divider
Rectangle viewportDim = vizpanel.getViewport().getBounds();
Rectangle vizpanelDim = vizpanel.getBounds();
int dividerX = viewportDim.width/2;
if( leftSide && rightmostX > dividerX )
dividerX = rightmostX + middleDividerLeftMargin; // move the divider over to the right so it doesn't overlap the graph
// update the height of the middle divider
int dividerH = deepestY;
if( dividerH < vizpanelDim.height ) dividerH = vizpanelDim.height;
if( dividerH < viewportDim.height ) dividerH = viewportDim.height;
RectangleElement dividerData = new RectangleElement(dividerX, 0, middleDividerWidth, dividerH + vizpanel.Ypadding, this, Ontology.ID_NONE );
middleDivider.setObject(dividerData);
// move the target ontology text in relation to the middle divider
targetOntologyText.getObject().x = dividerX + middleDividerWidth + 10; // move the "Target Ontology" text with the divider
// update the width of the top divider
// NOTE!: There's a condition that falls through here: when leftside == true and rightside == true.
if( (leftSide && !rightSideLoaded) || !leftSide ) { // loading an ontology on the left side
if( (rightmostX) > (viewportDim.x + viewportDim.width) ) {
topDivider.getObject().width = rightmostX + vizpanel.Xpadding;
} else {
topDivider.getObject().width = viewportDim.x + viewportDim.width + vizpanel.Xpadding;
}
}
layoutArtifactGraph.recalculateBounds();
}
/**
* This function puts the classes and properties graphs under one global root node
* @param classesGraph
* @param propertiesGraph
* @return
*/
private CanvasGraph buildOntologyGraph(LegacyNode classesRoot,
LegacyNode propertiesRoot, Ontology ont) {
GraphicalData gr = classesRoot.getObject();
if( gr == null ) return null;
CanvasGraph globalGraph = new CanvasGraph(GraphLocator.GraphType.GLOBAL_ROOT_GRAPH, ont.getID());
TextElement rootData = null;
if( leftSide ) rootData = new TextElement( leftGraphX, leftGraphY, 0, nodeHeight,
this, ont.getID() );
else rootData = new TextElement( rightGraphX, rightGraphY, 0, nodeHeight,
this, ont.getID() );
rootData.setText( ont.getTitle() );
LegacyNode globalRoot = new LegacyNode(rootData);
LegacyEdge globalroot2classesroot = new LegacyEdge( globalRoot, classesRoot, null, this );
LegacyEdge globalroot2propertiesroot = new LegacyEdge( globalRoot, propertiesRoot, null, this );
globalGraph.insertVertex(globalRoot);
globalGraph.insertEdge(globalroot2classesroot);
globalGraph.insertEdge(globalroot2propertiesroot);
return globalGraph;
}
/**
* This recursive function builds the class tree for an XML ontology.
* @param currentVertex
* @param parentNode
* @param graph
* @param depth
* @return
*/
private LegacyNode recursiveBuildClassGraphXML( Vertex currentVertex, LegacyNode parentNode, CanvasGraph graph, int depth ) {
// 1. Create the LegacyNode representation of the current vertex.
TextElement gr = new TextElement(depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100, nodeHeight, this, graph.getID() );
gr.setText( currentVertex.getName() );
LegacyNode currentNode = new LegacyNode( gr );
graph.insertVertex(currentNode);
LegacyEdge edge = new LegacyEdge( parentNode, currentNode, null, this);
graph.insertEdge( edge );
// 2. Look at the children of the current Vertex and do a recursive call
for( int i = 0; i < currentVertex.getChildCount(); i++ ) {
recursiveBuildClassGraphXML( (Vertex) currentVertex.getChildAt(i), currentNode, graph, depth+1);
}
// 3. Return the Root Node.
return currentNode;
}
/**
* This function and the recursive version build the class graph.
*
* The reason I split it into two functions is because the first level of recursion has to call
* OntTools.namedHierarchyRoots(m) while the rest of the levels use superClass.listSubClasses().
*
* Otherwise, the two functions are quite similar.
*
* They both add to the graph, and build it up.
*
*/
@SuppressWarnings("unchecked") // this comes from OntTools.namedHierarchyRoots()
private LegacyNode buildClassGraph( OntModel m, CanvasGraph graph ) {
Logger log = null;
if( Core.DEBUG ) {
log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
}
int depth = 0;
// create the root node;
TextElement gr = new TextElement(depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, this, graph.getID() );
gr.setText("OWL Classes Hierarchy");
LegacyNode root = new LegacyNode( gr );
graph.insertVertex(root);
List<OntClass> classesList = OntTools.namedHierarchyRoots(m);
depth++;
Iterator<OntClass> clsIter = classesList.iterator();
while( clsIter.hasNext() ) {
OntClass cls = clsIter.next(); // get the current child
if( cls.isAnon() ) { // if it is anonymous, don't add it, but we still need to recurse on its children
hashMap.put(cls, anonymousNode); // avoid cycles between anonymous nodes
if( Core.DEBUG ) log.debug(">> Inserted " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
recursiveBuildClassGraph(root, cls, depth, graph);
continue;
} else if( cls.equals(OWL.Nothing) ) // if it's OWL.Nothing (i.e. we recursed to the bottom of the heirarchy) skip it.
continue;
// cycle check at the root
if( hashMap.containsKey(cls) ) { // we have seen this node before, do NOT recurse again
if( Core.DEBUG ) log.debug("Cycle detected. OntClass:" + cls );
continue;
}
// the child class is not anonymous or OWL.Nothing, add it to the graph, with the correct relationships
GraphicalData gr1 = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100, nodeHeight, cls, GraphicalData.NodeType.CLASS_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr1);
graph.insertVertex( node );
LegacyEdge edge = new LegacyEdge( root, node, null, this );
graph.insertEdge( edge );
hashMap.put( cls, node);
if( Core.DEBUG ) log.debug(">> Inserted " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
recursiveBuildClassGraph( node, cls, depth+1, graph );
}
return root;
}
private void recursiveBuildClassGraph(
LegacyNode parentNode,
OntClass parentClass, // this has to be passed because of anonymous classes and the special root node
int depth,
CanvasGraph graph) {
Logger log = null;
if( Core.DEBUG ) {
log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
log.debug(parentClass);
}
ExtendedIterator clsIter = parentClass.listSubClasses(true);
while( clsIter.hasNext() ) {
OntClass cls = (OntClass) clsIter.next();
if( cls.isAnon() ) {
hashMap.put(cls, anonymousNode); // avoid cycles between anonymous nodes
if( Core.DEBUG ) log.debug(">> Inserted anonymous node " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
recursiveBuildClassGraph( parentNode, cls, depth, graph );
continue;
} else if( cls.equals( OWL.Nothing ) )
continue;
// this is the cycle check
if( hashMap.containsKey(cls) ) { // we have seen this node before, do NOT recurse again
if( Core.DEBUG ) log.debug("Cycle detected. OntClass:" + cls );
continue;
}
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100 , nodeHeight, cls, GraphicalData.NodeType.CLASS_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr);
graph.insertVertex(node);
LegacyEdge edge = new LegacyEdge( parentNode, node, null, this );
graph.insertEdge( edge );
hashMap.put(cls, node);
if( Core.DEBUG ) {
log.debug(">> Inserted " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
log.debug(">> Label: " + cls.getLabel(null));
}
recursiveBuildClassGraph( node, cls, depth+1, graph );
}
}
/*
/**
* buildClassTree(), createFosterHome() and adoptRemainingOrphans(), and getVertexFromClass() are ported from OntoTree builder
*
* fixDepthDFS() is written because the depthIndent information cannot be passed with these functions, so it has to be set
* after the heirarchy has been finished.
* @return
/
protected LegacyNode buildClassTree( OntModel m, CanvasGraph graph) {
//HashMap<OntClass, Vertex> classesMap = new HashMap<OntClass, Vertex>(); // this maps between ontology classes and Vertices created for the each class
ExtendedIterator orphansItr = m.listClasses(); // right now the classes have no parents, so they are orphans.
while( orphansItr.hasNext() ) { // iterate through all the classes
OntClass currentOrphan = (OntClass) orphansItr.next(); // the current class we are looking at
if( !currentOrphan.isAnon() ) { // make sure this is a real class (anoynymous classes are not real classes)
createFosterHome( currentOrphan, graph ); // assign orphan classes to parent parent classes
}
}
// this is the root node of the class tree (think of it like owl:Thing)
// create the root node;
TextElement gr = new TextElement(0, 0, 0, nodeHeight, this, Core.getInstance().getOntologyIDbyModel(m));
gr.setText("OWL Classes Hierarchy");
LegacyNode root = new LegacyNode( gr );
// we may have classes that still don't have a parent. these orphans will be adopted by root.
adoptRemainingOrphans( root, graph );
fixDepthHeightDFS( root, 0, 0); // because the heirarchy was not built in any order, the height and depth must be fixed after it is built (not during).
return root;
}
private void createFosterHome( OntClass currentOrphan, CanvasGraph graph ) {
LegacyNode currentVertex = getVertexFromClass( currentOrphan );
ExtendedIterator parentsItr = currentOrphan.listSuperClasses( true ); // iterator of the current class' parents
while( parentsItr.hasNext() ) {
OntClass parentClass = (OntClass) parentsItr.next();
if( !parentClass.isAnon() && !parentClass.equals(owlThing) ) {
LegacyNode parentVertex = getVertexFromClass(parentClass); // create a new Vertex object or use an existing one.
//parentVertex.add( currentVertex ); // create the parent link between the parent and the child
}
}
}
private void adoptRemainingOrphans(LegacyNode root, CanvasGraph graph) {
/* // Alternative way of iterating through the classes (via the classesMap that was created).
*
Set< Entry<OntClass, Vertex>> classesSet = classesMap.entrySet();
Iterator<Entry<OntClass, Vertex>> classesItr = classesSet.iterator();
while( classesItr.hasNext() ) {
}
/
// We will just iterate through the classes again, and find any remaining orphans
ExtendedIterator classesItr = model.listClasses();
while( classesItr.hasNext() ) {
OntClass currentClass = (OntClass) classesItr.next();
if( !currentClass.isAnon() ) {
if( classesMap.containsKey(currentClass) ) {
Vertex currentVertex = classesMap.get(currentClass);
if( currentVertex.getParent() == null ) {
// this vertex has no parent, that means root needs to adopt it
root.add( currentVertex );
}
}
else {
// we should never get here
// if we do, it means we _somehow_ missed a class during our first iteration in buildClassTree();
System.err.println("Assertion failed: listClasses() returning different classes between calls.");
}
}
}
}
/**
* helper Function for buildClassesTree()
* @param classesMap
* @param currentClass
* @return
/
private LegacyNode getVertexFromClass( OntClass currentClass ) {
LegacyNode currentVertex = null;
if( hashMap.containsKey( currentClass ) ) { // we already have a Vertex for the currentClass (because it is the parent of some node)
currentVertex = hashMap.get( currentClass );
} else {
// we don't have a Vertex for the current class, create one;
//currentVertex = createNodeAndVertex( currentClass, true, ontology.getSourceOrTarget());
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100 , nodeHeight, cls, GraphicalData.NodeType.CLASS_NODE, this );
LegacyNode node = new LegacyNode( gr);
hashMap.put(currentClass, currentVertex);
}
return currentVertex;
}
// fix the positions of all the nodes linked to this graph
private int fixDepthHeightDFS( DirectedGraphVertex<GraphicalData> root, int depth, int height ) {
root.getObject().x = depth*depthIndent + subgraphXoffset;
root.getObject().y = height * (nodeHeight+marginBottom) + subgraphYoffset;
height = height+1;
Iterator<DirectedGraphEdge<GraphicalData>> edgeIter = root.edgesOut();
while( edgeIter.hasNext() ) { height = fixDepthHeightDFS( edgeIter.next().getDestination(), depth+1, height ); } // DFS call
return height;
}
(((((((((((((((((((((((((((((((((((((((((((())))))))))))))))))))))))))))))))))))))))))))
*/
/**
* This function, and the recursive version build the properties graph. It's a copy of the Class building methods
*
* The reason I split it into two functions is because the first level of recursion has to
* find the roots of the properties hierarchy, while the rest of the levels use listSubProperties()
*
* Otherwise, the two functions are quite similar.
*
* They both add to the graph, and build it up.
*
*/
private LegacyNode buildPropertiesGraph( OntModel m, CanvasGraph graph ) {
Logger log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
int depth = 0;
// create the root node;
/*
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
GraphicalData.PROPERTIES_ROOT_NODE_WIDTH, nodeHeight,
GraphicalData.NodeType.PROPERTIES_ROOT, this, Core.getInstance().getOntologyIDbyModel(m) );
*/
TextElement gr = new TextElement(depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, this, graph.getID() );
gr.setText("OWL Properties Hierarchy");
LegacyNode root = new LegacyNode( gr );
graph.insertVertex(root);
List<OntProperty> propertiesList = getPropertyHeirarchyRoots(m);
depth++;
Iterator<OntProperty> propIter = propertiesList.iterator();
while( propIter.hasNext() ) {
OntProperty prop = propIter.next(); // get the current child
if( prop.isAnon() ) { // if it is anonymous, don't add it, but we still need to recurse on its children
hashMap.put(prop, anonymousNode); // avoid cycles between anonymous nodes
recursiveBuildPropertiesGraph(root, prop, depth, graph);
continue;
} else if( prop.equals(OWL.Nothing) ) // if it's OWL.Nothing (i.e. we recursed to the bottom of the heirarchy) skip it.
continue;
// this is the cycle check
if( hashMap.containsKey(prop) ) { // we have seen this node before, do NOT recurse again
log.debug("Cycle detected. OntProperty:" + prop );
continue;
}
// so the child property is not anonymous or OWL.Nothing, add it to the graph, with the correct relationships
GraphicalData gr1 = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, prop, GraphicalData.NodeType.PROPERTY_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr1);
graph.insertVertex( node );
LegacyEdge edge = new LegacyEdge( root, node, null, this );
graph.insertEdge( edge );
hashMap.put( prop, node);
recursiveBuildPropertiesGraph( node, prop, depth+1, graph );
}
return root;
}
private void recursiveBuildPropertiesGraph(
LegacyNode parentNode,
OntProperty parentProperty, // this has to be passed because of anonymous classes and the special root node
int depth,
CanvasGraph graph) {
Logger log = null;
if( Core.DEBUG ) {
log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
log.debug(parentProperty);
}
ExtendedIterator clsIter = null;
try {
clsIter = parentProperty.listSubProperties(true);
} catch (ConversionException e ){
e.printStackTrace();
return;
}
while( clsIter.hasNext() ) {
OntProperty prop = (OntProperty) clsIter.next();
if( prop.isAnon() ) {
hashMap.put(prop, anonymousNode); // avoid cycles between anonymous nodes
recursiveBuildPropertiesGraph( parentNode, prop, depth, graph );
continue;
} else if( prop.equals( OWL.Nothing ) )
continue;
// this is the cycle check
if( hashMap.containsKey(prop) ) { // we have seen this node before, do NOT recurse again
if( Core.DEBUG ) log.debug("Cycle detected. OntProperty:" + prop );
continue;
}
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100 , nodeHeight, prop, GraphicalData.NodeType.PROPERTY_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr);
graph.insertVertex(node);
LegacyEdge edge = new LegacyEdge( parentNode, node, null, this );
graph.insertEdge( edge );
hashMap.put(prop, node);
recursiveBuildPropertiesGraph( node, prop, depth+1, graph );
}
}
/**
* This function tries to identify the root nodes of the Property hierarchy of the ontology by
* searching for properties that do no have any super properties.
*/
private ArrayList<OntProperty> getPropertyHeirarchyRoots(OntModel m) {
ArrayList<OntProperty> roots = new ArrayList<OntProperty>();
// OBJECT PROPERTIES
ExtendedIterator itobj = m.listObjectProperties();
while( itobj.hasNext() ) { // look through all the object properties
OntProperty property = (OntProperty) itobj.next();
boolean isRoot = true;
ExtendedIterator superPropItr = property.listSuperProperties();
while( superPropItr.hasNext() ) {
OntProperty superProperty = (OntProperty) superPropItr.next();
if( !property.equals(superProperty) && !superProperty.isAnon() ) {
// this property has a valid superclass, therefore it is not a root property
superPropItr.close();
isRoot = false;
break;
}
}
if( isRoot ) roots.add(property);
}
// DATATYPE PROPERTIES
ExtendedIterator itdata = m.listDatatypeProperties();
while( itdata.hasNext() ) { // look through all the object properties
OntProperty property = (OntProperty) itdata.next();
boolean isRoot = true;
ExtendedIterator superPropItr = property.listSuperProperties();
while( superPropItr.hasNext() ) {
OntProperty superProperty = (OntProperty) superPropItr.next();
if( !property.equals(superProperty) && !superProperty.isAnon() ) {
// this property has a valid superclass, therefore it is not a root property
superPropItr.close();
isRoot = false;
break;
}
}
if( isRoot ) roots.add(property);
}
return roots; // all the heirarchy roots
}
@Override
public CanvasGraph buildMatcherGraph(AbstractMatcher m) {
CanvasGraph matcherGraph = new CanvasGraph( GraphType.MATCHER_GRAPH, m.getID() );
// Get the Class alignments.
AlignmentSet<Alignment> classesMatchings = m.getClassAlignmentSet();
if( classesMatchings != null ) {
Iterator<Alignment> alignmentIter = classesMatchings.iterator();
while( alignmentIter.hasNext() ) {
Alignment alignment = alignmentIter.next();
// TODO: Make AbstractMatchers work on Resource instead of Node.
OntResource e1 = (OntResource) alignment.getEntity1().getResource().as(OntResource.class); // translate from Node to OntResource
OntResource e2 = (OntResource) alignment.getEntity2().getResource().as(OntResource.class); // translate from Node to OntResource
if( hashMap.containsKey(e1) && hashMap.containsKey(e2) ) {
// great, our hashmap contains both entities
Canvas2Vertex n1 = hashMap.get(e1);
Canvas2Vertex n2 = hashMap.get(e2);
LegacyMapping edge = new LegacyMapping( n1, n2, null, m.getID(), Utility.getNoDecimalPercentFromDouble(alignment.getSimilarity()) );
matcherGraph.insertEdge(edge);
} else {
// the hashMap doesn't contain the source or the target node.
// something is wrong.
// no idea how to fix this problem.
// log it
Logger log = Logger.getLogger(this.getClass());
log.setLevel(Level.WARN);
if( !hashMap.containsKey(e1) ) log.warn("Cannot find OntResource: " + e1.toString() + ". Node container is: " + alignment.getEntity1().toString() );
if( !hashMap.containsKey(e2) ) log.warn("Cannot find OntResource: " + e2.toString() + ". Node container is: " + alignment.getEntity2().toString() );
}
}
}
// Get the Properties alignments.
AlignmentSet<Alignment> propertiesMatchings = m.getPropertyAlignmentSet();
if( propertiesMatchings != null ) {
Iterator<Alignment> alignmentIter = propertiesMatchings.iterator();
while( alignmentIter.hasNext() ) {
Alignment alignment = alignmentIter.next();
// TODO: Make AbstractMatchers work on Resource instead of Node.
OntResource e1 = (OntResource) alignment.getEntity1().getResource().as(OntResource.class); // translate from Node to OntResource
OntResource e2 = (OntResource) alignment.getEntity2().getResource().as(OntResource.class); // translate from Node to OntResource
if( hashMap.containsKey(e1) && hashMap.containsKey(e2) ) {
// great, our hashmap contains both entities
Canvas2Vertex n1 = hashMap.get(e1);
Canvas2Vertex n2 = hashMap.get(e2);
LegacyMapping edge = new LegacyMapping( n1, n2, null, m.getID(), Utility.getNoDecimalPercentFromDouble(alignment.getSimilarity()) );
matcherGraph.insertEdge(edge);
} else {
// the hashMap doesn't contain the source or the target node.
// something is wrong.
// no idea how to fix this problem.
// log it
Logger log = Logger.getLogger(this.getClass());
log.setLevel(Level.WARN);
if( !hashMap.containsKey(e1) ) log.warn("Cannot find OntResource: " + e1.toString() + ". Node container is: " + alignment.getEntity1().toString() );
if( !hashMap.containsKey(e2) ) log.warn("Cannot find OntResource: " + e2.toString() + ". Node container is: " + alignment.getEntity2().toString() );
}
}
}
return matcherGraph;
}
/*
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*************************************************** EVENT LISTENERS *************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*/
/**
* Handle Viewport resize events here.
*/
@Override
public void stateChanged(ChangeEvent e) { // the state of the viewport has changed.
return; // TODO: Get this resizing working.
/*
JViewport vp = vizpanel.getViewport();
Dimension vpSize = vp.getSize();
if( oldViewportDimensions.height != vpSize.height || oldViewportDimensions.width != vpSize.width ) {
// the size of the viewport has changed
// need to know what's on the left side and what's on the right side
ArrayList<CanvasGraph> ontologyGraphs = vizpanel.getOntologyGraphs();
ArrayList<CanvasGraph> leftSideGraphs = GraphLocator.getGraphsByID( ontologyGraphs, leftOntologyID);
ArrayList<CanvasGraph> rightSideGraphs = GraphLocator.getGraphsByID( ontologyGraphs, rightOntologyID);
Rectangle leftBounds = CanvasGraph.getCombinedBounds(leftSideGraphs); // the combined bounds of all the graphs on the left side
Rectangle rightBounds = CanvasGraph.getCombinedBounds(rightSideGraphs); // the combined bounds of all the graphs on the right side
int viewportHalfwidth = vp.getBounds().width / 2;
int leftBoundsLeftmostX = leftBounds.x + leftBounds.width;
if( leftBoundsLeftmostX > viewportHalfwidth ) { // if the left graphs move past the midpoint of the viewport even if the
// viewport were scrolled all the way to the left, then layout everything end to end
// TODO: do this part
} else {
// we have space between the middle line and the left side.
// line up the middle divider so it is at the middle of the canvas
int newMiddleDividerX = leftBounds.x + (viewportHalfwidth - leftBoundsLeftmostX); // this should be where the middle divider should be moved to
if( newMiddleDividerX != middleDivider.getObject().x ) { // do we have to move anything?
// yes, we have to move everything over.
int deltaX = newMiddleDividerX - middleDivider.getObject().x;
middleDivider.move(deltaX, 0);
Iterator<CanvasGraph> graphIter = rightSideGraphs.iterator();
while( graphIter.hasNext() ) { graphIter.next().moveGraph(deltaX, 0); } // move all the rightside graphs over by deltaX
}
}
}
*/
}
/**
* MOUSE EVENT listener functions
*/
@Override
public void mouseClicked( MouseEvent e ) {
// BUTTON1 = Left Click Button, BUTTON2 = Middle Click Button, BUTTON3 = Right Click Button
Graphics g = vizpanel.getGraphics(); // used for any redrawing of nodes
ArrayList<Canvas2Vertex> visibleVertices = vizpanel.getVisibleVertices();
Logger log = Logger.getLogger(this.getClass());
if( Core.DEBUG ) log.setLevel(Level.DEBUG);
if( PopupMenuActive ) { // if we have an active popup menu, cancel it
PopupMenuActive = false;
if( hoveringOver != null ) {
hoveringOver.setHover(false);
hoveringOver.draw(g);
hoveringOver = null; // clear the hover target, since the click can be anywhere and we didn't check again what we're hovering over
}
} else {
// only process mouse clicks if there's not a popup menu active
switch( e.getButton() ) {
// because of the way Java (and most any platform) handles the difference between single and double clicks,
// the single click action must be "complementary" to the double click action, as when you double click a
// single click is always fired just before the double click is detected.
// There is no way around this. A single click event will *always* be fired just before a double click.
// So then:
// - LEFT button SINGLE click = select NODE (or deselect if clicking empty space)
// - LEFT button DOUBLE click = line up two nodes by their mapping (do nothing if it's empty space)<- TODO
// Jan 29, 2010 - Cosmin
// Ok now, we are adding menu support:
// 1. User must single left click to select a node in one ontology graph, in order to select that node.
// 2. User must single left click a node in the OTHER ontology graph in order to cause a menu to come up.
// If the user clicks a node in the same ontology, this new node becomes the selected node.
// These actions should work with MULTIPLE selections (using the Control key).
// Feb 13th, 2010 - Cosmin
// Adding rightclick menu for deleting mappings.
// June 17th, 2010 - Cosmin
// Added the SingleMappingView to replace SMO. Activated by doubleclicking a node.
case MouseEvent.BUTTON1:
if( e.getClickCount() == 2 ) { // double click with the left mouse button
if( Core.DEBUG) log.debug("Double click with the LEFT mouse button detected.");
//do stuff
if( hoveringOver != null && SingleMappingView != true ) {
enableSingleMappingView();
vizpanel.repaint();
}
} else if( e.getClickCount() == 1 ) { // single click with left mouse button
if( SingleMappingView == true ) {
// if we don't click on anything, cancel the single mapping view
// restore the previous visibility of the nodes and edges
if( hoveringOver == null ) {
disableSingleMappingView();
vizpanel.repaint();
} else {
// we doubleclicked on another node.
disableSingleMappingView();
// move the viewpane to the new node
//vizpanel.getScrollPane().scrollRectToVisible( new Rectangle(0, vizpanel.getScrollPane().getSize().height, 1, 1) );
vizpanel.getScrollPane().getViewport().setViewPosition( new Point(vizpanel.getScrollPane().getViewport().getLocation().x,
hoveringOver.getBounds().y - vizpanel.getScrollPane().getViewport().getHeight()/2 ));
//System.out.print( "Moving viewport to: " + hoveringOver.getBounds().toString() );
hoveringOver = null;
vizpanel.repaint();
}
}
if( hoveringOver == null ) {
// we have clicked in an empty area, clear all the selected nodes
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
while( nodeIter.hasNext() ) {
LegacyNode selectedNode = nodeIter.next();
selectedNode.setSelected(false); // deselect the node
if( visibleVertices.contains( (Canvas2Vertex) selectedNode ) ) {
// redraw only if it's currently visible
//selectedNode.clearDrawArea(g);
selectedNode.draw(g);
}
}
selectedNodes.clear();
} else {
// user clicked over a node.
// is it a node in the OTHER ontology?
if( getSelectedNodesOntology() != Core.ID_NONE && getSelectedNodesOntology() != hoveringOver.getGraphicalData().ontologyID ) {
// yes it is in the other ontology
// bring up the Mapping Popup Menu, so the user can make a mapping
CreateMappingMenu menuCreate = new CreateMappingMenu( this );
menuCreate.show( vizpanel, e.getX(), e.getY());
PopupMenuActive = true;
} else {
// the nodes are in the same ontology
// we either add to the selection, or clear it and select the node that was just clicked
if( e.isControlDown() ) {
// if the user control clicked (CTRL+LEFTCLICK), we have to add this node to the list of selected nodes.
if( selectedNodes.contains(hoveringOver) ) { // if it already is in the list, remove it
selectedNodes.remove(hoveringOver);
hoveringOver.setSelected(false);
} else { // it's not in the list already, add it
hoveringOver.setSelected(true);
selectedNodes.add((LegacyNode) hoveringOver);
}
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
} else { // control is not pressed, clear any selections that there may be, and select single node
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
while( nodeIter.hasNext() ) {
LegacyNode selectedNode = nodeIter.next();
selectedNode.setSelected(false); // deselect the node
if( visibleVertices.contains( (Canvas2Vertex) selectedNode ) ) {
// redraw only if it's currently visible
//selectedNode.clearDrawArea(g);
selectedNode.draw(g);
}
}
selectedNodes.clear();
// select single node
hoveringOver.setSelected(true);
selectedNodes.add( (LegacyNode)hoveringOver);
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
}
}
}
}
break;
case MouseEvent.BUTTON2:
if( e.getClickCount() == 2 ) {
// double click with the middle mouse button.
log.debug("Double click with the MIDDLE mouse button detected.");
//do stuff
} else if( e.getClickCount() == 1 ) {
// middle click, print out debugging info
if( hoveringOver != null ) { // relying on the hover code in MouseMove
log.debug("\nResource: " + hoveringOver.getObject().r +
"\nHashCode: " + hoveringOver.getObject().r.hashCode());
log.debug("\nPosition" + e.getPoint().toString() );
}
//log.debug("Single click with the MIDDLE mouse button detected.");
}
break;
case MouseEvent.BUTTON3:
if( e.getClickCount() == 2 ) {
// double click with the right mouse button.
if( Core.DEBUG ) log.debug("Double click with the RIGHT mouse button detected.");
//do stuff
} else if( e.getClickCount() == 1 ) {
// single right click, bring up delete menu
if( hoveringOver != null ) {
DeleteMappingMenu menuDelete = new DeleteMappingMenu( this, hoveringOver.getMappings() );
menuDelete.show( vizpanel, e.getX(), e.getY());
PopupMenuActive = true;
}
if( Core.DEBUG ) log.debug("Single click with the RIGHT mouse button detected.");
}
break;
}
}
g.dispose(); // dispose of this graphics element, we don't need it anymore
}
private void disableSingleMappingView() {
// TODO Auto-generated method stub
Iterator<CanvasGraph> graphIter = vizpanel.getGraphs().iterator();
while( graphIter.hasNext() ) {
CanvasGraph graph = graphIter.next();
// restore the vertices
Iterator<Canvas2Vertex> nodeIter = graph.vertices();
while( nodeIter.hasNext() ) {
Canvas2Vertex node = nodeIter.next();
node.popVisibility();
}
// restore the edges
Iterator<Canvas2Edge> edgeIter = graph.edges();
while( edgeIter.hasNext() ) {
Canvas2Edge edge = edgeIter.next();
edge.popVisibility();
}
}
// move the nodes back to their places
Iterator<LegacyNode> movedNodesIter = SingleMappingMovedNodes.iterator();
while( movedNodesIter.hasNext() ) { movedNodesIter.next().popXY(); }
SingleMappingMovedNodes.clear();
Iterator<LegacyMapping> movedMappingsIter = SingleMappingMappings.iterator();
while( movedMappingsIter.hasNext() ) { movedMappingsIter.next().updateBounds(); }
SingleMappingMappings.clear();
SingleMappingView = false; // turn off the singlemappingview
// because we have moved nodes, the bounds of the graphs have changed.
// Update the bounds of all the graphs.
// TODO: This should not update ALL the graphs, but only the ones that contain the nodes that have moved.
ArrayList<CanvasGraph> graphs = vizpanel.getGraphs();
graphIter = graphs.iterator();
while( graphIter.hasNext() ) {
CanvasGraph g = graphIter.next();
g.recalculateBounds();
}
}
/**
* This method activates the SingleMappingView after the user doubleclicks a concept.s
*/
private void enableSingleMappingView() {
// Activate the SingleMappingView
SingleMappingView = true;
// turn off the visibility of all the nodes and edges
Iterator<CanvasGraph> graphIter = vizpanel.getGraphs().iterator();
while( graphIter.hasNext() ) {
CanvasGraph graph = graphIter.next();
// hide the vertices
Iterator<Canvas2Vertex> nodeIter = graph.vertices();
while( nodeIter.hasNext() ) {
Canvas2Vertex node = nodeIter.next();
node.pushVisibility(false);
}
// hide the edges
Iterator<Canvas2Edge> edgeIter = graph.edges();
while( edgeIter.hasNext() ) {
Canvas2Edge edge = edgeIter.next();
edge.pushVisibility(false);
}
}
// now that all of the nodes and edges have been hidden, show only the ones we want to see
// we will show all edges connected to the selectedNodes, and all nodes connected to the edges of the selectedNodes
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
while( nodeIter.hasNext() ) {
LegacyNode selectedNode = nodeIter.next();
selectedNode.setVisible(true);
selectedNode.setSelected(false); // unselect the nodes
Iterator<DirectedGraphEdge<GraphicalData>> edgeInIter = selectedNode.edgesIn();
while( edgeInIter.hasNext() ) {
Canvas2Edge connectedEdge = (Canvas2Edge) edgeInIter.next();
connectedEdge.setVisible(true);
if( selectedNode == connectedEdge.getOrigin() ) { ((Canvas2Vertex)connectedEdge.getDestination()).setVisible(true); }
else { ((Canvas2Vertex)connectedEdge.getOrigin()).setVisible(true); }
}
Iterator<DirectedGraphEdge<GraphicalData>> edgeOutIter = selectedNode.edgesOut();
while( edgeOutIter.hasNext() ) {
Canvas2Edge connectedEdge = (Canvas2Edge) edgeOutIter.next();
connectedEdge.setVisible(true);
if( selectedNode == connectedEdge.getOrigin() ) { ((Canvas2Vertex)connectedEdge.getDestination()).setVisible(true); }
else { ((Canvas2Vertex)connectedEdge.getOrigin()).setVisible(true); }
}
}
// we need to move the opposite side up to the side we clicked
//ArrayList<LegacyMapping> mappingList = new ArrayList<LegacyMapping>(); // we have to keep a list of all the mappings to/from this node
int uppermostY = -1; // -1 is a dummy value. Valid values are >= 0.
Iterator<LegacyNode> nodeIter2 = selectedNodes.iterator();
while( nodeIter2.hasNext() ) {
LegacyNode selectedNode = nodeIter2.next();
// update the uppermostY
if( uppermostY < 0 || selectedNode.getObject().y < uppermostY ) {
uppermostY = selectedNode.getObject().y;
}
// update the mappingList
Iterator<DirectedGraphEdge<GraphicalData>> edgeInIter = selectedNode.edgesIn();
while( edgeInIter.hasNext() ) {
DirectedGraphEdge<GraphicalData> connectedEdge = edgeInIter.next();
if( connectedEdge instanceof LegacyMapping ) {
SingleMappingMappings.add( (LegacyMapping) connectedEdge );
}
}
Iterator<DirectedGraphEdge<GraphicalData>> edgeOutIter = selectedNode.edgesOut();
while( edgeOutIter.hasNext() ) {
DirectedGraphEdge<GraphicalData> connectedEdge = edgeOutIter.next();
if( connectedEdge instanceof LegacyMapping) {
SingleMappingMappings.add( (LegacyMapping) connectedEdge );
}
}
}
// now we must move the mappings to the uppermostY.
for( int i = 0; i < SingleMappingMappings.size(); i++ ) {
// nodeheight marginbottom
LegacyMapping currentMapping = SingleMappingMappings.get(i);
if( selectedNodes.contains( currentMapping.getOrigin()) ) {
// we doubleclicked on the origin of the mapping, so move the destination up.
LegacyNode destinationNode = (LegacyNode) currentMapping.getDestination();
destinationNode.pushXY( destinationNode.getGraphicalData().x , uppermostY + i*(nodeHeight+marginBottom) );
SingleMappingMovedNodes.add(destinationNode);
vizpanel.getVisibleVertices().add(destinationNode);
} else {
// we doubleclicked on the destination of the mapping, therefore we move the origin up
LegacyNode originNode = (LegacyNode) currentMapping.getOrigin();
originNode.pushXY( originNode.getGraphicalData().x , uppermostY + i*(nodeHeight+marginBottom) );
SingleMappingMovedNodes.add(originNode);
vizpanel.getVisibleVertices().add(originNode);
}
// update the bounds of the node.
currentMapping.updateBounds();
}
selectedNodes.clear();
// because we have moved nodes, the bounds of the graphs have changed.
// Update the bounds of all the graphs.
// TODO: This should not update ALL the graphs, but only the ones that contain the nodes that have moved.
ArrayList<CanvasGraph> graphs = vizpanel.getGraphs();
graphIter = graphs.iterator();
while( graphIter.hasNext() ) {
CanvasGraph g = graphIter.next();
g.recalculateBounds();
}
}
private Canvas2Vertex hoveringOver;
@Override
public void mouseMoved(MouseEvent e) {
// don't redraw over a popupmenu
if( PopupMenuActive ) { return; }
Graphics g = vizpanel.getGraphics();
ArrayList<Canvas2Vertex> visibleVertices = vizpanel.getVisibleVertices();
Iterator<Canvas2Vertex> vertIter = visibleVertices.iterator();
boolean hoveringOverEmptySpace = true;
while( vertIter.hasNext() ) {
Canvas2Vertex vertex = vertIter.next();
if( vertex instanceof LegacyNode ) // we only care about legacy nodes (for now)
if( vertex.contains(e.getPoint()) ) {
// we are hovering over vertex
hoveringOverEmptySpace = false;
// first, remove the hover from the last element we were hovering over
if( hoveringOver == vertex ) {
// we are still hoovering over this element, do nothing
break;
} else if( hoveringOver != null ) {
// we had been hovering over something, but now we're not
hoveringOver.setHover(false);
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
}
hoveringOver = vertex;
hoveringOver.setHover(true);
//hoveringOver.clearDrawArea(g);
// redraw all the edges connected to this node.
Iterator<DirectedGraphEdge<GraphicalData>> edgeInIter = hoveringOver.edgesIn();
while( edgeInIter.hasNext() ) { ((Canvas2Edge)edgeInIter.next()).draw(g); }
Iterator<DirectedGraphEdge<GraphicalData>> edgeOutIter = hoveringOver.edgesOut();
while( edgeOutIter.hasNext() ) { ((Canvas2Edge)edgeOutIter.next()).draw(g); }
hoveringOver.draw(g);
break;
}
}
if( hoveringOverEmptySpace && hoveringOver != null) {
// clear the hover
hoveringOver.setHover(false);
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
hoveringOver = null;
}
g.dispose();
}
@Override
public void actionPerformed(ActionEvent e) {
String actionCommand = e.getActionCommand();
// these commands are from the Create Mappings popup menu
if( actionCommand == "CREATE_DEFAULT" ||
actionCommand == "CREATE_EQUIVALENCE" ||
actionCommand == "CREATE_SUBSET" ||
actionCommand == "CREATE_SUBSETCOMPLETE" ||
actionCommand == "CREATE_SUPERSET" ||
actionCommand == "CREATE_SUPERSETCOMPLETE" ||
actionCommand == "CREATE_OTHER" ) {
String relation = Alignment.EQUIVALENCE;;
double sim = 0;
ArrayList<Alignment> userMappings = new ArrayList<Alignment>();
if( actionCommand == "CREATE_DEFAULT" ) {
relation = Alignment.EQUIVALENCE;
sim = 1.0d;
} else {
// ask the user for the similarity value
boolean correct = false;
boolean abort = false;
while(!correct && !abort) {
String x = JOptionPane.showInputDialog(null, "Insert the similarity value.\nInsert a number between 0 and 100 using only numeric digits.\n Warning: the similarity should be higher than the threshold value.\nIf not, the similarity matrix will be modified but the alignment won't be selected and visualized.");
try {
if(x == null)
abort = true;//USER SELECTED CANCEL
else {
sim = Double.parseDouble(x);
if(sim >= 0 && sim <= 100) {
correct = true;
sim = sim/100;
}
}
}
catch(Exception ex) {//WRONG INPUT, ASK INPUT AGAIN
}
}
}
if( actionCommand == "CREATE_OTHER" ){
boolean correct = false;
boolean abort = false;
while(!correct && !abort) {
String x = JOptionPane.showInputDialog(null, "Insert the relation type:");
try {
if(x == null)
abort = true;//USER SELECTED CANCEL
else {
relation = x;
correct = true;
}
}
catch(Exception ex) {//WRONG INPUT, ASK INPUT AGAIN
}
}
}
if( actionCommand == "CREATE_EQUIVALENCE" ) relation = Alignment.EQUIVALENCE;
if( actionCommand == "CREATE_SUBSET" ) relation = Alignment.SUBSET;
if( actionCommand == "CREATE_SUBSETCOMPLETE" ) relation = Alignment.SUBSETCOMPLETE;
if( actionCommand == "CREATE_SUPERSET" ) relation = Alignment.SUPERSET;
if( actionCommand == "CREATE_SUPERSETCOMPLETE") relation = Alignment.SUPERSETCOMPLETE;
// **************** create the alignments
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
// what type of nodes are we mapping
alignType type = null;
if( hoveringOver.getGraphicalData().type == NodeType.CLASS_NODE ) {
type = AbstractMatcher.alignType.aligningClasses;
} else if( hoveringOver.getGraphicalData().type == NodeType.PROPERTY_NODE ) {
type = AbstractMatcher.alignType.aligningProperties;
}
// this is a little bit of a mess, but we have to support legacy code (meaning the Alignment class)- 1/29/2010 Cosmin
Ontology o2 = Core.getInstance().getOntologyByID( hoveringOver.getGraphicalData().ontologyID );
Node n2 = null;
try {
n2 = o2.getNodefromOntResource( hoveringOver.getGraphicalData().r, type );
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
while( nodeIter.hasNext() ) {
LegacyNode ln = nodeIter.next();
// again, this is necessary in order to be compatible with the way the Alignment class is at the moment - 1/29/2010 Cosmin
Ontology o1 = Core.getInstance().getOntologyByID( ln.getGraphicalData().ontologyID );
Node n1 = null;
try {
n1 = o1.getNodefromOntResource( ln.getGraphicalData().r, type );
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Alignment a;
if( ln.getGraphicalData().ontologyID == leftOntologyID ) { // this if statement fixes a small display bug
a = new Alignment( n1, n2, sim, relation, type);
} else {
a = new Alignment( n2, n1, sim, relation, type);
}
userMappings.add(a);
}
// add the mappings created to the user
Core.getUI().getControlPanel().userMatching(userMappings);
PopupMenuActive = false; // the popup menu goes away when something is clicked on it
}
}
private int getSelectedNodesOntology() {
int ontologyID = -1;
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
if( nodeIter.hasNext() ) {
// the first item in the list
ontologyID = nodeIter.next().getGraphicalData().ontologyID;
}
else { return Core.ID_NONE; } // empty list
// the next items in the list
while( nodeIter.hasNext() ) {
if( nodeIter.next().getGraphicalData().ontologyID != ontologyID ) {
// we have nodes that are not from the same ontology,
// this should not happen (because if it happens, then the menu pops up, and the selectedNodes is cleared).
return Core.ID_NONE;
}
}
// all the nodes in the selectedNodes list are from the ontology with id "ontologyID"
return ontologyID;
}
public boolean isSingleMappingView() { return SingleMappingView; }
}
| AgreementMaker/src/am/userInterface/canvas2/layouts/LegacyLayout.java | package am.userInterface.canvas2.layouts;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Map.Entry;
import javax.swing.JOptionPane;
import javax.swing.event.ChangeEvent;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import com.hp.hpl.jena.ontology.ConversionException;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.ontology.OntResource;
import com.hp.hpl.jena.ontology.OntTools;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.vocabulary.OWL;
import am.Utility;
import am.app.Core;
import am.app.mappingEngine.AbstractMatcher;
import am.app.mappingEngine.Alignment;
import am.app.mappingEngine.AlignmentSet;
import am.app.mappingEngine.AbstractMatcher.alignType;
import am.app.ontology.Node;
import am.app.ontology.Ontology;
import am.userInterface.canvas2.Canvas2;
import am.userInterface.canvas2.graphical.GraphicalData;
import am.userInterface.canvas2.graphical.RectangleElement;
import am.userInterface.canvas2.graphical.TextElement;
import am.userInterface.canvas2.graphical.GraphicalData.NodeType;
import am.userInterface.canvas2.nodes.GraphicalNode;
import am.userInterface.canvas2.nodes.LegacyEdge;
import am.userInterface.canvas2.nodes.LegacyMapping;
import am.userInterface.canvas2.nodes.LegacyNode;
import am.userInterface.canvas2.popupmenus.CreateMappingMenu;
import am.userInterface.canvas2.popupmenus.DeleteMappingMenu;
import am.userInterface.canvas2.utility.Canvas2Edge;
import am.userInterface.canvas2.utility.Canvas2Layout;
import am.userInterface.canvas2.utility.Canvas2Vertex;
import am.userInterface.canvas2.utility.CanvasGraph;
import am.userInterface.canvas2.utility.GraphLocator;
import am.userInterface.canvas2.utility.GraphLocator.GraphType;
import am.userInterface.vertex.Vertex;
import am.utility.DirectedGraphEdge;
/**
* This layout is resposible for placement of nodes and edges.
* The layout algorithm is the same one as the original Canvas class.
*
* @author cosmin
*
*/
public class LegacyLayout extends Canvas2Layout {
/* FLAGS, and SETTINGS */
private boolean showLocalName = true;
private boolean showLabel = false;
// private String language = "EN";
private String labelAndNameSeparator = " || ";
private boolean[] pixelColumnDrawn; // used for a very special hack in LegacyEdge.draw(); Read about it in that method.
// It's done in order to avoid unnecessary draws() and speed up the paint() function.
private Rectangle pixelColumnViewport; // required to know the correct index in the pixelColumnDrawn array.
private HashMap<OntResource,LegacyNode> hashMap; // used in the graph building to avoid visiting the same nodes twice
private LegacyNode anonymousNode;
private Dimension oldViewportDimensions; // this variable is used in the stateChanged handler.
/**
* Graph Building Variables.
*/
private int subgraphXoffset = 20;
private int subgraphYoffset = 20;
private int depthIndent = 20;
private int marginBottom = 5;
private int nodeHeight = 20;
private int leftGraphX = 20;
private int leftGraphY = 42;
private int rightGraphX = 500;
private int rightGraphY = 42;
private int middleDividerLeftMargin = 10;
private int middleDividerWidth = 1;
private int topDividerTopMargin = 20;
private GraphicalNode middleDivider;
private GraphicalNode topDivider;
private GraphicalNode sourceOntologyText;
private GraphicalNode targetOntologyText;
private OntClass owlThing;
private boolean leftSideLoaded = false;
private boolean rightSideLoaded = false;
private boolean leftSide;
private int leftOntologyID = Ontology.ID_NONE; // the ontology ID of the graphs on the left side of the canvas layout
private int rightOntologyID = Ontology.ID_NONE; // the ontology ID of the graphs on the right side of the canvas layout
/** Mouse Event handlers Variables */
private ArrayList<LegacyNode> selectedNodes; // the list of currently selected nodes
private boolean PopupMenuActive = false;
private boolean SingleMappingView = false; // this is used when a mapping is doubleclicked with the left mouse button
// in order to show only that specific mapping
private ArrayList<LegacyMapping> SingleMappingMappings = new ArrayList<LegacyMapping>(); // we need to keep a list of the mappings we change for the SingleMappingView
private ArrayList<LegacyNode> SingleMappingMovedNodes = new ArrayList<LegacyNode>(); // we need to keep a list of the nodes we moved
public LegacyLayout(Canvas2 vp) {
super(vp);
hashMap = new HashMap<OntResource, LegacyNode>();
oldViewportDimensions = new Dimension(0,0);
layoutArtifactGraph = buildArtifactGraph(); // build the artifact graph
selectedNodes = new ArrayList<LegacyNode>();
}
/**
* This function sets up the pixel column array.
*/
@Override
public void getReadyForRepaint(Rectangle viewport) {
pixelColumnDrawn = new boolean[viewport.width]; // the array has one entry for every pixel column of the viewport. Values initialize to false.
pixelColumnViewport = viewport;
}
/**
* When a column of the viewport has been drawn in, set the corresponding array entry to true.
* That way, it doesn't get draw by every edge that shares that column.
*/
public void setPixelColumnDrawn(int canvasColNum) {
int viewportColNum = canvasColNum - pixelColumnViewport.x - 1; // translate the canvasColNum to an array index
pixelColumnDrawn[viewportColNum] = true; /* column has been filled in. */
}
public boolean isPixelColumnDrawn(int canvasColNum) { // LegacyEdge needs to know if a column has been drawn already.
int viewportColNum = canvasColNum - pixelColumnViewport.x - 1;
return pixelColumnDrawn[viewportColNum];
}
public Rectangle getPixelColumnViewport() { return pixelColumnViewport; } // edge needs to know the viewport
/**
* Utility Function. This function is called by the LegacyNodes in order to set their text label correctly.
*/
@Override
public String getNodeLabel(GraphicalData d ) {
if( d.r == null ) {
if( d.type == NodeType.TEXT_ELEMENT ) return ((TextElement)d).getText();
return "";
}
if(showLabel && showLocalName)
return d.r.getLocalName() + labelAndNameSeparator + d.r.getLabel(null);
else if(showLabel)
return d.r.getLabel(null);
else if(showLocalName)
return d.r.getLocalName();
else
return "";
}
@Override public void setShowLabel(boolean shL ) { showLabel = shL; }
@Override public void setShowLocalName( boolean shLN ) { showLocalName = shLN; }
@Override public boolean getShowLabel() { return showLabel; }
@Override public boolean getShowLocalName() { return showLocalName; }
public int getDepthIndent() { return depthIndent; } // * Getter.
@Override
public boolean canDisplayMoreOntologies() {
if( leftSideLoaded && rightSideLoaded ) { return false; }
return true;
}
@Override
public void displayOntology( ArrayList<CanvasGraph> graphRepository, int ontologyID) {
//TODO: Do graph positioning here, depending on which side is loaded into the layout.
// Right now we assume the user will load source then target, in that order.
// update the loaded info
if( leftSideLoaded == false ) {
leftSideLoaded = true;
leftOntologyID = ontologyID;
}
else if( rightSideLoaded == false ) {
rightSideLoaded = true;
rightOntologyID = ontologyID;
}
// show the graphs
ArrayList<CanvasGraph> gr = GraphLocator.getGraphsByID(graphRepository, ontologyID);
Iterator<CanvasGraph> graphIter = gr.iterator();
while( graphIter.hasNext() ) {
graphIter.next().setVisible(true);
}
}
@Override
public void removeOntology( ArrayList<CanvasGraph> graphs, int ontologyID ) {
for( int i = graphs.size() - 1; i >= 0; i-- ) {
CanvasGraph gr = graphs.get(i);
if( gr.getGraphType() == GraphLocator.GraphType.MATCHER_GRAPH || gr.getID() == ontologyID ) {
gr.detachEdges(); // we must detach the graph from other visible graphs before removing.
graphs.remove(i);
}
}
if( leftOntologyID == ontologyID ) { leftOntologyID = Core.ID_NONE; leftSideLoaded = false; }
if( rightOntologyID == ontologyID ) { rightOntologyID = Core.ID_NONE; rightSideLoaded = false; }
// we must remove the ontology elements from the hashMap.
Set<Entry<OntResource,LegacyNode>> entries = hashMap.entrySet();
Iterator<Entry<OntResource,LegacyNode>> setIter = entries.iterator();
ArrayList<OntResource> toBeRemoved = new ArrayList<OntResource>();
while( setIter.hasNext() ) { // make a list of things to be removed. We must make a list, because we cannot remove entries as we are iterating over them.
Entry<OntResource,LegacyNode> currentEntry = setIter.next();
if( currentEntry.getValue().getObject().ontologyID == ontologyID ) {
toBeRemoved.add(currentEntry.getKey());
}
}
Iterator<OntResource> tbrIter = toBeRemoved.iterator();
while( tbrIter.hasNext() ) {
hashMap.remove( tbrIter.next() );
}
}
/*
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
************************************************ GRAPH BULDING METHODS **********************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*/
/**
* This function will build the global graph for an ontology (classes graph, properties graph, all under one global graph).
* It does different things for different types of ontologies (OWL,RDFS,XML), but in the end it should all be under the
* main ontology graph, which is the return value.
*
* This includes
* - build the Class graph.
* - build the Properties graph.
* - Individuals? TODO
*
* @param ont The Ontology that we are building the visualization graphs for.
*
*/
@Override
public ArrayList<CanvasGraph> buildGlobalGraph( Ontology ont ) {
ArrayList<CanvasGraph> ontologyGraphs = new ArrayList<CanvasGraph>();
// Before we build the graph, update the preferences.
showLabel = Core.getUI().getAppPreferences().getShowLabel();
showLocalName = Core.getUI().getAppPreferences().getShowLocalname();
if( !leftSideLoaded ) // source goes on the left.
leftSide = true;
else if( !rightSideLoaded )
leftSide = false;
else
return ontologyGraphs; // we have ontologies loaded on both sides, do nothing for now.
if( leftSide ) { // we're loading a graph on the left side of the canvas
subgraphXoffset = leftGraphX + depthIndent;
subgraphYoffset = leftGraphY + nodeHeight + marginBottom;
} else { // we're loading a graph on the right side of the canvas.
rightGraphX = middleDivider.getObject().x + 10;
subgraphXoffset = rightGraphX + depthIndent;
subgraphYoffset = rightGraphY + nodeHeight + marginBottom;
}
/*
if( ont.getLanguage().equals("XML") ) {
// build the visualization from an XML ontology
// an XML ontology will only have classes.
CanvasGraph classesGraph = new CanvasGraph( GraphLocator.GraphType.CLASSES_GRAPH, ont.getID() );
Vertex classRootVertex = ont.getClassesTree();
// Create the root LegacyNode in order to call the recusive method correctly
TextElement gr = new TextElement(0*depthIndent + subgraphXoffset,
classesGraph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, this, classesGraph.getID() );
gr.setText("XML Classes Hierarchy");
LegacyNode classRootNode = new LegacyNode( gr );
classesGraph.insertVertex(classRootNode);
recursiveBuildClassGraphXML( classRootVertex, classRootNode, classesGraph, 1 );
} else */
// if ( ont.getLanguage().equals("OWL") || ont.getLanguage().endsWith("XML") ) {
OntModel m = ont.getModel();
if( m == null ) {
// this is an ontology that is not loaded by jena
} else {
owlThing = m.getOntClass( OWL.Thing.getURI() );
}
CanvasGraph classesGraph = new CanvasGraph( GraphLocator.GraphType.CLASSES_GRAPH, ont.getID() );
anonymousNode = new LegacyNode( new GraphicalData(0, 0, 0, 0, GraphicalData.NodeType.FAKE_NODE, this, ont.getID() ));
LegacyNode classesRoot = buildClassGraph( m, classesGraph ); // build the class graph here
// update the offsets to put the properties graph under the class graph.
if( leftSide ) {
subgraphYoffset = classesGraph.getBounds().y + classesGraph.getBounds().height + nodeHeight + marginBottom;
} else {
subgraphYoffset = classesGraph.getBounds().y + classesGraph.getBounds().height + nodeHeight + marginBottom;
}
CanvasGraph propertiesGraph = new CanvasGraph( GraphLocator.GraphType.PROPERTIES_GRAPH, ont.getID() );
LegacyNode propertiesRoot = buildPropertiesGraph(m, propertiesGraph); // and the properties graph here
CanvasGraph globalGraph = buildOntologyGraph(classesRoot, propertiesRoot, ont); // and put them all under a global graph
int deepestY = 0;
if( (classesGraph.getBounds().y + classesGraph.getBounds().height) > (propertiesGraph.getBounds().y+propertiesGraph.getBounds().height) )
deepestY = classesGraph.getBounds().y + classesGraph.getBounds().height;
else
deepestY = propertiesGraph.getBounds().y+propertiesGraph.getBounds().height;
int rightmostX = 0;
if( (classesGraph.getBounds().x + classesGraph.getBounds().width) > (propertiesGraph.getBounds().x + propertiesGraph.getBounds().width) )
rightmostX = classesGraph.getBounds().x + classesGraph.getBounds().width;
else
rightmostX = propertiesGraph.getBounds().x + propertiesGraph.getBounds().width;
updateArtifactGraph(deepestY, rightmostX , leftSide);
// add all the graphs created to the ontologyGraphs in the Canvas2.
ontologyGraphs.add(classesGraph);
ontologyGraphs.add(propertiesGraph);
ontologyGraphs.add(globalGraph);
return ontologyGraphs;
}
/**
* Called from the LegacyLayout constuctor.
* This function builds the graph that displays this layouts Artifacts:
* - The middle divider
* - the top divider
* - the source ontology text (on the left of the canvas)
* - the target ontology text (on the right of the canvas)
*/
private CanvasGraph buildArtifactGraph() {
CanvasGraph artifactGraph = new CanvasGraph(GraphLocator.GraphType.LAYOUT_GRAPH_IGNORE_BOUNDS, Ontology.ID_NONE);
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
// MIDDLE DIVIDER
RectangleElement dividerData = new RectangleElement(screenSize.width/2, 0, middleDividerWidth, screenSize.height , this, Ontology.ID_NONE );
middleDivider = new GraphicalNode( dividerData );
// TOP DIVIDER
RectangleElement topDividerData = new RectangleElement(0, topDividerTopMargin, screenSize.width, 1, this, Ontology.ID_NONE);
topDivider = new GraphicalNode( topDividerData );
// LEFT TEXT LABEL
TextElement sourceOntologyData = new TextElement( 10, 15, 10, 20, this, Ontology.ID_NONE );
sourceOntologyData.setBold(true);
sourceOntologyData.setText("Source Ontology");
sourceOntologyText = new GraphicalNode(sourceOntologyData);
// RIGHT TEXT LABEL
TextElement targetOntologyData = new TextElement( screenSize.width/2 + middleDividerWidth + 10, 15, 10, 20, this,Ontology.ID_NONE);
targetOntologyData.setBold(true);
targetOntologyData.setText("Target Ontology");
targetOntologyText = new GraphicalNode(targetOntologyData);
artifactGraph.insertVertex(middleDivider);
artifactGraph.insertVertex(topDivider);
artifactGraph.insertVertex(sourceOntologyText);
artifactGraph.insertVertex(targetOntologyText);
return artifactGraph;
}
/**
* Update the position of the middle divider according to this globalGraph.
*/
private void updateArtifactGraph(int deepestY, int rightmostX, boolean leftSide ) {
// update the x position of the middle divider
Rectangle viewportDim = vizpanel.getViewport().getBounds();
Rectangle vizpanelDim = vizpanel.getBounds();
int dividerX = viewportDim.width/2;
if( leftSide && rightmostX > dividerX )
dividerX = rightmostX + middleDividerLeftMargin; // move the divider over to the right so it doesn't overlap the graph
// update the height of the middle divider
int dividerH = deepestY;
if( dividerH < vizpanelDim.height ) dividerH = vizpanelDim.height;
if( dividerH < viewportDim.height ) dividerH = viewportDim.height;
RectangleElement dividerData = new RectangleElement(dividerX, 0, middleDividerWidth, dividerH + vizpanel.Ypadding, this, Ontology.ID_NONE );
middleDivider.setObject(dividerData);
// move the target ontology text in relation to the middle divider
targetOntologyText.getObject().x = dividerX + middleDividerWidth + 10; // move the "Target Ontology" text with the divider
// update the width of the top divider
// NOTE!: There's a condition that falls through here: when leftside == true and rightside == true.
if( (leftSide && !rightSideLoaded) || !leftSide ) { // loading an ontology on the left side
if( (rightmostX) > (viewportDim.x + viewportDim.width) ) {
topDivider.getObject().width = rightmostX + vizpanel.Xpadding;
} else {
topDivider.getObject().width = viewportDim.x + viewportDim.width + vizpanel.Xpadding;
}
}
layoutArtifactGraph.recalculateBounds();
}
/**
* This function puts the classes and properties graphs under one global root node
* @param classesGraph
* @param propertiesGraph
* @return
*/
private CanvasGraph buildOntologyGraph(LegacyNode classesRoot,
LegacyNode propertiesRoot, Ontology ont) {
GraphicalData gr = classesRoot.getObject();
if( gr == null ) return null;
CanvasGraph globalGraph = new CanvasGraph(GraphLocator.GraphType.GLOBAL_ROOT_GRAPH, ont.getID());
TextElement rootData = null;
if( leftSide ) rootData = new TextElement( leftGraphX, leftGraphY, 0, nodeHeight,
this, ont.getID() );
else rootData = new TextElement( rightGraphX, rightGraphY, 0, nodeHeight,
this, ont.getID() );
rootData.setText( ont.getTitle() );
LegacyNode globalRoot = new LegacyNode(rootData);
LegacyEdge globalroot2classesroot = new LegacyEdge( globalRoot, classesRoot, null, this );
LegacyEdge globalroot2propertiesroot = new LegacyEdge( globalRoot, propertiesRoot, null, this );
globalGraph.insertVertex(globalRoot);
globalGraph.insertEdge(globalroot2classesroot);
globalGraph.insertEdge(globalroot2propertiesroot);
return globalGraph;
}
/**
* This recursive function builds the class tree for an XML ontology.
* @param currentVertex
* @param parentNode
* @param graph
* @param depth
* @return
*/
private LegacyNode recursiveBuildClassGraphXML( Vertex currentVertex, LegacyNode parentNode, CanvasGraph graph, int depth ) {
// 1. Create the LegacyNode representation of the current vertex.
TextElement gr = new TextElement(depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100, nodeHeight, this, graph.getID() );
gr.setText( currentVertex.getName() );
LegacyNode currentNode = new LegacyNode( gr );
graph.insertVertex(currentNode);
LegacyEdge edge = new LegacyEdge( parentNode, currentNode, null, this);
graph.insertEdge( edge );
// 2. Look at the children of the current Vertex and do a recursive call
for( int i = 0; i < currentVertex.getChildCount(); i++ ) {
recursiveBuildClassGraphXML( (Vertex) currentVertex.getChildAt(i), currentNode, graph, depth+1);
}
// 3. Return the Root Node.
return currentNode;
}
/**
* This function and the recursive version build the class graph.
*
* The reason I split it into two functions is because the first level of recursion has to call
* OntTools.namedHierarchyRoots(m) while the rest of the levels use superClass.listSubClasses().
*
* Otherwise, the two functions are quite similar.
*
* They both add to the graph, and build it up.
*
*/
@SuppressWarnings("unchecked") // this comes from OntTools.namedHierarchyRoots()
private LegacyNode buildClassGraph( OntModel m, CanvasGraph graph ) {
Logger log = null;
if( Core.DEBUG ) {
log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
}
int depth = 0;
// create the root node;
TextElement gr = new TextElement(depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, this, graph.getID() );
gr.setText("OWL Classes Hierarchy");
LegacyNode root = new LegacyNode( gr );
graph.insertVertex(root);
List<OntClass> classesList = OntTools.namedHierarchyRoots(m);
depth++;
Iterator<OntClass> clsIter = classesList.iterator();
while( clsIter.hasNext() ) {
OntClass cls = clsIter.next(); // get the current child
if( cls.isAnon() ) { // if it is anonymous, don't add it, but we still need to recurse on its children
hashMap.put(cls, anonymousNode); // avoid cycles between anonymous nodes
if( Core.DEBUG ) log.debug(">> Inserted " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
recursiveBuildClassGraph(root, cls, depth, graph);
continue;
} else if( cls.equals(OWL.Nothing) ) // if it's OWL.Nothing (i.e. we recursed to the bottom of the heirarchy) skip it.
continue;
// cycle check at the root
if( hashMap.containsKey(cls) ) { // we have seen this node before, do NOT recurse again
if( Core.DEBUG ) log.debug("Cycle detected. OntClass:" + cls );
continue;
}
// the child class is not anonymous or OWL.Nothing, add it to the graph, with the correct relationships
GraphicalData gr1 = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100, nodeHeight, cls, GraphicalData.NodeType.CLASS_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr1);
graph.insertVertex( node );
LegacyEdge edge = new LegacyEdge( root, node, null, this );
graph.insertEdge( edge );
hashMap.put( cls, node);
if( Core.DEBUG ) log.debug(">> Inserted " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
recursiveBuildClassGraph( node, cls, depth+1, graph );
}
return root;
}
private void recursiveBuildClassGraph(
LegacyNode parentNode,
OntClass parentClass, // this has to be passed because of anonymous classes and the special root node
int depth,
CanvasGraph graph) {
Logger log = null;
if( Core.DEBUG ) {
log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
log.debug(parentClass);
}
ExtendedIterator clsIter = parentClass.listSubClasses(true);
while( clsIter.hasNext() ) {
OntClass cls = (OntClass) clsIter.next();
if( cls.isAnon() ) {
hashMap.put(cls, anonymousNode); // avoid cycles between anonymous nodes
if( Core.DEBUG ) log.debug(">> Inserted anonymous node " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
recursiveBuildClassGraph( parentNode, cls, depth, graph );
continue;
} else if( cls.equals( OWL.Nothing ) )
continue;
// this is the cycle check
if( hashMap.containsKey(cls) ) { // we have seen this node before, do NOT recurse again
if( Core.DEBUG ) log.debug("Cycle detected. OntClass:" + cls );
continue;
}
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100 , nodeHeight, cls, GraphicalData.NodeType.CLASS_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr);
graph.insertVertex(node);
LegacyEdge edge = new LegacyEdge( parentNode, node, null, this );
graph.insertEdge( edge );
hashMap.put(cls, node);
if( Core.DEBUG ) {
log.debug(">> Inserted " + cls + " into hashmap. HASHCODE: " + cls.hashCode());
log.debug(">> Label: " + cls.getLabel(null));
}
recursiveBuildClassGraph( node, cls, depth+1, graph );
}
}
/*
/**
* buildClassTree(), createFosterHome() and adoptRemainingOrphans(), and getVertexFromClass() are ported from OntoTree builder
*
* fixDepthDFS() is written because the depthIndent information cannot be passed with these functions, so it has to be set
* after the heirarchy has been finished.
* @return
/
protected LegacyNode buildClassTree( OntModel m, CanvasGraph graph) {
//HashMap<OntClass, Vertex> classesMap = new HashMap<OntClass, Vertex>(); // this maps between ontology classes and Vertices created for the each class
ExtendedIterator orphansItr = m.listClasses(); // right now the classes have no parents, so they are orphans.
while( orphansItr.hasNext() ) { // iterate through all the classes
OntClass currentOrphan = (OntClass) orphansItr.next(); // the current class we are looking at
if( !currentOrphan.isAnon() ) { // make sure this is a real class (anoynymous classes are not real classes)
createFosterHome( currentOrphan, graph ); // assign orphan classes to parent parent classes
}
}
// this is the root node of the class tree (think of it like owl:Thing)
// create the root node;
TextElement gr = new TextElement(0, 0, 0, nodeHeight, this, Core.getInstance().getOntologyIDbyModel(m));
gr.setText("OWL Classes Hierarchy");
LegacyNode root = new LegacyNode( gr );
// we may have classes that still don't have a parent. these orphans will be adopted by root.
adoptRemainingOrphans( root, graph );
fixDepthHeightDFS( root, 0, 0); // because the heirarchy was not built in any order, the height and depth must be fixed after it is built (not during).
return root;
}
private void createFosterHome( OntClass currentOrphan, CanvasGraph graph ) {
LegacyNode currentVertex = getVertexFromClass( currentOrphan );
ExtendedIterator parentsItr = currentOrphan.listSuperClasses( true ); // iterator of the current class' parents
while( parentsItr.hasNext() ) {
OntClass parentClass = (OntClass) parentsItr.next();
if( !parentClass.isAnon() && !parentClass.equals(owlThing) ) {
LegacyNode parentVertex = getVertexFromClass(parentClass); // create a new Vertex object or use an existing one.
//parentVertex.add( currentVertex ); // create the parent link between the parent and the child
}
}
}
private void adoptRemainingOrphans(LegacyNode root, CanvasGraph graph) {
/* // Alternative way of iterating through the classes (via the classesMap that was created).
*
Set< Entry<OntClass, Vertex>> classesSet = classesMap.entrySet();
Iterator<Entry<OntClass, Vertex>> classesItr = classesSet.iterator();
while( classesItr.hasNext() ) {
}
/
// We will just iterate through the classes again, and find any remaining orphans
ExtendedIterator classesItr = model.listClasses();
while( classesItr.hasNext() ) {
OntClass currentClass = (OntClass) classesItr.next();
if( !currentClass.isAnon() ) {
if( classesMap.containsKey(currentClass) ) {
Vertex currentVertex = classesMap.get(currentClass);
if( currentVertex.getParent() == null ) {
// this vertex has no parent, that means root needs to adopt it
root.add( currentVertex );
}
}
else {
// we should never get here
// if we do, it means we _somehow_ missed a class during our first iteration in buildClassTree();
System.err.println("Assertion failed: listClasses() returning different classes between calls.");
}
}
}
}
/**
* helper Function for buildClassesTree()
* @param classesMap
* @param currentClass
* @return
/
private LegacyNode getVertexFromClass( OntClass currentClass ) {
LegacyNode currentVertex = null;
if( hashMap.containsKey( currentClass ) ) { // we already have a Vertex for the currentClass (because it is the parent of some node)
currentVertex = hashMap.get( currentClass );
} else {
// we don't have a Vertex for the current class, create one;
//currentVertex = createNodeAndVertex( currentClass, true, ontology.getSourceOrTarget());
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100 , nodeHeight, cls, GraphicalData.NodeType.CLASS_NODE, this );
LegacyNode node = new LegacyNode( gr);
hashMap.put(currentClass, currentVertex);
}
return currentVertex;
}
// fix the positions of all the nodes linked to this graph
private int fixDepthHeightDFS( DirectedGraphVertex<GraphicalData> root, int depth, int height ) {
root.getObject().x = depth*depthIndent + subgraphXoffset;
root.getObject().y = height * (nodeHeight+marginBottom) + subgraphYoffset;
height = height+1;
Iterator<DirectedGraphEdge<GraphicalData>> edgeIter = root.edgesOut();
while( edgeIter.hasNext() ) { height = fixDepthHeightDFS( edgeIter.next().getDestination(), depth+1, height ); } // DFS call
return height;
}
(((((((((((((((((((((((((((((((((((((((((((())))))))))))))))))))))))))))))))))))))))))))
*/
/**
* This function, and the recursive version build the properties graph. It's a copy of the Class building methods
*
* The reason I split it into two functions is because the first level of recursion has to
* find the roots of the properties hierarchy, while the rest of the levels use listSubProperties()
*
* Otherwise, the two functions are quite similar.
*
* They both add to the graph, and build it up.
*
*/
private LegacyNode buildPropertiesGraph( OntModel m, CanvasGraph graph ) {
Logger log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
int depth = 0;
// create the root node;
/*
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
GraphicalData.PROPERTIES_ROOT_NODE_WIDTH, nodeHeight,
GraphicalData.NodeType.PROPERTIES_ROOT, this, Core.getInstance().getOntologyIDbyModel(m) );
*/
TextElement gr = new TextElement(depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, this, graph.getID() );
gr.setText("OWL Properties Hierarchy");
LegacyNode root = new LegacyNode( gr );
graph.insertVertex(root);
List<OntProperty> propertiesList = getPropertyHeirarchyRoots(m);
depth++;
Iterator<OntProperty> propIter = propertiesList.iterator();
while( propIter.hasNext() ) {
OntProperty prop = propIter.next(); // get the current child
if( prop.isAnon() ) { // if it is anonymous, don't add it, but we still need to recurse on its children
hashMap.put(prop, anonymousNode); // avoid cycles between anonymous nodes
recursiveBuildPropertiesGraph(root, prop, depth, graph);
continue;
} else if( prop.equals(OWL.Nothing) ) // if it's OWL.Nothing (i.e. we recursed to the bottom of the heirarchy) skip it.
continue;
// this is the cycle check
if( hashMap.containsKey(prop) ) { // we have seen this node before, do NOT recurse again
log.debug("Cycle detected. OntProperty:" + prop );
continue;
}
// so the child property is not anonymous or OWL.Nothing, add it to the graph, with the correct relationships
GraphicalData gr1 = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
0, nodeHeight, prop, GraphicalData.NodeType.PROPERTY_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr1);
graph.insertVertex( node );
LegacyEdge edge = new LegacyEdge( root, node, null, this );
graph.insertEdge( edge );
hashMap.put( prop, node);
recursiveBuildPropertiesGraph( node, prop, depth+1, graph );
}
return root;
}
private void recursiveBuildPropertiesGraph(
LegacyNode parentNode,
OntProperty parentProperty, // this has to be passed because of anonymous classes and the special root node
int depth,
CanvasGraph graph) {
Logger log = null;
if( Core.DEBUG ) {
log = Logger.getLogger(this.getClass());
log.setLevel(Level.DEBUG);
log.debug(parentProperty);
}
ExtendedIterator clsIter = null;
try {
clsIter = parentProperty.listSubProperties(true);
} catch (ConversionException e ){
e.printStackTrace();
return;
}
while( clsIter.hasNext() ) {
OntProperty prop = (OntProperty) clsIter.next();
if( prop.isAnon() ) {
hashMap.put(prop, anonymousNode); // avoid cycles between anonymous nodes
recursiveBuildPropertiesGraph( parentNode, prop, depth, graph );
continue;
} else if( prop.equals( OWL.Nothing ) )
continue;
// this is the cycle check
if( hashMap.containsKey(prop) ) { // we have seen this node before, do NOT recurse again
if( Core.DEBUG ) log.debug("Cycle detected. OntProperty:" + prop );
continue;
}
GraphicalData gr = new GraphicalData( depth*depthIndent + subgraphXoffset,
graph.numVertices() * (nodeHeight+marginBottom) + subgraphYoffset,
100 , nodeHeight, prop, GraphicalData.NodeType.PROPERTY_NODE, this, graph.getID() );
LegacyNode node = new LegacyNode( gr);
graph.insertVertex(node);
LegacyEdge edge = new LegacyEdge( parentNode, node, null, this );
graph.insertEdge( edge );
hashMap.put(prop, node);
recursiveBuildPropertiesGraph( node, prop, depth+1, graph );
}
}
/**
* This function tries to identify the root nodes of the Property hierarchy of the ontology by
* searching for properties that do no have any super properties.
*/
private ArrayList<OntProperty> getPropertyHeirarchyRoots(OntModel m) {
ArrayList<OntProperty> roots = new ArrayList<OntProperty>();
// OBJECT PROPERTIES
ExtendedIterator itobj = m.listObjectProperties();
while( itobj.hasNext() ) { // look through all the object properties
OntProperty property = (OntProperty) itobj.next();
boolean isRoot = true;
ExtendedIterator superPropItr = property.listSuperProperties();
while( superPropItr.hasNext() ) {
OntProperty superProperty = (OntProperty) superPropItr.next();
if( !property.equals(superProperty) && !superProperty.isAnon() ) {
// this property has a valid superclass, therefore it is not a root property
superPropItr.close();
isRoot = false;
break;
}
}
if( isRoot ) roots.add(property);
}
// DATATYPE PROPERTIES
ExtendedIterator itdata = m.listDatatypeProperties();
while( itdata.hasNext() ) { // look through all the object properties
OntProperty property = (OntProperty) itdata.next();
boolean isRoot = true;
ExtendedIterator superPropItr = property.listSuperProperties();
while( superPropItr.hasNext() ) {
OntProperty superProperty = (OntProperty) superPropItr.next();
if( !property.equals(superProperty) && !superProperty.isAnon() ) {
// this property has a valid superclass, therefore it is not a root property
superPropItr.close();
isRoot = false;
break;
}
}
if( isRoot ) roots.add(property);
}
return roots; // all the heirarchy roots
}
@Override
public CanvasGraph buildMatcherGraph(AbstractMatcher m) {
CanvasGraph matcherGraph = new CanvasGraph( GraphType.MATCHER_GRAPH, m.getID() );
// Get the Class alignments.
AlignmentSet<Alignment> classesMatchings = m.getClassAlignmentSet();
if( classesMatchings != null ) {
Iterator<Alignment> alignmentIter = classesMatchings.iterator();
while( alignmentIter.hasNext() ) {
Alignment alignment = alignmentIter.next();
// TODO: Make AbstractMatchers work on Resource instead of Node.
OntResource e1 = (OntResource) alignment.getEntity1().getResource().as(OntResource.class); // translate from Node to OntResource
OntResource e2 = (OntResource) alignment.getEntity2().getResource().as(OntResource.class); // translate from Node to OntResource
if( hashMap.containsKey(e1) && hashMap.containsKey(e2) ) {
// great, our hashmap contains both entities
Canvas2Vertex n1 = hashMap.get(e1);
Canvas2Vertex n2 = hashMap.get(e2);
LegacyMapping edge = new LegacyMapping( n1, n2, null, m.getID(), Utility.getNoDecimalPercentFromDouble(alignment.getSimilarity()) );
matcherGraph.insertEdge(edge);
} else {
// the hashMap doesn't contain the source or the target node.
// something is wrong.
// no idea how to fix this problem.
// log it
Logger log = Logger.getLogger(this.getClass());
log.setLevel(Level.WARN);
if( !hashMap.containsKey(e1) ) log.warn("Cannot find OntResource: " + e1.toString() + ". Node container is: " + alignment.getEntity1().toString() );
if( !hashMap.containsKey(e2) ) log.warn("Cannot find OntResource: " + e2.toString() + ". Node container is: " + alignment.getEntity2().toString() );
}
}
}
// Get the Properties alignments.
AlignmentSet<Alignment> propertiesMatchings = m.getPropertyAlignmentSet();
if( propertiesMatchings != null ) {
Iterator<Alignment> alignmentIter = propertiesMatchings.iterator();
while( alignmentIter.hasNext() ) {
Alignment alignment = alignmentIter.next();
// TODO: Make AbstractMatchers work on Resource instead of Node.
OntResource e1 = (OntResource) alignment.getEntity1().getResource().as(OntResource.class); // translate from Node to OntResource
OntResource e2 = (OntResource) alignment.getEntity2().getResource().as(OntResource.class); // translate from Node to OntResource
if( hashMap.containsKey(e1) && hashMap.containsKey(e2) ) {
// great, our hashmap contains both entities
Canvas2Vertex n1 = hashMap.get(e1);
Canvas2Vertex n2 = hashMap.get(e2);
LegacyMapping edge = new LegacyMapping( n1, n2, null, m.getID(), Utility.getNoDecimalPercentFromDouble(alignment.getSimilarity()) );
matcherGraph.insertEdge(edge);
} else {
// the hashMap doesn't contain the source or the target node.
// something is wrong.
// no idea how to fix this problem.
// log it
Logger log = Logger.getLogger(this.getClass());
log.setLevel(Level.WARN);
if( !hashMap.containsKey(e1) ) log.warn("Cannot find OntResource: " + e1.toString() + ". Node container is: " + alignment.getEntity1().toString() );
if( !hashMap.containsKey(e2) ) log.warn("Cannot find OntResource: " + e2.toString() + ". Node container is: " + alignment.getEntity2().toString() );
}
}
}
return matcherGraph;
}
/*
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*************************************************** EVENT LISTENERS *************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*********************************************************************************************************************************
*/
/**
* Handle Viewport resize events here.
*/
@Override
public void stateChanged(ChangeEvent e) { // the state of the viewport has changed.
return; // TODO: Get this resizing working.
/*
JViewport vp = vizpanel.getViewport();
Dimension vpSize = vp.getSize();
if( oldViewportDimensions.height != vpSize.height || oldViewportDimensions.width != vpSize.width ) {
// the size of the viewport has changed
// need to know what's on the left side and what's on the right side
ArrayList<CanvasGraph> ontologyGraphs = vizpanel.getOntologyGraphs();
ArrayList<CanvasGraph> leftSideGraphs = GraphLocator.getGraphsByID( ontologyGraphs, leftOntologyID);
ArrayList<CanvasGraph> rightSideGraphs = GraphLocator.getGraphsByID( ontologyGraphs, rightOntologyID);
Rectangle leftBounds = CanvasGraph.getCombinedBounds(leftSideGraphs); // the combined bounds of all the graphs on the left side
Rectangle rightBounds = CanvasGraph.getCombinedBounds(rightSideGraphs); // the combined bounds of all the graphs on the right side
int viewportHalfwidth = vp.getBounds().width / 2;
int leftBoundsLeftmostX = leftBounds.x + leftBounds.width;
if( leftBoundsLeftmostX > viewportHalfwidth ) { // if the left graphs move past the midpoint of the viewport even if the
// viewport were scrolled all the way to the left, then layout everything end to end
// TODO: do this part
} else {
// we have space between the middle line and the left side.
// line up the middle divider so it is at the middle of the canvas
int newMiddleDividerX = leftBounds.x + (viewportHalfwidth - leftBoundsLeftmostX); // this should be where the middle divider should be moved to
if( newMiddleDividerX != middleDivider.getObject().x ) { // do we have to move anything?
// yes, we have to move everything over.
int deltaX = newMiddleDividerX - middleDivider.getObject().x;
middleDivider.move(deltaX, 0);
Iterator<CanvasGraph> graphIter = rightSideGraphs.iterator();
while( graphIter.hasNext() ) { graphIter.next().moveGraph(deltaX, 0); } // move all the rightside graphs over by deltaX
}
}
}
*/
}
/**
* MOUSE EVENT listener functions
*/
@Override
public void mouseClicked( MouseEvent e ) {
// BUTTON1 = Left Click Button, BUTTON2 = Middle Click Button, BUTTON3 = Right Click Button
Graphics g = vizpanel.getGraphics(); // used for any redrawing of nodes
ArrayList<Canvas2Vertex> visibleVertices = vizpanel.getVisibleVertices();
Logger log = Logger.getLogger(this.getClass());
if( Core.DEBUG ) log.setLevel(Level.DEBUG);
if( PopupMenuActive ) { // if we have an active popup menu, cancel it
PopupMenuActive = false;
if( hoveringOver != null ) {
hoveringOver.setHover(false);
hoveringOver.draw(g);
hoveringOver = null; // clear the hover target, since the click can be anywhere and we didn't check again what we're hovering over
}
} else {
// only process mouse clicks if there's not a popup menu active
switch( e.getButton() ) {
// because of the way Java (and most any platform) handles the difference between single and double clicks,
// the single click action must be "complementary" to the double click action, as when you double click a
// single click is always fired just before the double click is detected.
// There is no way around this. A single click event will *always* be fired just before a double click.
// So then:
// - LEFT button SINGLE click = select NODE (or deselect if clicking empty space)
// - LEFT button DOUBLE click = line up two nodes by their mapping (do nothing if it's empty space)<- TODO
// Jan 29, 2010 - Cosmin
// Ok now, we are adding menu support:
// 1. User must single left click to select a node in one ontology graph, in order to select that node.
// 2. User must single left click a node in the OTHER ontology graph in order to cause a menu to come up.
// If the user clicks a node in the same ontology, this new node becomes the selected node.
// These actions should work with MULTIPLE selections (using the Control key).
// Feb 13th, 2010 - Cosmin
// Adding rightclick menu for deleting mappings.
// June 17th, 2010 - Cosmin
// Added the SingleMappingView to replace SMO. Activated by doubleclicking a node.
case MouseEvent.BUTTON1:
if( e.getClickCount() == 2 ) { // double click with the left mouse button
if( Core.DEBUG) log.debug("Double click with the LEFT mouse button detected.");
//do stuff
if( hoveringOver != null && SingleMappingView != true ) {
enableSingleMappingView();
vizpanel.repaint();
}
} else if( e.getClickCount() == 1 ) { // single click with left mouse button
if( SingleMappingView == true ) {
// if we don't click on anything, cancel the single mapping view
// restore the previous visibility of the nodes and edges
if( hoveringOver == null ) {
disableSingleMappingView();
vizpanel.repaint();
} else {
// we doubleclicked on another node.
disableSingleMappingView();
// move the viewpane to the new node
//vizpanel.getScrollPane().scrollRectToVisible( new Rectangle(0, vizpanel.getScrollPane().getSize().height, 1, 1) );
vizpanel.getScrollPane().getViewport().setViewPosition( new Point(vizpanel.getScrollPane().getViewport().getLocation().x,
hoveringOver.getBounds().y - vizpanel.getScrollPane().getViewport().getHeight()/2 ));
//System.out.print( "Moving viewport to: " + hoveringOver.getBounds().toString() );
hoveringOver = null;
vizpanel.repaint();
}
}
if( hoveringOver == null ) {
// we have clicked in an empty area, clear all the selected nodes
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
while( nodeIter.hasNext() ) {
LegacyNode selectedNode = nodeIter.next();
selectedNode.setSelected(false); // deselect the node
if( visibleVertices.contains( (Canvas2Vertex) selectedNode ) ) {
// redraw only if it's currently visible
//selectedNode.clearDrawArea(g);
selectedNode.draw(g);
}
}
selectedNodes.clear();
} else {
// user clicked over a node.
// is it a node in the OTHER ontology?
if( getSelectedNodesOntology() != Core.ID_NONE && getSelectedNodesOntology() != hoveringOver.getGraphicalData().ontologyID ) {
// yes it is in the other ontology
// bring up the Mapping Popup Menu, so the user can make a mapping
CreateMappingMenu menuCreate = new CreateMappingMenu( this );
menuCreate.show( vizpanel, e.getX(), e.getY());
PopupMenuActive = true;
} else {
// the nodes are in the same ontology
// we either add to the selection, or clear it and select the node that was just clicked
if( e.isControlDown() ) {
// if the user control clicked (CTRL+LEFTCLICK), we have to add this node to the list of selected nodes.
if( selectedNodes.contains(hoveringOver) ) { // if it already is in the list, remove it
selectedNodes.remove(hoveringOver);
hoveringOver.setSelected(false);
} else { // it's not in the list already, add it
hoveringOver.setSelected(true);
selectedNodes.add((LegacyNode) hoveringOver);
}
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
} else { // control is not pressed, clear any selections that there may be, and select single node
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
while( nodeIter.hasNext() ) {
LegacyNode selectedNode = nodeIter.next();
selectedNode.setSelected(false); // deselect the node
if( visibleVertices.contains( (Canvas2Vertex) selectedNode ) ) {
// redraw only if it's currently visible
//selectedNode.clearDrawArea(g);
selectedNode.draw(g);
}
}
selectedNodes.clear();
// select single node
hoveringOver.setSelected(true);
selectedNodes.add( (LegacyNode)hoveringOver);
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
}
}
}
}
break;
case MouseEvent.BUTTON2:
if( e.getClickCount() == 2 ) {
// double click with the middle mouse button.
log.debug("Double click with the MIDDLE mouse button detected.");
//do stuff
} else if( e.getClickCount() == 1 ) {
// middle click, print out debugging info
if( hoveringOver != null ) { // relying on the hover code in MouseMove
log.debug("\nResource: " + hoveringOver.getObject().r +
"\nHashCode: " + hoveringOver.getObject().r.hashCode());
log.debug("\nPosition" + e.getPoint().toString() );
}
//log.debug("Single click with the MIDDLE mouse button detected.");
}
break;
case MouseEvent.BUTTON3:
if( e.getClickCount() == 2 ) {
// double click with the right mouse button.
if( Core.DEBUG ) log.debug("Double click with the RIGHT mouse button detected.");
//do stuff
} else if( e.getClickCount() == 1 ) {
// single right click, bring up delete menu
if( hoveringOver != null ) {
DeleteMappingMenu menuDelete = new DeleteMappingMenu( this, hoveringOver.getMappings() );
menuDelete.show( vizpanel, e.getX(), e.getY());
PopupMenuActive = true;
}
if( Core.DEBUG ) log.debug("Single click with the RIGHT mouse button detected.");
}
break;
}
}
g.dispose(); // dispose of this graphics element, we don't need it anymore
}
private void disableSingleMappingView() {
// TODO Auto-generated method stub
Iterator<CanvasGraph> graphIter = vizpanel.getGraphs().iterator();
while( graphIter.hasNext() ) {
CanvasGraph graph = graphIter.next();
// restore the vertices
Iterator<Canvas2Vertex> nodeIter = graph.vertices();
while( nodeIter.hasNext() ) {
Canvas2Vertex node = nodeIter.next();
node.popVisibility();
}
// restore the edges
Iterator<Canvas2Edge> edgeIter = graph.edges();
while( edgeIter.hasNext() ) {
Canvas2Edge edge = edgeIter.next();
edge.popVisibility();
}
}
// move the nodes back to their places
Iterator<LegacyNode> movedNodesIter = SingleMappingMovedNodes.iterator();
while( movedNodesIter.hasNext() ) { movedNodesIter.next().popXY(); }
SingleMappingMovedNodes.clear();
Iterator<LegacyMapping> movedMappingsIter = SingleMappingMappings.iterator();
while( movedMappingsIter.hasNext() ) { movedMappingsIter.next().updateBounds(); }
SingleMappingMappings.clear();
SingleMappingView = false; // turn off the singlemappingview
}
private void enableSingleMappingView() {
// TODO Auto-generated method stub
//Activate the SingleMappingView
SingleMappingView = true;
// turn off the visibility of all the nodes and edges
Iterator<CanvasGraph> graphIter = vizpanel.getGraphs().iterator();
while( graphIter.hasNext() ) {
CanvasGraph graph = graphIter.next();
// hide the vertices
Iterator<Canvas2Vertex> nodeIter = graph.vertices();
while( nodeIter.hasNext() ) {
Canvas2Vertex node = nodeIter.next();
node.pushVisibility(false);
}
// hide the edges
Iterator<Canvas2Edge> edgeIter = graph.edges();
while( edgeIter.hasNext() ) {
Canvas2Edge edge = edgeIter.next();
edge.pushVisibility(false);
}
}
// now that all of the nodes and edges have been hidden, show only the ones we want to see
// we will show all edges connected to the selectedNodes, and all nodes connected to the edges of the selectedNodes
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
while( nodeIter.hasNext() ) {
LegacyNode selectedNode = nodeIter.next();
selectedNode.setVisible(true);
selectedNode.setSelected(false); // unselect the nodes
Iterator<DirectedGraphEdge<GraphicalData>> edgeInIter = selectedNode.edgesIn();
while( edgeInIter.hasNext() ) {
Canvas2Edge connectedEdge = (Canvas2Edge) edgeInIter.next();
connectedEdge.setVisible(true);
if( selectedNode == connectedEdge.getOrigin() ) { ((Canvas2Vertex)connectedEdge.getDestination()).setVisible(true); }
else { ((Canvas2Vertex)connectedEdge.getOrigin()).setVisible(true); }
}
Iterator<DirectedGraphEdge<GraphicalData>> edgeOutIter = selectedNode.edgesOut();
while( edgeOutIter.hasNext() ) {
Canvas2Edge connectedEdge = (Canvas2Edge) edgeOutIter.next();
connectedEdge.setVisible(true);
if( selectedNode == connectedEdge.getOrigin() ) { ((Canvas2Vertex)connectedEdge.getDestination()).setVisible(true); }
else { ((Canvas2Vertex)connectedEdge.getOrigin()).setVisible(true); }
}
}
// we need to move the opposite side up to the side we clicked
//ArrayList<LegacyMapping> mappingList = new ArrayList<LegacyMapping>(); // we have to keep a list of all the mappings to/from this node
int uppermostY = -1; // -1 is a dummy value. Valid values are >= 0.
Iterator<LegacyNode> nodeIter2 = selectedNodes.iterator();
while( nodeIter2.hasNext() ) {
LegacyNode selectedNode = nodeIter2.next();
// update the uppermostY
if( uppermostY < 0 || selectedNode.getObject().y < uppermostY ) {
uppermostY = selectedNode.getObject().y;
}
// update the mappingList
Iterator<DirectedGraphEdge<GraphicalData>> edgeInIter = selectedNode.edgesIn();
while( edgeInIter.hasNext() ) {
DirectedGraphEdge<GraphicalData> connectedEdge = edgeInIter.next();
if( connectedEdge instanceof LegacyMapping ) {
SingleMappingMappings.add( (LegacyMapping) connectedEdge );
}
}
Iterator<DirectedGraphEdge<GraphicalData>> edgeOutIter = selectedNode.edgesOut();
while( edgeOutIter.hasNext() ) {
DirectedGraphEdge<GraphicalData> connectedEdge = edgeOutIter.next();
if( connectedEdge instanceof LegacyMapping) {
SingleMappingMappings.add( (LegacyMapping) connectedEdge );
}
}
}
// now we must move the mappings to the uppermostY.
for( int i = 0; i < SingleMappingMappings.size(); i++ ) {
// nodeheight marginbottom
LegacyMapping currentMapping = SingleMappingMappings.get(i);
if( selectedNodes.contains( currentMapping.getOrigin()) ) {
// we doubleclicked on the origin of the mapping, so move the destination up.
LegacyNode destinationNode = (LegacyNode) currentMapping.getDestination();
destinationNode.pushXY( destinationNode.getGraphicalData().x , uppermostY + i*(nodeHeight+marginBottom) );
SingleMappingMovedNodes.add(destinationNode);
vizpanel.getVisibleVertices().add(destinationNode);
} else {
// we doubleclicked on the destination of the mapping, therefore we move the origin up
LegacyNode originNode = (LegacyNode) currentMapping.getOrigin();
originNode.pushXY( originNode.getGraphicalData().x , uppermostY + i*(nodeHeight+marginBottom) );
SingleMappingMovedNodes.add(originNode);
vizpanel.getVisibleVertices().add(originNode);
}
currentMapping.updateBounds();
}
selectedNodes.clear();
}
private Canvas2Vertex hoveringOver;
@Override
public void mouseMoved(MouseEvent e) {
// don't redraw over a popupmenu
if( PopupMenuActive ) { return; }
Graphics g = vizpanel.getGraphics();
ArrayList<Canvas2Vertex> visibleVertices = vizpanel.getVisibleVertices();
Iterator<Canvas2Vertex> vertIter = visibleVertices.iterator();
boolean hoveringOverEmptySpace = true;
while( vertIter.hasNext() ) {
Canvas2Vertex vertex = vertIter.next();
if( vertex instanceof LegacyNode ) // we only care about legacy nodes (for now)
if( vertex.contains(e.getPoint()) ) {
// we are hovering over vertex
hoveringOverEmptySpace = false;
// first, remove the hover from the last element we were hovering over
if( hoveringOver == vertex ) {
// we are still hoovering over this element, do nothing
break;
} else if( hoveringOver != null ) {
// we had been hovering over something, but now we're not
hoveringOver.setHover(false);
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
}
hoveringOver = vertex;
hoveringOver.setHover(true);
//hoveringOver.clearDrawArea(g);
// redraw all the edges connected to this node.
Iterator<DirectedGraphEdge<GraphicalData>> edgeInIter = hoveringOver.edgesIn();
while( edgeInIter.hasNext() ) { ((Canvas2Edge)edgeInIter.next()).draw(g); }
Iterator<DirectedGraphEdge<GraphicalData>> edgeOutIter = hoveringOver.edgesOut();
while( edgeOutIter.hasNext() ) { ((Canvas2Edge)edgeOutIter.next()).draw(g); }
hoveringOver.draw(g);
break;
}
}
if( hoveringOverEmptySpace && hoveringOver != null) {
// clear the hover
hoveringOver.setHover(false);
//hoveringOver.clearDrawArea(g);
hoveringOver.draw(g);
hoveringOver = null;
}
g.dispose();
}
@Override
public void actionPerformed(ActionEvent e) {
String actionCommand = e.getActionCommand();
// these commands are from the Create Mappings popup menu
if( actionCommand == "CREATE_DEFAULT" ||
actionCommand == "CREATE_EQUIVALENCE" ||
actionCommand == "CREATE_SUBSET" ||
actionCommand == "CREATE_SUBSETCOMPLETE" ||
actionCommand == "CREATE_SUPERSET" ||
actionCommand == "CREATE_SUPERSETCOMPLETE" ||
actionCommand == "CREATE_OTHER" ) {
String relation = Alignment.EQUIVALENCE;;
double sim = 0;
ArrayList<Alignment> userMappings = new ArrayList<Alignment>();
if( actionCommand == "CREATE_DEFAULT" ) {
relation = Alignment.EQUIVALENCE;
sim = 1.0d;
} else {
// ask the user for the similarity value
boolean correct = false;
boolean abort = false;
while(!correct && !abort) {
String x = JOptionPane.showInputDialog(null, "Insert the similarity value.\nInsert a number between 0 and 100 using only numeric digits.\n Warning: the similarity should be higher than the threshold value.\nIf not, the similarity matrix will be modified but the alignment won't be selected and visualized.");
try {
if(x == null)
abort = true;//USER SELECTED CANCEL
else {
sim = Double.parseDouble(x);
if(sim >= 0 && sim <= 100) {
correct = true;
sim = sim/100;
}
}
}
catch(Exception ex) {//WRONG INPUT, ASK INPUT AGAIN
}
}
}
if( actionCommand == "CREATE_OTHER" ){
boolean correct = false;
boolean abort = false;
while(!correct && !abort) {
String x = JOptionPane.showInputDialog(null, "Insert the relation type:");
try {
if(x == null)
abort = true;//USER SELECTED CANCEL
else {
relation = x;
correct = true;
}
}
catch(Exception ex) {//WRONG INPUT, ASK INPUT AGAIN
}
}
}
if( actionCommand == "CREATE_EQUIVALENCE" ) relation = Alignment.EQUIVALENCE;
if( actionCommand == "CREATE_SUBSET" ) relation = Alignment.SUBSET;
if( actionCommand == "CREATE_SUBSETCOMPLETE" ) relation = Alignment.SUBSETCOMPLETE;
if( actionCommand == "CREATE_SUPERSET" ) relation = Alignment.SUPERSET;
if( actionCommand == "CREATE_SUPERSETCOMPLETE") relation = Alignment.SUPERSETCOMPLETE;
// **************** create the alignments
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
// what type of nodes are we mapping
alignType type = null;
if( hoveringOver.getGraphicalData().type == NodeType.CLASS_NODE ) {
type = AbstractMatcher.alignType.aligningClasses;
} else if( hoveringOver.getGraphicalData().type == NodeType.PROPERTY_NODE ) {
type = AbstractMatcher.alignType.aligningProperties;
}
// this is a little bit of a mess, but we have to support legacy code (meaning the Alignment class)- 1/29/2010 Cosmin
Ontology o2 = Core.getInstance().getOntologyByID( hoveringOver.getGraphicalData().ontologyID );
Node n2 = null;
try {
n2 = o2.getNodefromOntResource( hoveringOver.getGraphicalData().r, type );
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
while( nodeIter.hasNext() ) {
LegacyNode ln = nodeIter.next();
// again, this is necessary in order to be compatible with the way the Alignment class is at the moment - 1/29/2010 Cosmin
Ontology o1 = Core.getInstance().getOntologyByID( ln.getGraphicalData().ontologyID );
Node n1 = null;
try {
n1 = o1.getNodefromOntResource( ln.getGraphicalData().r, type );
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Alignment a;
if( ln.getGraphicalData().ontologyID == leftOntologyID ) { // this if statement fixes a small display bug
a = new Alignment( n1, n2, sim, relation, type);
} else {
a = new Alignment( n2, n1, sim, relation, type);
}
userMappings.add(a);
}
// add the mappings created to the user
Core.getUI().getControlPanel().userMatching(userMappings);
PopupMenuActive = false; // the popup menu goes away when something is clicked on it
}
}
private int getSelectedNodesOntology() {
int ontologyID = -1;
Iterator<LegacyNode> nodeIter = selectedNodes.iterator();
if( nodeIter.hasNext() ) {
// the first item in the list
ontologyID = nodeIter.next().getGraphicalData().ontologyID;
}
else { return Core.ID_NONE; } // empty list
// the next items in the list
while( nodeIter.hasNext() ) {
if( nodeIter.next().getGraphicalData().ontologyID != ontologyID ) {
// we have nodes that are not from the same ontology,
// this should not happen (because if it happens, then the menu pops up, and the selectedNodes is cleared).
return Core.ID_NONE;
}
}
// all the nodes in the selectedNodes list are from the ontology with id "ontologyID"
return ontologyID;
}
public boolean isSingleMappingView() { return SingleMappingView; }
}
| When displaying SingleMappingView, you must recalculate the bounds of the graphs.
| AgreementMaker/src/am/userInterface/canvas2/layouts/LegacyLayout.java | When displaying SingleMappingView, you must recalculate the bounds of the graphs. |
|
Java | apache-2.0 | 47bd7a63a7fa7df69a014208df88a53803d3606b | 0 | apache/incubator-taverna-engine,apache/incubator-taverna-engine | /*******************************************************************************
* Copyright (C) 2007 The University of Manchester
*
* Modifications to the initial code base are copyright of their
* respective authors, or their employers as appropriate.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
******************************************************************************/
package net.sf.taverna.t2.activities.stringconstant;
import java.util.List;
import net.sf.taverna.t2.workflowmodel.Processor;
import net.sf.taverna.t2.workflowmodel.health.HealthCheck;
import net.sf.taverna.t2.workflowmodel.health.HealthChecker;
import net.sf.taverna.t2.visit.VisitReport;
import net.sf.taverna.t2.visit.VisitReport.Status;
public class StringConstantActivityHealthChecker implements HealthChecker<StringConstantActivity> {
public boolean canVisit(Object subject) {
return subject!=null && subject instanceof StringConstantActivity;
}
public VisitReport visit(StringConstantActivity activity, List<Object> ancestors) {
Processor p = (Processor) VisitReport.findAncestor(ancestors, Processor.class);
if (p == null) {
return null;
}
String value = activity.getConfiguration().getValue();
if (value==null) {
return new VisitReport(HealthCheck.getInstance(), p,"StringConstant value is null", HealthCheck.NULL_VALUE, Status.SEVERE);
}
if ("Add your own value here".equals(value)) {
return new VisitReport(HealthCheck.getInstance(), p, "StringConstant value is still the default", HealthCheck.DEFAULT_VALUE, Status.WARNING);
}
return new VisitReport(HealthCheck.getInstance(), p, "StringConstant is OK", HealthCheck.NO_PROBLEM, Status.OK);
}
public boolean isTimeConsuming() {
return false;
}
}
| src/main/java/net/sf/taverna/t2/activities/stringconstant/StringConstantActivityHealthChecker.java | /*******************************************************************************
* Copyright (C) 2007 The University of Manchester
*
* Modifications to the initial code base are copyright of their
* respective authors, or their employers as appropriate.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
******************************************************************************/
package net.sf.taverna.t2.activities.stringconstant;
import net.sf.taverna.t2.workflowmodel.health.HealthChecker;
import net.sf.taverna.t2.workflowmodel.health.HealthReport;
import net.sf.taverna.t2.workflowmodel.health.HealthReport.Status;
public class StringConstantActivityHealthChecker implements HealthChecker<StringConstantActivity> {
public boolean canHandle(Object subject) {
return subject!=null && subject instanceof StringConstantActivity;
}
public HealthReport checkHealth(StringConstantActivity activity) {
String value = activity.getConfiguration().getValue();
if (value==null) {
return new HealthReport("StringConstant Activity","The value is null",Status.SEVERE);
}
if ("edit me!".equals(value)) {
return new HealthReport("StringConstant Activity","The value is still the default",Status.WARNING);
}
return new HealthReport("StringConstant Activity","OK",Status.OK);
}
}
| Modified health checkers to cope with DisabledActivity and new VisitReport as per T2-1238 and T2-1231
git-svn-id: 862cecc6b6024acb2a5742ea390146ac2ada5e04@10490 bf327186-88b3-11dd-a302-d386e5130c1c
| src/main/java/net/sf/taverna/t2/activities/stringconstant/StringConstantActivityHealthChecker.java | Modified health checkers to cope with DisabledActivity and new VisitReport as per T2-1238 and T2-1231 |
|
Java | apache-2.0 | 5adcefa0c3b5ecabbd5c846c57fdc957ee9c4f3e | 0 | evanv/titan,thinkaurelius/titan,graben1437/titan,hortonworks/titan,samanalysis/titan,nvoron23/titan,wangbf/titan,kangkot/titan,xlcupid/titan,tomersagi/titan,mbrukman/titan,anuragkh/titan,nvoron23/titan,hortonworks/titan,anuragkh/titan,infochimps-forks/titan,elubow/titan,xlcupid/titan,elubow/titan,boorad/titan,tomersagi/titan,dylanht/titan,twilmes/titan,mbrukman/titan,ThiagoGarciaAlves/titan,hortonworks/titan,kangkot/titan,graben1437/titan,infochimps-forks/titan,dylanht/titan,qiuqiyuan/titan,elubow/titan,elkingtonmcb/titan,samanalysis/titan,englishtown/titan,CYPP/titan,samanalysis/titan,samanalysis/titan,mwpnava/titan,pluradj/titan,anvie/titan,wangbf/titan,hortonworks/titan,xlcupid/titan,nvoron23/titan,elkingtonmcb/titan,jamestyack/titan,infochimps-forks/titan,thinkaurelius/titan,boorad/titan,mwpnava/titan,banjiewen/titan,kalatestimine/titan,anuragkh/titan,twilmes/titan,anvie/titan,hortonworks/titan,kangkot/titan,jankotek/titan,dylanht/titan,mwpnava/titan,jamestyack/titan,jankotek/titan,jankotek/titan,CYPP/titan,wangbf/titan,CYPP/titan,anvie/titan,mbrukman/titan,wangbf/titan,mbrukman/titan,pluradj/titan,xlcupid/titan,xlcupid/titan,qiuqiyuan/titan,ThiagoGarciaAlves/titan,fengshao0907/titan,englishtown/titan,anvie/titan,thinkaurelius/titan,kalatestimine/titan,elkingtonmcb/titan,elkingtonmcb/titan,fengshao0907/titan,kalatestimine/titan,englishtown/titan,kangkot/titan,fengshao0907/titan,qiuqiyuan/titan,twilmes/titan,ThiagoGarciaAlves/titan,kalatestimine/titan,twilmes/titan,fengshao0907/titan,graben1437/titan,amcp/titan,qiuqiyuan/titan,ThiagoGarciaAlves/titan,amcp/titan,jankotek/titan,jankotek/titan,jamestyack/titan,infochimps-forks/titan,tomersagi/titan,elkingtonmcb/titan,mwpnava/titan,boorad/titan,fengshao0907/titan,elubow/titan,twilmes/titan,infochimps-forks/titan,CYPP/titan,kangkot/titan,mwpnava/titan,jamestyack/titan,boorad/titan,tomersagi/titan,mbrukman/titan,evanv/titan,evanv/titan,pluradj/titan,qiuqiyuan/titan,anuragkh/titan,wangbf/titan,jamestyack/titan,pluradj/titan,englishtown/titan,banjiewen/titan,banjiewen/titan,anvie/titan,evanv/titan,amcp/titan,banjiewen/titan,kalatestimine/titan,amcp/titan,dylanht/titan,graben1437/titan,ThiagoGarciaAlves/titan,nvoron23/titan,thinkaurelius/titan,anuragkh/titan,tomersagi/titan,evanv/titan | package com.thinkaurelius.titan.diskstorage.hazelcast;
import java.util.Iterator;
import javax.annotation.Nullable;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterators;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.thinkaurelius.titan.diskstorage.StaticBuffer;
import com.thinkaurelius.titan.diskstorage.StorageException;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.StoreTransaction;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.CacheStore;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.CacheUpdateException;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.KeySelector;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.KeyValueEntry;
import com.thinkaurelius.titan.diskstorage.util.RecordIterator;
import com.thinkaurelius.titan.diskstorage.util.StaticArrayBuffer;
import com.thinkaurelius.titan.diskstorage.util.StaticByteBuffer;
public class HazelcastCacheStore implements CacheStore {
private static final String UPDATE_EXCEPTION_FORMAT = "Key: %s, has current value different from %s, can't replace with %s.";
private final IMap<byte[], byte[]> cache;
public HazelcastCacheStore(String name, HazelcastInstance manager) {
this.cache = manager.getMap(name);
}
@Override
public void replace(StaticBuffer key, StaticBuffer newValue, StaticBuffer oldValue, StoreTransaction txh) throws CacheUpdateException {
byte[] rawKey = key.as(StaticArrayBuffer.ARRAY_FACTORY);
byte[] rawNewValue = newValue.as(StaticArrayBuffer.ARRAY_FACTORY);
// Hazelcast doesn't replace a value when old value was null
// so we have to look and use putIfAbsent(new) if oldValue == null, otherwise use replace(old, new)
if (oldValue == null) {
if (cache.putIfAbsent(rawKey, rawNewValue) != null)
throw new CacheUpdateException(String.format(UPDATE_EXCEPTION_FORMAT, key, oldValue, newValue));
} else if (!cache.replace(rawKey, oldValue.as(StaticArrayBuffer.ARRAY_FACTORY), rawNewValue)) {
throw new CacheUpdateException(String.format(UPDATE_EXCEPTION_FORMAT, key, oldValue, newValue));
}
}
@Override
public void delete(StaticBuffer key, StoreTransaction txh) throws StorageException {
cache.remove(key.as(StaticArrayBuffer.ARRAY_FACTORY));
}
@Override
public StaticBuffer get(StaticBuffer key, StoreTransaction txh) throws StorageException {
byte[] value = cache.get(key.as(StaticArrayBuffer.ARRAY_FACTORY));
return value == null ? null : new StaticByteBuffer(value);
}
@Override
public boolean containsKey(StaticBuffer key, StoreTransaction txh) throws StorageException {
return cache.containsKey(key.as(StaticArrayBuffer.ARRAY_FACTORY));
}
@Override
public RecordIterator<KeyValueEntry> getKeys(final KeySelector selector, StoreTransaction txh) throws StorageException {
final Iterator<byte[]> keys = Iterators.filter(cache.keySet().iterator(), new Predicate<byte[]>() {
@Override
public boolean apply(@Nullable byte[] key) {
return selector.include(new StaticArrayBuffer(key)) && !selector.reachedLimit();
}
});
return new RecordIterator<KeyValueEntry>() {
@Override
public boolean hasNext() {
return keys.hasNext();
}
@Override
public KeyValueEntry next() {
byte[] key = keys.next();
return new KeyValueEntry(new StaticArrayBuffer(key), new StaticArrayBuffer(cache.get(key)));
}
@Override
public void close() {
// nothing to do
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public void acquireLock(StaticBuffer key, StaticBuffer expectedValue, StoreTransaction txh) throws StorageException {
// not supported
}
@Override
public StaticBuffer[] getLocalKeyPartition() throws StorageException {
throw new UnsupportedOperationException();
}
@Override
public String getName() {
return cache.getName();
}
@Override
public void clearStore() {
cache.clear();
}
@Override
public void close() throws StorageException {
cache.destroy();
}
} | titan-hazelcast/src/main/java/com/thinkaurelius/titan/diskstorage/hazelcast/HazelcastCacheStore.java | package com.thinkaurelius.titan.diskstorage.hazelcast;
import java.util.Iterator;
import javax.annotation.Nullable;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterators;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.thinkaurelius.titan.diskstorage.StaticBuffer;
import com.thinkaurelius.titan.diskstorage.StorageException;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.StoreTransaction;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.CacheStore;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.CacheUpdateException;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.KeySelector;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue.KeyValueEntry;
import com.thinkaurelius.titan.diskstorage.util.RecordIterator;
import com.thinkaurelius.titan.diskstorage.util.StaticArrayBuffer;
import com.thinkaurelius.titan.diskstorage.util.StaticByteBuffer;
public class HazelcastCacheStore implements CacheStore {
private static final String UPDATE_EXCEPTION_FORMAT = "Key: %s, has current value different from %s, can't replace with %s.";
private final IMap<byte[], byte[]> cache;
public HazelcastCacheStore(String name, HazelcastInstance manager) {
this.cache = manager.getMap(name);
}
@Override
public void replace(StaticBuffer key, StaticBuffer newValue, StaticBuffer oldValue, StoreTransaction txh) throws CacheUpdateException {
byte[] rawKey = key.as(StaticArrayBuffer.ARRAY_FACTORY);
byte[] rawNewValue = newValue.as(StaticArrayBuffer.ARRAY_FACTORY);
// Hazelcast doesn't replace a value when old value was null
// so we have to look and use putIfAbsent(new) if oldValue == null, otherwise use replace(old, new)
if (oldValue == null) {
if (cache.putIfAbsent(rawKey, rawNewValue) != null)
throw new CacheUpdateException(String.format(UPDATE_EXCEPTION_FORMAT, key, oldValue, newValue));
} else if (!cache.replace(rawKey, oldValue.as(StaticArrayBuffer.ARRAY_FACTORY), rawNewValue)) {
throw new CacheUpdateException(String.format(UPDATE_EXCEPTION_FORMAT, key, oldValue, newValue));
}
}
@Override
public void delete(StaticBuffer key, StoreTransaction txh) throws StorageException {
cache.remove(key.as(StaticArrayBuffer.ARRAY_FACTORY));
}
@Override
public StaticBuffer get(StaticBuffer key, StoreTransaction txh) throws StorageException {
byte[] value = cache.get(key.as(StaticArrayBuffer.ARRAY_FACTORY));
return value == null ? null : new StaticArrayBuffer(value);
}
@Override
public boolean containsKey(StaticBuffer key, StoreTransaction txh) throws StorageException {
return cache.containsKey(key.as(StaticArrayBuffer.ARRAY_FACTORY));
}
@Override
public RecordIterator<KeyValueEntry> getKeys(final KeySelector selector, StoreTransaction txh) throws StorageException {
final Iterator<byte[]> keys = Iterators.filter(cache.keySet().iterator(), new Predicate<byte[]>() {
@Override
public boolean apply(@Nullable byte[] key) {
return selector.include(new StaticArrayBuffer(key)) && !selector.reachedLimit();
}
});
return new RecordIterator<KeyValueEntry>() {
@Override
public boolean hasNext() {
return keys.hasNext();
}
@Override
public KeyValueEntry next() {
byte[] key = keys.next();
return new KeyValueEntry(new StaticArrayBuffer(key), new StaticArrayBuffer(cache.get(key)));
}
@Override
public void close() {
// nothing to do
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public void acquireLock(StaticBuffer key, StaticBuffer expectedValue, StoreTransaction txh) throws StorageException {
// not supported
}
@Override
public StaticBuffer[] getLocalKeyPartition() throws StorageException {
throw new UnsupportedOperationException();
}
@Override
public String getName() {
return cache.getName();
}
@Override
public void clearStore() {
cache.clear();
}
@Override
public void close() throws StorageException {
cache.destroy();
}
}
| reverted change in HazelcastCacheStore that caused tests to fail.
| titan-hazelcast/src/main/java/com/thinkaurelius/titan/diskstorage/hazelcast/HazelcastCacheStore.java | reverted change in HazelcastCacheStore that caused tests to fail. |
|
Java | apache-2.0 | bd01bb57f0fddf1c84326f7ba14325eafc97948b | 0 | fubuki/elasticsearch,vorce/es-metrics,aparo/elasticsearch,vorce/es-metrics,aparo/elasticsearch,fubuki/elasticsearch,aparo/elasticsearch,fubuki/elasticsearch,aparo/elasticsearch,fubuki/elasticsearch,vorce/es-metrics,fubuki/elasticsearch,vorce/es-metrics,fubuki/elasticsearch,aparo/elasticsearch,vorce/es-metrics,aparo/elasticsearch | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.aliases;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesMissingException;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.facet.FacetBuilders;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.Sets.newHashSet;
import static org.elasticsearch.client.Requests.createIndexRequest;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.test.hamcrest.CollectionAssertions.hasKey;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class IndexAliasesTests extends ElasticsearchIntegrationTest {
@Test
public void testAliases() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1]");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1"));
logger.info("--> indexing against [alias1], should work now");
IndexResponse indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"))).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test"));
logger.info("--> creating index [test_x]");
createIndex("test_x");
ensureGreen();
logger.info("--> remove [alias1], Aliasing index [test_x] with [alias1]");
assertAcked(admin().indices().prepareAliases().removeAlias("test", "alias1").addAlias("test_x", "alias1"));
logger.info("--> indexing against [alias1], should work against [test_x]");
indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"))).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test_x"));
}
@Test
public void testFailedFilter() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
//invalid filter, invalid json
IndicesAliasesRequestBuilder indicesAliasesRequestBuilder = admin().indices().prepareAliases().addAlias("test", "alias1", "abcde");
try {
indicesAliasesRequestBuilder.get();
fail("put alias should have been failed due to invalid filter");
} catch (ElasticsearchIllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("failed to parse filter for alias [alias1]"));
}
//valid json , invalid filter
indicesAliasesRequestBuilder = admin().indices().prepareAliases().addAlias("test", "alias1", "{ \"test\": {} }");
try {
indicesAliasesRequestBuilder.get();
fail("put alias should have been failed due to invalid filter");
} catch (ElasticsearchIllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("failed to parse filter for alias [alias1]"));
}
}
@Test
public void testFilteringAliases() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1] and filter [user:kimchy]");
FilterBuilder filter = termFilter("user", "kimchy");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", filter));
// For now just making sure that filter was stored with the alias
logger.info("--> making sure that filter was stored with alias [alias1] and filter [user:kimchy]");
ClusterState clusterState = admin().cluster().prepareState().get().getState();
IndexMetaData indexMd = clusterState.metaData().index("test");
assertThat(indexMd.aliases().get("alias1").filter().string(), equalTo("{\"term\":{\"user\":\"kimchy\"}}"));
}
@Test
public void testEmptyFilter() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1] and empty filter");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", "{}"));
}
@Test
public void testSearchingFilteringAliasesSingleIndex() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> adding filtering aliases to index [test]");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1"));
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias2"));
assertAcked(admin().indices().prepareAliases().addAlias("test", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test", "bars", termFilter("name", "bar")));
assertAcked(admin().indices().prepareAliases().addAlias("test", "tests", termFilter("name", "test")));
logger.info("--> indexing against [test]");
client().index(indexRequest("test").type("type1").id("1").source(source("1", "foo test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("2").source(source("2", "bar test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("3").source(source("3", "baz test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("4").source(source("4", "something else")).refresh(true)).actionGet();
logger.info("--> checking single filtering alias search");
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1");
logger.info("--> checking single filtering alias wildcard search");
searchResponse = client().prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3");
logger.info("--> checking single filtering alias search with sort");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_uid", SortOrder.ASC).get();
assertHits(searchResponse.getHits(), "1", "2", "3");
logger.info("--> checking single filtering alias search with global facets");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(true))
.get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(4));
logger.info("--> checking single filtering alias search with global facets and sort");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(true))
.addSort("_uid", SortOrder.ASC).get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(4));
logger.info("--> checking single filtering alias search with non-global facets");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(false))
.addSort("_uid", SortOrder.ASC).get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(2));
searchResponse = client().prepareSearch("foos", "bars").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2");
logger.info("--> checking single non-filtering alias search");
searchResponse = client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking non-filtering alias and filtering alias search");
searchResponse = client().prepareSearch("alias1", "foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking index and filtering alias search");
searchResponse = client().prepareSearch("test", "foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking index and alias wildcard search");
searchResponse = client().prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
}
@Test
public void testSearchingFilteringAliasesTwoIndices() throws Exception {
logger.info("--> creating index [test1]");
createIndex("test1");
logger.info("--> creating index [test2]");
createIndex("test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "bars", termFilter("name", "bar")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "foos", termFilter("name", "foo")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"))).get();
client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"))).get();
client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"))).get();
client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"))).get();
logger.info("--> indexing against [test2]");
client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"))).get();
client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"))).get();
client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"))).get();
client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"))).get();
refresh();
logger.info("--> checking filtering alias for two indices");
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "5");
assertThat(client().prepareCount("foos").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(2L));
logger.info("--> checking filtering alias for one index");
searchResponse = client().prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "2");
assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L));
logger.info("--> checking filtering alias for two indices and one complete index");
searchResponse = client().prepareSearch("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5");
assertThat(client().prepareCount("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for one index");
searchResponse = client().prepareSearch("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5");
assertThat(client().prepareCount("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(8L));
assertThat(client().prepareCount("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(8L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get();
assertHits(searchResponse.getHits(), "4", "8");
assertThat(client().prepareCount("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get().getCount(), equalTo(2L));
}
@Test
public void testSearchingFilteringAliasesMultipleIndices() throws Exception {
logger.info("--> creating indices");
createIndex("test1", "test2", "test3");
ensureGreen();
logger.info("--> adding aliases to indices");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "alias12"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "alias12"));
logger.info("--> adding filtering aliases to indices");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "filter1", termFilter("name", "test1")));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "filter23", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test3", "filter23", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "filter13", termFilter("name", "baz")));
assertAcked(admin().indices().prepareAliases().addAlias("test3", "filter13", termFilter("name", "baz")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("11").source(source("11", "foo test1"))).get();
client().index(indexRequest("test1").type("type1").id("12").source(source("12", "bar test1"))).get();
client().index(indexRequest("test1").type("type1").id("13").source(source("13", "baz test1"))).get();
client().index(indexRequest("test2").type("type1").id("21").source(source("21", "foo test2"))).get();
client().index(indexRequest("test2").type("type1").id("22").source(source("22", "bar test2"))).get();
client().index(indexRequest("test2").type("type1").id("23").source(source("23", "baz test2"))).get();
client().index(indexRequest("test3").type("type1").id("31").source(source("31", "foo test3"))).get();
client().index(indexRequest("test3").type("type1").id("32").source(source("32", "bar test3"))).get();
client().index(indexRequest("test3").type("type1").id("33").source(source("33", "baz test3"))).get();
refresh();
logger.info("--> checking filtering alias for multiple indices");
SearchResponse searchResponse = client().prepareSearch("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "31", "13", "33");
assertThat(client().prepareCount("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(4L));
searchResponse = client().prepareSearch("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "31", "11", "12", "13");
assertThat(client().prepareCount("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
searchResponse = client().prepareSearch("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "33");
assertThat(client().prepareCount("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(4L));
searchResponse = client().prepareSearch("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "21", "31", "33");
assertThat(client().prepareCount("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(6L));
searchResponse = client().prepareSearch("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "22", "23", "31", "13", "33");
assertThat(client().prepareCount("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(6L));
searchResponse = client().prepareSearch("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "21", "22", "23", "31", "33");
assertThat(client().prepareCount("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(8L));
}
@Test
public void testDeletingByQueryFilteringAliases() throws Exception {
logger.info("--> creating index [test1] and [test2");
createIndex("test1", "test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "bars", termFilter("name", "bar")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "tests", termFilter("name", "test")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "tests", termFilter("name", "test")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"))).get();
client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"))).get();
client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"))).get();
client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"))).get();
logger.info("--> indexing against [test2]");
client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"))).get();
client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"))).get();
client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"))).get();
client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"))).get();
refresh();
logger.info("--> checking counts before delete");
assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L));
logger.info("--> delete by query from a single alias");
client().prepareDeleteByQuery("bars").setQuery(QueryBuilders.termQuery("name", "test")).get();
logger.info("--> verify that only one record was deleted");
assertThat(client().prepareCount("test1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(3L));
logger.info("--> delete by query from an aliases pointing to two indices");
client().prepareDeleteByQuery("foos").setQuery(QueryBuilders.matchAllQuery()).get();
logger.info("--> verify that proper records were deleted");
SearchResponse searchResponse = client().prepareSearch("aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "3", "4", "6", "7", "8");
logger.info("--> delete by query from an aliases and an index");
client().prepareDeleteByQuery("tests", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
logger.info("--> verify that proper records were deleted");
searchResponse = client().prepareSearch("aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "4");
}
@Test
public void testDeleteAliases() throws Exception {
logger.info("--> creating index [test1] and [test2]");
createIndex("test1", "test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1")
.addAlias("test1", "aliasToTests")
.addAlias("test1", "foos", termFilter("name", "foo"))
.addAlias("test1", "bars", termFilter("name", "bar"))
.addAlias("test1", "tests", termFilter("name", "test")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2")
.addAlias("test2", "aliasToTests")
.addAlias("test2", "foos", termFilter("name", "foo"))
.addAlias("test2", "tests", termFilter("name", "test")));
String[] indices = {"test1", "test2"};
String[] aliases = {"aliasToTest1", "foos", "bars", "tests", "aliasToTest2", "aliasToTests"};
admin().indices().prepareAliases().removeAlias(indices, aliases).get();
AliasesExistResponse response = admin().indices().prepareAliasesExist(aliases).get();
assertThat(response.exists(), equalTo(false));
}
@Test
public void testWaitForAliasCreationMultipleShards() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
for (int i = 0; i < 10; i++) {
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias" + i));
client().index(indexRequest("alias" + i).type("type1").id("1").source(source("1", "test"))).get();
}
}
@Test
public void testWaitForAliasCreationSingleShard() throws Exception {
logger.info("--> creating index [test]");
assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.numberOfReplicas", 0).put("index.numberOfShards", 1))).get());
ensureGreen();
for (int i = 0; i < 10; i++) {
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias" + i));
client().index(indexRequest("alias" + i).type("type1").id("1").source(source("1", "test"))).get();
}
}
@Test
public void testWaitForAliasSimultaneousUpdate() throws Exception {
final int aliasCount = 10;
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
ExecutorService executor = Executors.newFixedThreadPool(aliasCount);
for (int i = 0; i < aliasCount; i++) {
final String aliasName = "alias" + i;
executor.submit(new Runnable() {
@Override
public void run() {
assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName));
client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"))).actionGet();
}
});
}
executor.shutdown();
boolean done = executor.awaitTermination(10, TimeUnit.SECONDS);
assertThat(done, equalTo(true));
if (!done) {
executor.shutdownNow();
}
}
@Test
public void testSameAlias() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> creating alias1 ");
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1")));
TimeValue timeout = TimeValue.timeValueSeconds(2);
logger.info("--> recreating alias1 ");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1").setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> modifying alias1 to have a filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> recreating alias1 with the same filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> recreating alias1 with a different filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "bar")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> verify that filter was updated");
AliasMetaData aliasMetaData = cluster().clusterService().state().metaData().aliases().get("alias1").get("test");
assertThat(aliasMetaData.getFilter().toString(), equalTo("{\"term\":{\"name\":\"bar\"}}"));
logger.info("--> deleting alias1");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1").setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
}
@Test(expected = AliasesMissingException.class)
public void testIndicesRemoveNonExistingAliasResponds404() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> deleting alias1 which does not exist");
assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1")));
}
@Test
public void testIndicesGetAliases() throws Exception {
Settings indexSettings = ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build();
logger.info("--> creating indices [foobar, test, test123, foobarbaz, bazbar]");
assertAcked(prepareCreate("foobar").setSettings(indexSettings));
assertAcked(prepareCreate("test").setSettings(indexSettings));
assertAcked(prepareCreate("test123").setSettings(indexSettings));
assertAcked(prepareCreate("foobarbaz").setSettings(indexSettings));
assertAcked(prepareCreate("bazbar").setSettings(indexSettings));
ensureGreen();
logger.info("--> creating aliases [alias1, alias2]");
assertAcked(admin().indices().prepareAliases().addAlias("foobar", "alias1").addAlias("foobar", "alias2"));
logger.info("--> getting alias1");
GetAliasesResponse getResponse = admin().indices().prepareGetAliases("alias1").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
AliasesExistResponse existsResponse = admin().indices().prepareAliasesExist("alias1").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting all aliases that start with alias*");
getResponse = admin().indices().prepareGetAliases("alias*").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias2"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias1"));
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("alias*").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> creating aliases [bar, baz, foo]");
assertAcked(admin().indices().prepareAliases()
.addAlias("bazbar", "bar")
.addAlias("bazbar", "bac", termFilter("field", "value"))
.addAlias("foobar", "foo"));
assertAcked(admin().indices().prepareAliases()
.addAliasAction(new AliasAction(AliasAction.Type.ADD, "foobar", "bac").routing("bla")));
logger.info("--> getting bar and baz for index bazbar");
getResponse = admin().indices().prepareGetAliases("bar", "bac").addIndices("bazbar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("bar", "bac")
.addIndices("bazbar").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting *b* for index baz*");
getResponse = admin().indices().prepareGetAliases("*b*").addIndices("baz*").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("*b*")
.addIndices("baz*").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting *b* for index *bar");
getResponse = admin().indices().prepareGetAliases("b*").addIndices("*bar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), equalTo("bla"));
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), equalTo("bla"));
existsResponse = admin().indices().prepareAliasesExist("b*")
.addIndices("*bar").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting f* for index *bar");
getResponse = admin().indices().prepareGetAliases("f*").addIndices("*bar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("f*")
.addIndices("*bar").get();
assertThat(existsResponse.exists(), equalTo(true));
// alias at work
logger.info("--> getting f* for index *bac");
getResponse = admin().indices().prepareGetAliases("foo").addIndices("*bac").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("foo")
.addIndices("*bac").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting foo for index foobar");
getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("foo")
.addIndices("foobar").get();
assertThat(existsResponse.exists(), equalTo(true));
// alias at work again
logger.info("--> getting * for index *bac");
getResponse = admin().indices().prepareGetAliases("*").addIndices("*bac").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(2));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(4));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
existsResponse = admin().indices().prepareAliasesExist("*")
.addIndices("*bac").get();
assertThat(existsResponse.exists(), equalTo(true));
assertAcked(admin().indices().prepareAliases()
.removeAlias("foobar", "foo"));
getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get();
assertThat(getResponse.getAliases().isEmpty(), equalTo(true));
existsResponse = admin().indices().prepareAliasesExist("foo").addIndices("foobar").get();
assertThat(existsResponse.exists(), equalTo(false));
}
@Test(expected = IndexMissingException.class)
public void testAddAliasNullIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasEmptyIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasNullAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", null)).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasEmptyAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", "")).get();
}
@Test
public void testAddAliasNullAliasNullIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, null)).get();
assertTrue("Should throw " + ActionRequestValidationException.class.getSimpleName(), false);
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(1));
}
}
@Test
public void testAddAliasEmptyAliasEmptyIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "")).get();
assertTrue("Should throw " + ActionRequestValidationException.class.getSimpleName(), false);
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasNullIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasEmptyIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("", "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasNullAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", null)).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasEmptyAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", "")).get();
}
@Test
public void testRemoveAliasNullAliasNullIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, null)).get();
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test
public void testRemoveAliasEmptyAliasEmptyIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "")).get();
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test
public void testGetAllAliasesWorks() {
createIndex("index1");
createIndex("index2");
ensureYellow();
assertAcked(admin().indices().prepareAliases().addAlias("index1", "alias1").addAlias("index2", "alias2"));
GetAliasesResponse response = admin().indices().prepareGetAliases().get();
assertThat(response.getAliases(), hasKey("index1"));
assertThat(response.getAliases(), hasKey("index1"));
}
@Test
public void testCreateIndexWithAliases() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias1"))
.addAlias(new Alias("alias2").filter(FilterBuilders.missingFilter("field")))
.addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")));
checkAliases();
}
@Test
public void testCreateIndexWithAliasesInSource() throws Exception {
assertAcked(prepareCreate("test").setSource("{\n" +
" \"aliases\" : {\n" +
" \"alias1\" : {},\n" +
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
" \"alias3\" : { \"index_routing\" : \"index\", \"search_routing\" : \"search\"}\n" +
" }\n" +
"}"));
checkAliases();
}
@Test
public void testCreateIndexWithAliasesSource() throws Exception {
assertAcked(prepareCreate("test").setAliases("{\n" +
" \"alias1\" : {},\n" +
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
" \"alias3\" : { \"index_routing\" : \"index\", \"search_routing\" : \"search\"}\n" +
"}"));
checkAliases();
}
@Test
public void testCreateIndexWithAliasesFilterNotValid() {
//non valid filter, invalid json
CreateIndexRequestBuilder createIndexRequestBuilder = prepareCreate("test").addAlias(new Alias("alias2").filter("f"));
try {
createIndexRequestBuilder.get();
fail("create index should have failed due to invalid alias filter");
} catch (ElasticsearchIllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("failed to parse filter for alias [alias2]"));
}
//valid json but non valid filter
createIndexRequestBuilder = prepareCreate("test").addAlias(new Alias("alias2").filter("{ \"test\": {} }"));
try {
createIndexRequestBuilder.get();
fail("create index should have failed due to invalid alias filter");
} catch (ElasticsearchIllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("failed to parse filter for alias [alias2]"));
}
}
private void checkAliases() {
GetAliasesResponse getAliasesResponse = admin().indices().prepareGetAliases("alias1").get();
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
AliasMetaData aliasMetaData = getAliasesResponse.getAliases().get("test").get(0);
assertThat(aliasMetaData.alias(), equalTo("alias1"));
assertThat(aliasMetaData.filter(), nullValue());
assertThat(aliasMetaData.indexRouting(), nullValue());
assertThat(aliasMetaData.searchRouting(), nullValue());
getAliasesResponse = admin().indices().prepareGetAliases("alias2").get();
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
aliasMetaData = getAliasesResponse.getAliases().get("test").get(0);
assertThat(aliasMetaData.alias(), equalTo("alias2"));
assertThat(aliasMetaData.filter(), notNullValue());
assertThat(aliasMetaData.indexRouting(), nullValue());
assertThat(aliasMetaData.searchRouting(), nullValue());
getAliasesResponse = admin().indices().prepareGetAliases("alias3").get();
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
aliasMetaData = getAliasesResponse.getAliases().get("test").get(0);
assertThat(aliasMetaData.alias(), equalTo("alias3"));
assertThat(aliasMetaData.filter(), nullValue());
assertThat(aliasMetaData.indexRouting(), equalTo("index"));
assertThat(aliasMetaData.searchRouting(), equalTo("search"));
}
private void assertHits(SearchHits hits, String... ids) {
assertThat(hits.totalHits(), equalTo((long) ids.length));
Set<String> hitIds = newHashSet();
for (SearchHit hit : hits.getHits()) {
hitIds.add(hit.id());
}
assertThat(hitIds, containsInAnyOrder(ids));
}
private String source(String id, String nameValue) {
return "{ \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" }";
}
}
| src/test/java/org/elasticsearch/aliases/IndexAliasesTests.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.aliases;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesMissingException;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.facet.FacetBuilders;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.Sets.newHashSet;
import static org.elasticsearch.client.Requests.createIndexRequest;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.test.hamcrest.CollectionAssertions.hasKey;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class IndexAliasesTests extends ElasticsearchIntegrationTest {
@Test
public void testAliases() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1]");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1"));
logger.info("--> indexing against [alias1], should work now");
IndexResponse indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"))).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test"));
logger.info("--> creating index [test_x]");
createIndex("test_x");
ensureGreen();
logger.info("--> remove [alias1], Aliasing index [test_x] with [alias1]");
assertAcked(admin().indices().prepareAliases().removeAlias("test", "alias1").addAlias("test_x", "alias1"));
logger.info("--> indexing against [alias1], should work against [test_x]");
indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"))).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test_x"));
}
@Test
public void testFailedFilter() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
try {
logger.info("--> aliasing index [test] with [alias1] and filter [t]");
admin().indices().prepareAliases().addAlias("test", "alias1", "{ t }").get();
fail();
} catch (Exception e) {
// all is well
}
}
@Test
public void testFilteringAliases() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1] and filter [user:kimchy]");
FilterBuilder filter = termFilter("user", "kimchy");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", filter));
// For now just making sure that filter was stored with the alias
logger.info("--> making sure that filter was stored with alias [alias1] and filter [user:kimchy]");
ClusterState clusterState = admin().cluster().prepareState().get().getState();
IndexMetaData indexMd = clusterState.metaData().index("test");
assertThat(indexMd.aliases().get("alias1").filter().string(), equalTo("{\"term\":{\"user\":\"kimchy\"}}"));
}
@Test
public void testEmptyFilter() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1] and empty filter");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", "{}"));
}
@Test
public void testSearchingFilteringAliasesSingleIndex() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> adding filtering aliases to index [test]");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1"));
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias2"));
assertAcked(admin().indices().prepareAliases().addAlias("test", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test", "bars", termFilter("name", "bar")));
assertAcked(admin().indices().prepareAliases().addAlias("test", "tests", termFilter("name", "test")));
logger.info("--> indexing against [test]");
client().index(indexRequest("test").type("type1").id("1").source(source("1", "foo test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("2").source(source("2", "bar test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("3").source(source("3", "baz test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("4").source(source("4", "something else")).refresh(true)).actionGet();
logger.info("--> checking single filtering alias search");
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1");
logger.info("--> checking single filtering alias wildcard search");
searchResponse = client().prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3");
logger.info("--> checking single filtering alias search with sort");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_uid", SortOrder.ASC).get();
assertHits(searchResponse.getHits(), "1", "2", "3");
logger.info("--> checking single filtering alias search with global facets");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(true))
.get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(4));
logger.info("--> checking single filtering alias search with global facets and sort");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(true))
.addSort("_uid", SortOrder.ASC).get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(4));
logger.info("--> checking single filtering alias search with non-global facets");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(false))
.addSort("_uid", SortOrder.ASC).get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(2));
searchResponse = client().prepareSearch("foos", "bars").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2");
logger.info("--> checking single non-filtering alias search");
searchResponse = client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking non-filtering alias and filtering alias search");
searchResponse = client().prepareSearch("alias1", "foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking index and filtering alias search");
searchResponse = client().prepareSearch("test", "foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking index and alias wildcard search");
searchResponse = client().prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
}
@Test
public void testSearchingFilteringAliasesTwoIndices() throws Exception {
logger.info("--> creating index [test1]");
createIndex("test1");
logger.info("--> creating index [test2]");
createIndex("test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "bars", termFilter("name", "bar")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "foos", termFilter("name", "foo")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"))).get();
client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"))).get();
client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"))).get();
client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"))).get();
logger.info("--> indexing against [test2]");
client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"))).get();
client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"))).get();
client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"))).get();
client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"))).get();
refresh();
logger.info("--> checking filtering alias for two indices");
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "5");
assertThat(client().prepareCount("foos").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(2L));
logger.info("--> checking filtering alias for one index");
searchResponse = client().prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "2");
assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L));
logger.info("--> checking filtering alias for two indices and one complete index");
searchResponse = client().prepareSearch("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5");
assertThat(client().prepareCount("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for one index");
searchResponse = client().prepareSearch("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5");
assertThat(client().prepareCount("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(8L));
assertThat(client().prepareCount("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(8L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get();
assertHits(searchResponse.getHits(), "4", "8");
assertThat(client().prepareCount("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get().getCount(), equalTo(2L));
}
@Test
public void testSearchingFilteringAliasesMultipleIndices() throws Exception {
logger.info("--> creating indices");
createIndex("test1", "test2", "test3");
ensureGreen();
logger.info("--> adding aliases to indices");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "alias12"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "alias12"));
logger.info("--> adding filtering aliases to indices");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "filter1", termFilter("name", "test1")));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "filter23", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test3", "filter23", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "filter13", termFilter("name", "baz")));
assertAcked(admin().indices().prepareAliases().addAlias("test3", "filter13", termFilter("name", "baz")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("11").source(source("11", "foo test1"))).get();
client().index(indexRequest("test1").type("type1").id("12").source(source("12", "bar test1"))).get();
client().index(indexRequest("test1").type("type1").id("13").source(source("13", "baz test1"))).get();
client().index(indexRequest("test2").type("type1").id("21").source(source("21", "foo test2"))).get();
client().index(indexRequest("test2").type("type1").id("22").source(source("22", "bar test2"))).get();
client().index(indexRequest("test2").type("type1").id("23").source(source("23", "baz test2"))).get();
client().index(indexRequest("test3").type("type1").id("31").source(source("31", "foo test3"))).get();
client().index(indexRequest("test3").type("type1").id("32").source(source("32", "bar test3"))).get();
client().index(indexRequest("test3").type("type1").id("33").source(source("33", "baz test3"))).get();
refresh();
logger.info("--> checking filtering alias for multiple indices");
SearchResponse searchResponse = client().prepareSearch("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "31", "13", "33");
assertThat(client().prepareCount("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(4L));
searchResponse = client().prepareSearch("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "31", "11", "12", "13");
assertThat(client().prepareCount("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
searchResponse = client().prepareSearch("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "33");
assertThat(client().prepareCount("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(4L));
searchResponse = client().prepareSearch("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "21", "31", "33");
assertThat(client().prepareCount("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(6L));
searchResponse = client().prepareSearch("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "22", "23", "31", "13", "33");
assertThat(client().prepareCount("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(6L));
searchResponse = client().prepareSearch("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "21", "22", "23", "31", "33");
assertThat(client().prepareCount("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(8L));
}
@Test
public void testDeletingByQueryFilteringAliases() throws Exception {
logger.info("--> creating index [test1] and [test2");
createIndex("test1", "test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "bars", termFilter("name", "bar")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "tests", termFilter("name", "test")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "tests", termFilter("name", "test")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"))).get();
client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"))).get();
client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"))).get();
client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"))).get();
logger.info("--> indexing against [test2]");
client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"))).get();
client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"))).get();
client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"))).get();
client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"))).get();
refresh();
logger.info("--> checking counts before delete");
assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L));
logger.info("--> delete by query from a single alias");
client().prepareDeleteByQuery("bars").setQuery(QueryBuilders.termQuery("name", "test")).get();
logger.info("--> verify that only one record was deleted");
assertThat(client().prepareCount("test1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(3L));
logger.info("--> delete by query from an aliases pointing to two indices");
client().prepareDeleteByQuery("foos").setQuery(QueryBuilders.matchAllQuery()).get();
logger.info("--> verify that proper records were deleted");
SearchResponse searchResponse = client().prepareSearch("aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "3", "4", "6", "7", "8");
logger.info("--> delete by query from an aliases and an index");
client().prepareDeleteByQuery("tests", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
logger.info("--> verify that proper records were deleted");
searchResponse = client().prepareSearch("aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "4");
}
@Test
public void testDeleteAliases() throws Exception {
logger.info("--> creating index [test1] and [test2]");
createIndex("test1", "test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1")
.addAlias("test1", "aliasToTests")
.addAlias("test1", "foos", termFilter("name", "foo"))
.addAlias("test1", "bars", termFilter("name", "bar"))
.addAlias("test1", "tests", termFilter("name", "test")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2")
.addAlias("test2", "aliasToTests")
.addAlias("test2", "foos", termFilter("name", "foo"))
.addAlias("test2", "tests", termFilter("name", "test")));
String[] indices = {"test1", "test2"};
String[] aliases = {"aliasToTest1", "foos", "bars", "tests", "aliasToTest2", "aliasToTests"};
admin().indices().prepareAliases().removeAlias(indices, aliases).get();
AliasesExistResponse response = admin().indices().prepareAliasesExist(aliases).get();
assertThat(response.exists(), equalTo(false));
}
@Test
public void testWaitForAliasCreationMultipleShards() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
for (int i = 0; i < 10; i++) {
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias" + i));
client().index(indexRequest("alias" + i).type("type1").id("1").source(source("1", "test"))).get();
}
}
@Test
public void testWaitForAliasCreationSingleShard() throws Exception {
logger.info("--> creating index [test]");
assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.numberOfReplicas", 0).put("index.numberOfShards", 1))).get());
ensureGreen();
for (int i = 0; i < 10; i++) {
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias" + i));
client().index(indexRequest("alias" + i).type("type1").id("1").source(source("1", "test"))).get();
}
}
@Test
public void testWaitForAliasSimultaneousUpdate() throws Exception {
final int aliasCount = 10;
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
ExecutorService executor = Executors.newFixedThreadPool(aliasCount);
for (int i = 0; i < aliasCount; i++) {
final String aliasName = "alias" + i;
executor.submit(new Runnable() {
@Override
public void run() {
assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName));
client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"))).actionGet();
}
});
}
executor.shutdown();
boolean done = executor.awaitTermination(10, TimeUnit.SECONDS);
assertThat(done, equalTo(true));
if (!done) {
executor.shutdownNow();
}
}
@Test
public void testSameAlias() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> creating alias1 ");
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1")));
TimeValue timeout = TimeValue.timeValueSeconds(2);
logger.info("--> recreating alias1 ");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1").setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> modifying alias1 to have a filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> recreating alias1 with the same filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> recreating alias1 with a different filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "bar")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> verify that filter was updated");
AliasMetaData aliasMetaData = cluster().clusterService().state().metaData().aliases().get("alias1").get("test");
assertThat(aliasMetaData.getFilter().toString(), equalTo("{\"term\":{\"name\":\"bar\"}}"));
logger.info("--> deleting alias1");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1").setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
}
@Test(expected = AliasesMissingException.class)
public void testIndicesRemoveNonExistingAliasResponds404() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> deleting alias1 which does not exist");
assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1")));
}
@Test
public void testIndicesGetAliases() throws Exception {
Settings indexSettings = ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build();
logger.info("--> creating indices [foobar, test, test123, foobarbaz, bazbar]");
assertAcked(prepareCreate("foobar").setSettings(indexSettings));
assertAcked(prepareCreate("test").setSettings(indexSettings));
assertAcked(prepareCreate("test123").setSettings(indexSettings));
assertAcked(prepareCreate("foobarbaz").setSettings(indexSettings));
assertAcked(prepareCreate("bazbar").setSettings(indexSettings));
ensureGreen();
logger.info("--> creating aliases [alias1, alias2]");
assertAcked(admin().indices().prepareAliases().addAlias("foobar", "alias1").addAlias("foobar", "alias2"));
logger.info("--> getting alias1");
GetAliasesResponse getResponse = admin().indices().prepareGetAliases("alias1").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
AliasesExistResponse existsResponse = admin().indices().prepareAliasesExist("alias1").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting all aliases that start with alias*");
getResponse = admin().indices().prepareGetAliases("alias*").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias2"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias1"));
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("alias*").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> creating aliases [bar, baz, foo]");
assertAcked(admin().indices().prepareAliases()
.addAlias("bazbar", "bar")
.addAlias("bazbar", "bac", termFilter("field", "value"))
.addAlias("foobar", "foo"));
assertAcked(admin().indices().prepareAliases()
.addAliasAction(new AliasAction(AliasAction.Type.ADD, "foobar", "bac").routing("bla")));
logger.info("--> getting bar and baz for index bazbar");
getResponse = admin().indices().prepareGetAliases("bar", "bac").addIndices("bazbar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("bar", "bac")
.addIndices("bazbar").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting *b* for index baz*");
getResponse = admin().indices().prepareGetAliases("*b*").addIndices("baz*").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("*b*")
.addIndices("baz*").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting *b* for index *bar");
getResponse = admin().indices().prepareGetAliases("b*").addIndices("*bar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), equalTo("bla"));
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), equalTo("bla"));
existsResponse = admin().indices().prepareAliasesExist("b*")
.addIndices("*bar").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting f* for index *bar");
getResponse = admin().indices().prepareGetAliases("f*").addIndices("*bar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("f*")
.addIndices("*bar").get();
assertThat(existsResponse.exists(), equalTo(true));
// alias at work
logger.info("--> getting f* for index *bac");
getResponse = admin().indices().prepareGetAliases("foo").addIndices("*bac").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("foo")
.addIndices("*bac").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting foo for index foobar");
getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("foo")
.addIndices("foobar").get();
assertThat(existsResponse.exists(), equalTo(true));
// alias at work again
logger.info("--> getting * for index *bac");
getResponse = admin().indices().prepareGetAliases("*").addIndices("*bac").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(2));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(4));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
existsResponse = admin().indices().prepareAliasesExist("*")
.addIndices("*bac").get();
assertThat(existsResponse.exists(), equalTo(true));
assertAcked(admin().indices().prepareAliases()
.removeAlias("foobar", "foo"));
getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get();
assertThat(getResponse.getAliases().isEmpty(), equalTo(true));
existsResponse = admin().indices().prepareAliasesExist("foo").addIndices("foobar").get();
assertThat(existsResponse.exists(), equalTo(false));
}
@Test(expected = IndexMissingException.class)
public void testAddAliasNullIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasEmptyIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasNullAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", null)).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasEmptyAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", "")).get();
}
@Test
public void testAddAliasNullAliasNullIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, null)).get();
assertTrue("Should throw " + ActionRequestValidationException.class.getSimpleName(), false);
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(1));
}
}
@Test
public void testAddAliasEmptyAliasEmptyIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "")).get();
assertTrue("Should throw " + ActionRequestValidationException.class.getSimpleName(), false);
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasNullIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasEmptyIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("", "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasNullAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", null)).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasEmptyAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", "")).get();
}
@Test
public void testRemoveAliasNullAliasNullIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, null)).get();
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test
public void testRemoveAliasEmptyAliasEmptyIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "")).get();
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test
public void testGetAllAliasesWorks() {
createIndex("index1");
createIndex("index2");
ensureYellow();
assertAcked(admin().indices().prepareAliases().addAlias("index1", "alias1").addAlias("index2", "alias2"));
GetAliasesResponse response = admin().indices().prepareGetAliases().get();
assertThat(response.getAliases(), hasKey("index1"));
assertThat(response.getAliases(), hasKey("index1"));
}
@Test
public void testCreateIndexWithAliases() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias1"))
.addAlias(new Alias("alias2").filter(FilterBuilders.missingFilter("field")))
.addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")));
checkAliases();
}
@Test
public void testCreateIndexWithAliasesInSource() throws Exception {
assertAcked(prepareCreate("test").setSource("{\n" +
" \"aliases\" : {\n" +
" \"alias1\" : {},\n" +
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
" \"alias3\" : { \"index_routing\" : \"index\", \"search_routing\" : \"search\"}\n" +
" }\n" +
"}"));
checkAliases();
}
@Test
public void testCreateIndexWithAliasesSource() throws Exception {
assertAcked(prepareCreate("test").setAliases("{\n" +
" \"alias1\" : {},\n" +
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
" \"alias3\" : { \"index_routing\" : \"index\", \"search_routing\" : \"search\"}\n" +
"}"));
checkAliases();
}
@Test (expected = ElasticsearchIllegalArgumentException.class)
public void testCreateIndexWithAliasesFilterNotValid() {
prepareCreate("test").addAlias(new Alias("alias1"))
.addAlias(new Alias("alias2").filter("f"))
.addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")).get();
}
private void checkAliases() {
GetAliasesResponse getAliasesResponse = admin().indices().prepareGetAliases("alias1").get();
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
AliasMetaData aliasMetaData = getAliasesResponse.getAliases().get("test").get(0);
assertThat(aliasMetaData.alias(), equalTo("alias1"));
assertThat(aliasMetaData.filter(), nullValue());
assertThat(aliasMetaData.indexRouting(), nullValue());
assertThat(aliasMetaData.searchRouting(), nullValue());
getAliasesResponse = admin().indices().prepareGetAliases("alias2").get();
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
aliasMetaData = getAliasesResponse.getAliases().get("test").get(0);
assertThat(aliasMetaData.alias(), equalTo("alias2"));
assertThat(aliasMetaData.filter(), notNullValue());
assertThat(aliasMetaData.indexRouting(), nullValue());
assertThat(aliasMetaData.searchRouting(), nullValue());
getAliasesResponse = admin().indices().prepareGetAliases("alias3").get();
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
aliasMetaData = getAliasesResponse.getAliases().get("test").get(0);
assertThat(aliasMetaData.alias(), equalTo("alias3"));
assertThat(aliasMetaData.filter(), nullValue());
assertThat(aliasMetaData.indexRouting(), equalTo("index"));
assertThat(aliasMetaData.searchRouting(), equalTo("search"));
}
private void assertHits(SearchHits hits, String... ids) {
assertThat(hits.totalHits(), equalTo((long) ids.length));
Set<String> hitIds = newHashSet();
for (SearchHit hit : hits.getHits()) {
hitIds.add(hit.id());
}
assertThat(hitIds, containsInAnyOrder(ids));
}
private String source(String id, String nameValue) {
return "{ \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" }";
}
}
| [TEST] Added tests for alias filter that is valid json but not a valid filter
| src/test/java/org/elasticsearch/aliases/IndexAliasesTests.java | [TEST] Added tests for alias filter that is valid json but not a valid filter |
|
Java | apache-2.0 | 59c390541ac97a6a0e27682ac59018395e5ee3a6 | 0 | httpcache4j/httpcache4j,httpcache4j/httpcache4j,codehaus/httpcache4j,codehaus/httpcache4j | /*
* Copyright (c) 2009. The Codehaus. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.httpcache4j.cache;
import org.apache.commons.io.input.NullInputStream;
import org.apache.commons.io.IOUtils;
import org.codehaus.httpcache4j.*;
import org.codehaus.httpcache4j.payload.InputStreamPayload;
import org.junit.After;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.io.IOException;
import java.io.InputStream;
/**
* @author <a href="mailto:[email protected]">Erlend Hamnaberg</a>
* @version $Revision: $
*/
public abstract class ConcurrentCacheStorageAbstractTest {
private ExecutorService service = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
protected CacheStorage cacheStorage;
@Before
public void setUp() {
cacheStorage = createCacheStorage();
}
protected abstract CacheStorage createCacheStorage();
@Test
public void test100Concurrent() throws InterruptedException {
testIterations(100, 100);
}
protected void testIterations(int numberOfIterations, int expected) throws InterruptedException {
List<Callable<HTTPResponse>> calls = new ArrayList<Callable<HTTPResponse>>();
for (int i = 0; i < numberOfIterations; i++) {
final URI uri = URI.create(String.valueOf(i));
final HTTPRequest request = new HTTPRequest(uri);
Callable<HTTPResponse> call = new Callable<HTTPResponse>() {
public HTTPResponse call() throws Exception {
HTTPResponse cached = cacheStorage.insert(request, createCacheResponse());
assertResponse(cached);
CacheItem cacheItem = cacheStorage.get(request);
HTTPResponse response = cacheItem.getResponse();
assertResponse(response);
cached = cacheStorage.update(request, createCacheResponse());
assertNotSame(cached, cacheItem.getResponse());
assertResponse(cached);
return cached;
}
};
calls.add(call);
}
List<Future<HTTPResponse>> responses = service.invokeAll(calls);
for (Future<HTTPResponse> responseFuture : responses) {
try {
responseFuture.get();
} catch (ExecutionException e) {
e.printStackTrace();
fail(e.getCause().getMessage());
}
}
assertEquals(expected, cacheStorage.size());
}
@Test
public void test1000InsertsOfSameURI() throws InterruptedException {
final HTTPRequest request = new HTTPRequest(URI.create("GET"));
List<Callable<HTTPResponse>> calls = new ArrayList<Callable<HTTPResponse>>();
for (int i = 0; i < 1000; i++) {
calls.add(new Callable<HTTPResponse>() {
public HTTPResponse call() throws Exception {
return cacheStorage.insert(request, createCacheResponse());
}
});
}
List<Future<HTTPResponse>> responses = service.invokeAll(calls);
for (Future<HTTPResponse> response : responses) {
try {
HTTPResponse real = response.get();
assertResponse(real);
} catch (ExecutionException e) {
e.printStackTrace();
fail(e.getCause().getMessage());
}
}
assertEquals(1, cacheStorage.size());
}
private HTTPResponse createCacheResponse() {
return new HTTPResponse(new InputStreamPayload(new NullInputStream(40), MIMEType.APPLICATION_OCTET_STREAM), Status.OK, new Headers().add("Foo", "Bar"));
}
protected void assertResponse(final HTTPResponse response) {
assertNotNull("Response was null", response);
assertTrue("Payload was not here", response.hasPayload());
assertTrue("Payload was not available", response.getPayload().isAvailable());
InputStream is = response.getPayload().getInputStream();
try {
IOUtils.toString(is);
} catch (IOException e) {
e.printStackTrace();
fail("unable to create string from stream");
}
finally {
IOUtils.closeQuietly(is);
}
}
@After
public void tearDown() {
if (cacheStorage != null) {
cacheStorage.clear();
}
service.shutdownNow();
}
}
| httpcache4j-storage-api/src/test/java/org/codehaus/httpcache4j/cache/ConcurrentCacheStorageAbstractTest.java | /*
* Copyright (c) 2009. The Codehaus. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.httpcache4j.cache;
import org.apache.commons.io.input.NullInputStream;
import org.apache.commons.io.IOUtils;
import org.codehaus.httpcache4j.*;
import org.codehaus.httpcache4j.payload.InputStreamPayload;
import org.junit.After;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.io.IOException;
import java.io.InputStream;
/**
* @author <a href="mailto:[email protected]">Erlend Hamnaberg</a>
* @version $Revision: $
*/
public abstract class ConcurrentCacheStorageAbstractTest {
private ExecutorService service = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
protected CacheStorage cacheStorage;
@Before
public void setUp() {
cacheStorage = createCacheStorage();
}
protected abstract CacheStorage createCacheStorage();
@Test
public void test100Concurrent() throws InterruptedException {
testIterations(100, 100);
}
protected void testIterations(int numberOfIterations, int expected) throws InterruptedException {
List<Callable<HTTPResponse>> calls = new ArrayList<Callable<HTTPResponse>>();
for (int i = 0; i < numberOfIterations; i++) {
final URI uri = URI.create(String.valueOf(i));
final HTTPRequest request = new HTTPRequest(uri);
Callable<HTTPResponse> call = new Callable<HTTPResponse>() {
public HTTPResponse call() throws Exception {
HTTPResponse cached = cacheStorage.insert(request, createCacheResponse());
assertResponse(cached);
CacheItem cacheItem = cacheStorage.get(request);
HTTPResponse response = cacheItem.getResponse();
assertResponse(response);
cached = cacheStorage.update(request, createCacheResponse());
assertNotSame(cached, cacheItem.getResponse());
assertResponse(cached);
return cached;
}
};
calls.add(call);
}
List<Future<HTTPResponse>> responses = service.invokeAll(calls);
for (Future<HTTPResponse> responseFuture : responses) {
try {
responseFuture.get();
} catch (ExecutionException e) {
e.printStackTrace();
fail(e.getCause().getMessage());
}
}
assertEquals(expected, cacheStorage.size());
}
@Test
public void test1000InsertsOfSameURI() throws InterruptedException {
final HTTPRequest request = new HTTPRequest(URI.create("GET"));
List<Callable<HTTPResponse>> calls = new ArrayList<Callable<HTTPResponse>>();
for (int i = 0; i < 1000; i++) {
calls.add(new Callable<HTTPResponse>() {
public HTTPResponse call() throws Exception {
return cacheStorage.insert(request, createCacheResponse());
}
});
}
List<Future<HTTPResponse>> responses = service.invokeAll(calls);
for (Future<HTTPResponse> response : responses) {
try {
HTTPResponse real = response.get();
assertResponse(real);
} catch (ExecutionException e) {
fail(e.getCause().getMessage());
}
}
assertEquals(1, cacheStorage.size());
}
private HTTPResponse createCacheResponse() {
return new HTTPResponse(new InputStreamPayload(new NullInputStream(40), MIMEType.APPLICATION_OCTET_STREAM), Status.OK, new Headers().add("Foo", "Bar"));
}
protected void assertResponse(final HTTPResponse response) {
assertNotNull("Response was null", response);
assertTrue("Payload was not here", response.hasPayload());
assertTrue("Payload was not available", response.getPayload().isAvailable());
InputStream is = response.getPayload().getInputStream();
try {
IOUtils.toString(is);
} catch (IOException e) {
e.printStackTrace();
fail("unable to write string from stream");
}
finally {
IOUtils.closeQuietly(is);
}
}
@After
public void tearDown() {
if (cacheStorage != null) {
cacheStorage.clear();
}
service.shutdownNow();
}
}
| - Added stack trace to be able to figure out why the test failed
| httpcache4j-storage-api/src/test/java/org/codehaus/httpcache4j/cache/ConcurrentCacheStorageAbstractTest.java | - Added stack trace to be able to figure out why the test failed |
|
Java | apache-2.0 | 352c9baaea46061d79eb2b318653ae137a6a130c | 0 | Leanplum/Leanplum-Android-SDK,Leanplum/Leanplum-Android-SDK,Leanplum/Leanplum-Android-SDK,Leanplum/Leanplum-Android-SDK | /*
* Copyright 2013, Leanplum, Inc. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.leanplum.internal;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.leanplum.Leanplum;
import com.leanplum.utils.SharedPreferencesUtil;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.UUID;
/**
* Leanplum request class.
*
* @author Andrew First
*/
public class Request {
private static final long DEVELOPMENT_MIN_DELAY_MS = 100;
private static final long DEVELOPMENT_MAX_DELAY_MS = 5000;
private static final long PRODUCTION_DELAY = 60000;
static final int MAX_EVENTS_PER_API_CALL;
static final String LEANPLUM = "__leanplum__";
static final String UUID_KEY = "uuid";
private static String appId;
private static String accessKey;
private static String deviceId;
private static String userId;
private static final LeanplumEventCallbackManager eventCallbackManager =
new LeanplumEventCallbackManager();
private static final Map<String, Boolean> fileTransferStatus = new HashMap<>();
private static int pendingDownloads;
private static NoPendingDownloadsCallback noPendingDownloadsBlock;
// The token is saved primarily for legacy SharedPreferences decryption. This could
// likely be removed in the future.
private static String token = null;
private static final Map<File, Long> fileUploadSize = new HashMap<>();
private static final Map<File, Double> fileUploadProgress = new HashMap<>();
private static String fileUploadProgressString = "";
private static long lastSendTimeMs;
private static final Object uploadFileLock = new Object();
private final String httpMethod;
private final String apiMethod;
private final Map<String, Object> params;
private ResponseCallback response;
private ErrorCallback error;
private boolean sent;
private long dataBaseIndex;
private static ApiResponseCallback apiResponse;
private static List<Map<String, Object>> localErrors = new ArrayList<>();
static {
if (Build.VERSION.SDK_INT <= 17) {
MAX_EVENTS_PER_API_CALL = 5000;
} else {
MAX_EVENTS_PER_API_CALL = 10000;
}
}
public static void setAppId(String appId, String accessKey) {
if (!TextUtils.isEmpty(appId)) {
Request.appId = appId.trim();
}
if (!TextUtils.isEmpty(accessKey)) {
Request.accessKey = accessKey.trim();
}
}
public static void setDeviceId(String deviceId) {
Request.deviceId = deviceId;
}
public static void setUserId(String userId) {
Request.userId = userId;
}
public static void setToken(String token) {
Request.token = token;
}
public static String token() {
return token;
}
/**
* Since requests are batched there can be a case where other Request can take future Request
* events. We need to have for each Request database index for handle response, error or start
* callbacks.
*
* @return Index of event at database.
*/
public long getDataBaseIndex() {
return dataBaseIndex;
}
// Update index of event at database.
public void setDataBaseIndex(long dataBaseIndex) {
this.dataBaseIndex = dataBaseIndex;
}
public static void loadToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
String token = defaults.getString(Constants.Defaults.TOKEN_KEY, null);
if (token == null) {
return;
}
setToken(token);
}
public static void saveToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = defaults.edit();
editor.putString(Constants.Defaults.TOKEN_KEY, Request.token());
SharedPreferencesUtil.commitChanges(editor);
}
public static String appId() {
return appId;
}
public static String deviceId() {
return deviceId;
}
public static String userId() {
return Request.userId;
}
public Request(String httpMethod, String apiMethod, Map<String, Object> params) {
this.httpMethod = httpMethod;
this.apiMethod = apiMethod;
this.params = params != null ? params : new HashMap<String, Object>();
// Check if it is error and here was SQLite exception.
if (Constants.Methods.LOG.equals(apiMethod) && LeanplumEventDataManager.willSendErrorLog) {
localErrors.add(createArgsDictionary());
}
// Make sure the Handler is initialized on the main thread.
OsHandler.getInstance();
dataBaseIndex = -1;
}
public static Request get(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("GET", apiMethod, params);
}
public static Request post(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("POST", apiMethod, params);
}
public void onResponse(ResponseCallback response) {
this.response = response;
}
public void onError(ErrorCallback error) {
this.error = error;
}
public void onApiResponse(ApiResponseCallback apiResponse) {
Request.apiResponse = apiResponse;
}
private Map<String, Object> createArgsDictionary() {
Map<String, Object> args = new HashMap<>();
args.put(Constants.Params.DEVICE_ID, deviceId);
args.put(Constants.Params.USER_ID, userId);
args.put(Constants.Params.ACTION, apiMethod);
args.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
args.put(Constants.Params.DEV_MODE, Boolean.toString(Constants.isDevelopmentModeEnabled));
args.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
if (token != null) {
args.put(Constants.Params.TOKEN, token);
}
args.putAll(params);
return args;
}
private void saveRequestForLater(final Map<String, Object> args) {
final Request currentRequest = this;
LeanplumEventDataManager.executeAsyncTask(new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
synchronized (Request.class) {
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
long count = LeanplumEventDataManager.getEventsCount();
String uuid = preferences.getString(Constants.Defaults.UUID_KEY, null);
if (uuid == null || count % MAX_EVENTS_PER_API_CALL == 0) {
uuid = UUID.randomUUID().toString();
editor.putString(Constants.Defaults.UUID_KEY, uuid);
SharedPreferencesUtil.commitChanges(editor);
}
args.put(UUID_KEY, uuid);
LeanplumEventDataManager.insertEvent(JsonConverter.toJson(args));
dataBaseIndex = count;
// Checks if here response and/or error callback for this request. We need to add callbacks to
// eventCallbackManager only if here was internet connection, otherwise triggerErrorCallback
// will handle error callback for this event.
if (response != null || error != null && !Util.isConnected()) {
eventCallbackManager.addCallbacks(currentRequest, response, error);
}
}
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
public void send() {
this.sendEventually();
if (Constants.isDevelopmentModeEnabled) {
long currentTimeMs = System.currentTimeMillis();
long delayMs;
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > DEVELOPMENT_MAX_DELAY_MS) {
delayMs = DEVELOPMENT_MIN_DELAY_MS;
} else {
delayMs = (lastSendTimeMs + DEVELOPMENT_MAX_DELAY_MS) - currentTimeMs;
}
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfConnected();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, delayMs);
}
}
/**
* Wait 1 second for potential other API calls, and then sends the call synchronously if no other
* call has been sent within 1 minute.
*/
public void sendIfDelayed() {
sendEventually();
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfDelayedHelper();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, 1000);
}
/**
* Sends the call synchronously if no other call has been sent within 1 minute.
*/
private void sendIfDelayedHelper() {
if (Constants.isDevelopmentModeEnabled) {
send();
} else {
long currentTimeMs = System.currentTimeMillis();
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > PRODUCTION_DELAY) {
sendIfConnected();
}
}
}
public void sendIfConnected() {
if (Util.isConnected()) {
this.sendNow();
} else {
this.sendEventually();
Log.i("Device is offline, will send later");
triggerErrorCallback(new Exception("Not connected to the Internet"));
}
}
private void triggerErrorCallback(Exception e) {
if (error != null) {
error.error(e);
}
if (apiResponse != null) {
List<Map<String, Object>> requests = getUnsentRequests();
List<Map<String, Object>> requestsToSend = removeIrrelevantBackgroundStartRequests(requests);
apiResponse.response(requestsToSend, null, requests.size());
}
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private static boolean attachApiKeys(Map<String, Object> dict) {
if (appId == null || accessKey == null) {
Log.e("API keys are not set. Please use Leanplum.setAppIdForDevelopmentMode or "
+ "Leanplum.setAppIdForProductionMode.");
return false;
}
dict.put(Constants.Params.APP_ID, appId);
dict.put(Constants.Params.CLIENT_KEY, accessKey);
dict.put(Constants.Params.CLIENT, Constants.CLIENT);
return true;
}
public interface ResponseCallback {
void response(JSONObject response);
}
public interface ApiResponseCallback {
void response(List<Map<String, Object>> requests, JSONObject response, int countOfEvents);
}
public interface ErrorCallback {
void error(Exception e);
}
public interface NoPendingDownloadsCallback {
void noPendingDownloads();
}
/**
* Parse response body from server. Invoke potential error or response callbacks for all events
* of this request.
*
* @param responseBody JSONObject with response body from server.
* @param requestsToSend List of requests that were sent to the server/
* @param error Exception.
* @param unsentRequestsSize Size of unsent request, that we will delete.
*/
private void parseResponseBody(JSONObject responseBody, List<Map<String, Object>>
requestsToSend, Exception error, int unsentRequestsSize) {
synchronized (Request.class) {
if (responseBody == null && error != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(error, unsentRequestsSize);
return;
} else if (responseBody == null) {
return;
}
// Response for last start call.
if (apiResponse != null) {
apiResponse.response(requestsToSend, responseBody, unsentRequestsSize);
}
// We will replace it with error from response body, if we found it.
Exception lastResponseError = error;
// Valid response, parse and handle response body.
int numResponses = Request.numResponses(responseBody);
for (int i = 0; i < numResponses; i++) {
JSONObject response = Request.getResponseAt(responseBody, i);
if (Request.isResponseSuccess(response)) {
continue; // If event response is successful, proceed with next one.
}
// If event response was not successful, handle error.
String errorMessage = getReadableErrorMessage(Request.getResponseError(response));
Log.e(errorMessage);
// Throw an exception if last event response is negative.
if (i == numResponses - 1) {
lastResponseError = new Exception(errorMessage);
}
}
if (lastResponseError != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(lastResponseError, unsentRequestsSize);
} else {
// Invoke potential response callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksForResponse(responseBody, unsentRequestsSize);
}
}
}
/**
* Parse error message from server response and return readable error message.
*
* @param errorMessage String of error from server response.
* @return String of readable error message.
*/
@NonNull
private String getReadableErrorMessage(String errorMessage) {
if (errorMessage == null || errorMessage.length() == 0) {
errorMessage = "API error";
} else if (errorMessage.startsWith("App not found")) {
errorMessage = "No app matching the provided app ID was found.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Invalid access key")) {
errorMessage = "The access key you provided is not valid for this app.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Development mode requested but not permitted")) {
errorMessage = "A call to Leanplum.setAppIdForDevelopmentMode "
+ "with your production key was made, which is not permitted.";
Constants.isInPermanentFailureState = true;
} else {
errorMessage = "API error: " + errorMessage;
}
return errorMessage;
}
private void sendNow() {
if (Constants.isTestMode) {
return;
}
if (appId == null) {
Log.e("Cannot send request. appId is not set.");
return;
}
if (accessKey == null) {
Log.e("Cannot send request. accessKey is not set.");
return;
}
this.sendEventually();
Util.executeAsyncTask(true, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
sendRequests();
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
private static class RequestsWithEncoding {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedString;
}
private RequestsWithEncoding getRequestsWithEncodedStringForErrors() {
List<Map<String, Object>> unsentRequests = new ArrayList<>();
List<Map<String, Object>> requestsToSend;
String jsonEncodedRequestsToSend;
String uuid = UUID.randomUUID().toString();
for (Map<String, Object> error : localErrors) {
error.put(UUID_KEY, uuid);
unsentRequests.add(error);
}
requestsToSend = unsentRequests;
jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend = requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private RequestsWithEncoding getRequestsWithEncodedStringStoredRequests() {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedRequestsToSend;
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
unsentRequests = getUnsentRequests();
requestsToSend = removeIrrelevantBackgroundStartRequests(unsentRequests);
jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend = requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private RequestsWithEncoding getRequestsWithEncodedString() {
RequestsWithEncoding requestsWithEncoding;
// Check if we have localErrors, if yes then we will send only errors to the server.
if (localErrors.size() != 0) {
requestsWithEncoding = getRequestsWithEncodedStringForErrors();
} else {
requestsWithEncoding = getRequestsWithEncodedStringStoredRequests()
}
return requestsWithEncoding;
}
private void sendRequests() {
RequestsWithEncoding requestsWithEncoding = getRequestsWithEncodedString();
List<Map<String, Object>> unsentRequests = requestsWithEncoding.unsentRequests;
List<Map<String, Object>> requestsToSend = requestsWithEncoding.requestsToSend;
String jsonEncodedString = requestsWithEncoding.jsonEncodedString;
if (requestsToSend.isEmpty()) {
return;
}
final Map<String, Object> multiRequestArgs = new HashMap<>();
if (!Request.attachApiKeys(multiRequestArgs)) {
return;
}
multiRequestArgs.put(Constants.Params.DATA, jsonEncodedString);
multiRequestArgs.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
multiRequestArgs.put(Constants.Params.ACTION, Constants.Methods.MULTI);
multiRequestArgs.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
JSONObject responseBody;
HttpURLConnection op = null;
try {
try {
op = Util.operation(
Constants.API_HOST_NAME,
Constants.API_SERVLET,
multiRequestArgs,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS);
responseBody = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
Exception errorException;
if (statusCode >= 200 && statusCode <= 299) {
if (responseBody == null) {
errorException = new Exception("Response JSON is null.");
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, errorException, unsentRequests.size());
return;
}
Exception exception = null;
// Checks if we received the same number of responses as a number of sent request.
int numResponses = Request.numResponses(responseBody);
if (numResponses != requestsToSend.size()) {
Log.w("Sent " + requestsToSend.size() + " requests but only" +
" received " + numResponses);
}
parseResponseBody(responseBody, requestsToSend, null, unsentRequests.size());
// Clear localErrors list.
localErrors.clear();
deleteSentRequests(unsentRequests.size());
// Send another request if the last request had maximum events per api call.
if (unsentRequests.size() == MAX_EVENTS_PER_API_CALL) {
sendRequests();
}
} else {
errorException = new Exception("HTTP error " + statusCode);
if (statusCode != -1 && statusCode != 408 && !(statusCode >= 500 && statusCode <= 599)) {
deleteSentRequests(unsentRequests.size());
parseResponseBody(responseBody, requestsToSend, errorException, unsentRequests.size());
}
}
} catch (JSONException e) {
Log.e("Error parsing JSON response: " + e.toString() + "\n" + Log.getStackTraceString(e));
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, e, unsentRequests.size());
} catch (Exception e) {
Log.e("Unable to send request: " + e.toString() + "\n" + Log.getStackTraceString(e));
} finally {
if (op != null) {
op.disconnect();
}
}
} catch (Throwable t) {
Util.handleException(t);
}
}
public void sendEventually() {
if (Constants.isTestMode) {
return;
}
if (LeanplumEventDataManager.willSendErrorLog) {
return;
}
if (!sent) {
sent = true;
Map<String, Object> args = createArgsDictionary();
saveRequestForLater(args);
}
}
static void deleteSentRequests(int requestsCount) {
if (requestsCount == 0) {
return;
}
synchronized (Request.class) {
LeanplumEventDataManager.deleteEvents(requestsCount);
}
}
private static List<Map<String, Object>> getUnsentRequests() {
List<Map<String, Object>> requestData;
synchronized (Request.class) {
lastSendTimeMs = System.currentTimeMillis();
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
requestData = LeanplumEventDataManager.getEvents(MAX_EVENTS_PER_API_CALL);
editor.remove(Constants.Defaults.UUID_KEY);
SharedPreferencesUtil.commitChanges(editor);
}
return requestData;
}
/**
* In various scenarios we can end up batching a big number of requests (e.g. device is offline,
* background sessions), which could make the stored API calls batch look something like:
* <p>
* <code>start(B), start(B), start(F), track, start(B), track, start(F), resumeSession</code>
* <p>
* where <code>start(B)</code> indicates a start in the background, and <code>start(F)</code>
* one in the foreground.
* <p>
* In this case the first two <code>start(B)</code> can be dropped because they don't contribute
* any relevant information for the batch call.
* <p>
* Essentially we drop every <code>start(B)</code> call, that is directly followed by any kind of
* a <code>start</code> call.
*
* @param requestData A list of the requests, stored on the device.
* @return A list of only these requests, which contain relevant information for the API call.
*/
private static List<Map<String, Object>> removeIrrelevantBackgroundStartRequests(
List<Map<String, Object>> requestData) {
List<Map<String, Object>> relevantRequests = new ArrayList<>();
int requestCount = requestData.size();
if (requestCount > 0) {
for (int i = 0; i < requestCount; i++) {
Map<String, Object> currentRequest = requestData.get(i);
if (i < requestCount - 1
&& Constants.Methods.START.equals(requestData.get(i + 1).get(Constants.Params.ACTION))
&& Constants.Methods.START.equals(currentRequest.get(Constants.Params.ACTION))
&& Boolean.TRUE.toString().equals(currentRequest.get(Constants.Params.BACKGROUND))) {
continue;
}
relevantRequests.add(currentRequest);
}
}
return relevantRequests;
}
private static String jsonEncodeUnsentRequests(List<Map<String, Object>> requestData) {
Map<String, Object> data = new HashMap<>();
data.put(Constants.Params.DATA, requestData);
return JsonConverter.toJson(data);
}
private static String getSizeAsString(int bytes) {
if (bytes < (1 << 10)) {
return bytes + " B";
} else if (bytes < (1 << 20)) {
return (bytes >> 10) + " KB";
} else {
return (bytes >> 20) + " MB";
}
}
private static void printUploadProgress() {
int totalFiles = fileUploadSize.size();
int sentFiles = 0;
int totalBytes = 0;
int sentBytes = 0;
for (Map.Entry<File, Long> entry : fileUploadSize.entrySet()) {
File file = entry.getKey();
long fileSize = entry.getValue();
double fileProgress = fileUploadProgress.get(file);
if (fileProgress == 1) {
sentFiles++;
}
sentBytes += (int) (fileSize * fileProgress);
totalBytes += fileSize;
}
String progressString = "Uploading resources. " +
sentFiles + '/' + totalFiles + " files completed; " +
getSizeAsString(sentBytes) + '/' + getSizeAsString(totalBytes) + " transferred.";
if (!fileUploadProgressString.equals(progressString)) {
fileUploadProgressString = progressString;
Log.i(progressString);
}
}
public void sendFilesNow(final List<String> filenames, final List<InputStream> streams) {
if (Constants.isTestMode) {
return;
}
final Map<String, Object> dict = createArgsDictionary();
if (!attachApiKeys(dict)) {
return;
}
final List<File> filesToUpload = new ArrayList<>();
// First set up the files for upload
for (int i = 0; i < filenames.size(); i++) {
String filename = filenames.get(i);
if (filename == null || Boolean.TRUE.equals(fileTransferStatus.get(filename))) {
continue;
}
File file = new File(filename);
long size;
try {
size = streams.get(i).available();
} catch (IOException e) {
size = file.length();
} catch (NullPointerException e) {
// Not good. Can't read asset.
Log.e("Unable to read file " + filename);
continue;
}
fileTransferStatus.put(filename, true);
filesToUpload.add(file);
fileUploadSize.put(file, size);
fileUploadProgress.put(file, 0.0);
}
if (filesToUpload.size() == 0) {
return;
}
printUploadProgress();
// Now upload the files
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
synchronized (uploadFileLock) { // Don't overload app and server with many upload tasks
JSONObject result;
HttpURLConnection op = null;
try {
op = Util.uploadFilesOperation(
Constants.Params.FILE,
filesToUpload,
streams,
Constants.API_HOST_NAME,
Constants.API_SERVLET,
dict,
httpMethod,
Constants.API_SSL,
60);
if (op != null) {
result = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request: " + statusCode);
}
if (Request.this.response != null) {
Request.this.response.response(result);
}
} else {
if (error != null) {
error.error(new Exception("Leanplum: Unable to read file."));
}
}
} catch (JSONException e) {
Log.e("Unable to convert to JSON.", e);
if (error != null) {
error.error(e);
}
} catch (SocketTimeoutException e) {
Log.e("Timeout uploading files. Try again or limit the number of files " +
"to upload with parameters to syncResourcesAsync.");
if (error != null) {
error.error(e);
}
} catch (Exception e) {
Log.e("Unable to send file.", e);
if (error != null) {
error.error(e);
}
} finally {
if (op != null) {
op.disconnect();
}
}
for (File file : filesToUpload) {
fileUploadProgress.put(file, 1.0);
}
printUploadProgress();
return null;
}
}
});
// TODO: Upload progress
}
void downloadFile(final String path, final String url) {
if (Constants.isTestMode) {
return;
}
if (Boolean.TRUE.equals(fileTransferStatus.get(path))) {
return;
}
pendingDownloads++;
Log.i("Downloading resource " + path);
fileTransferStatus.put(path, true);
final Map<String, Object> dict = createArgsDictionary();
dict.put(Constants.Keys.FILENAME, path);
if (!attachApiKeys(dict)) {
return;
}
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
downloadHelper(Constants.API_HOST_NAME, Constants.API_SERVLET, path, url, dict);
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
// TODO: Download progress
}
private void downloadHelper(String hostName, String servlet, final String path, final String url,
final Map<String, Object> dict) {
HttpURLConnection op = null;
URL originalURL = null;
try {
if (url == null) {
op = Util.operation(
hostName,
servlet,
dict,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
} else {
op = Util.createHttpUrlConnection(url, httpMethod, url.startsWith("https://"),
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
}
originalURL = op.getURL();
op.connect();
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request to: " + hostName +
", HTTP status code: " + statusCode);
}
Stack<String> dirs = new Stack<>();
String currentDir = path;
while ((currentDir = new File(currentDir).getParent()) != null) {
dirs.push(currentDir);
}
while (!dirs.isEmpty()) {
String directory = FileManager.fileRelativeToDocuments(dirs.pop());
boolean isCreated = new File(directory).mkdir();
if (!isCreated) {
Log.w("Failed to create directory: ", directory);
}
}
FileOutputStream out = new FileOutputStream(
new File(FileManager.fileRelativeToDocuments(path)));
Util.saveResponse(op, out);
pendingDownloads--;
if (Request.this.response != null) {
Request.this.response.response(null);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} catch (Exception e) {
if (e instanceof EOFException) {
if (op != null && !op.getURL().equals(originalURL)) {
downloadHelper(null, op.getURL().toString(), path, url, new HashMap<String, Object>());
return;
}
}
Log.e("Error downloading resource:" + path, e);
pendingDownloads--;
if (error != null) {
error.error(e);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} finally {
if (op != null) {
op.disconnect();
}
}
}
public static int numPendingDownloads() {
return pendingDownloads;
}
public static void onNoPendingDownloads(NoPendingDownloadsCallback block) {
noPendingDownloadsBlock = block;
}
public static int numResponses(JSONObject response) {
if (response == null) {
return 0;
}
try {
return response.getJSONArray("response").length();
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return 0;
}
}
public static JSONObject getResponseAt(JSONObject response, int index) {
try {
return response.getJSONArray("response").getJSONObject(index);
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
public static JSONObject getLastResponse(JSONObject response) {
int numResponses = numResponses(response);
if (numResponses > 0) {
return getResponseAt(response, numResponses - 1);
} else {
return null;
}
}
public static boolean isResponseSuccess(JSONObject response) {
if (response == null) {
return false;
}
try {
return response.getBoolean("success");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return false;
}
}
public static String getResponseError(JSONObject response) {
if (response == null) {
return null;
}
try {
JSONObject error = response.optJSONObject("error");
if (error == null) {
return null;
}
return error.getString("message");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
}
| AndroidSDKCore/src/main/java/com/leanplum/internal/Request.java | /*
* Copyright 2013, Leanplum, Inc. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.leanplum.internal;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.leanplum.Leanplum;
import com.leanplum.utils.SharedPreferencesUtil;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.UUID;
/**
* Leanplum request class.
*
* @author Andrew First
*/
public class Request {
private static final long DEVELOPMENT_MIN_DELAY_MS = 100;
private static final long DEVELOPMENT_MAX_DELAY_MS = 5000;
private static final long PRODUCTION_DELAY = 60000;
static final int MAX_EVENTS_PER_API_CALL;
static final String LEANPLUM = "__leanplum__";
static final String UUID_KEY = "uuid";
private static String appId;
private static String accessKey;
private static String deviceId;
private static String userId;
private static final LeanplumEventCallbackManager eventCallbackManager =
new LeanplumEventCallbackManager();
private static final Map<String, Boolean> fileTransferStatus = new HashMap<>();
private static int pendingDownloads;
private static NoPendingDownloadsCallback noPendingDownloadsBlock;
// The token is saved primarily for legacy SharedPreferences decryption. This could
// likely be removed in the future.
private static String token = null;
private static final Map<File, Long> fileUploadSize = new HashMap<>();
private static final Map<File, Double> fileUploadProgress = new HashMap<>();
private static String fileUploadProgressString = "";
private static long lastSendTimeMs;
private static final Object uploadFileLock = new Object();
private final String httpMethod;
private final String apiMethod;
private final Map<String, Object> params;
private ResponseCallback response;
private ErrorCallback error;
private boolean sent;
private long dataBaseIndex;
private static ApiResponseCallback apiResponse;
private static List<Map<String, Object>> localErrors = new ArrayList<>();
static {
if (Build.VERSION.SDK_INT <= 17) {
MAX_EVENTS_PER_API_CALL = 5000;
} else {
MAX_EVENTS_PER_API_CALL = 10000;
}
}
public static void setAppId(String appId, String accessKey) {
if (!TextUtils.isEmpty(appId)) {
Request.appId = appId.trim();
}
if (!TextUtils.isEmpty(accessKey)) {
Request.accessKey = accessKey.trim();
}
}
public static void setDeviceId(String deviceId) {
Request.deviceId = deviceId;
}
public static void setUserId(String userId) {
Request.userId = userId;
}
public static void setToken(String token) {
Request.token = token;
}
public static String token() {
return token;
}
/**
* Since requests are batched there can be a case where other Request can take future Request
* events. We need to have for each Request database index for handle response, error or start
* callbacks.
*
* @return Index of event at database.
*/
public long getDataBaseIndex() {
return dataBaseIndex;
}
// Update index of event at database.
public void setDataBaseIndex(long dataBaseIndex) {
this.dataBaseIndex = dataBaseIndex;
}
public static void loadToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
String token = defaults.getString(Constants.Defaults.TOKEN_KEY, null);
if (token == null) {
return;
}
setToken(token);
}
public static void saveToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = defaults.edit();
editor.putString(Constants.Defaults.TOKEN_KEY, Request.token());
SharedPreferencesUtil.commitChanges(editor);
}
public static String appId() {
return appId;
}
public static String deviceId() {
return deviceId;
}
public static String userId() {
return Request.userId;
}
public Request(String httpMethod, String apiMethod, Map<String, Object> params) {
this.httpMethod = httpMethod;
this.apiMethod = apiMethod;
this.params = params != null ? params : new HashMap<String, Object>();
// Check if it is error and here was SQLite exception.
if (Constants.Methods.LOG.equals(apiMethod) && LeanplumEventDataManager.willSendErrorLog) {
localErrors.add(createArgsDictionary());
}
// Make sure the Handler is initialized on the main thread.
OsHandler.getInstance();
dataBaseIndex = -1;
}
public static Request get(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("GET", apiMethod, params);
}
public static Request post(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("POST", apiMethod, params);
}
public void onResponse(ResponseCallback response) {
this.response = response;
}
public void onError(ErrorCallback error) {
this.error = error;
}
public void onApiResponse(ApiResponseCallback apiResponse) {
Request.apiResponse = apiResponse;
}
private Map<String, Object> createArgsDictionary() {
Map<String, Object> args = new HashMap<>();
args.put(Constants.Params.DEVICE_ID, deviceId);
args.put(Constants.Params.USER_ID, userId);
args.put(Constants.Params.ACTION, apiMethod);
args.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
args.put(Constants.Params.DEV_MODE, Boolean.toString(Constants.isDevelopmentModeEnabled));
args.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
if (token != null) {
args.put(Constants.Params.TOKEN, token);
}
args.putAll(params);
return args;
}
private void saveRequestForLater(final Map<String, Object> args) {
final Request currentRequest = this;
LeanplumEventDataManager.executeAsyncTask(new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
synchronized (Request.class) {
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
long count = LeanplumEventDataManager.getEventsCount();
String uuid = preferences.getString(Constants.Defaults.UUID_KEY, null);
if (uuid == null || count % MAX_EVENTS_PER_API_CALL == 0) {
uuid = UUID.randomUUID().toString();
editor.putString(Constants.Defaults.UUID_KEY, uuid);
SharedPreferencesUtil.commitChanges(editor);
}
args.put(UUID_KEY, uuid);
LeanplumEventDataManager.insertEvent(JsonConverter.toJson(args));
dataBaseIndex = count;
// Checks if here response and/or error callback for this request. We need to add callbacks to
// eventCallbackManager only if here was internet connection, otherwise triggerErrorCallback
// will handle error callback for this event.
if (response != null || error != null && !Util.isConnected()) {
eventCallbackManager.addCallbacks(currentRequest, response, error);
}
}
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
public void send() {
this.sendEventually();
if (Constants.isDevelopmentModeEnabled) {
long currentTimeMs = System.currentTimeMillis();
long delayMs;
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > DEVELOPMENT_MAX_DELAY_MS) {
delayMs = DEVELOPMENT_MIN_DELAY_MS;
} else {
delayMs = (lastSendTimeMs + DEVELOPMENT_MAX_DELAY_MS) - currentTimeMs;
}
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfConnected();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, delayMs);
}
}
/**
* Wait 1 second for potential other API calls, and then sends the call synchronously if no other
* call has been sent within 1 minute.
*/
public void sendIfDelayed() {
sendEventually();
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfDelayedHelper();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, 1000);
}
/**
* Sends the call synchronously if no other call has been sent within 1 minute.
*/
private void sendIfDelayedHelper() {
if (Constants.isDevelopmentModeEnabled) {
send();
} else {
long currentTimeMs = System.currentTimeMillis();
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > PRODUCTION_DELAY) {
sendIfConnected();
}
}
}
public void sendIfConnected() {
if (Util.isConnected()) {
this.sendNow();
} else {
this.sendEventually();
Log.i("Device is offline, will send later");
triggerErrorCallback(new Exception("Not connected to the Internet"));
}
}
private void triggerErrorCallback(Exception e) {
if (error != null) {
error.error(e);
}
if (apiResponse != null) {
List<Map<String, Object>> requests = getUnsentRequests();
List<Map<String, Object>> requestsToSend = removeIrrelevantBackgroundStartRequests(requests);
apiResponse.response(requestsToSend, null, requests.size());
}
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private static boolean attachApiKeys(Map<String, Object> dict) {
if (appId == null || accessKey == null) {
Log.e("API keys are not set. Please use Leanplum.setAppIdForDevelopmentMode or "
+ "Leanplum.setAppIdForProductionMode.");
return false;
}
dict.put(Constants.Params.APP_ID, appId);
dict.put(Constants.Params.CLIENT_KEY, accessKey);
dict.put(Constants.Params.CLIENT, Constants.CLIENT);
return true;
}
public interface ResponseCallback {
void response(JSONObject response);
}
public interface ApiResponseCallback {
void response(List<Map<String, Object>> requests, JSONObject response, int countOfEvents);
}
public interface ErrorCallback {
void error(Exception e);
}
public interface NoPendingDownloadsCallback {
void noPendingDownloads();
}
/**
* Parse response body from server. Invoke potential error or response callbacks for all events
* of this request.
*
* @param responseBody JSONObject with response body from server.
* @param requestsToSend List of requests that were sent to the server/
* @param error Exception.
* @param unsentRequestsSize Size of unsent request, that we will delete.
*/
private void parseResponseBody(JSONObject responseBody, List<Map<String, Object>>
requestsToSend, Exception error, int unsentRequestsSize) {
synchronized (Request.class) {
if (responseBody == null && error != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(error, unsentRequestsSize);
return;
} else if (responseBody == null) {
return;
}
// Response for last start call.
if (apiResponse != null) {
apiResponse.response(requestsToSend, responseBody, unsentRequestsSize);
}
// We will replace it with error from response body, if we found it.
Exception lastResponseError = error;
// Valid response, parse and handle response body.
int numResponses = Request.numResponses(responseBody);
for (int i = 0; i < numResponses; i++) {
JSONObject response = Request.getResponseAt(responseBody, i);
if (Request.isResponseSuccess(response)) {
continue; // If event response is successful, proceed with next one.
}
// If event response was not successful, handle error.
String errorMessage = getReadableErrorMessage(Request.getResponseError(response));
Log.e(errorMessage);
// Throw an exception if last event response is negative.
if (i == numResponses - 1) {
lastResponseError = new Exception(errorMessage);
}
}
if (lastResponseError != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(lastResponseError, unsentRequestsSize);
} else {
// Invoke potential response callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksForResponse(responseBody, unsentRequestsSize);
}
}
}
/**
* Parse error message from server response and return readable error message.
*
* @param errorMessage String of error from server response.
* @return String of readable error message.
*/
@NonNull
private String getReadableErrorMessage(String errorMessage) {
if (errorMessage == null || errorMessage.length() == 0) {
errorMessage = "API error";
} else if (errorMessage.startsWith("App not found")) {
errorMessage = "No app matching the provided app ID was found.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Invalid access key")) {
errorMessage = "The access key you provided is not valid for this app.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Development mode requested but not permitted")) {
errorMessage = "A call to Leanplum.setAppIdForDevelopmentMode "
+ "with your production key was made, which is not permitted.";
Constants.isInPermanentFailureState = true;
} else {
errorMessage = "API error: " + errorMessage;
}
return errorMessage;
}
private void sendNow() {
if (Constants.isTestMode) {
return;
}
if (appId == null) {
Log.e("Cannot send request. appId is not set.");
return;
}
if (accessKey == null) {
Log.e("Cannot send request. accessKey is not set.");
return;
}
this.sendEventually();
Util.executeAsyncTask(true, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
sendRequests();
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
private static class RequestsWithEncoding {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedString;
}
private RequestsWithEncoding getRequestsWithEncodedString() {
List<Map<String, Object>> unsentRequests = new ArrayList<>();
List<Map<String, Object>> requestsToSend;
// Check if we have localErrors, if yes then we will send only errors to the server.
if (localErrors.size() != 0) {
String uuid = UUID.randomUUID().toString();
for (Map<String, Object> error : localErrors) {
error.put(UUID_KEY, uuid);
unsentRequests.add(error);
}
requestsToSend = unsentRequests;
} else {
unsentRequests = getUnsentRequests();
requestsToSend = removeIrrelevantBackgroundStartRequests(unsentRequests);
}
String jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend= requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private void sendRequests() {
RequestsWithEncoding requestsWithEncoding = getRequestsWithEncodedString();
List<Map<String, Object>> unsentRequests = requestsWithEncoding.unsentRequests;
List<Map<String, Object>> requestsToSend = requestsWithEncoding.requestsToSend;
String jsonEncodedString = requestsWithEncoding.jsonEncodedString;
if (requestsToSend.isEmpty()) {
return;
}
final Map<String, Object> multiRequestArgs = new HashMap<>();
if (!Request.attachApiKeys(multiRequestArgs)) {
return;
}
multiRequestArgs.put(Constants.Params.DATA, jsonEncodedString);
multiRequestArgs.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
multiRequestArgs.put(Constants.Params.ACTION, Constants.Methods.MULTI);
multiRequestArgs.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
JSONObject responseBody;
HttpURLConnection op = null;
try {
try {
op = Util.operation(
Constants.API_HOST_NAME,
Constants.API_SERVLET,
multiRequestArgs,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS);
responseBody = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
Exception errorException;
if (statusCode >= 200 && statusCode <= 299) {
if (responseBody == null) {
errorException = new Exception("Response JSON is null.");
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, errorException, unsentRequests.size());
return;
}
Exception exception = null;
// Checks if we received the same number of responses as a number of sent request.
int numResponses = Request.numResponses(responseBody);
if (numResponses != requestsToSend.size()) {
Log.w("Sent " + requestsToSend.size() + " requests but only" +
" received " + numResponses);
}
parseResponseBody(responseBody, requestsToSend, null, unsentRequests.size());
// Clear localErrors list.
localErrors.clear();
deleteSentRequests(unsentRequests.size());
// Send another request if the last request had maximum events per api call.
if (unsentRequests.size() == MAX_EVENTS_PER_API_CALL) {
sendRequests();
}
} else {
errorException = new Exception("HTTP error " + statusCode);
if (statusCode != -1 && statusCode != 408 && !(statusCode >= 500 && statusCode <= 599)) {
deleteSentRequests(unsentRequests.size());
parseResponseBody(responseBody, requestsToSend, errorException, unsentRequests.size());
}
}
} catch (JSONException e) {
Log.e("Error parsing JSON response: " + e.toString() + "\n" + Log.getStackTraceString(e));
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, e, unsentRequests.size());
} catch (Exception e) {
Log.e("Unable to send request: " + e.toString() + "\n" + Log.getStackTraceString(e));
} finally {
if (op != null) {
op.disconnect();
}
}
} catch (Throwable t) {
Util.handleException(t);
}
}
public void sendEventually() {
if (Constants.isTestMode) {
return;
}
if (LeanplumEventDataManager.willSendErrorLog) {
return;
}
if (!sent) {
sent = true;
Map<String, Object> args = createArgsDictionary();
saveRequestForLater(args);
}
}
static void deleteSentRequests(int requestsCount) {
if (requestsCount == 0) {
return;
}
synchronized (Request.class) {
LeanplumEventDataManager.deleteEvents(requestsCount);
}
}
private static List<Map<String, Object>> getUnsentRequests() {
List<Map<String, Object>> requestData;
synchronized (Request.class) {
lastSendTimeMs = System.currentTimeMillis();
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
requestData = LeanplumEventDataManager.getEvents(MAX_EVENTS_PER_API_CALL);
editor.remove(Constants.Defaults.UUID_KEY);
SharedPreferencesUtil.commitChanges(editor);
}
return requestData;
}
/**
* In various scenarios we can end up batching a big number of requests (e.g. device is offline,
* background sessions), which could make the stored API calls batch look something like:
* <p>
* <code>start(B), start(B), start(F), track, start(B), track, start(F), resumeSession</code>
* <p>
* where <code>start(B)</code> indicates a start in the background, and <code>start(F)</code>
* one in the foreground.
* <p>
* In this case the first two <code>start(B)</code> can be dropped because they don't contribute
* any relevant information for the batch call.
* <p>
* Essentially we drop every <code>start(B)</code> call, that is directly followed by any kind of
* a <code>start</code> call.
*
* @param requestData A list of the requests, stored on the device.
* @return A list of only these requests, which contain relevant information for the API call.
*/
private static List<Map<String, Object>> removeIrrelevantBackgroundStartRequests(
List<Map<String, Object>> requestData) {
List<Map<String, Object>> relevantRequests = new ArrayList<>();
int requestCount = requestData.size();
if (requestCount > 0) {
for (int i = 0; i < requestCount; i++) {
Map<String, Object> currentRequest = requestData.get(i);
if (i < requestCount - 1
&& Constants.Methods.START.equals(requestData.get(i + 1).get(Constants.Params.ACTION))
&& Constants.Methods.START.equals(currentRequest.get(Constants.Params.ACTION))
&& Boolean.TRUE.toString().equals(currentRequest.get(Constants.Params.BACKGROUND))) {
continue;
}
relevantRequests.add(currentRequest);
}
}
return relevantRequests;
}
private static String jsonEncodeUnsentRequests(List<Map<String, Object>> requestData) {
Map<String, Object> data = new HashMap<>();
data.put(Constants.Params.DATA, requestData);
return JsonConverter.toJson(data);
}
private static String getSizeAsString(int bytes) {
if (bytes < (1 << 10)) {
return bytes + " B";
} else if (bytes < (1 << 20)) {
return (bytes >> 10) + " KB";
} else {
return (bytes >> 20) + " MB";
}
}
private static void printUploadProgress() {
int totalFiles = fileUploadSize.size();
int sentFiles = 0;
int totalBytes = 0;
int sentBytes = 0;
for (Map.Entry<File, Long> entry : fileUploadSize.entrySet()) {
File file = entry.getKey();
long fileSize = entry.getValue();
double fileProgress = fileUploadProgress.get(file);
if (fileProgress == 1) {
sentFiles++;
}
sentBytes += (int) (fileSize * fileProgress);
totalBytes += fileSize;
}
String progressString = "Uploading resources. " +
sentFiles + '/' + totalFiles + " files completed; " +
getSizeAsString(sentBytes) + '/' + getSizeAsString(totalBytes) + " transferred.";
if (!fileUploadProgressString.equals(progressString)) {
fileUploadProgressString = progressString;
Log.i(progressString);
}
}
public void sendFilesNow(final List<String> filenames, final List<InputStream> streams) {
if (Constants.isTestMode) {
return;
}
final Map<String, Object> dict = createArgsDictionary();
if (!attachApiKeys(dict)) {
return;
}
final List<File> filesToUpload = new ArrayList<>();
// First set up the files for upload
for (int i = 0; i < filenames.size(); i++) {
String filename = filenames.get(i);
if (filename == null || Boolean.TRUE.equals(fileTransferStatus.get(filename))) {
continue;
}
File file = new File(filename);
long size;
try {
size = streams.get(i).available();
} catch (IOException e) {
size = file.length();
} catch (NullPointerException e) {
// Not good. Can't read asset.
Log.e("Unable to read file " + filename);
continue;
}
fileTransferStatus.put(filename, true);
filesToUpload.add(file);
fileUploadSize.put(file, size);
fileUploadProgress.put(file, 0.0);
}
if (filesToUpload.size() == 0) {
return;
}
printUploadProgress();
// Now upload the files
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
synchronized (uploadFileLock) { // Don't overload app and server with many upload tasks
JSONObject result;
HttpURLConnection op = null;
try {
op = Util.uploadFilesOperation(
Constants.Params.FILE,
filesToUpload,
streams,
Constants.API_HOST_NAME,
Constants.API_SERVLET,
dict,
httpMethod,
Constants.API_SSL,
60);
if (op != null) {
result = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request: " + statusCode);
}
if (Request.this.response != null) {
Request.this.response.response(result);
}
} else {
if (error != null) {
error.error(new Exception("Leanplum: Unable to read file."));
}
}
} catch (JSONException e) {
Log.e("Unable to convert to JSON.", e);
if (error != null) {
error.error(e);
}
} catch (SocketTimeoutException e) {
Log.e("Timeout uploading files. Try again or limit the number of files " +
"to upload with parameters to syncResourcesAsync.");
if (error != null) {
error.error(e);
}
} catch (Exception e) {
Log.e("Unable to send file.", e);
if (error != null) {
error.error(e);
}
} finally {
if (op != null) {
op.disconnect();
}
}
for (File file : filesToUpload) {
fileUploadProgress.put(file, 1.0);
}
printUploadProgress();
return null;
}
}
});
// TODO: Upload progress
}
void downloadFile(final String path, final String url) {
if (Constants.isTestMode) {
return;
}
if (Boolean.TRUE.equals(fileTransferStatus.get(path))) {
return;
}
pendingDownloads++;
Log.i("Downloading resource " + path);
fileTransferStatus.put(path, true);
final Map<String, Object> dict = createArgsDictionary();
dict.put(Constants.Keys.FILENAME, path);
if (!attachApiKeys(dict)) {
return;
}
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
downloadHelper(Constants.API_HOST_NAME, Constants.API_SERVLET, path, url, dict);
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
// TODO: Download progress
}
private void downloadHelper(String hostName, String servlet, final String path, final String url,
final Map<String, Object> dict) {
HttpURLConnection op = null;
URL originalURL = null;
try {
if (url == null) {
op = Util.operation(
hostName,
servlet,
dict,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
} else {
op = Util.createHttpUrlConnection(url, httpMethod, url.startsWith("https://"),
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
}
originalURL = op.getURL();
op.connect();
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request to: " + hostName +
", HTTP status code: " + statusCode);
}
Stack<String> dirs = new Stack<>();
String currentDir = path;
while ((currentDir = new File(currentDir).getParent()) != null) {
dirs.push(currentDir);
}
while (!dirs.isEmpty()) {
String directory = FileManager.fileRelativeToDocuments(dirs.pop());
boolean isCreated = new File(directory).mkdir();
if (!isCreated) {
Log.w("Failed to create directory: ", directory);
}
}
FileOutputStream out = new FileOutputStream(
new File(FileManager.fileRelativeToDocuments(path)));
Util.saveResponse(op, out);
pendingDownloads--;
if (Request.this.response != null) {
Request.this.response.response(null);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} catch (Exception e) {
if (e instanceof EOFException) {
if (op != null && !op.getURL().equals(originalURL)) {
downloadHelper(null, op.getURL().toString(), path, url, new HashMap<String, Object>());
return;
}
}
Log.e("Error downloading resource:" + path, e);
pendingDownloads--;
if (error != null) {
error.error(e);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} finally {
if (op != null) {
op.disconnect();
}
}
}
public static int numPendingDownloads() {
return pendingDownloads;
}
public static void onNoPendingDownloads(NoPendingDownloadsCallback block) {
noPendingDownloadsBlock = block;
}
public static int numResponses(JSONObject response) {
if (response == null) {
return 0;
}
try {
return response.getJSONArray("response").length();
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return 0;
}
}
public static JSONObject getResponseAt(JSONObject response, int index) {
try {
return response.getJSONArray("response").getJSONObject(index);
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
public static JSONObject getLastResponse(JSONObject response) {
int numResponses = numResponses(response);
if (numResponses > 0) {
return getResponseAt(response, numResponses - 1);
} else {
return null;
}
}
public static boolean isResponseSuccess(JSONObject response) {
if (response == null) {
return false;
}
try {
return response.getBoolean("success");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return false;
}
}
public static String getResponseError(JSONObject response) {
if (response == null) {
return null;
}
try {
JSONObject error = response.optJSONObject("error");
if (error == null) {
return null;
}
return error.getString("message");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
}
| better refactor
| AndroidSDKCore/src/main/java/com/leanplum/internal/Request.java | better refactor |
|
Java | apache-2.0 | 240a1abed481b80d28da269ac544e2b33fd36ee9 | 0 | Arcnor/libgdx,Deftwun/libgdx,gdos/libgdx,KrisLee/libgdx,toloudis/libgdx,tell10glu/libgdx,FredGithub/libgdx,antag99/libgdx,ThiagoGarciaAlves/libgdx,nrallakis/libgdx,petugez/libgdx,jsjolund/libgdx,nave966/libgdx,BlueRiverInteractive/libgdx,Arcnor/libgdx,tommyettinger/libgdx,stickyd/libgdx,haedri/libgdx-1,saqsun/libgdx,EsikAntony/libgdx,Deftwun/libgdx,jasonwee/libgdx,thepullman/libgdx,Badazdz/libgdx,del-sol/libgdx,revo09/libgdx,hyvas/libgdx,ztv/libgdx,zommuter/libgdx,309746069/libgdx,bladecoder/libgdx,MikkelTAndersen/libgdx,nooone/libgdx,MikkelTAndersen/libgdx,309746069/libgdx,fwolff/libgdx,zhimaijoy/libgdx,alireza-hosseini/libgdx,MetSystem/libgdx,luischavez/libgdx,MetSystem/libgdx,youprofit/libgdx,bgroenks96/libgdx,js78/libgdx,haedri/libgdx-1,KrisLee/libgdx,tommyettinger/libgdx,nooone/libgdx,Arcnor/libgdx,NathanSweet/libgdx,FredGithub/libgdx,ninoalma/libgdx,BlueRiverInteractive/libgdx,fwolff/libgdx,Heart2009/libgdx,toloudis/libgdx,andyvand/libgdx,PedroRomanoBarbosa/libgdx,1yvT0s/libgdx,shiweihappy/libgdx,stickyd/libgdx,snovak/libgdx,SidneyXu/libgdx,Badazdz/libgdx,jsjolund/libgdx,Dzamir/libgdx,mumer92/libgdx,thepullman/libgdx,tommycli/libgdx,MadcowD/libgdx,Dzamir/libgdx,alex-dorokhov/libgdx,MadcowD/libgdx,anserran/libgdx,toa5/libgdx,kagehak/libgdx,saqsun/libgdx,toa5/libgdx,tommycli/libgdx,ya7lelkom/libgdx,FredGithub/libgdx,ninoalma/libgdx,FredGithub/libgdx,azakhary/libgdx,NathanSweet/libgdx,saqsun/libgdx,Thotep/libgdx,saqsun/libgdx,anserran/libgdx,nudelchef/libgdx,toloudis/libgdx,curtiszimmerman/libgdx,Zonglin-Li6565/libgdx,gouessej/libgdx,fiesensee/libgdx,jasonwee/libgdx,bgroenks96/libgdx,antag99/libgdx,toloudis/libgdx,ya7lelkom/libgdx,josephknight/libgdx,SidneyXu/libgdx,tommycli/libgdx,codepoke/libgdx,nooone/libgdx,libgdx/libgdx,codepoke/libgdx,titovmaxim/libgdx,codepoke/libgdx,czyzby/libgdx,JDReutt/libgdx,bladecoder/libgdx,nrallakis/libgdx,nudelchef/libgdx,Senth/libgdx,ttencate/libgdx,antag99/libgdx,nelsonsilva/libgdx,Deftwun/libgdx,FyiurAmron/libgdx,Heart2009/libgdx,MetSystem/libgdx,antag99/libgdx,davebaol/libgdx,del-sol/libgdx,tell10glu/libgdx,noelsison2/libgdx,samskivert/libgdx,Wisienkas/libgdx,JDReutt/libgdx,bladecoder/libgdx,junkdog/libgdx,JFixby/libgdx,mumer92/libgdx,del-sol/libgdx,davebaol/libgdx,EsikAntony/libgdx,MadcowD/libgdx,jasonwee/libgdx,yangweigbh/libgdx,alex-dorokhov/libgdx,collinsmith/libgdx,saqsun/libgdx,azakhary/libgdx,luischavez/libgdx,cypherdare/libgdx,nave966/libgdx,jasonwee/libgdx,alireza-hosseini/libgdx,basherone/libgdxcn,ricardorigodon/libgdx,xpenatan/libgdx-LWJGL3,ThiagoGarciaAlves/libgdx,saltares/libgdx,srwonka/libGdx,designcrumble/libgdx,titovmaxim/libgdx,gouessej/libgdx,SidneyXu/libgdx,xpenatan/libgdx-LWJGL3,bgroenks96/libgdx,Thotep/libgdx,czyzby/libgdx,Heart2009/libgdx,Gliby/libgdx,xranby/libgdx,alex-dorokhov/libgdx,EsikAntony/libgdx,katiepino/libgdx,czyzby/libgdx,czyzby/libgdx,BlueRiverInteractive/libgdx,1yvT0s/libgdx,zhimaijoy/libgdx,PedroRomanoBarbosa/libgdx,ya7lelkom/libgdx,KrisLee/libgdx,realitix/libgdx,FyiurAmron/libgdx,nave966/libgdx,sjosegarcia/libgdx,anserran/libgdx,tommycli/libgdx,nave966/libgdx,GreenLightning/libgdx,antag99/libgdx,MovingBlocks/libgdx,Zomby2D/libgdx,anserran/libgdx,basherone/libgdxcn,noelsison2/libgdx,JDReutt/libgdx,nelsonsilva/libgdx,sinistersnare/libgdx,Gliby/libgdx,petugez/libgdx,snovak/libgdx,junkdog/libgdx,zommuter/libgdx,ninoalma/libgdx,mumer92/libgdx,Senth/libgdx,TheAks999/libgdx,TheAks999/libgdx,Badazdz/libgdx,youprofit/libgdx,MovingBlocks/libgdx,Deftwun/libgdx,ttencate/libgdx,ricardorigodon/libgdx,curtiszimmerman/libgdx,basherone/libgdxcn,cypherdare/libgdx,309746069/libgdx,josephknight/libgdx,alireza-hosseini/libgdx,djom20/libgdx,firefly2442/libgdx,sarkanyi/libgdx,collinsmith/libgdx,Gliby/libgdx,copystudy/libgdx,alex-dorokhov/libgdx,Senth/libgdx,copystudy/libgdx,petugez/libgdx,kotcrab/libgdx,Deftwun/libgdx,FyiurAmron/libgdx,junkdog/libgdx,FredGithub/libgdx,alireza-hosseini/libgdx,xpenatan/libgdx-LWJGL3,ThiagoGarciaAlves/libgdx,saltares/libgdx,nelsonsilva/libgdx,codepoke/libgdx,Arcnor/libgdx,djom20/libgdx,junkdog/libgdx,tommycli/libgdx,stinsonga/libgdx,MikkelTAndersen/libgdx,azakhary/libgdx,thepullman/libgdx,nelsonsilva/libgdx,Badazdz/libgdx,gouessej/libgdx,josephknight/libgdx,Zonglin-Li6565/libgdx,sarkanyi/libgdx,gouessej/libgdx,jasonwee/libgdx,nrallakis/libgdx,ztv/libgdx,Thotep/libgdx,MovingBlocks/libgdx,zhimaijoy/libgdx,MovingBlocks/libgdx,curtiszimmerman/libgdx,djom20/libgdx,haedri/libgdx-1,ThiagoGarciaAlves/libgdx,nave966/libgdx,xoppa/libgdx,codepoke/libgdx,youprofit/libgdx,xranby/libgdx,billgame/libgdx,fiesensee/libgdx,flaiker/libgdx,yangweigbh/libgdx,MadcowD/libgdx,nave966/libgdx,tommyettinger/libgdx,sjosegarcia/libgdx,josephknight/libgdx,srwonka/libGdx,bgroenks96/libgdx,jasonwee/libgdx,designcrumble/libgdx,nudelchef/libgdx,realitix/libgdx,MikkelTAndersen/libgdx,snovak/libgdx,youprofit/libgdx,copystudy/libgdx,basherone/libgdxcn,fiesensee/libgdx,UnluckyNinja/libgdx,UnluckyNinja/libgdx,MetSystem/libgdx,junkdog/libgdx,luischavez/libgdx,curtiszimmerman/libgdx,djom20/libgdx,zommuter/libgdx,tommycli/libgdx,bsmr-java/libgdx,Badazdz/libgdx,cypherdare/libgdx,curtiszimmerman/libgdx,kotcrab/libgdx,Zomby2D/libgdx,js78/libgdx,youprofit/libgdx,sinistersnare/libgdx,fiesensee/libgdx,Zonglin-Li6565/libgdx,del-sol/libgdx,zhimaijoy/libgdx,bgroenks96/libgdx,del-sol/libgdx,curtiszimmerman/libgdx,shiweihappy/libgdx,ricardorigodon/libgdx,gdos/libgdx,srwonka/libGdx,TheAks999/libgdx,Badazdz/libgdx,codepoke/libgdx,Dzamir/libgdx,MovingBlocks/libgdx,bsmr-java/libgdx,cypherdare/libgdx,xoppa/libgdx,gf11speed/libgdx,tell10glu/libgdx,mumer92/libgdx,309746069/libgdx,xoppa/libgdx,katiepino/libgdx,Xhanim/libgdx,ricardorigodon/libgdx,gouessej/libgdx,Dzamir/libgdx,katiepino/libgdx,saqsun/libgdx,haedri/libgdx-1,MetSystem/libgdx,nrallakis/libgdx,bladecoder/libgdx,sjosegarcia/libgdx,Badazdz/libgdx,hyvas/libgdx,flaiker/libgdx,GreenLightning/libgdx,nelsonsilva/libgdx,PedroRomanoBarbosa/libgdx,firefly2442/libgdx,ricardorigodon/libgdx,Wisienkas/libgdx,katiepino/libgdx,Zomby2D/libgdx,jsjolund/libgdx,Dzamir/libgdx,PedroRomanoBarbosa/libgdx,billgame/libgdx,luischavez/libgdx,tommycli/libgdx,JFixby/libgdx,JDReutt/libgdx,snovak/libgdx,czyzby/libgdx,anserran/libgdx,luischavez/libgdx,Xhanim/libgdx,Heart2009/libgdx,billgame/libgdx,djom20/libgdx,TheAks999/libgdx,stickyd/libgdx,haedri/libgdx-1,ninoalma/libgdx,Heart2009/libgdx,xranby/libgdx,srwonka/libGdx,ztv/libgdx,hyvas/libgdx,ThiagoGarciaAlves/libgdx,xoppa/libgdx,nudelchef/libgdx,snovak/libgdx,gdos/libgdx,gf11speed/libgdx,xpenatan/libgdx-LWJGL3,Xhanim/libgdx,Dzamir/libgdx,sjosegarcia/libgdx,billgame/libgdx,yangweigbh/libgdx,collinsmith/libgdx,jsjolund/libgdx,Thotep/libgdx,ttencate/libgdx,NathanSweet/libgdx,Senth/libgdx,xpenatan/libgdx-LWJGL3,UnluckyNinja/libgdx,UnluckyNinja/libgdx,youprofit/libgdx,nave966/libgdx,Dzamir/libgdx,fwolff/libgdx,saqsun/libgdx,hyvas/libgdx,josephknight/libgdx,bsmr-java/libgdx,junkdog/libgdx,nudelchef/libgdx,petugez/libgdx,saltares/libgdx,JFixby/libgdx,tell10glu/libgdx,Senth/libgdx,jberberick/libgdx,Zonglin-Li6565/libgdx,TheAks999/libgdx,gdos/libgdx,petugez/libgdx,MikkelTAndersen/libgdx,hyvas/libgdx,revo09/libgdx,JDReutt/libgdx,MadcowD/libgdx,ttencate/libgdx,andyvand/libgdx,samskivert/libgdx,toloudis/libgdx,srwonka/libGdx,hyvas/libgdx,haedri/libgdx-1,PedroRomanoBarbosa/libgdx,toloudis/libgdx,MadcowD/libgdx,309746069/libgdx,snovak/libgdx,zommuter/libgdx,EsikAntony/libgdx,sjosegarcia/libgdx,Heart2009/libgdx,josephknight/libgdx,noelsison2/libgdx,KrisLee/libgdx,fiesensee/libgdx,xoppa/libgdx,codepoke/libgdx,SidneyXu/libgdx,bgroenks96/libgdx,Zomby2D/libgdx,srwonka/libGdx,designcrumble/libgdx,jasonwee/libgdx,Wisienkas/libgdx,ttencate/libgdx,FredGithub/libgdx,cypherdare/libgdx,NathanSweet/libgdx,ttencate/libgdx,saltares/libgdx,curtiszimmerman/libgdx,davebaol/libgdx,firefly2442/libgdx,kotcrab/libgdx,FyiurAmron/libgdx,shiweihappy/libgdx,Gliby/libgdx,xoppa/libgdx,bgroenks96/libgdx,davebaol/libgdx,ya7lelkom/libgdx,luischavez/libgdx,thepullman/libgdx,Thotep/libgdx,PedroRomanoBarbosa/libgdx,del-sol/libgdx,UnluckyNinja/libgdx,kotcrab/libgdx,JDReutt/libgdx,toa5/libgdx,sinistersnare/libgdx,FredGithub/libgdx,gouessej/libgdx,1yvT0s/libgdx,designcrumble/libgdx,nrallakis/libgdx,nave966/libgdx,MetSystem/libgdx,EsikAntony/libgdx,TheAks999/libgdx,shiweihappy/libgdx,tommycli/libgdx,davebaol/libgdx,andyvand/libgdx,ThiagoGarciaAlves/libgdx,revo09/libgdx,luischavez/libgdx,fwolff/libgdx,gdos/libgdx,antag99/libgdx,sinistersnare/libgdx,copystudy/libgdx,titovmaxim/libgdx,thepullman/libgdx,kagehak/libgdx,KrisLee/libgdx,gf11speed/libgdx,kagehak/libgdx,kotcrab/libgdx,fwolff/libgdx,ttencate/libgdx,Zonglin-Li6565/libgdx,sarkanyi/libgdx,anserran/libgdx,FyiurAmron/libgdx,shiweihappy/libgdx,zhimaijoy/libgdx,tell10glu/libgdx,Senth/libgdx,GreenLightning/libgdx,js78/libgdx,BlueRiverInteractive/libgdx,js78/libgdx,haedri/libgdx-1,SidneyXu/libgdx,bsmr-java/libgdx,ThiagoGarciaAlves/libgdx,ninoalma/libgdx,snovak/libgdx,antag99/libgdx,copystudy/libgdx,MikkelTAndersen/libgdx,alireza-hosseini/libgdx,zommuter/libgdx,nrallakis/libgdx,zhimaijoy/libgdx,jberberick/libgdx,alireza-hosseini/libgdx,ricardorigodon/libgdx,revo09/libgdx,EsikAntony/libgdx,kagehak/libgdx,djom20/libgdx,fwolff/libgdx,ya7lelkom/libgdx,flaiker/libgdx,fiesensee/libgdx,kagehak/libgdx,saqsun/libgdx,tell10glu/libgdx,ninoalma/libgdx,FyiurAmron/libgdx,realitix/libgdx,jberberick/libgdx,1yvT0s/libgdx,libgdx/libgdx,MadcowD/libgdx,tell10glu/libgdx,josephknight/libgdx,noelsison2/libgdx,collinsmith/libgdx,xoppa/libgdx,Xhanim/libgdx,Wisienkas/libgdx,davebaol/libgdx,czyzby/libgdx,noelsison2/libgdx,realitix/libgdx,revo09/libgdx,ninoalma/libgdx,JDReutt/libgdx,ninoalma/libgdx,junkdog/libgdx,Dzamir/libgdx,ttencate/libgdx,srwonka/libGdx,stickyd/libgdx,EsikAntony/libgdx,jsjolund/libgdx,noelsison2/libgdx,del-sol/libgdx,KrisLee/libgdx,hyvas/libgdx,sjosegarcia/libgdx,nrallakis/libgdx,nudelchef/libgdx,alireza-hosseini/libgdx,FyiurAmron/libgdx,fiesensee/libgdx,stickyd/libgdx,saltares/libgdx,noelsison2/libgdx,sarkanyi/libgdx,Wisienkas/libgdx,UnluckyNinja/libgdx,KrisLee/libgdx,nudelchef/libgdx,samskivert/libgdx,jberberick/libgdx,gouessej/libgdx,sarkanyi/libgdx,Heart2009/libgdx,shiweihappy/libgdx,ricardorigodon/libgdx,MikkelTAndersen/libgdx,Deftwun/libgdx,MetSystem/libgdx,nooone/libgdx,Arcnor/libgdx,zhimaijoy/libgdx,ya7lelkom/libgdx,xranby/libgdx,JFixby/libgdx,azakhary/libgdx,Gliby/libgdx,SidneyXu/libgdx,toloudis/libgdx,titovmaxim/libgdx,djom20/libgdx,firefly2442/libgdx,Wisienkas/libgdx,zhimaijoy/libgdx,Wisienkas/libgdx,js78/libgdx,JFixby/libgdx,sarkanyi/libgdx,gdos/libgdx,kagehak/libgdx,toloudis/libgdx,tommyettinger/libgdx,Gliby/libgdx,Heart2009/libgdx,collinsmith/libgdx,ztv/libgdx,SidneyXu/libgdx,Gliby/libgdx,yangweigbh/libgdx,BlueRiverInteractive/libgdx,bsmr-java/libgdx,titovmaxim/libgdx,Thotep/libgdx,xranby/libgdx,TheAks999/libgdx,katiepino/libgdx,samskivert/libgdx,stinsonga/libgdx,sjosegarcia/libgdx,mumer92/libgdx,saltares/libgdx,sinistersnare/libgdx,js78/libgdx,toa5/libgdx,Deftwun/libgdx,GreenLightning/libgdx,noelsison2/libgdx,kagehak/libgdx,jsjolund/libgdx,josephknight/libgdx,libgdx/libgdx,firefly2442/libgdx,sinistersnare/libgdx,shiweihappy/libgdx,xpenatan/libgdx-LWJGL3,realitix/libgdx,junkdog/libgdx,azakhary/libgdx,BlueRiverInteractive/libgdx,katiepino/libgdx,fwolff/libgdx,copystudy/libgdx,gf11speed/libgdx,TheAks999/libgdx,MovingBlocks/libgdx,GreenLightning/libgdx,PedroRomanoBarbosa/libgdx,1yvT0s/libgdx,ricardorigodon/libgdx,xpenatan/libgdx-LWJGL3,zommuter/libgdx,Zonglin-Li6565/libgdx,revo09/libgdx,JDReutt/libgdx,tommyettinger/libgdx,designcrumble/libgdx,collinsmith/libgdx,sarkanyi/libgdx,kotcrab/libgdx,toa5/libgdx,thepullman/libgdx,stinsonga/libgdx,petugez/libgdx,shiweihappy/libgdx,stickyd/libgdx,designcrumble/libgdx,309746069/libgdx,stinsonga/libgdx,xoppa/libgdx,alex-dorokhov/libgdx,GreenLightning/libgdx,haedri/libgdx-1,andyvand/libgdx,1yvT0s/libgdx,Wisienkas/libgdx,JFixby/libgdx,fiesensee/libgdx,flaiker/libgdx,mumer92/libgdx,codepoke/libgdx,snovak/libgdx,realitix/libgdx,srwonka/libGdx,flaiker/libgdx,czyzby/libgdx,jberberick/libgdx,gouessej/libgdx,revo09/libgdx,anserran/libgdx,saltares/libgdx,petugez/libgdx,zommuter/libgdx,nooone/libgdx,nrallakis/libgdx,firefly2442/libgdx,kotcrab/libgdx,toa5/libgdx,SidneyXu/libgdx,ThiagoGarciaAlves/libgdx,samskivert/libgdx,Arcnor/libgdx,Badazdz/libgdx,js78/libgdx,bgroenks96/libgdx,Zonglin-Li6565/libgdx,collinsmith/libgdx,thepullman/libgdx,billgame/libgdx,collinsmith/libgdx,BlueRiverInteractive/libgdx,youprofit/libgdx,tell10glu/libgdx,basherone/libgdxcn,samskivert/libgdx,xranby/libgdx,katiepino/libgdx,gdos/libgdx,1yvT0s/libgdx,libgdx/libgdx,stinsonga/libgdx,Deftwun/libgdx,yangweigbh/libgdx,firefly2442/libgdx,ztv/libgdx,sjosegarcia/libgdx,Zomby2D/libgdx,Zonglin-Li6565/libgdx,ztv/libgdx,MadcowD/libgdx,copystudy/libgdx,youprofit/libgdx,designcrumble/libgdx,ya7lelkom/libgdx,gdos/libgdx,libgdx/libgdx,nudelchef/libgdx,alex-dorokhov/libgdx,yangweigbh/libgdx,BlueRiverInteractive/libgdx,alireza-hosseini/libgdx,realitix/libgdx,djom20/libgdx,del-sol/libgdx,bsmr-java/libgdx,bsmr-java/libgdx,JFixby/libgdx,ya7lelkom/libgdx,jasonwee/libgdx,UnluckyNinja/libgdx,MikkelTAndersen/libgdx,1yvT0s/libgdx,EsikAntony/libgdx,jberberick/libgdx,Senth/libgdx,sarkanyi/libgdx,mumer92/libgdx,andyvand/libgdx,stickyd/libgdx,billgame/libgdx,ztv/libgdx,UnluckyNinja/libgdx,firefly2442/libgdx,MetSystem/libgdx,PedroRomanoBarbosa/libgdx,Senth/libgdx,gf11speed/libgdx,katiepino/libgdx,js78/libgdx,luischavez/libgdx,NathanSweet/libgdx,ztv/libgdx,billgame/libgdx,GreenLightning/libgdx,jsjolund/libgdx,realitix/libgdx,JFixby/libgdx,curtiszimmerman/libgdx,gf11speed/libgdx,andyvand/libgdx,andyvand/libgdx,titovmaxim/libgdx,zommuter/libgdx,titovmaxim/libgdx,MovingBlocks/libgdx,titovmaxim/libgdx,andyvand/libgdx,bladecoder/libgdx,MovingBlocks/libgdx,kotcrab/libgdx,designcrumble/libgdx,fwolff/libgdx,FredGithub/libgdx,jberberick/libgdx,nooone/libgdx,309746069/libgdx,billgame/libgdx,xpenatan/libgdx-LWJGL3,xranby/libgdx,309746069/libgdx,nelsonsilva/libgdx,Xhanim/libgdx,Gliby/libgdx,Thotep/libgdx,gf11speed/libgdx,xranby/libgdx,flaiker/libgdx,flaiker/libgdx,toa5/libgdx,revo09/libgdx,anserran/libgdx,FyiurAmron/libgdx,GreenLightning/libgdx,mumer92/libgdx,alex-dorokhov/libgdx,thepullman/libgdx,copystudy/libgdx,azakhary/libgdx,kagehak/libgdx,yangweigbh/libgdx,gf11speed/libgdx,antag99/libgdx,yangweigbh/libgdx,toa5/libgdx,KrisLee/libgdx,czyzby/libgdx,hyvas/libgdx,Thotep/libgdx,Xhanim/libgdx,basherone/libgdxcn,stickyd/libgdx,jsjolund/libgdx,saltares/libgdx,samskivert/libgdx,bsmr-java/libgdx,alex-dorokhov/libgdx,Xhanim/libgdx,jberberick/libgdx,petugez/libgdx,Xhanim/libgdx,flaiker/libgdx,samskivert/libgdx | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.pay.android.ouya;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.spec.X509EncodedKeySpec;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.json.JSONException;
import org.json.JSONObject;
import tv.ouya.console.api.CancelIgnoringOuyaResponseListener;
import tv.ouya.console.api.OuyaEncryptionHelper;
import tv.ouya.console.api.OuyaErrorCodes;
import tv.ouya.console.api.OuyaFacade;
import tv.ouya.console.api.OuyaResponseListener;
import tv.ouya.console.api.Product;
import tv.ouya.console.api.Purchasable;
import tv.ouya.console.api.Receipt;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.ParseException;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Base64;
import android.widget.Toast;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.pay.PurchaseManager;
import com.badlogic.gdx.pay.PurchaseManagerConfig;
import com.badlogic.gdx.pay.PurchaseObserver;
import com.badlogic.gdx.pay.Transaction;
import com.badlogic.gdx.utils.Disposable;
import com.badlogic.gdx.utils.GdxRuntimeException;
/** The purchase manager implementation for OUYA
* <p>
* To integrate on OUYA/Android do the following:
* <ul>
* <li>AndroidManifest.xml: add the required permissions (see <a href="https://github.com/onepf/OpenIAB">OpenIAB on GitHub</a>).
* <li>proguard.cfg: add the required proguard settings (see <a href="https://github.com/onepf/OpenIAB">OpenIAB on GitHub</a>).
* <li>AndroidApplication/member variables: add "PurchaseManagerAndroidOpenIAB manager;"
* <li>AndroidApplication.onCreate(...): add "manager = new PurchaseManagerOUYA(this);"
* <li>AndroidApplication.onDispose(): add "manager.dispose(); manager = null;"
* <li>optional: AndroidApplication.onActivityResult(...): add "manager.onActivityResult(requestCode, resultCode, data);"
* <ul>
*
* @author just4phil */
public class PurchaseManagerAndroidOUYA implements PurchaseManager, Disposable {
/** Debug tag for logging. */
private static final String TAG = "GdxPay/OUYA";
private static final boolean LOGDEBUG = true;
private static final boolean SHOWTOASTS = true;
private static final int LOGTYPELOG = 0;
private static final int LOGTYPEERROR = 1;
/** Our Android activity. */
private Activity activity;
/** The request code to use for onActivityResult (arbitrary chosen). */
private int requestCode;
/** The registered observer. */
PurchaseObserver observer;
/** The configuration. */
PurchaseManagerConfig config;
/** the ouya helper */
private OuyaFacade ouyaFacade;
/** The OUYA cryptographic key for the application */
public PublicKey ouyaPublicKey;
private byte[] applicationKey;
public List<Purchasable> productIDList; //--- This is the set of OUYA product IDs which our app knows about
private final Map<String, Product> ouyaOutstandingPurchaseRequests = new HashMap<String, Product>();
ReceiptListener myOUYAreceiptListener = new ReceiptListener();
private List<Receipt> mReceiptList; // the list of purchased items, sorted
private ArrayList<Product> productList = new ArrayList<Product>();
private Purchasable purchasable; // for a concrete purchase
Product OUYApurchaseProduct;
com.badlogic.gdx.pay.PurchaseListener appPurchaseListener; // this is the listener from the app that will be informed after a purchase
//------- for Toasts (debugging) -----
public String toastText;
public int duration;
//--------------------------------------------------
public PurchaseManagerAndroidOUYA (Activity activity) {
this(activity, 1001); // NOTE: requestCode here is an arbitrarily chosen number!
}
public PurchaseManagerAndroidOUYA (Activity activity, int requestCode) {
this.activity = activity;
this.requestCode = requestCode; // TODO: the request code for onActivityResult, not needed for OUYA!
}
@Override
public String storeName() {
return PurchaseManagerConfig.STORE_NAME_ANDROID_OUYA;
}
@Override
public void install (final PurchaseObserver observer, PurchaseManagerConfig config) {
this.observer = observer;
this.config = config;
ouyaFacade = OuyaFacade.getInstance();
ouyaFacade.init((Context)activity, config.getDeveloperID());
/** store the ouya applicationKey */
applicationKey = config.getApplicationKey();
//--- copy all available products to the list of purchasables
productIDList = new ArrayList<Purchasable>(config.getOfferCount());
for (int i = 0; i < config.getOfferCount(); i++) {
productIDList.add(new Purchasable(config.getOffer(i).getIdentifier()));
}
// Create a PublicKey object from the key data downloaded from the developer portal.
try {
// Read in the key.der file (downloaded from the developer portal)
// TODO: needed: path to key??! -> inputstream
// InputStream inputStream = getResources().openRawResource(R.raw.key);
// byte[] applicationKey = new byte[inputStream.available()];
// inputStream.read(applicationKey);
// inputStream.close();
X509EncodedKeySpec keySpec = new X509EncodedKeySpec(applicationKey); // by now we just take the ready-to-go applicationkey
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
ouyaPublicKey = keyFactory.generatePublic(keySpec);
showMessage(LOGTYPELOG, "succesfully created publicKey");
//---- request the productlist ---------
requestProductList();
// notify of successful initialization
observer.handleInstall();
} catch (Exception e) {
// notify about the problem
showMessage(LOGTYPEERROR, "Problem setting up in-app billing: Unable to create encryption key");
observer.handleInstallError(new GdxRuntimeException("Problem setting up in-app billing: Unable to create encryption key: " + e));
}
}
//----- Handler --------------------
Handler handler = new HandlerExtension();
final static int showToast = 0;
final static int requestOUYAproducts = 1;
final static int requestOUYApurchase = 2;
final static int requestPurchaseRestore = 3;
private final class HandlerExtension extends Handler {
@Override
public void handleMessage(Message msg) {
switch(msg.what) {
case requestOUYAproducts:
ouyaFacade.requestProductList(productIDList, productListListener);
break;
case requestOUYApurchase:
ouyaFacade.requestPurchase(purchasable, new PurchaseListener(OUYApurchaseProduct));
break;
case requestPurchaseRestore:
ouyaFacade.requestReceipts(myOUYAreceiptListener);
break;
case showToast:
Toast toast = Toast.makeText(activity, toastText, duration);
toast.show();
break;
}
}
}
//------------------------------------------------
/**
* Request the receipts from the users previous purchases from the server.
*/
public void requestPurchaseRestore() {
handler.sendEmptyMessage(requestPurchaseRestore);
}
/**
* Request the available products from the server.
*/
public void requestProductList() {
handler.sendEmptyMessage(requestOUYAproducts);
}
/**
* make a purchase
*/
@Override
public void purchase(com.badlogic.gdx.pay.PurchaseListener listener, String identifier) {
// String payload = null;
this.appPurchaseListener = listener; // store the listener
OUYApurchaseProduct = getProduct(identifier);
if (OUYApurchaseProduct != null) {
try {
requestPurchase(OUYApurchaseProduct);
handler.sendEmptyMessage(requestOUYApurchase);
} catch (UnsupportedEncodingException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
} catch (GeneralSecurityException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
} catch (JSONException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
}
}
else {
showMessage(LOGTYPEERROR, "There has been a Problem with your Internet connection. Please try again later");
appPurchaseListener.handlePurchaseError(new GdxRuntimeException("There has been a Problem with your Internet connection. Please try again later"));
}
}
//-------------------------------------------------------------
/**
* The callback for when the list of user receipts has been requested.
*/
public class ReceiptListener implements OuyaResponseListener<String> {
/**
* Handle the successful fetching of the data for the receipts from the server.
*
* @param receiptResponse The response from the server.
*/
@Override
public void onSuccess(String receiptResponse) {
OuyaEncryptionHelper helper = new OuyaEncryptionHelper();
List<Receipt> receipts = null;
try {
JSONObject response = new JSONObject(receiptResponse);
if(response.has("key") && response.has("iv")) {
receipts = helper.decryptReceiptResponse(response, ouyaPublicKey);
}
else receipts = helper.parseJSONReceiptResponse(receiptResponse);
} catch (ParseException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (JSONException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (GeneralSecurityException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (IOException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (java.text.ParseException e) {
observer.handleRestoreError(e);
e.printStackTrace();
}
Collections.sort(receipts, new Comparator<Receipt>() {
@Override
public int compare(Receipt lhs, Receipt rhs) {
return rhs.getPurchaseDate().compareTo(lhs.getPurchaseDate());
}
});
mReceiptList = receipts;
List<Transaction> transactions = new ArrayList<Transaction>( mReceiptList.size());
for (int i = 0; i < mReceiptList.size(); i++) {
transactions.add(convertToTransaction(mReceiptList.get(i)));
}
// send inventory to observer
observer.handleRestore(transactions.toArray(new Transaction[transactions.size()]));
//========= not sure if this is needed?? ---- shouldnt this be the task of the app ??
// if the observer above didn't throw an error, we consume all consumeables as needed
// for (int i = 0; i < mReceiptList.size(); i++) {
// Receipt receipt = mReceiptList.get(i);
// Offer offer = config.getOffer(receipt.getIdentifier());
// if (offer == null) {
// Gdx.app.debug(TAG, "Offer not found for: " + receipt.getIdentifier());
// }
// else if (offer.getType() == OfferType.CONSUMABLE) {
//
// Gdx.app.log("TODO", "we have to consume incoming receipts?!");
//
// // it's a consumable, so we consume right away!
// helper.consumeAsync(purchase, new IabHelper.OnConsumeFinishedListener() {
// @Override
// public void onConsumeFinished (Purchase purchase, IabResult result) {
// if (!result.isSuccess()) {
// // NOTE: we should only rarely have an exception due to e.g. network outages etc.
// Gdx.app.error(TAG, "Error while consuming: " + result);
// }
// }
// });
// }
// }
}
@Override
public void onCancel() {
// observer.handleRestoreError(new GdxRuntimeException("");
showMessage(LOGTYPELOG, "receiptlistener: user canceled");
}
@Override
public void onFailure(int arg0, String arg1, Bundle arg2) {
// observer.handleRestoreError(e);
showMessage(LOGTYPEERROR, "receiptlistener: onFailure!");
}
}
//----------------------------
OuyaResponseListener<ArrayList<Product>> productListListener = new CancelIgnoringOuyaResponseListener<ArrayList<Product>>() {
@Override
public void onSuccess(ArrayList<Product> products) {
productList = products;
showMessage(LOGTYPELOG, "successfully loaded productlist. " + productList.size() + " products found");
}
@Override
public void onFailure(int errorCode, String errorMessage, Bundle errorBundle) {
productList = null;
showMessage(LOGTYPEERROR, "failed to load productlist!");
}
};
//---------------------------
/** search for a specific product by identifier */
public Product getProduct(String identifier) {
Product returnProduct = null;
for (int i = 0; i < productList.size(); i++) {
if (productList.get(i).getIdentifier().equals(identifier)) {
returnProduct = productList.get(i);
break;
}
}
return returnProduct;
}
//------------------------------------------------
public void requestPurchase(final Product product)
throws GeneralSecurityException, UnsupportedEncodingException, JSONException {
SecureRandom sr = SecureRandom.getInstance("SHA1PRNG");
// This is an ID that allows you to associate a successful purchase with
// it's original request. The server does nothing with this string except
// pass it back to you, so it only needs to be unique within this instance
// of your app to allow you to pair responses with requests.
String uniqueId = Long.toHexString(sr.nextLong());
JSONObject purchaseRequest = new JSONObject();
purchaseRequest.put("uuid", uniqueId);
purchaseRequest.put("identifier", product.getIdentifier());
// purchaseRequest.put("testing", "true"); // This value is only needed for testing, not setting it results in a live purchase
String purchaseRequestJson = purchaseRequest.toString();
byte[] keyBytes = new byte[16];
sr.nextBytes(keyBytes);
SecretKey key = new SecretKeySpec(keyBytes, "AES");
byte[] ivBytes = new byte[16];
sr.nextBytes(ivBytes);
IvParameterSpec iv = new IvParameterSpec(ivBytes);
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding", "BC");
cipher.init(Cipher.ENCRYPT_MODE, key, iv);
byte[] payload = cipher.doFinal(purchaseRequestJson.getBytes("UTF-8"));
cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding", "BC");
cipher.init(Cipher.ENCRYPT_MODE, ouyaPublicKey);
byte[] encryptedKey = cipher.doFinal(keyBytes);
purchasable =
new Purchasable(
product.getIdentifier(),
Base64.encodeToString(encryptedKey, Base64.NO_WRAP),
Base64.encodeToString(ivBytes, Base64.NO_WRAP),
Base64.encodeToString(payload, Base64.NO_WRAP) );
synchronized (ouyaOutstandingPurchaseRequests) {
ouyaOutstandingPurchaseRequests.put(uniqueId, product);
}
}
//-----------------------------------------------------------
/**
* The callback for when the user attempts to purchase something. We're not worried about
* the user cancelling the purchase so we extend CancelIgnoringOuyaResponseListener, if
* you want to handle cancelations differently you should extend OuyaResponseListener and
* implement an onCancel method.
*
* @see tv.ouya.console.api.CancelIgnoringOuyaResponseListener
* @see tv.ouya.console.api.OuyaResponseListener#onCancel()
*/
private class PurchaseListener implements OuyaResponseListener<String> {
/**
* The ID of the product the user is trying to purchase. This is used in the
* onFailure method to start a re-purchase if the user wishes to do so.
*/
private Product mProduct;
PurchaseListener(final Product product) {
mProduct = product;
}
/**
* Handle a successful purchase.
*
* @param result The response from the server.
*/
@Override
public void onSuccess(String result) {
Product product = null;
Product storedProduct = null;
String id;
try {
OuyaEncryptionHelper helper = new OuyaEncryptionHelper();
JSONObject response = new JSONObject(result);
if(response.has("key") && response.has("iv")) {
id = helper.decryptPurchaseResponse(response, ouyaPublicKey);
synchronized (ouyaOutstandingPurchaseRequests) {
storedProduct = ouyaOutstandingPurchaseRequests.remove(id);
// showMessage("PurchaseListener: looks good ....");
}
if(storedProduct == null || !storedProduct.getIdentifier().equals(mProduct.getIdentifier())) {
showMessage(LOGTYPEERROR, "Purchased product is not the same as purchase request product");
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, "Purchased product is not the same as purchase request product", Bundle.EMPTY);
return;
}
} else {
product = new Product(new JSONObject(result));
if(!mProduct.getIdentifier().equals(product.getIdentifier())) {
showMessage(LOGTYPEERROR, "Purchased product is not the same as purchase request product");
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, "Purchased product is not the same as purchase request product", Bundle.EMPTY);
return;
}
}
} catch (ParseException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
} catch (JSONException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
return;
} catch (IOException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
return;
} catch (GeneralSecurityException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
return;
} catch (java.text.ParseException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
return;
}
// evrything is ok ...
// purchaseRestore(); // check for purchases ..... would work but is not intended here
if (storedProduct != null) {
// convert product to transaction
Transaction trans = convertPurchasedProductToTransaction(storedProduct);
// inform the listener
appPurchaseListener.handlePurchase(trans);
}
else {
// appPurchaseListener.handlePurchaseError(e);
showMessage(LOGTYPEERROR, "PurchaseListener: storedProduct == null!");
}
}
/**
* Handle a failure. Because displaying the receipts is not critical to the application we just show an error
* message rather than asking the user to authenticate themselves just to start the application up.
*
* @param errorCode An HTTP error code between 0 and 999, if there was one. Otherwise, an internal error code from the
* Ouya server, documented in the {@link OuyaErrorCodes} class.
*
* @param errorMessage Empty for HTTP error codes. Otherwise, a brief, non-localized, explanation of the error.
*
* @param optionalData A Map of optional key/value pairs which provide additional information.
*/
@Override
public void onFailure(int arg0, String e, Bundle arg2) {
showMessage(LOGTYPEERROR, "PurchaseListener: onFailure :(");
}
@Override
public void onCancel() {
showMessage(LOGTYPELOG, "PurchaseListener: onCancel ...");
}
}
//---------------------------------------------
@Override
public void purchaseRestore () {
handler.sendEmptyMessage(requestPurchaseRestore);
}
//--------------------------------------------
/** Converts a product to our transaction object. */
Transaction convertPurchasedProductToTransaction (Product product) {
// build the transaction from the purchase object
Transaction transaction = new Transaction();
transaction.setIdentifier(product.getIdentifier());
transaction.setStoreName(storeName());
//transaction.setOrderId(receipt.getOrderId());
transaction.setPurchaseTime(new Date());
// transaction.setPurchaseText(skuDetails != null ? "Purchased: " + skuDetails.getTitle() : "Purchased");
//GeneratedDate - when the receipt was created
//Gamer - the gamer that purchased the product
//UUID - the identifier of the gamer that purchased the product
//transaction.setPurchaseCost(-1); // TODO: GdxPay: impl. parsing of COST + CURRENCY via skuDetails.getPrice()!
//transaction.setPurchaseCostCurrency(null);
//if (purchase.getPurchaseState() != 0) {
// order has been refunded or cancelled
//transaction.setReversalTime(new Date());
//transaction.setReversalText(purchase.getPurchaseState() == 1 ? "Cancelled" : "Refunded");
//} else {
// still valid!
//transaction.setReversalTime(null);
//transaction.setReversalText(null);
//}
//transaction.setTransactionData(purchase.getOriginalJson());
//transaction.setTransactionDataSignature(purchase.getSignature());
showMessage(LOGTYPELOG, "converted purchased product to transaction.");
return transaction;
}
/** Converts a purchase to our transaction object. */
Transaction convertToTransaction (Receipt receipt) {
// build the transaction from the purchase object
Transaction transaction = new Transaction();
transaction.setIdentifier(receipt.getIdentifier());
transaction.setStoreName(storeName());
//transaction.setOrderId(receipt.getOrderId());
transaction.setPurchaseTime(receipt.getPurchaseDate());
// transaction.setPurchaseText(skuDetails != null ? "Purchased: " + skuDetails.getTitle() : "Purchased");
//GeneratedDate - when the receipt was created
//Gamer - the gamer that purchased the product
//UUID - the identifier of the gamer that purchased the product
//transaction.setPurchaseCost(-1); // TODO: GdxPay: impl. parsing of COST + CURRENCY via skuDetails.getPrice()!
//transaction.setPurchaseCostCurrency(null);
//if (purchase.getPurchaseState() != 0) {
// order has been refunded or cancelled
//transaction.setReversalTime(new Date());
//transaction.setReversalText(purchase.getPurchaseState() == 1 ? "Cancelled" : "Refunded");
//} else {
// still valid!
//transaction.setReversalTime(null);
//transaction.setReversalText(null);
//}
//transaction.setTransactionData(purchase.getOriginalJson());
//transaction.setTransactionDataSignature(purchase.getSignature());
showMessage(LOGTYPELOG, "converted receipt to transaction.");
return transaction;
}
public void onActivityResult (int requestCode, int resultCode, Intent data) {
// forwards activities to OpenIAB for processing
// this is only relevant for android
}
@Override
public String toString () {
return "OUYA";
}
private void showMessage(final int type, final String message) {
if (LOGDEBUG) {
if (type == LOGTYPELOG) Gdx.app.log(TAG, message);
if (type == LOGTYPEERROR) Gdx.app.error(TAG, message);
}
if (SHOWTOASTS) {
if (type == LOGTYPELOG) showToast(message);
if (type == LOGTYPEERROR) showToast("error: " + message);
}
}
//---- saves the toast text and displays it
public void showToast(String toastText) {
this.duration = Toast.LENGTH_SHORT;
this.toastText = toastText;
handler.sendEmptyMessage(showToast);
}
@Override
public boolean installed () {
return ouyaFacade != null;
}
@Override
public void dispose () {
if (ouyaFacade != null) {
ouyaFacade.shutdown();
ouyaFacade = null;
// remove observer and config as well
observer = null;
config = null;
showMessage(LOGTYPELOG, "disposed all the OUYA IAP stuff.");
}
}
}
| extensions/gdx-pay/gdx-pay-android-ouya/src/com/badlogic/gdx/pay/android/ouya/PurchaseManagerAndroidOUYA.java | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.pay.android.ouya;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.spec.X509EncodedKeySpec;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.json.JSONException;
import org.json.JSONObject;
import tv.ouya.console.api.CancelIgnoringOuyaResponseListener;
import tv.ouya.console.api.OuyaEncryptionHelper;
import tv.ouya.console.api.OuyaErrorCodes;
import tv.ouya.console.api.OuyaFacade;
import tv.ouya.console.api.OuyaResponseListener;
import tv.ouya.console.api.Product;
import tv.ouya.console.api.Purchasable;
import tv.ouya.console.api.Receipt;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.ParseException;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Base64;
import android.widget.Toast;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.pay.PurchaseManager;
import com.badlogic.gdx.pay.PurchaseManagerConfig;
import com.badlogic.gdx.pay.PurchaseObserver;
import com.badlogic.gdx.pay.Transaction;
import com.badlogic.gdx.utils.Disposable;
import com.badlogic.gdx.utils.GdxRuntimeException;
/** The purchase manager implementation for OUYA
* <p>
* To integrate on OUYA/Android do the following:
* <ul>
* <li>AndroidManifest.xml: add the required permissions (see <a href="https://github.com/onepf/OpenIAB">OpenIAB on GitHub</a>).
* <li>proguard.cfg: add the required proguard settings (see <a href="https://github.com/onepf/OpenIAB">OpenIAB on GitHub</a>).
* <li>AndroidApplication/member variables: add "PurchaseManagerAndroidOpenIAB manager;"
* <li>AndroidApplication.onCreate(...): add "manager = new PurchaseManagerOUYA(this);"
* <li>AndroidApplication.onDispose(): add "manager.dispose(); manager = null;"
* <li>optional: AndroidApplication.onActivityResult(...): add "manager.onActivityResult(requestCode, resultCode, data);"
* <ul>
*
* @author just4phil */
public class PurchaseManagerAndroidOUYA implements PurchaseManager, Disposable {
/** Debug tag for logging. */
private static final String TAG = "GdxPay/OUYA";
private static final boolean LOGDEBUG = true;
private static final boolean SHOWTOASTS = true;
private static final int LOGTYPELOG = 0;
private static final int LOGTYPEERROR = 1;
/** Our Android activity. */
private Activity activity;
/** The request code to use for onActivityResult (arbitrary chosen). */
private int requestCode;
/** The registered observer. */
PurchaseObserver observer;
/** The configuration. */
PurchaseManagerConfig config;
/** the ouya helper */
private OuyaFacade ouyaFacade;
/** The OUYA cryptographic key for the application */
public PublicKey ouyaPublicKey;
private byte[] applicationKey;
public List<Purchasable> productIDList; //--- This is the set of OUYA product IDs which our app knows about
private final Map<String, Product> ouyaOutstandingPurchaseRequests = new HashMap<String, Product>();
ReceiptListener myOUYAreceiptListener = new ReceiptListener();
private List<Receipt> mReceiptList; // the list of purchased items, sorted
private ArrayList<Product> productList = new ArrayList<Product>();
private Purchasable purchasable; // for a concrete purchase
Product OUYApurchaseProduct;
com.badlogic.gdx.pay.PurchaseListener appPurchaseListener; // this is the listener from the app that will be informed after a purchase
//------- for Toasts (debugging) -----
public String toastText;
public int duration;
//--------------------------------------------------
public PurchaseManagerAndroidOUYA (Activity activity) {
this(activity, 1001); // NOTE: requestCode here is an arbitrarily chosen number!
}
public PurchaseManagerAndroidOUYA (Activity activity, int requestCode) {
this.activity = activity;
this.requestCode = requestCode; // TODO: the request code for onActivityResult, not needed for OUYA!
}
@Override
public String storeName() {
return PurchaseManagerConfig.STORE_NAME_OUYA;
}
@Override
public void install (final PurchaseObserver observer, PurchaseManagerConfig config) {
this.observer = observer;
this.config = config;
ouyaFacade = OuyaFacade.getInstance();
ouyaFacade.init((Context)activity, config.getDeveloperID());
/** store the ouya applicationKey */
applicationKey = config.getApplicationKey();
//--- copy all available products to the list of purchasables
productIDList = new ArrayList<Purchasable>(config.getOfferCount());
for (int i = 0; i < config.getOfferCount(); i++) {
productIDList.add(new Purchasable(config.getOffer(i).getIdentifier()));
}
// Create a PublicKey object from the key data downloaded from the developer portal.
try {
// Read in the key.der file (downloaded from the developer portal)
// TODO: needed: path to key??! -> inputstream
// InputStream inputStream = getResources().openRawResource(R.raw.key);
// byte[] applicationKey = new byte[inputStream.available()];
// inputStream.read(applicationKey);
// inputStream.close();
X509EncodedKeySpec keySpec = new X509EncodedKeySpec(applicationKey); // by now we just take the ready-to-go applicationkey
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
ouyaPublicKey = keyFactory.generatePublic(keySpec);
showMessage(LOGTYPELOG, "succesfully created publicKey");
//---- request the productlist ---------
requestProductList();
// notify of successful initialization
observer.handleInstall();
} catch (Exception e) {
// notify about the problem
showMessage(LOGTYPEERROR, "Problem setting up in-app billing: Unable to create encryption key");
observer.handleInstallError(new GdxRuntimeException("Problem setting up in-app billing: Unable to create encryption key: " + e));
}
}
//----- Handler --------------------
Handler handler = new HandlerExtension();
final static int showToast = 0;
final static int requestOUYAproducts = 1;
final static int requestOUYApurchase = 2;
final static int requestPurchaseRestore = 3;
private final class HandlerExtension extends Handler {
@Override
public void handleMessage(Message msg) {
switch(msg.what) {
case requestOUYAproducts:
ouyaFacade.requestProductList(productIDList, productListListener);
break;
case requestOUYApurchase:
ouyaFacade.requestPurchase(purchasable, new PurchaseListener(OUYApurchaseProduct));
break;
case requestPurchaseRestore:
ouyaFacade.requestReceipts(myOUYAreceiptListener);
break;
case showToast:
Toast toast = Toast.makeText(activity, toastText, duration);
toast.show();
break;
}
}
}
//------------------------------------------------
/**
* Request the receipts from the users previous purchases from the server.
*/
public void requestPurchaseRestore() {
handler.sendEmptyMessage(requestPurchaseRestore);
}
/**
* Request the available products from the server.
*/
public void requestProductList() {
handler.sendEmptyMessage(requestOUYAproducts);
}
/**
* make a purchase
*/
@Override
public void purchase(com.badlogic.gdx.pay.PurchaseListener listener, String identifier) {
// String payload = null;
this.appPurchaseListener = listener; // store the listener
OUYApurchaseProduct = getProduct(identifier);
if (OUYApurchaseProduct != null) {
try {
requestPurchase(OUYApurchaseProduct);
handler.sendEmptyMessage(requestOUYApurchase);
} catch (UnsupportedEncodingException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
} catch (GeneralSecurityException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
} catch (JSONException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
}
}
else {
showMessage(LOGTYPEERROR, "There has been a Problem with your Internet connection. Please try again later");
appPurchaseListener.handlePurchaseError(new GdxRuntimeException("There has been a Problem with your Internet connection. Please try again later"));
}
}
//-------------------------------------------------------------
/**
* The callback for when the list of user receipts has been requested.
*/
public class ReceiptListener implements OuyaResponseListener<String> {
/**
* Handle the successful fetching of the data for the receipts from the server.
*
* @param receiptResponse The response from the server.
*/
@Override
public void onSuccess(String receiptResponse) {
OuyaEncryptionHelper helper = new OuyaEncryptionHelper();
List<Receipt> receipts = null;
try {
JSONObject response = new JSONObject(receiptResponse);
if(response.has("key") && response.has("iv")) {
receipts = helper.decryptReceiptResponse(response, ouyaPublicKey);
}
else receipts = helper.parseJSONReceiptResponse(receiptResponse);
} catch (ParseException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (JSONException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (GeneralSecurityException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (IOException e) {
observer.handleRestoreError(e);
throw new RuntimeException(e);
} catch (java.text.ParseException e) {
observer.handleRestoreError(e);
e.printStackTrace();
}
Collections.sort(receipts, new Comparator<Receipt>() {
@Override
public int compare(Receipt lhs, Receipt rhs) {
return rhs.getPurchaseDate().compareTo(lhs.getPurchaseDate());
}
});
mReceiptList = receipts;
List<Transaction> transactions = new ArrayList<Transaction>( mReceiptList.size());
for (int i = 0; i < mReceiptList.size(); i++) {
transactions.add(convertToTransaction(mReceiptList.get(i)));
}
// send inventory to observer
observer.handleRestore(transactions.toArray(new Transaction[transactions.size()]));
//========= not sure if this is needed?? ---- shouldnt this be the task of the app ??
// if the observer above didn't throw an error, we consume all consumeables as needed
// for (int i = 0; i < mReceiptList.size(); i++) {
// Receipt receipt = mReceiptList.get(i);
// Offer offer = config.getOffer(receipt.getIdentifier());
// if (offer == null) {
// Gdx.app.debug(TAG, "Offer not found for: " + receipt.getIdentifier());
// }
// else if (offer.getType() == OfferType.CONSUMABLE) {
//
// Gdx.app.log("TODO", "we have to consume incoming receipts?!");
//
// // it's a consumable, so we consume right away!
// helper.consumeAsync(purchase, new IabHelper.OnConsumeFinishedListener() {
// @Override
// public void onConsumeFinished (Purchase purchase, IabResult result) {
// if (!result.isSuccess()) {
// // NOTE: we should only rarely have an exception due to e.g. network outages etc.
// Gdx.app.error(TAG, "Error while consuming: " + result);
// }
// }
// });
// }
// }
}
@Override
public void onCancel() {
// observer.handleRestoreError(new GdxRuntimeException("");
showMessage(LOGTYPELOG, "receiptlistener: user canceled");
}
@Override
public void onFailure(int arg0, String arg1, Bundle arg2) {
// observer.handleRestoreError(e);
showMessage(LOGTYPEERROR, "receiptlistener: onFailure!");
}
}
//----------------------------
OuyaResponseListener<ArrayList<Product>> productListListener = new CancelIgnoringOuyaResponseListener<ArrayList<Product>>() {
@Override
public void onSuccess(ArrayList<Product> products) {
productList = products;
showMessage(LOGTYPELOG, "successfully loaded productlist. " + productList.size() + " products found");
}
@Override
public void onFailure(int errorCode, String errorMessage, Bundle errorBundle) {
productList = null;
showMessage(LOGTYPEERROR, "failed to load productlist!");
}
};
//---------------------------
/** search for a specific product by identifier */
public Product getProduct(String identifier) {
Product returnProduct = null;
for (int i = 0; i < productList.size(); i++) {
if (productList.get(i).getIdentifier().equals(identifier)) {
returnProduct = productList.get(i);
break;
}
}
return returnProduct;
}
//------------------------------------------------
public void requestPurchase(final Product product)
throws GeneralSecurityException, UnsupportedEncodingException, JSONException {
SecureRandom sr = SecureRandom.getInstance("SHA1PRNG");
// This is an ID that allows you to associate a successful purchase with
// it's original request. The server does nothing with this string except
// pass it back to you, so it only needs to be unique within this instance
// of your app to allow you to pair responses with requests.
String uniqueId = Long.toHexString(sr.nextLong());
JSONObject purchaseRequest = new JSONObject();
purchaseRequest.put("uuid", uniqueId);
purchaseRequest.put("identifier", product.getIdentifier());
// purchaseRequest.put("testing", "true"); // This value is only needed for testing, not setting it results in a live purchase
String purchaseRequestJson = purchaseRequest.toString();
byte[] keyBytes = new byte[16];
sr.nextBytes(keyBytes);
SecretKey key = new SecretKeySpec(keyBytes, "AES");
byte[] ivBytes = new byte[16];
sr.nextBytes(ivBytes);
IvParameterSpec iv = new IvParameterSpec(ivBytes);
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding", "BC");
cipher.init(Cipher.ENCRYPT_MODE, key, iv);
byte[] payload = cipher.doFinal(purchaseRequestJson.getBytes("UTF-8"));
cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding", "BC");
cipher.init(Cipher.ENCRYPT_MODE, ouyaPublicKey);
byte[] encryptedKey = cipher.doFinal(keyBytes);
purchasable =
new Purchasable(
product.getIdentifier(),
Base64.encodeToString(encryptedKey, Base64.NO_WRAP),
Base64.encodeToString(ivBytes, Base64.NO_WRAP),
Base64.encodeToString(payload, Base64.NO_WRAP) );
synchronized (ouyaOutstandingPurchaseRequests) {
ouyaOutstandingPurchaseRequests.put(uniqueId, product);
}
}
//-----------------------------------------------------------
/**
* The callback for when the user attempts to purchase something. We're not worried about
* the user cancelling the purchase so we extend CancelIgnoringOuyaResponseListener, if
* you want to handle cancelations differently you should extend OuyaResponseListener and
* implement an onCancel method.
*
* @see tv.ouya.console.api.CancelIgnoringOuyaResponseListener
* @see tv.ouya.console.api.OuyaResponseListener#onCancel()
*/
private class PurchaseListener implements OuyaResponseListener<String> {
/**
* The ID of the product the user is trying to purchase. This is used in the
* onFailure method to start a re-purchase if the user wishes to do so.
*/
private Product mProduct;
PurchaseListener(final Product product) {
mProduct = product;
}
/**
* Handle a successful purchase.
*
* @param result The response from the server.
*/
@Override
public void onSuccess(String result) {
Product product = null;
Product storedProduct = null;
String id;
try {
OuyaEncryptionHelper helper = new OuyaEncryptionHelper();
JSONObject response = new JSONObject(result);
if(response.has("key") && response.has("iv")) {
id = helper.decryptPurchaseResponse(response, ouyaPublicKey);
synchronized (ouyaOutstandingPurchaseRequests) {
storedProduct = ouyaOutstandingPurchaseRequests.remove(id);
// showMessage("PurchaseListener: looks good ....");
}
if(storedProduct == null || !storedProduct.getIdentifier().equals(mProduct.getIdentifier())) {
showMessage(LOGTYPEERROR, "Purchased product is not the same as purchase request product");
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, "Purchased product is not the same as purchase request product", Bundle.EMPTY);
return;
}
} else {
product = new Product(new JSONObject(result));
if(!mProduct.getIdentifier().equals(product.getIdentifier())) {
showMessage(LOGTYPEERROR, "Purchased product is not the same as purchase request product");
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, "Purchased product is not the same as purchase request product", Bundle.EMPTY);
return;
}
}
} catch (ParseException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
} catch (JSONException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
return;
} catch (IOException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
return;
} catch (GeneralSecurityException e) {
appPurchaseListener.handlePurchaseError(e);
onFailure(OuyaErrorCodes.THROW_DURING_ON_SUCCESS, e.getMessage(), Bundle.EMPTY);
return;
} catch (java.text.ParseException e) {
appPurchaseListener.handlePurchaseError(e);
e.printStackTrace();
return;
}
// evrything is ok ...
// purchaseRestore(); // check for purchases ..... would work but is not intended here
if (storedProduct != null) {
// convert product to transaction
Transaction trans = convertPurchasedProductToTransaction(storedProduct);
// inform the listener
appPurchaseListener.handlePurchase(trans);
}
else {
// appPurchaseListener.handlePurchaseError(e);
showMessage(LOGTYPEERROR, "PurchaseListener: storedProduct == null!");
}
}
/**
* Handle a failure. Because displaying the receipts is not critical to the application we just show an error
* message rather than asking the user to authenticate themselves just to start the application up.
*
* @param errorCode An HTTP error code between 0 and 999, if there was one. Otherwise, an internal error code from the
* Ouya server, documented in the {@link OuyaErrorCodes} class.
*
* @param errorMessage Empty for HTTP error codes. Otherwise, a brief, non-localized, explanation of the error.
*
* @param optionalData A Map of optional key/value pairs which provide additional information.
*/
@Override
public void onFailure(int arg0, String e, Bundle arg2) {
showMessage(LOGTYPEERROR, "PurchaseListener: onFailure :(");
}
@Override
public void onCancel() {
showMessage(LOGTYPELOG, "PurchaseListener: onCancel ...");
}
}
//---------------------------------------------
@Override
public void purchaseRestore () {
handler.sendEmptyMessage(requestPurchaseRestore);
}
//--------------------------------------------
/** Converts a product to our transaction object. */
Transaction convertPurchasedProductToTransaction (Product product) {
// build the transaction from the purchase object
Transaction transaction = new Transaction();
transaction.setIdentifier(product.getIdentifier());
transaction.setStoreName(storeName());
//transaction.setOrderId(receipt.getOrderId());
transaction.setPurchaseTime(new Date());
// transaction.setPurchaseText(skuDetails != null ? "Purchased: " + skuDetails.getTitle() : "Purchased");
//GeneratedDate - when the receipt was created
//Gamer - the gamer that purchased the product
//UUID - the identifier of the gamer that purchased the product
//transaction.setPurchaseCost(-1); // TODO: GdxPay: impl. parsing of COST + CURRENCY via skuDetails.getPrice()!
//transaction.setPurchaseCostCurrency(null);
//if (purchase.getPurchaseState() != 0) {
// order has been refunded or cancelled
//transaction.setReversalTime(new Date());
//transaction.setReversalText(purchase.getPurchaseState() == 1 ? "Cancelled" : "Refunded");
//} else {
// still valid!
//transaction.setReversalTime(null);
//transaction.setReversalText(null);
//}
//transaction.setTransactionData(purchase.getOriginalJson());
//transaction.setTransactionDataSignature(purchase.getSignature());
showMessage(LOGTYPELOG, "converted purchased product to transaction.");
return transaction;
}
/** Converts a purchase to our transaction object. */
Transaction convertToTransaction (Receipt receipt) {
// build the transaction from the purchase object
Transaction transaction = new Transaction();
transaction.setIdentifier(receipt.getIdentifier());
transaction.setStoreName(storeName());
//transaction.setOrderId(receipt.getOrderId());
transaction.setPurchaseTime(receipt.getPurchaseDate());
// transaction.setPurchaseText(skuDetails != null ? "Purchased: " + skuDetails.getTitle() : "Purchased");
//GeneratedDate - when the receipt was created
//Gamer - the gamer that purchased the product
//UUID - the identifier of the gamer that purchased the product
//transaction.setPurchaseCost(-1); // TODO: GdxPay: impl. parsing of COST + CURRENCY via skuDetails.getPrice()!
//transaction.setPurchaseCostCurrency(null);
//if (purchase.getPurchaseState() != 0) {
// order has been refunded or cancelled
//transaction.setReversalTime(new Date());
//transaction.setReversalText(purchase.getPurchaseState() == 1 ? "Cancelled" : "Refunded");
//} else {
// still valid!
//transaction.setReversalTime(null);
//transaction.setReversalText(null);
//}
//transaction.setTransactionData(purchase.getOriginalJson());
//transaction.setTransactionDataSignature(purchase.getSignature());
showMessage(LOGTYPELOG, "converted receipt to transaction.");
return transaction;
}
public void onActivityResult (int requestCode, int resultCode, Intent data) {
// forwards activities to OpenIAB for processing
// this is only relevant for android
}
@Override
public String toString () {
return "OUYA";
}
private void showMessage(final int type, final String message) {
if (LOGDEBUG) {
if (type == LOGTYPELOG) Gdx.app.log(TAG, message);
if (type == LOGTYPEERROR) Gdx.app.error(TAG, message);
}
if (SHOWTOASTS) {
if (type == LOGTYPELOG) showToast(message);
if (type == LOGTYPEERROR) showToast("error: " + message);
}
}
//---- saves the toast text and displays it
public void showToast(String toastText) {
this.duration = Toast.LENGTH_SHORT;
this.toastText = toastText;
handler.sendEmptyMessage(showToast);
}
@Override
public boolean installed () {
return ouyaFacade != null;
}
@Override
public void dispose () {
if (ouyaFacade != null) {
ouyaFacade.shutdown();
ouyaFacade = null;
// remove observer and config as well
observer = null;
config = null;
showMessage(LOGTYPELOG, "disposed all the OUYA IAP stuff.");
}
}
}
| GdxPay: bugfix (refactoring wasn't quite done yet...).
| extensions/gdx-pay/gdx-pay-android-ouya/src/com/badlogic/gdx/pay/android/ouya/PurchaseManagerAndroidOUYA.java | GdxPay: bugfix (refactoring wasn't quite done yet...). |
|
Java | apache-2.0 | 214945a1781081d69d94c81b3a52b5737897fb32 | 0 | Teradata/kylo,Teradata/kylo,Teradata/kylo,Teradata/kylo,Teradata/kylo | package com.thinkbiganalytics.kylo.catalog.file;
import com.thinkbiganalytics.kylo.catalog.ConnectorPluginManager;
/*-
* #%L
* kylo-catalog-core
* %%
* Copyright (C) 2017 - 2018 ThinkBig Analytics, a Teradata Company
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.kylo.catalog.connector.ConnectorUtil;
import com.thinkbiganalytics.kylo.catalog.datasource.DataSourceUtil;
import com.thinkbiganalytics.kylo.catalog.rest.model.Connector;
import com.thinkbiganalytics.kylo.catalog.rest.model.ConnectorPluginDescriptor;
import com.thinkbiganalytics.kylo.catalog.rest.model.DataSet;
import com.thinkbiganalytics.kylo.catalog.rest.model.DataSource;
import com.thinkbiganalytics.kylo.catalog.spi.ConnectorPlugin;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
/**
* Validates data set paths.
*/
@Component
public class PathValidator {
private static final Pattern FILENAME_REGEX = Pattern.compile("^([^\\./:]{1,2}|[^/:]{3,})$");
@Inject
private ConnectorPluginManager pluginManager;
/**
* Expected connector tab srefs for file systems
*/
@Nonnull
private List<String> fileSystemSrefs = Collections.emptyList();
/**
* Expected connector tab srefs for uploading files
*/
@Nonnull
private List<String> uploadSrefs = Collections.emptyList();
/**
* Sets the connector tab srefs for file systems.
*/
@Value("${catalog.collection.filesystem.sref:#{null}}")
public void setFileSystemSrefs(@Nullable final List<String> sref) {
fileSystemSrefs = (sref != null) ? sref : Collections.singletonList(".browse");
}
/**
* Sets the connector tab srefs for uploading files.
*/
@Value("${catalog.collection.upload.sref:#{null}}")
public void setUploadSrefs(@Nullable final List<String> sref) {
uploadSrefs = (sref != null) ? sref : Collections.singletonList(".upload");
}
/**
* Determines if the specified path is allowed for the specified data set.
*/
public boolean isPathAllowed(@Nonnull final Path path, @Nonnull final DataSet dataSet) {
return isPathAllowed(path, dataSet.getId(), dataSet.getDataSource());
}
/**
* Determines if the specified path is allowed for the specified data source.
*/
public boolean isPathAllowed(@Nonnull final Path path, @Nonnull final DataSource dataSource) {
return isPathAllowed(path, null, dataSource);
}
/**
* Indicates if the specified file name is valid.
*/
public boolean isValidFileName(@Nonnull final String fileName) {
return FILENAME_REGEX.matcher(fileName).matches() && fileName.chars().noneMatch(Character::isIdentifierIgnorable);
}
/**
* Determines if the specified path is allowed for the specified data set and data source.
*/
private boolean isPathAllowed(@Nonnull final Path path, @Nullable final String dataSetId, @Nonnull final DataSource dataSource) {
final Optional<List<String>> dataSourcePaths = DataSourceUtil.getPaths(dataSource);
if (dataSourcePaths.isPresent()) {
final Stream<String> allowedPaths = dataSourcePaths.get().stream();
final String pluginId = dataSource.getConnector().getPluginId();
final Optional<ConnectorPlugin> plugin = this.pluginManager.getPlugin(pluginId);
if (plugin.isPresent()) {
if (ConnectorUtil.hasAnyTabSref(plugin.get().getDescriptor(), fileSystemSrefs)) {
return isPathAllowed(path.toUri(), toURIs(allowedPaths));
}
if (dataSetId != null && ConnectorUtil.hasAnyTabSref(plugin.get().getDescriptor(), uploadSrefs)) {
final Stream<String> uploadPaths = allowedPaths
.map(allowedPath -> allowedPath.endsWith(Path.SEPARATOR) ? allowedPath : allowedPath + Path.SEPARATOR)
.map(allowedPath -> allowedPath + dataSetId + Path.SEPARATOR);
return isPathAllowed(path.toUri(), toURIs(uploadPaths));
}
} else {
return false;
}
}
return true;
}
/**
* Determines if the specified path matches one of the allowed URIs.
*/
private boolean isPathAllowed(@Nonnull final URI path, @Nonnull final List<URI> allowedUris) {
final String scheme = (path.getScheme() != null) ? path.getScheme() : "file";
final String normalPath = path.normalize().getPath();
for (final URI allowedUri : allowedUris) {
final String allowedPath = allowedUri.getPath();
final String allowedScheme = allowedUri.getScheme() != null ? allowedUri.getScheme() : "file";
if (scheme.equals(allowedScheme) && (allowedPath == null || allowedPath.equals(normalPath) || normalPath.startsWith(allowedPath))) {
return true;
}
}
return false;
}
/**
* Converts the specified paths to URIs.
*/
@Nonnull
private List<URI> toURIs(@Nonnull final Stream<String> paths) {
return paths.map(Path::new).map(Path::toUri).collect(Collectors.toList());
}
}
| services/catalog-service/catalog-core/src/main/java/com/thinkbiganalytics/kylo/catalog/file/PathValidator.java | package com.thinkbiganalytics.kylo.catalog.file;
import com.thinkbiganalytics.kylo.catalog.ConnectorPluginManager;
/*-
* #%L
* kylo-catalog-core
* %%
* Copyright (C) 2017 - 2018 ThinkBig Analytics, a Teradata Company
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.kylo.catalog.connector.ConnectorUtil;
import com.thinkbiganalytics.kylo.catalog.datasource.DataSourceUtil;
import com.thinkbiganalytics.kylo.catalog.rest.model.Connector;
import com.thinkbiganalytics.kylo.catalog.rest.model.ConnectorPluginDescriptor;
import com.thinkbiganalytics.kylo.catalog.rest.model.DataSet;
import com.thinkbiganalytics.kylo.catalog.rest.model.DataSource;
import com.thinkbiganalytics.kylo.catalog.spi.ConnectorPlugin;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
/**
* Validates data set paths.
*/
@Component
public class PathValidator {
private static final Pattern FILENAME_REGEX = Pattern.compile("^([^\\./:]{1,2}|[^/:]{3,})$");
@Inject
private ConnectorPluginManager pluginManager;
/**
* Expected connector tab srefs for file systems
*/
@Nonnull
private List<String> fileSystemSrefs = Collections.emptyList();
/**
* Expected connector tab srefs for uploading files
*/
@Nonnull
private List<String> uploadSrefs = Collections.emptyList();
/**
* Sets the connector tab srefs for file systems.
*/
@Value("${catalog.collection.filesystem.sref:#{null}}")
public void setFileSystemSrefs(@Nullable final List<String> sref) {
fileSystemSrefs = (sref != null) ? sref : Collections.singletonList(".browse");
}
/**
* Sets the connector tab srefs for uploading files.
*/
@Value("${catalog.collection.upload.sref:#{null}}")
public void setUploadSrefs(@Nullable final List<String> sref) {
uploadSrefs = (sref != null) ? sref : Collections.singletonList(".upload");
}
/**
* Determines if the specified path is allowed for the specified data set.
*/
public boolean isPathAllowed(@Nonnull final Path path, @Nonnull final DataSet dataSet) {
return isPathAllowed(path, dataSet.getId(), dataSet.getDataSource());
}
/**
* Determines if the specified path is allowed for the specified data source.
*/
public boolean isPathAllowed(@Nonnull final Path path, @Nonnull final DataSource dataSource) {
return isPathAllowed(path, null, dataSource);
}
/**
* Indicates if the specified file name is valid.
*/
public boolean isValidFileName(@Nonnull final String fileName) {
return FILENAME_REGEX.matcher(fileName).matches() && fileName.chars().noneMatch(Character::isIdentifierIgnorable);
}
/**
* Determines if the specified path is allowed for the specified data set and data source.
*/
private boolean isPathAllowed(@Nonnull final Path path, @Nullable final String dataSetId, @Nonnull final DataSource dataSource) {
final Optional<List<String>> dataSourcePaths = DataSourceUtil.getPaths(dataSource);
if (dataSourcePaths.isPresent()) {
final Stream<String> allowedPaths = dataSourcePaths.get().stream();
final String pluginId = dataSource.getConnector().getId();
final Optional<ConnectorPlugin> plugin = this.pluginManager.getPlugin(pluginId);
if (plugin.isPresent()) {
if (ConnectorUtil.hasAnyTabSref(plugin.get().getDescriptor(), fileSystemSrefs)) {
return isPathAllowed(path.toUri(), toURIs(allowedPaths));
}
if (dataSetId != null && ConnectorUtil.hasAnyTabSref(plugin.get().getDescriptor(), uploadSrefs)) {
final Stream<String> uploadPaths = allowedPaths
.map(allowedPath -> allowedPath.endsWith(Path.SEPARATOR) ? allowedPath : allowedPath + Path.SEPARATOR)
.map(allowedPath -> allowedPath + dataSetId + Path.SEPARATOR);
return isPathAllowed(path.toUri(), toURIs(uploadPaths));
}
} else {
return false;
}
}
return true;
}
/**
* Determines if the specified path matches one of the allowed URIs.
*/
private boolean isPathAllowed(@Nonnull final URI path, @Nonnull final List<URI> allowedUris) {
final String scheme = (path.getScheme() != null) ? path.getScheme() : "file";
final String normalPath = path.normalize().getPath();
for (final URI allowedUri : allowedUris) {
final String allowedPath = allowedUri.getPath();
final String allowedScheme = allowedUri.getScheme() != null ? allowedUri.getScheme() : "file";
if (scheme.equals(allowedScheme) && (allowedPath == null || allowedPath.equals(normalPath) || normalPath.startsWith(allowedPath))) {
return true;
}
}
return false;
}
/**
* Converts the specified paths to URIs.
*/
@Nonnull
private List<URI> toURIs(@Nonnull final Stream<String> paths) {
return paths.map(Path::new).map(Path::toUri).collect(Collectors.toList());
}
}
| fix bug using pluginId when testing for valid path on a new datasource
| services/catalog-service/catalog-core/src/main/java/com/thinkbiganalytics/kylo/catalog/file/PathValidator.java | fix bug using pluginId when testing for valid path on a new datasource |
|
Java | apache-2.0 | eed678adc8310f9a10b599b7e6375f236d954c3b | 0 | jollygeorge/camel,dmvolod/camel,atoulme/camel,snadakuduru/camel,FingolfinTEK/camel,dpocock/camel,cunningt/camel,snurmine/camel,punkhorn/camel-upstream,bdecoste/camel,objectiser/camel,isururanawaka/camel,DariusX/camel,coderczp/camel,askannon/camel,tarilabs/camel,gnodet/camel,pkletsko/camel,logzio/camel,acartapanis/camel,lburgazzoli/camel,jarst/camel,tlehoux/camel,jameszkw/camel,stalet/camel,acartapanis/camel,mnki/camel,cunningt/camel,oscerd/camel,sirlatrom/camel,FingolfinTEK/camel,MrCoder/camel,tarilabs/camel,gyc567/camel,rmarting/camel,lowwool/camel,noelo/camel,hqstevenson/camel,yuruki/camel,duro1/camel,chirino/camel,jameszkw/camel,veithen/camel,driseley/camel,lasombra/camel,iweiss/camel,yogamaha/camel,isavin/camel,tdiesler/camel,pkletsko/camel,prashant2402/camel,dpocock/camel,tadayosi/camel,davidwilliams1978/camel,royopa/camel,nikvaessen/camel,eformat/camel,christophd/camel,manuelh9r/camel,CodeSmell/camel,anton-k11/camel,nikhilvibhav/camel,dsimansk/camel,YoshikiHigo/camel,sabre1041/camel,oalles/camel,zregvart/camel,manuelh9r/camel,ramonmaruko/camel,punkhorn/camel-upstream,nikvaessen/camel,mnki/camel,kevinearls/camel,edigrid/camel,ekprayas/camel,acartapanis/camel,jpav/camel,pplatek/camel,driseley/camel,stravag/camel,ge0ffrey/camel,zregvart/camel,neoramon/camel,jollygeorge/camel,haku/camel,manuelh9r/camel,DariusX/camel,adessaigne/camel,ge0ffrey/camel,akhettar/camel,brreitme/camel,onders86/camel,onders86/camel,iweiss/camel,dkhanolkar/camel,pmoerenhout/camel,rparree/camel,akhettar/camel,mgyongyosi/camel,snadakuduru/camel,duro1/camel,jollygeorge/camel,pkletsko/camel,Thopap/camel,drsquidop/camel,royopa/camel,isavin/camel,sabre1041/camel,mzapletal/camel,arnaud-deprez/camel,edigrid/camel,kevinearls/camel,borcsokj/camel,driseley/camel,yuruki/camel,rmarting/camel,qst-jdc-labs/camel,sebi-hgdata/camel,jarst/camel,mnki/camel,ullgren/camel,anoordover/camel,sirlatrom/camel,driseley/camel,ullgren/camel,dpocock/camel,jonmcewen/camel,pplatek/camel,YoshikiHigo/camel,borcsokj/camel,grange74/camel,nicolaferraro/camel,logzio/camel,anton-k11/camel,JYBESSON/camel,josefkarasek/camel,coderczp/camel,sabre1041/camel,tlehoux/camel,driseley/camel,punkhorn/camel-upstream,grgrzybek/camel,josefkarasek/camel,mzapletal/camel,pmoerenhout/camel,royopa/camel,yury-vashchyla/camel,tdiesler/camel,pplatek/camel,mohanaraosv/camel,sirlatrom/camel,cunningt/camel,mzapletal/camel,NetNow/camel,cunningt/camel,lburgazzoli/apache-camel,RohanHart/camel,joakibj/camel,atoulme/camel,jamesnetherton/camel,davidwilliams1978/camel,cunningt/camel,NetNow/camel,dsimansk/camel,gilfernandes/camel,dkhanolkar/camel,qst-jdc-labs/camel,jameszkw/camel,woj-i/camel,yogamaha/camel,ge0ffrey/camel,partis/camel,dmvolod/camel,joakibj/camel,pmoerenhout/camel,snadakuduru/camel,stalet/camel,akhettar/camel,mzapletal/camel,gilfernandes/camel,prashant2402/camel,snadakuduru/camel,nboukhed/camel,brreitme/camel,mike-kukla/camel,curso007/camel,tdiesler/camel,bdecoste/camel,satishgummadelli/camel,eformat/camel,qst-jdc-labs/camel,prashant2402/camel,chanakaudaya/camel,bhaveshdt/camel,ekprayas/camel,objectiser/camel,mgyongyosi/camel,objectiser/camel,jkorab/camel,askannon/camel,partis/camel,woj-i/camel,dvankleef/camel,lowwool/camel,lburgazzoli/apache-camel,gyc567/camel,oscerd/camel,sverkera/camel,kevinearls/camel,grgrzybek/camel,joakibj/camel,bfitzpat/camel,atoulme/camel,edigrid/camel,borcsokj/camel,veithen/camel,anoordover/camel,qst-jdc-labs/camel,jpav/camel,arnaud-deprez/camel,jamesnetherton/camel,allancth/camel,jollygeorge/camel,apache/camel,jmandawg/camel,adessaigne/camel,prashant2402/camel,erwelch/camel,askannon/camel,jkorab/camel,mohanaraosv/camel,stravag/camel,CodeSmell/camel,rmarting/camel,w4tson/camel,veithen/camel,veithen/camel,yury-vashchyla/camel,ramonmaruko/camel,stalet/camel,jkorab/camel,DariusX/camel,pax95/camel,bhaveshdt/camel,yuruki/camel,jollygeorge/camel,CandleCandle/camel,allancth/camel,davidkarlsen/camel,haku/camel,isururanawaka/camel,lburgazzoli/apache-camel,gyc567/camel,FingolfinTEK/camel,jamesnetherton/camel,gautric/camel,jmandawg/camel,mike-kukla/camel,bfitzpat/camel,mohanaraosv/camel,ullgren/camel,ramonmaruko/camel,ssharma/camel,borcsokj/camel,askannon/camel,prashant2402/camel,gyc567/camel,johnpoth/camel,gautric/camel,tkopczynski/camel,bhaveshdt/camel,nicolaferraro/camel,davidkarlsen/camel,grange74/camel,neoramon/camel,jarst/camel,rmarting/camel,drsquidop/camel,anoordover/camel,isururanawaka/camel,NickCis/camel,sirlatrom/camel,joakibj/camel,tkopczynski/camel,chanakaudaya/camel,CodeSmell/camel,mike-kukla/camel,mnki/camel,onders86/camel,oalles/camel,maschmid/camel,nboukhed/camel,oalles/camel,anton-k11/camel,salikjan/camel,ramonmaruko/camel,sverkera/camel,akhettar/camel,sabre1041/camel,drsquidop/camel,eformat/camel,maschmid/camel,chanakaudaya/camel,jonmcewen/camel,allancth/camel,atoulme/camel,duro1/camel,veithen/camel,arnaud-deprez/camel,davidkarlsen/camel,apache/camel,snadakuduru/camel,tkopczynski/camel,logzio/camel,tdiesler/camel,oalles/camel,mike-kukla/camel,askannon/camel,rparree/camel,tadayosi/camel,jameszkw/camel,anoordover/camel,bfitzpat/camel,woj-i/camel,lburgazzoli/apache-camel,stravag/camel,tarilabs/camel,bgaudaen/camel,gilfernandes/camel,bdecoste/camel,lburgazzoli/camel,lasombra/camel,christophd/camel,koscejev/camel,gnodet/camel,lasombra/camel,logzio/camel,coderczp/camel,ekprayas/camel,tdiesler/camel,rparree/camel,NickCis/camel,mohanaraosv/camel,davidwilliams1978/camel,partis/camel,MohammedHammam/camel,sverkera/camel,dsimansk/camel,jarst/camel,manuelh9r/camel,lasombra/camel,manuelh9r/camel,jamesnetherton/camel,johnpoth/camel,coderczp/camel,scranton/camel,NetNow/camel,grange74/camel,zregvart/camel,jonmcewen/camel,nboukhed/camel,ge0ffrey/camel,jlpedrosa/camel,duro1/camel,snurmine/camel,hqstevenson/camel,jlpedrosa/camel,zregvart/camel,arnaud-deprez/camel,JYBESSON/camel,johnpoth/camel,hqstevenson/camel,logzio/camel,lasombra/camel,rparree/camel,yury-vashchyla/camel,woj-i/camel,maschmid/camel,isavin/camel,stalet/camel,curso007/camel,noelo/camel,yogamaha/camel,jmandawg/camel,trohovsky/camel,snurmine/camel,neoramon/camel,tkopczynski/camel,MohammedHammam/camel,alvinkwekel/camel,Fabryprog/camel,rparree/camel,skinzer/camel,arnaud-deprez/camel,brreitme/camel,NickCis/camel,isavin/camel,ssharma/camel,CandleCandle/camel,hqstevenson/camel,sebi-hgdata/camel,gyc567/camel,trohovsky/camel,satishgummadelli/camel,sverkera/camel,yogamaha/camel,kevinearls/camel,yuruki/camel,grgrzybek/camel,sebi-hgdata/camel,adessaigne/camel,dvankleef/camel,scranton/camel,isavin/camel,dvankleef/camel,tarilabs/camel,rmarting/camel,gilfernandes/camel,nikhilvibhav/camel,woj-i/camel,duro1/camel,royopa/camel,mcollovati/camel,salikjan/camel,anton-k11/camel,nikvaessen/camel,davidwilliams1978/camel,skinzer/camel,Fabryprog/camel,Thopap/camel,pax95/camel,noelo/camel,prashant2402/camel,eformat/camel,Thopap/camel,jonmcewen/camel,mgyongyosi/camel,bgaudaen/camel,pplatek/camel,arnaud-deprez/camel,MrCoder/camel,jamesnetherton/camel,edigrid/camel,rmarting/camel,coderczp/camel,sirlatrom/camel,MohammedHammam/camel,allancth/camel,ramonmaruko/camel,haku/camel,dsimansk/camel,iweiss/camel,satishgummadelli/camel,ekprayas/camel,erwelch/camel,curso007/camel,onders86/camel,mohanaraosv/camel,scranton/camel,davidwilliams1978/camel,YMartsynkevych/camel,noelo/camel,edigrid/camel,bdecoste/camel,ssharma/camel,isururanawaka/camel,tlehoux/camel,YMartsynkevych/camel,DariusX/camel,johnpoth/camel,maschmid/camel,jarst/camel,joakibj/camel,acartapanis/camel,mgyongyosi/camel,mnki/camel,borcsokj/camel,objectiser/camel,dvankleef/camel,isavin/camel,FingolfinTEK/camel,NetNow/camel,ssharma/camel,tarilabs/camel,qst-jdc-labs/camel,nboukhed/camel,onders86/camel,mike-kukla/camel,w4tson/camel,tlehoux/camel,jameszkw/camel,nboukhed/camel,mgyongyosi/camel,alvinkwekel/camel,oscerd/camel,YoshikiHigo/camel,mzapletal/camel,pplatek/camel,brreitme/camel,ssharma/camel,chirino/camel,chanakaudaya/camel,allancth/camel,YMartsynkevych/camel,manuelh9r/camel,lburgazzoli/apache-camel,christophd/camel,ekprayas/camel,dsimansk/camel,Thopap/camel,sebi-hgdata/camel,lasombra/camel,MrCoder/camel,drsquidop/camel,jpav/camel,FingolfinTEK/camel,RohanHart/camel,pax95/camel,Thopap/camel,hqstevenson/camel,sebi-hgdata/camel,nikhilvibhav/camel,bgaudaen/camel,anoordover/camel,kevinearls/camel,dkhanolkar/camel,grange74/camel,josefkarasek/camel,NickCis/camel,satishgummadelli/camel,bgaudaen/camel,satishgummadelli/camel,mohanaraosv/camel,jamesnetherton/camel,nicolaferraro/camel,eformat/camel,pplatek/camel,adessaigne/camel,yury-vashchyla/camel,josefkarasek/camel,tadayosi/camel,dpocock/camel,brreitme/camel,alvinkwekel/camel,jmandawg/camel,tlehoux/camel,noelo/camel,koscejev/camel,pax95/camel,tadayosi/camel,chirino/camel,mnki/camel,snurmine/camel,dsimansk/camel,edigrid/camel,YMartsynkevych/camel,bhaveshdt/camel,mgyongyosi/camel,grgrzybek/camel,isururanawaka/camel,jmandawg/camel,lowwool/camel,punkhorn/camel-upstream,stalet/camel,RohanHart/camel,scranton/camel,curso007/camel,allancth/camel,jonmcewen/camel,joakibj/camel,CandleCandle/camel,alvinkwekel/camel,logzio/camel,jpav/camel,NickCis/camel,brreitme/camel,chanakaudaya/camel,johnpoth/camel,snadakuduru/camel,jkorab/camel,skinzer/camel,yury-vashchyla/camel,maschmid/camel,apache/camel,oalles/camel,adessaigne/camel,FingolfinTEK/camel,chirino/camel,CandleCandle/camel,dvankleef/camel,logzio/camel,gnodet/camel,erwelch/camel,dpocock/camel,nikvaessen/camel,mcollovati/camel,coderczp/camel,lowwool/camel,kevinearls/camel,davidkarlsen/camel,noelo/camel,snurmine/camel,gilfernandes/camel,mike-kukla/camel,gilfernandes/camel,dmvolod/camel,pmoerenhout/camel,skinzer/camel,erwelch/camel,dkhanolkar/camel,jmandawg/camel,nikhilvibhav/camel,tadayosi/camel,lburgazzoli/camel,acartapanis/camel,anton-k11/camel,yogamaha/camel,satishgummadelli/camel,trohovsky/camel,haku/camel,gautric/camel,veithen/camel,bgaudaen/camel,YMartsynkevych/camel,koscejev/camel,erwelch/camel,sirlatrom/camel,skinzer/camel,bfitzpat/camel,jkorab/camel,jlpedrosa/camel,jlpedrosa/camel,pmoerenhout/camel,pplatek/camel,bfitzpat/camel,pax95/camel,w4tson/camel,grgrzybek/camel,atoulme/camel,duro1/camel,CodeSmell/camel,tadayosi/camel,MrCoder/camel,jarst/camel,apache/camel,christophd/camel,anoordover/camel,apache/camel,MohammedHammam/camel,ge0ffrey/camel,askannon/camel,trohovsky/camel,trohovsky/camel,CandleCandle/camel,oscerd/camel,partis/camel,JYBESSON/camel,drsquidop/camel,jlpedrosa/camel,MohammedHammam/camel,oscerd/camel,pax95/camel,JYBESSON/camel,pmoerenhout/camel,ssharma/camel,dpocock/camel,oscerd/camel,w4tson/camel,cunningt/camel,curso007/camel,MohammedHammam/camel,lburgazzoli/camel,royopa/camel,bgaudaen/camel,RohanHart/camel,haku/camel,oalles/camel,pkletsko/camel,tdiesler/camel,jpav/camel,davidwilliams1978/camel,dmvolod/camel,atoulme/camel,chirino/camel,ge0ffrey/camel,onders86/camel,dkhanolkar/camel,koscejev/camel,maschmid/camel,christophd/camel,christophd/camel,mcollovati/camel,jlpedrosa/camel,lowwool/camel,JYBESSON/camel,hqstevenson/camel,isururanawaka/camel,tlehoux/camel,skinzer/camel,NickCis/camel,adessaigne/camel,gyc567/camel,bhaveshdt/camel,CandleCandle/camel,w4tson/camel,akhettar/camel,trohovsky/camel,chirino/camel,koscejev/camel,gautric/camel,gnodet/camel,borcsokj/camel,woj-i/camel,dmvolod/camel,stalet/camel,stravag/camel,nikvaessen/camel,neoramon/camel,sabre1041/camel,lburgazzoli/camel,bdecoste/camel,partis/camel,jonmcewen/camel,jollygeorge/camel,gautric/camel,yuruki/camel,tkopczynski/camel,dmvolod/camel,lburgazzoli/apache-camel,MrCoder/camel,dvankleef/camel,dkhanolkar/camel,lburgazzoli/camel,iweiss/camel,iweiss/camel,rparree/camel,eformat/camel,nicolaferraro/camel,sverkera/camel,anton-k11/camel,akhettar/camel,apache/camel,scranton/camel,bhaveshdt/camel,pkletsko/camel,grange74/camel,royopa/camel,YMartsynkevych/camel,nikvaessen/camel,MrCoder/camel,snurmine/camel,lowwool/camel,chanakaudaya/camel,ullgren/camel,sebi-hgdata/camel,tkopczynski/camel,Thopap/camel,acartapanis/camel,neoramon/camel,josefkarasek/camel,johnpoth/camel,stravag/camel,erwelch/camel,YoshikiHigo/camel,mzapletal/camel,NetNow/camel,sverkera/camel,grange74/camel,yuruki/camel,ekprayas/camel,NetNow/camel,bdecoste/camel,sabre1041/camel,jameszkw/camel,YoshikiHigo/camel,josefkarasek/camel,YoshikiHigo/camel,gnodet/camel,nboukhed/camel,Fabryprog/camel,RohanHart/camel,yury-vashchyla/camel,gautric/camel,curso007/camel,haku/camel,grgrzybek/camel,stravag/camel,drsquidop/camel,iweiss/camel,JYBESSON/camel,yogamaha/camel,scranton/camel,jpav/camel,qst-jdc-labs/camel,driseley/camel,RohanHart/camel,tarilabs/camel,koscejev/camel,neoramon/camel,pkletsko/camel,Fabryprog/camel,jkorab/camel,mcollovati/camel,partis/camel,ramonmaruko/camel,bfitzpat/camel,w4tson/camel | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.servletlistener;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.camel.ManagementStatisticsLevel;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.ErrorHandlerBuilderRef;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.properties.PropertiesComponent;
import org.apache.camel.management.DefaultManagementAgent;
import org.apache.camel.management.DefaultManagementLifecycleStrategy;
import org.apache.camel.management.DefaultManagementStrategy;
import org.apache.camel.management.ManagedManagementStrategy;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.model.RoutesDefinition;
import org.apache.camel.spi.ManagementStrategy;
import org.apache.camel.spi.Registry;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.CastUtils;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ResourceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link ServletContextListener} which is used to bootstrap
* {@link org.apache.camel.CamelContext} in web applications.
*
* @param <R> the type of the {@link Registry} being {@link #createRegistry() created}
*/
public abstract class CamelServletContextListener<R extends Registry> implements ServletContextListener {
/**
* instance is used for testing purpose
*/
public static ServletCamelContext instance;
/**
* Key to store the created {@link org.apache.camel.CamelContext} as an attribute on the {@link javax.servlet.ServletContext}.
*/
public static final String CAMEL_CONTEXT_KEY = "CamelContext";
protected static final Logger LOG = LoggerFactory.getLogger(CamelServletContextListener.class);
protected ServletCamelContext camelContext;
protected CamelContextLifecycle<R> camelContextLifecycle;
protected boolean test;
protected R registry;
@Override
public void contextInitialized(ServletContextEvent sce) {
LOG.info("CamelContextServletListener initializing ...");
// create jndi and camel context
try {
registry = createRegistry();
camelContext = new ServletCamelContext(registry, sce.getServletContext());
} catch (Exception e) {
throw new RuntimeException("Error creating CamelContext.", e);
}
// get the init parameters
Map<String, Object> map = extractInitParameters(sce);
// special for test parameter
String test = (String) map.remove("test");
if (test != null && "true".equalsIgnoreCase(test)) {
this.test = true;
}
LOG.trace("In test mode? {}", this.test);
// set properties on the camel context from the init parameters
try {
initPropertyPlaceholder(camelContext, map);
initJmx(camelContext, map);
initCamelContext(camelContext, map);
if (!map.isEmpty()) {
IntrospectionSupport.setProperties(camelContext, map);
}
} catch (Exception e) {
throw new RuntimeException("Error setting init parameters on CamelContext.", e);
}
// any custom CamelContextLifecycle
String lifecycle = (String) map.remove("CamelContextLifecycle");
if (lifecycle != null) {
try {
Class<CamelContextLifecycle<R>> clazz = CastUtils.cast(camelContext.getClassResolver().resolveMandatoryClass(lifecycle, CamelContextLifecycle.class));
camelContextLifecycle = camelContext.getInjector().newInstance(clazz);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Error creating CamelContextLifecycle class with name " + lifecycle, e);
}
}
// just log if we could not use all the parameters, as they may be used by others
if (!map.isEmpty()) {
LOG.info("There are {} ServletContext init parameters, unknown to Camel. Maybe they are used by other frameworks? [{}]", map.size(), map);
}
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.beforeAddRoutes(camelContext, registry);
}
} catch (Exception e) {
LOG.error("Error before adding routes to CamelContext.", e);
throw new RuntimeException("Error before adding routes to CamelContext.", e);
}
// get the routes and add to the CamelContext
List<Object> routes = extractRoutes(map);
for (Object route : routes) {
if (route instanceof RouteBuilder) {
try {
camelContext.addRoutes((RoutesBuilder) route);
} catch (Exception e) {
throw new RuntimeException("Error adding route " + route, e);
}
} else if (route instanceof Set) {
// its a set of route builders
for (Object routesBuilder : (Set<?>) route) {
try {
camelContext.addRoutes((RoutesBuilder) routesBuilder);
} catch (Exception e) {
throw new RuntimeException("Error adding route " + routesBuilder, e);
}
}
} else if (route instanceof RoutesDefinition) {
try {
camelContext.addRouteDefinitions(((RoutesDefinition) route).getRoutes());
} catch (Exception e) {
throw new RuntimeException("Error adding route(s) " + route, e);
}
} else if (route instanceof RouteDefinition) {
try {
camelContext.addRouteDefinition((RouteDefinition) route);
} catch (Exception e) {
throw new RuntimeException("Error adding route(s) " + route, e);
}
} else {
throw new IllegalArgumentException("Unsupported route: " + route);
}
}
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.afterAddRoutes(camelContext, registry);
}
} catch (Exception e) {
LOG.error("Error after adding routes to CamelContext.", e);
throw new RuntimeException("Error after adding routes to CamelContext.", e);
}
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.beforeStart(camelContext, registry);
}
camelContext.start();
if (camelContextLifecycle != null) {
camelContextLifecycle.afterStart(camelContext, registry);
}
} catch (Exception e) {
LOG.error("Error starting CamelContext.", e);
throw new RuntimeException("Error starting CamelContext.", e);
}
if (this.test) {
instance = camelContext;
}
// store the CamelContext as an attribute
sce.getServletContext().setAttribute(CAMEL_CONTEXT_KEY, camelContext);
LOG.info("CamelContextServletListener initialized");
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
LOG.info("CamelContextServletListener destroying ...");
if (camelContext != null) {
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.beforeStop(camelContext, registry);
}
camelContext.stop();
if (camelContextLifecycle != null) {
camelContextLifecycle.afterStop(camelContext, registry);
}
} catch (Exception e) {
LOG.warn("Error stopping CamelContext. This exception will be ignored.", e);
}
}
camelContext = null;
registry = null;
instance = null;
// store the CamelContext as an attribute
sce.getServletContext().removeAttribute(CAMEL_CONTEXT_KEY);
LOG.info("CamelContextServletListener destroyed");
}
/**
* Creates the {@link Registry} implementation to use.
*/
protected abstract R createRegistry() throws Exception;
/**
* Extracts all the init parameters, and will do reference lookup in {@link #createRegistry() registry}
* in case the value starts with a {@code #} sign.
*/
private Map<String, Object> extractInitParameters(ServletContextEvent sce) {
// configure CamelContext with the init parameter
Map<String, Object> map = new LinkedHashMap<String, Object>();
Enumeration<?> names = sce.getServletContext().getInitParameterNames();
while (names.hasMoreElements()) {
String name = (String) names.nextElement();
String value = sce.getServletContext().getInitParameter(name);
if (ObjectHelper.isNotEmpty(value)) {
Object target = value;
if (value.startsWith("#")) {
// a reference lookup in registry
value = value.substring(1);
target = lookupRegistryByName(value);
LOG.debug("Resolved the servlet context's initialization parameter {} to {}", value, target);
}
map.put(name, target);
}
}
return map;
}
/**
* Initializes the property placeholders by registering the {@link PropertiesComponent} with
* the configuration from the given init parameters.
*/
private void initPropertyPlaceholder(ServletCamelContext camelContext, Map<String, Object> parameters) throws Exception {
// setup property placeholder first
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "propertyPlaceholder.");
if (properties != null && !properties.isEmpty()) {
PropertiesComponent pc = new PropertiesComponent();
IntrospectionSupport.setProperties(pc, properties);
// validate we could set all parameters
if (!properties.isEmpty()) {
throw new IllegalArgumentException("Error setting propertyPlaceholder parameters on CamelContext."
+ " There are " + properties.size() + " unknown parameters. [" + properties + "]");
}
// register the properties component
camelContext.addComponent("properties", pc);
}
}
/**
* Initializes JMX on {@link ServletCamelContext} with the configuration from the given init parameters.
*/
private void initJmx(ServletCamelContext camelContext, Map<String, Object> parameters) throws Exception {
// setup jmx
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "jmx.");
if (properties != null && !properties.isEmpty()) {
String disabled = (String) properties.remove("disabled");
boolean disableJmx = CamelContextHelper.parseBoolean(camelContext, disabled != null ? disabled : "false");
if (disableJmx) {
// disable JMX which is a bit special to do
LOG.info("JMXAgent disabled");
// clear the existing lifecycle strategies define by the DefaultCamelContext constructor
camelContext.getLifecycleStrategies().clear();
// no need to add a lifecycle strategy as we do not need one as JMX is disabled
camelContext.setManagementStrategy(new DefaultManagementStrategy());
} else {
LOG.info("JMXAgent enabled");
DefaultManagementAgent agent = new DefaultManagementAgent(camelContext);
IntrospectionSupport.setProperties(agent, properties);
ManagementStrategy managementStrategy = new ManagedManagementStrategy(camelContext, agent);
camelContext.setManagementStrategy(managementStrategy);
// clear the existing lifecycle strategies defined by the DefaultCamelContext constructor
camelContext.getLifecycleStrategies().clear();
camelContext.addLifecycleStrategy(new DefaultManagementLifecycleStrategy(camelContext));
// set additional configuration from agent
boolean onlyId = agent.getOnlyRegisterProcessorWithCustomId() != null && agent.getOnlyRegisterProcessorWithCustomId();
camelContext.getManagementStrategy().onlyManageProcessorWithCustomId(onlyId);
String statisticsLevel = (String) properties.remove("statisticsLevel");
if (statisticsLevel != null) {
camelContext.getManagementStrategy().setStatisticsLevel(ManagementStatisticsLevel.valueOf(statisticsLevel));
}
String loadStatisticsEnabled = (String) properties.remove("loadStatisticsEnabled");
Boolean statisticsEnabled = CamelContextHelper.parseBoolean(camelContext, loadStatisticsEnabled != null ? loadStatisticsEnabled : "true");
if (statisticsEnabled != null) {
camelContext.getManagementStrategy().setLoadStatisticsEnabled(statisticsEnabled);
}
}
// validate we could set all parameters
if (!properties.isEmpty()) {
throw new IllegalArgumentException("Error setting jmx parameters on CamelContext."
+ " There are " + properties.size() + " unknown parameters. [" + properties + "]");
}
}
}
/**
* Initializes the {@link ServletCamelContext} by setting the supported init parameters.
*/
private void initCamelContext(ServletCamelContext camelContext, Map<String, Object> parameters) throws Exception {
String messageHistory = (String) parameters.remove("messageHistory");
if (messageHistory != null) {
camelContext.setMessageHistory(CamelContextHelper.parseBoolean(camelContext, messageHistory));
}
String streamCache = (String) parameters.remove("streamCache");
if (streamCache != null) {
camelContext.setStreamCaching(CamelContextHelper.parseBoolean(camelContext, streamCache));
}
String trace = (String) parameters.remove("trace");
if (trace != null) {
camelContext.setTracing(CamelContextHelper.parseBoolean(camelContext, trace));
}
String delayer = (String) parameters.remove("delayer");
if (delayer != null) {
camelContext.setDelayer(CamelContextHelper.parseLong(camelContext, delayer));
}
String handleFault = (String) parameters.remove("handleFault");
if (handleFault != null) {
camelContext.setHandleFault(CamelContextHelper.parseBoolean(camelContext, handleFault));
}
String errorHandlerRef = (String) parameters.remove("errorHandlerRef");
if (errorHandlerRef != null) {
camelContext.setErrorHandlerBuilder(new ErrorHandlerBuilderRef(errorHandlerRef));
}
String autoStartup = (String) parameters.remove("autoStartup");
if (autoStartup != null) {
camelContext.setAutoStartup(CamelContextHelper.parseBoolean(camelContext, autoStartup));
}
String useMDCLogging = (String) parameters.remove("useMDCLogging");
if (useMDCLogging != null) {
camelContext.setUseMDCLogging(CamelContextHelper.parseBoolean(camelContext, useMDCLogging));
}
String useBreadcrumb = (String) parameters.remove("useBreadcrumb");
if (useBreadcrumb != null) {
camelContext.setUseBreadcrumb(CamelContextHelper.parseBoolean(camelContext, useBreadcrumb));
}
String managementNamePattern = (String) parameters.remove("managementNamePattern");
if (managementNamePattern != null) {
camelContext.getManagementNameStrategy().setNamePattern(managementNamePattern);
}
String threadNamePattern = (String) parameters.remove("threadNamePattern");
if (threadNamePattern != null) {
camelContext.getExecutorServiceManager().setThreadNamePattern(threadNamePattern);
}
// extract any additional properties. prefixes
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "properties.");
if (properties != null && !properties.isEmpty()) {
for (Map.Entry<String, Object> entry : properties.entrySet()) {
camelContext.getProperties().put(entry.getKey(), "" + entry.getValue());
}
}
}
/**
* Extract the routes from the parameters.
*
* @param map parameters
* @return a list of routes, which can be of different types. See source code for more details.
*/
private List<Object> extractRoutes(Map<String, Object> map) {
List<Object> answer = new ArrayList<Object>();
List<String> names = new ArrayList<String>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
if (entry.getKey().toLowerCase(Locale.UK).startsWith("routebuilder")) {
names.add(entry.getKey());
// we can have multiple values assigned, separated by comma, so create an iterator
String value = (String) entry.getValue();
Iterator<Object> it = ObjectHelper.createIterator(value);
while (it.hasNext()) {
value = (String) it.next();
if (ObjectHelper.isNotEmpty(value)) {
// trim value before usage, as people can indent the values
value = value.trim();
Object target = null;
if (value.startsWith("#")) {
// a reference lookup in jndi
value = value.substring(1);
target = lookupRegistryByName(value);
} else if (ResourceHelper.hasScheme(value)) {
// XML resource from classpath or file system
InputStream is = null;
try {
is = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext.getClassResolver(), value);
target = camelContext.loadRoutesDefinition(is);
} catch (Exception e) {
throw new RuntimeException("Error loading routes from resource: " + value, e);
} finally {
IOHelper.close(is, entry.getKey(), LOG);
}
} else if (value.startsWith("packagescan:")) {
// using package scanning
String path = value.substring(12);
Set<Class<?>> classes = camelContext.getPackageScanClassResolver().findImplementations(RouteBuilder.class, path);
if (!classes.isEmpty()) {
Set<RouteBuilder> builders = new LinkedHashSet<RouteBuilder>();
target = builders;
for (Class<?> clazz : classes) {
try {
RouteBuilder route = (RouteBuilder) camelContext.getInjector().newInstance(clazz);
builders.add(route);
} catch (Exception e) {
throw new RuntimeException("Error creating RouteBuilder " + clazz, e);
}
}
}
} else {
// assume its a FQN classname for a RouteBuilder class
try {
Class<RouteBuilder> clazz = camelContext.getClassResolver().resolveMandatoryClass(value, RouteBuilder.class);
target = camelContext.getInjector().newInstance(clazz);
} catch (Exception e) {
throw new RuntimeException("Error creating RouteBuilder " + value, e);
}
}
if (target != null) {
answer.add(target);
}
}
}
}
}
// after adding the route builders we should remove them from the map
for (String name : names) {
map.remove(name);
}
return answer;
}
private Object lookupRegistryByName(String name) {
return registry.lookupByName(name);
}
}
| components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelServletContextListener.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.servletlistener;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.camel.ManagementStatisticsLevel;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.ErrorHandlerBuilderRef;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.properties.PropertiesComponent;
import org.apache.camel.management.DefaultManagementAgent;
import org.apache.camel.management.DefaultManagementLifecycleStrategy;
import org.apache.camel.management.DefaultManagementStrategy;
import org.apache.camel.management.ManagedManagementStrategy;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.model.RoutesDefinition;
import org.apache.camel.spi.ManagementStrategy;
import org.apache.camel.spi.Registry;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.CastUtils;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ResourceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link ServletContextListener} which is used to bootstrap
* {@link org.apache.camel.CamelContext} in web applications.
*
* @param <R> the type of the {@link Registry} being {@link #createRegistry() created}
*/
public abstract class CamelServletContextListener<R extends Registry> implements ServletContextListener {
/**
* instance is used for testing purpose
*/
public static ServletCamelContext instance;
protected static final Logger LOG = LoggerFactory.getLogger(CamelServletContextListener.class);
protected ServletCamelContext camelContext;
protected CamelContextLifecycle<R> camelContextLifecycle;
protected boolean test;
protected R registry;
@Override
public void contextInitialized(ServletContextEvent sce) {
LOG.info("CamelContextServletListener initializing ...");
// create jndi and camel context
try {
registry = createRegistry();
camelContext = new ServletCamelContext(registry, sce.getServletContext());
} catch (Exception e) {
throw new RuntimeException("Error creating CamelContext.", e);
}
// get the init parameters
Map<String, Object> map = extractInitParameters(sce);
// special for test parameter
String test = (String) map.remove("test");
if (test != null && "true".equalsIgnoreCase(test)) {
this.test = true;
}
LOG.trace("In test mode? {}", this.test);
// set properties on the camel context from the init parameters
try {
initPropertyPlaceholder(camelContext, map);
initJmx(camelContext, map);
initCamelContext(camelContext, map);
if (!map.isEmpty()) {
IntrospectionSupport.setProperties(camelContext, map);
}
} catch (Exception e) {
throw new RuntimeException("Error setting init parameters on CamelContext.", e);
}
// any custom CamelContextLifecycle
String lifecycle = (String) map.remove("CamelContextLifecycle");
if (lifecycle != null) {
try {
Class<CamelContextLifecycle<R>> clazz = CastUtils.cast(camelContext.getClassResolver().resolveMandatoryClass(lifecycle, CamelContextLifecycle.class));
camelContextLifecycle = camelContext.getInjector().newInstance(clazz);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Error creating CamelContextLifecycle class with name " + lifecycle, e);
}
}
// just log if we could not use all the parameters, as they may be used by others
if (!map.isEmpty()) {
LOG.info("There are {} ServletContext init parameters, unknown to Camel. Maybe they are used by other frameworks? [{}]", map.size(), map);
}
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.beforeAddRoutes(camelContext, registry);
}
} catch (Exception e) {
LOG.error("Error before adding routes to CamelContext.", e);
throw new RuntimeException("Error before adding routes to CamelContext.", e);
}
// get the routes and add to the CamelContext
List<Object> routes = extractRoutes(map);
for (Object route : routes) {
if (route instanceof RouteBuilder) {
try {
camelContext.addRoutes((RoutesBuilder) route);
} catch (Exception e) {
throw new RuntimeException("Error adding route " + route, e);
}
} else if (route instanceof Set) {
// its a set of route builders
for (Object routesBuilder : (Set<?>) route) {
try {
camelContext.addRoutes((RoutesBuilder) routesBuilder);
} catch (Exception e) {
throw new RuntimeException("Error adding route " + routesBuilder, e);
}
}
} else if (route instanceof RoutesDefinition) {
try {
camelContext.addRouteDefinitions(((RoutesDefinition) route).getRoutes());
} catch (Exception e) {
throw new RuntimeException("Error adding route(s) " + route, e);
}
} else if (route instanceof RouteDefinition) {
try {
camelContext.addRouteDefinition((RouteDefinition) route);
} catch (Exception e) {
throw new RuntimeException("Error adding route(s) " + route, e);
}
} else {
throw new IllegalArgumentException("Unsupported route: " + route);
}
}
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.afterAddRoutes(camelContext, registry);
}
} catch (Exception e) {
LOG.error("Error after adding routes to CamelContext.", e);
throw new RuntimeException("Error after adding routes to CamelContext.", e);
}
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.beforeStart(camelContext, registry);
}
camelContext.start();
if (camelContextLifecycle != null) {
camelContextLifecycle.afterStart(camelContext, registry);
}
} catch (Exception e) {
LOG.error("Error starting CamelContext.", e);
throw new RuntimeException("Error starting CamelContext.", e);
}
if (this.test) {
instance = camelContext;
}
LOG.info("CamelContextServletListener initialized");
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
LOG.info("CamelContextServletListener destroying ...");
if (camelContext != null) {
try {
if (camelContextLifecycle != null) {
camelContextLifecycle.beforeStop(camelContext, registry);
}
camelContext.stop();
if (camelContextLifecycle != null) {
camelContextLifecycle.afterStop(camelContext, registry);
}
} catch (Exception e) {
LOG.warn("Error stopping CamelContext. This exception will be ignored.", e);
}
}
camelContext = null;
registry = null;
instance = null;
LOG.info("CamelContextServletListener destroyed");
}
/**
* Creates the {@link Registry} implementation to use.
*/
protected abstract R createRegistry() throws Exception;
/**
* Extracts all the init parameters, and will do reference lookup in {@link #createRegistry() registry}
* in case the value starts with a {@code #} sign.
*/
private Map<String, Object> extractInitParameters(ServletContextEvent sce) {
// configure CamelContext with the init parameter
Map<String, Object> map = new LinkedHashMap<String, Object>();
Enumeration<?> names = sce.getServletContext().getInitParameterNames();
while (names.hasMoreElements()) {
String name = (String) names.nextElement();
String value = sce.getServletContext().getInitParameter(name);
if (ObjectHelper.isNotEmpty(value)) {
Object target = value;
if (value.startsWith("#")) {
// a reference lookup in registry
value = value.substring(1);
target = lookupRegistryByName(value);
LOG.debug("Resolved the servlet context's initialization parameter {} to {}", value, target);
}
map.put(name, target);
}
}
return map;
}
/**
* Initializes the property placeholders by registering the {@link PropertiesComponent} with
* the configuration from the given init parameters.
*/
private void initPropertyPlaceholder(ServletCamelContext camelContext, Map<String, Object> parameters) throws Exception {
// setup property placeholder first
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "propertyPlaceholder.");
if (properties != null && !properties.isEmpty()) {
PropertiesComponent pc = new PropertiesComponent();
IntrospectionSupport.setProperties(pc, properties);
// validate we could set all parameters
if (!properties.isEmpty()) {
throw new IllegalArgumentException("Error setting propertyPlaceholder parameters on CamelContext."
+ " There are " + properties.size() + " unknown parameters. [" + properties + "]");
}
// register the properties component
camelContext.addComponent("properties", pc);
}
}
/**
* Initializes JMX on {@link ServletCamelContext} with the configuration from the given init parameters.
*/
private void initJmx(ServletCamelContext camelContext, Map<String, Object> parameters) throws Exception {
// setup jmx
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "jmx.");
if (properties != null && !properties.isEmpty()) {
String disabled = (String) properties.remove("disabled");
boolean disableJmx = CamelContextHelper.parseBoolean(camelContext, disabled != null ? disabled : "false");
if (disableJmx) {
// disable JMX which is a bit special to do
LOG.info("JMXAgent disabled");
// clear the existing lifecycle strategies define by the DefaultCamelContext constructor
camelContext.getLifecycleStrategies().clear();
// no need to add a lifecycle strategy as we do not need one as JMX is disabled
camelContext.setManagementStrategy(new DefaultManagementStrategy());
} else {
LOG.info("JMXAgent enabled");
DefaultManagementAgent agent = new DefaultManagementAgent(camelContext);
IntrospectionSupport.setProperties(agent, properties);
ManagementStrategy managementStrategy = new ManagedManagementStrategy(camelContext, agent);
camelContext.setManagementStrategy(managementStrategy);
// clear the existing lifecycle strategies defined by the DefaultCamelContext constructor
camelContext.getLifecycleStrategies().clear();
camelContext.addLifecycleStrategy(new DefaultManagementLifecycleStrategy(camelContext));
// set additional configuration from agent
boolean onlyId = agent.getOnlyRegisterProcessorWithCustomId() != null && agent.getOnlyRegisterProcessorWithCustomId();
camelContext.getManagementStrategy().onlyManageProcessorWithCustomId(onlyId);
String statisticsLevel = (String) properties.remove("statisticsLevel");
if (statisticsLevel != null) {
camelContext.getManagementStrategy().setStatisticsLevel(ManagementStatisticsLevel.valueOf(statisticsLevel));
}
String loadStatisticsEnabled = (String) properties.remove("loadStatisticsEnabled");
Boolean statisticsEnabled = CamelContextHelper.parseBoolean(camelContext, loadStatisticsEnabled != null ? loadStatisticsEnabled : "true");
if (statisticsEnabled != null) {
camelContext.getManagementStrategy().setLoadStatisticsEnabled(statisticsEnabled);
}
}
// validate we could set all parameters
if (!properties.isEmpty()) {
throw new IllegalArgumentException("Error setting jmx parameters on CamelContext."
+ " There are " + properties.size() + " unknown parameters. [" + properties + "]");
}
}
}
/**
* Initializes the {@link ServletCamelContext} by setting the supported init parameters.
*/
private void initCamelContext(ServletCamelContext camelContext, Map<String, Object> parameters) throws Exception {
String messageHistory = (String) parameters.remove("messageHistory");
if (messageHistory != null) {
camelContext.setMessageHistory(CamelContextHelper.parseBoolean(camelContext, messageHistory));
}
String streamCache = (String) parameters.remove("streamCache");
if (streamCache != null) {
camelContext.setStreamCaching(CamelContextHelper.parseBoolean(camelContext, streamCache));
}
String trace = (String) parameters.remove("trace");
if (trace != null) {
camelContext.setTracing(CamelContextHelper.parseBoolean(camelContext, trace));
}
String delayer = (String) parameters.remove("delayer");
if (delayer != null) {
camelContext.setDelayer(CamelContextHelper.parseLong(camelContext, delayer));
}
String handleFault = (String) parameters.remove("handleFault");
if (handleFault != null) {
camelContext.setHandleFault(CamelContextHelper.parseBoolean(camelContext, handleFault));
}
String errorHandlerRef = (String) parameters.remove("errorHandlerRef");
if (errorHandlerRef != null) {
camelContext.setErrorHandlerBuilder(new ErrorHandlerBuilderRef(errorHandlerRef));
}
String autoStartup = (String) parameters.remove("autoStartup");
if (autoStartup != null) {
camelContext.setAutoStartup(CamelContextHelper.parseBoolean(camelContext, autoStartup));
}
String useMDCLogging = (String) parameters.remove("useMDCLogging");
if (useMDCLogging != null) {
camelContext.setUseMDCLogging(CamelContextHelper.parseBoolean(camelContext, useMDCLogging));
}
String useBreadcrumb = (String) parameters.remove("useBreadcrumb");
if (useBreadcrumb != null) {
camelContext.setUseBreadcrumb(CamelContextHelper.parseBoolean(camelContext, useBreadcrumb));
}
String managementNamePattern = (String) parameters.remove("managementNamePattern");
if (managementNamePattern != null) {
camelContext.getManagementNameStrategy().setNamePattern(managementNamePattern);
}
String threadNamePattern = (String) parameters.remove("threadNamePattern");
if (threadNamePattern != null) {
camelContext.getExecutorServiceManager().setThreadNamePattern(threadNamePattern);
}
// extract any additional properties. prefixes
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "properties.");
if (properties != null && !properties.isEmpty()) {
for (Map.Entry<String, Object> entry : properties.entrySet()) {
camelContext.getProperties().put(entry.getKey(), "" + entry.getValue());
}
}
}
/**
* Extract the routes from the parameters.
*
* @param map parameters
* @return a list of routes, which can be of different types. See source code for more details.
*/
private List<Object> extractRoutes(Map<String, Object> map) {
List<Object> answer = new ArrayList<Object>();
List<String> names = new ArrayList<String>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
if (entry.getKey().toLowerCase(Locale.UK).startsWith("routebuilder")) {
names.add(entry.getKey());
// we can have multiple values assigned, separated by comma, so create an iterator
String value = (String) entry.getValue();
Iterator<Object> it = ObjectHelper.createIterator(value);
while (it.hasNext()) {
value = (String) it.next();
if (ObjectHelper.isNotEmpty(value)) {
// trim value before usage, as people can indent the values
value = value.trim();
Object target = null;
if (value.startsWith("#")) {
// a reference lookup in jndi
value = value.substring(1);
target = lookupRegistryByName(value);
} else if (ResourceHelper.hasScheme(value)) {
// XML resource from classpath or file system
InputStream is = null;
try {
is = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext.getClassResolver(), value);
target = camelContext.loadRoutesDefinition(is);
} catch (Exception e) {
throw new RuntimeException("Error loading routes from resource: " + value, e);
} finally {
IOHelper.close(is, entry.getKey(), LOG);
}
} else if (value.startsWith("packagescan:")) {
// using package scanning
String path = value.substring(12);
Set<Class<?>> classes = camelContext.getPackageScanClassResolver().findImplementations(RouteBuilder.class, path);
if (!classes.isEmpty()) {
Set<RouteBuilder> builders = new LinkedHashSet<RouteBuilder>();
target = builders;
for (Class<?> clazz : classes) {
try {
RouteBuilder route = (RouteBuilder) camelContext.getInjector().newInstance(clazz);
builders.add(route);
} catch (Exception e) {
throw new RuntimeException("Error creating RouteBuilder " + clazz, e);
}
}
}
} else {
// assume its a FQN classname for a RouteBuilder class
try {
Class<RouteBuilder> clazz = camelContext.getClassResolver().resolveMandatoryClass(value, RouteBuilder.class);
target = camelContext.getInjector().newInstance(clazz);
} catch (Exception e) {
throw new RuntimeException("Error creating RouteBuilder " + value, e);
}
}
if (target != null) {
answer.add(target);
}
}
}
}
}
// after adding the route builders we should remove them from the map
for (String name : names) {
map.remove(name);
}
return answer;
}
private Object lookupRegistryByName(String name) {
return registry.lookupByName(name);
}
}
| CAMEL-7711: camel-servletlistener - Store created CamelContext on ServletContext attribute
| components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelServletContextListener.java | CAMEL-7711: camel-servletlistener - Store created CamelContext on ServletContext attribute |
|
Java | apache-2.0 | 0dd03ae48c6c385c152dd8f530b0c31a8f6e8f7d | 0 | datanucleus/tests,datanucleus/tests,datanucleus/tests,datanucleus/tests,datanucleus/tests | /**********************************************************************
Copyright (c) 2004 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors :
...
***********************************************************************/
package org.datanucleus.tests.types;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import javax.jdo.FetchPlan;
import javax.jdo.JDOUserException;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import javax.jdo.Transaction;
import org.datanucleus.enhancement.Persistable;
import org.datanucleus.tests.JDOPersistenceTestCase;
import org.jpox.samples.array.ArrayElement;
import org.jpox.samples.array.ArrayHolderInterface;
import org.jpox.samples.array.BigDecimalArray;
import org.jpox.samples.array.BigIntegerArray;
import org.jpox.samples.array.BooleanArray;
import org.jpox.samples.array.BooleanObjectArray;
import org.jpox.samples.array.ByteArray;
import org.jpox.samples.array.ByteObjectArray;
import org.jpox.samples.array.CharArray;
import org.jpox.samples.array.CharObjectArray;
import org.jpox.samples.array.DateArray;
import org.jpox.samples.array.DoubleArray;
import org.jpox.samples.array.DoubleObjectArray;
import org.jpox.samples.array.FloatArray;
import org.jpox.samples.array.FloatObjectArray;
import org.jpox.samples.array.IntArray;
import org.jpox.samples.array.IntObjectArray;
import org.jpox.samples.array.InterfaceArray;
import org.jpox.samples.array.LocaleArray;
import org.jpox.samples.array.LongArray;
import org.jpox.samples.array.LongObjectArray;
import org.jpox.samples.array.ObjectArray;
import org.jpox.samples.array.PersistableArray;
import org.jpox.samples.array.ShortArray;
import org.jpox.samples.array.ShortObjectArray;
import org.jpox.samples.array.StringArray;
import org.jpox.samples.interfaces.Rectangle;
import org.jpox.samples.interfaces.Shape;
/**
* Test case to test use of Arrays.
* @version $Revision: 1.5 $
**/
public class ArrayTest extends JDOPersistenceTestCase
{
protected Transaction tx;
protected PersistenceManager pm;
public ArrayTest(String name)
{
super(name);
}
// ---------------------------- Serialised tests --------------------------------------
/**
* Test for boolean[] array, stored serialised or in a join table.
*/
public void testBooleanArray()
{
boolean[] elements = new boolean[] {true, false, true, false};
BooleanArray holder = new BooleanArray(elements, elements);
performArrayTest(holder, boolean[].class, elements, elements, 0.0);
holder = new BooleanArray(null, null);
performArrayTest(holder, boolean[].class, null, null, 0.0);
}
/**
* Test for byte[] array, stored serialised or in a join table.
*/
public void testByteArray()
{
byte[] elements = new byte[] {1, 0, 1, 1};
ByteArray holder = new ByteArray(elements, elements);
performArrayTest(holder, byte[].class, elements, elements, 0.0);
holder = new ByteArray(null, null);
performArrayTest(holder, byte[].class, null, null, 0.0);
}
/**
* Test for char[] array, stored serialised or in a join table.
*/
public void testCharArray()
{
char[] elements = new char[] {'A', 'B', 'C'};
CharArray holder = new CharArray(elements, elements);
performArrayTest(holder, char[].class, elements, elements, 0.0);
holder = new CharArray(null, null);
performArrayTest(holder, char[].class, null, null, 0.0);
}
/**
* Test for double[] array, stored serialised or in a join table.
*/
public void testDoubleArray()
{
double[] elements = new double[] {12.34567, 23.45678, 1.00, -299.89};
DoubleArray holder = new DoubleArray(elements, elements);
performArrayTest(holder, double[].class, elements, elements, 0.00001);
holder = new DoubleArray(null, null);
performArrayTest(holder, double[].class, null, null, 0.0);
}
/**
* Test for float[] array, stored serialised or in a join table.
*/
public void testFloatArray()
{
float[] elements = new float[] {(float)12.34, (float)34.5};
FloatArray holder = new FloatArray(elements, elements);
performArrayTest(holder, float[].class, elements, elements, 0.00001);
holder = new FloatArray(null, null);
performArrayTest(holder, float[].class, null, null, 0.0);
}
/**
* Test for int[] array, stored serialised or in a join table.
*/
public void testIntArray()
{
int[] elements = new int[] {2001, 4001, 6004, 4000};
IntArray holder = new IntArray(elements, elements);
performArrayTest(holder, int[].class, elements, elements, 0.0);
holder = new IntArray(null, null);
performArrayTest(holder, int[].class, null, null, 0.0);
}
/**
* Test for long[] array, stored serialised or in a join table.
*/
public void testLongArray()
{
long[] elements = new long[] {123456789, 432156789, 1};
LongArray holder = new LongArray(elements, elements);
performArrayTest(holder, long[].class, elements, elements, 0.0);
holder = new LongArray(null, null);
performArrayTest(holder, long[].class, null, null, 0.0);
}
/**
* Test for short[] array, stored serialised or in a join table.
*/
public void testShortArray()
{
short[] elements = new short[] {123, 24, 1};
ShortArray holder = new ShortArray(elements, elements);
performArrayTest(holder, short[].class, elements, elements, 0.0);
holder = new ShortArray(null, null);
performArrayTest(holder, short[].class, null, null, 0.0);
}
/**
* Test for Boolean[] array, stored serialised or in a join table.
*/
public void testBooleanObjectArray()
{
Boolean[] elements = new Boolean[] {new Boolean(true), new Boolean(false), new Boolean(true), new Boolean(false)};
BooleanObjectArray holder = new BooleanObjectArray(elements, elements);
performArrayTest(holder, Boolean[].class, elements, elements, 0.0);
holder = new BooleanObjectArray(null, null);
performArrayTest(holder, Boolean[].class, null, null, 0.0);
}
/**
* Test for Byte[] array, stored serialised or in a join table.
*/
public void testByteObjectArray()
{
Byte[] elements = new Byte[] {new Byte("1"), new Byte("0"), new Byte("1"), new Byte("1")};
ByteObjectArray holder = new ByteObjectArray(elements, elements);
performArrayTest(holder, Byte[].class, elements, elements, 0.0);
holder = new ByteObjectArray(null, null);
performArrayTest(holder, Byte[].class, null, null, 0.0);
}
/**
* Test for Character[] array, stored serialised or in a join table.
*/
public void testCharObjectArray()
{
Character[] elements = new Character[] {new Character('A'), new Character('B'), new Character('C')};
CharObjectArray holder = new CharObjectArray(elements, elements);
performArrayTest(holder, Character[].class, elements, elements, 0.0);
holder = new CharObjectArray(null, null);
performArrayTest(holder, Character[].class, null, null, 0.0);
}
/**
* Test for Double[] array, stored serialised or in a join table.
*/
public void testDoubleObjectArray()
{
Double[] elements = new Double[] {new Double(12.34567), new Double(23.45678), new Double(1.00), new Double(-299.89)};
DoubleObjectArray holder = new DoubleObjectArray(elements, elements);
performArrayTest(holder, Double[].class, elements, elements, 0.00001);
holder = new DoubleObjectArray(null, null);
performArrayTest(holder, Double[].class, null, null, 0.0);
}
/**
* Test for Float[] array, stored serialised or in a join table.
*/
public void testFloatObjectArray()
{
Float[] elements = new Float[] {new Float(12.34), new Float(34.5)};
FloatObjectArray holder = new FloatObjectArray(elements, elements);
performArrayTest(holder, Float[].class, elements, elements, 0.00001);
holder = new FloatObjectArray(null, null);
performArrayTest(holder, Float[].class, null, null, 0.0);
}
/**
* Test for Integer[] array, stored serialised or in a join table.
*/
public void testIntObjectArray()
{
Integer[] elements = new Integer[] {new Integer(2001), new Integer(4001), new Integer(6004), new Integer(4000)};
IntObjectArray holder = new IntObjectArray(elements, elements);
performArrayTest(holder, Integer[].class, elements, elements, 0.0);
holder = new IntObjectArray(null, null);
performArrayTest(holder, Integer[].class, null, null, 0.0);
}
/**
* Test for Long[] array, stored serialised or in a join table.
*/
public void testLongObjectArray()
{
Long[] elements = new Long[] {new Long(123456789), new Long(432156789), new Long(1)};
LongObjectArray holder = new LongObjectArray(elements, elements);
performArrayTest(holder, Long[].class, elements, elements, 0.0);
holder = new LongObjectArray(null, null);
performArrayTest(holder, Long[].class, null, null, 0.0);
}
/**
* Test for Short[] array, stored serialised or in a join table.
*/
public void testShortObjectArray()
{
Short[] elements = new Short[] {new Short("123"), new Short("24"), new Short("1")};
ShortObjectArray holder = new ShortObjectArray(elements, elements);
performArrayTest(holder, Short[].class, elements, elements, 0.0);
holder = new ShortObjectArray(null, null);
performArrayTest(holder, Short[].class, null, null, 0.0);
}
/**
* Test for BigDecimal[] array, stored serialised or in a join table.
*/
public void testBigDecimalArray()
{
BigDecimal[] elements = new BigDecimal[] {new BigDecimal(12.34567), new BigDecimal(23.45678),
new BigDecimal(1.00), new BigDecimal(-299.89)};
BigDecimalArray holder = new BigDecimalArray(elements, elements);
performArrayTest(holder, BigDecimal[].class, elements, elements, 0.00001);
holder = new BigDecimalArray(null, null);
performArrayTest(holder, BigDecimal[].class, null, null, 0.0);
}
/**
* Test for BigInteger[] array, stored serialised or in a join table.
*/
public void testBigIntegerArray()
{
BigInteger[] elements = new BigInteger[] {new BigInteger("12"), new BigInteger("23"), new BigInteger("1"), new BigInteger("-299")};
BigIntegerArray holder = new BigIntegerArray(elements, elements);
performArrayTest(holder, BigInteger[].class, elements, elements, 0.0);
holder = new BigIntegerArray(null, null);
performArrayTest(holder, BigInteger[].class, null, null, 0.0);
}
/**
* Test for Date[] array, stored serialised or in a join table.
*/
public void testDateArray()
{
Date[] elements = new Date[]{new Date(1000), new Date(10000000), new Date(20000000)};
DateArray holder = new DateArray(elements, elements);
performArrayTest(holder, Date[].class, elements, elements, 0.0);
holder = new DateArray(null, null);
performArrayTest(holder, Date[].class, null, null, 0.0);
}
/**
* Test for Locale[] array, stored serialised or in a join table.
*/
public void testLocaleArray()
{
Locale[] elements = new Locale[] {Locale.ENGLISH, Locale.JAPANESE, Locale.GERMAN};
LocaleArray holder = new LocaleArray(elements, elements);
performArrayTest(holder, Locale[].class, elements, elements, 0.0);
holder = new LocaleArray(null, null);
performArrayTest(holder, Locale[].class, null, null, 0.0);
}
/**
* Test for String[] array, stored serialised or in a join table.
*/
public void testStringArray()
{
String[] elements = new String[] {"First string", "Second string", "Third string that is much longer"};
StringArray holder = new StringArray(elements, elements);
performArrayTest(holder, String[].class, elements, elements, 0.0);
holder = new StringArray(null, null);
performArrayTest(holder, String[].class, null, null, 0.0);
}
/**
* Test for String[] array, stored serialised or in a join table, including some nulls.
*/
public void testStringArrayWithNulls()
{
String[] elements = new String[] {"First string", null, "Third string that is much longer"};
StringArray holder = new StringArray(elements, elements);
performArrayTest(holder, String[].class, elements, elements, 0.0);
holder = new StringArray(null, null);
performArrayTest(holder, String[].class, null, null, 0.0);
}
/**
* Test for a field of type Interface[] stored serialised or in a join table.
*/
public void testInterfaceArray()
{
Shape[] shapes1 = new Shape[2];
shapes1[0] = new Rectangle(1, 25.0, 20.0);
shapes1[1] = new Rectangle(2, 35.0, 10.0);
Shape[] shapes2 = new Shape[2];
shapes2[0] = new Rectangle(3, 25.0, 20.0);
shapes2[1] = new Rectangle(4, 35.0, 10.0);
InterfaceArray holder = new InterfaceArray(shapes1, shapes2);
Shape[] expectedShapes1 = new Shape[2];
expectedShapes1[0] = (Shape)((Rectangle)shapes1[0]).clone();
expectedShapes1[1] = (Shape)((Rectangle)shapes1[1]).clone();
Shape[] expectedShapes2 = new Shape[2];
expectedShapes2[0] = (Shape)((Rectangle)shapes2[0]).clone();
expectedShapes2[1] = (Shape)((Rectangle)shapes2[1]).clone();
performArrayTest(holder, Shape[].class, expectedShapes1, expectedShapes2, 0.0);
holder = new InterfaceArray(null, null);
performArrayTest(holder, Shape[].class, null, null, 0.0);
clean(Rectangle.class);
}
/**
* Test for a field of type PC[] stored serialised or in a join table.
*/
public void testPersistableArray()
{
ArrayElement[] elements1 = new ArrayElement[3];
elements1[0] = new ArrayElement("101", "First element");
elements1[1] = new ArrayElement("102", "Second element");
elements1[2] = new ArrayElement("103", "Third element");
ArrayElement[] elements2 = new ArrayElement[3];
elements2[0] = new ArrayElement("104", "Fourth element");
elements2[1] = new ArrayElement("105", "Fifth element");
elements2[2] = new ArrayElement("106", "Sixth element");
PersistableArray holder = new PersistableArray(elements1, elements2);
ArrayElement[] expectedElements1 = new ArrayElement[3];
expectedElements1[0] = (ArrayElement)elements1[0].clone();
expectedElements1[1] = (ArrayElement)elements1[1].clone();
expectedElements1[2] = (ArrayElement)elements1[2].clone();
ArrayElement[] expectedElements2 = new ArrayElement[3];
expectedElements2[0] = (ArrayElement)elements2[0].clone();
expectedElements2[1] = (ArrayElement)elements2[1].clone();
expectedElements2[2] = (ArrayElement)elements2[2].clone();
performArrayTest(holder, ArrayElement[].class, expectedElements1, expectedElements2, 0.0);
holder = new PersistableArray(null, null);
performArrayTest(holder, ArrayElement[].class, null, null, 0.0);
}
/**
* Test for a field of type PC[] stored in a join table, and having nulls.
*/
public void testPersistableArrayWithNulls()
{
ArrayElement[] elements1 = new ArrayElement[3];
elements1[0] = new ArrayElement("101", "First element");
elements1[1] = null;
elements1[2] = new ArrayElement("103", "Third element");
ArrayElement[] elements2 = new ArrayElement[3];
elements2[0] = new ArrayElement("104", "Fourth element");
elements2[1] = null;
elements2[2] = new ArrayElement("106", "Sixth element");
PersistableArray holder = new PersistableArray(elements1, elements2);
ArrayElement[] expectedElements1 = new ArrayElement[3];
expectedElements1[0] = (ArrayElement)elements1[0].clone();
expectedElements1[1] = null;
expectedElements1[2] = (ArrayElement)elements1[2].clone();
ArrayElement[] expectedElements2 = new ArrayElement[3];
expectedElements2[0] = (ArrayElement)elements2[0].clone();
expectedElements2[1] = null;
expectedElements2[2] = (ArrayElement)elements2[2].clone();
performArrayTest(holder, ArrayElement[].class, expectedElements1, expectedElements2, 0.0);
holder = new PersistableArray(null, null);
performArrayTest(holder, ArrayElement[].class, null, null, 0.0);
}
/**
* Test for a field of type Object[] (as PCs) stored.
*/
public void testObjectArray()
{
ArrayElement[] elements1 = new ArrayElement[3];
elements1[0] = new ArrayElement("101", "First element");
elements1[1] = new ArrayElement("102", "Second element");
elements1[2] = new ArrayElement("103", "Third element");
ArrayElement[] elements2 = new ArrayElement[3];
elements2[0] = new ArrayElement("104", "Fourth element");
elements2[1] = new ArrayElement("105", "Fifth element");
elements2[2] = new ArrayElement("106", "Sixth element");
ObjectArray holder = new ObjectArray(elements1, elements2);
ArrayElement[] expectedElements1 = new ArrayElement[3];
expectedElements1[0] = (ArrayElement)elements1[0].clone();
expectedElements1[1] = (ArrayElement)elements1[1].clone();
expectedElements1[2] = (ArrayElement)elements1[2].clone();
ArrayElement[] expectedElements2 = new ArrayElement[3];
expectedElements2[0] = (ArrayElement)elements2[0].clone();
expectedElements2[1] = (ArrayElement)elements2[1].clone();
expectedElements2[2] = (ArrayElement)elements2[2].clone();
performArrayTest(holder, ArrayElement[].class, expectedElements1, expectedElements2, 0.0);
holder = new ObjectArray(null, null);
performArrayTest(holder, ArrayElement[].class, null, null, 0.0);
}
/**
* Test for a field of type PC[] stored using a foreign-key.
*/
/*public void testForeignKeyPCArray()
{
org.jpox.samples.array.foreignkey.Product[] products = new org.jpox.samples.array.foreignkey.Product[3];
products[0] = new org.jpox.samples.array.foreignkey.Product("100", "Toaster", "New toaster", "http://www.jpox.org", "GBP", 17.5, 17.5, 17.5, 0.0, 1);
products[1] = new org.jpox.samples.array.foreignkey.Product("101", "Kettle", "Kettle", "http://www.jpox.org", "GBP", 10.0, 10.0, 10.0, 0.0, 1);
products[2] = new org.jpox.samples.array.foreignkey.Product("102", "Microwave", "Microwave oven", "http://www.jpox.org", "GBP", 65.0, 65.0, 65.0, 0.0, 1);
MyPCArrayFK holder = new MyPCArrayFK(products);
org.jpox.samples.array.foreignkey.Product[] expectedProducts = new org.jpox.samples.array.foreignkey.Product[3];
expectedProducts[0] = (org.jpox.samples.array.foreignkey.Product)products[0].clone();
expectedProducts[1] = (org.jpox.samples.array.foreignkey.Product)products[1].clone();
expectedProducts[2] = (org.jpox.samples.array.foreignkey.Product)products[2].clone();
performArrayTest(holder, org.jpox.samples.array.foreignkey.Product[].class, expectedProducts);
holder = new MyPCArrayFK(null);
performArrayTest(holder, org.jpox.samples.array.foreignkey.Product[].class, null);
}*/
// ---------------------------- Array querying tests --------------------------------------
/**
* Test for a field of type int[] stored in a join table.
*/
public void testJoinTableIntQueryArray()
{
int[] elements = new int[] {2001, 4001, 6004, 4000};
IntArray holder = new IntArray(elements, elements);
performArrayQueryTest(holder, int[].class, elements);
performArrayTest(holder, int[].class, null, null, 0.0);
}
/**
* Test for a field of type String[] stored in a join table.
*/
public void testJoinTableStringQueryArray()
{
String[] elements = new String[] {"First string", "Second string", "Third string that is much longer"};
StringArray holder = new StringArray(elements, elements);
performArrayQueryTest(holder, String[].class, elements);
}
/**
* Test for a field of type PC[] stored using a join table.
*/
public void testJoinTablePCQueryArray()
{
ArrayElement[] elements1 = new ArrayElement[3];
elements1[0] = new ArrayElement("100", "First");
elements1[1] = new ArrayElement("101", "Second");
elements1[2] = new ArrayElement("102", "Third");
ArrayElement[] elements2 = new ArrayElement[3];
elements2[0] = new ArrayElement("103", "Fourth");
elements2[1] = new ArrayElement("104", "Fifth");
elements2[2] = new ArrayElement("105", "Sixth");
PersistableArray holder = new PersistableArray(elements1, elements2);
ArrayElement[] expectedElements1 = new ArrayElement[3];
expectedElements1[0] = (ArrayElement)elements1[0].clone();
expectedElements1[1] = (ArrayElement)elements1[1].clone();
expectedElements1[2] = (ArrayElement)elements1[2].clone();
performArrayQueryTest(holder, ArrayElement[].class, expectedElements1);
}
/**
* Test for a field of type Interface[] stored using a join table.
*/
public void testJoinTableInterfaceQueryArray()
{
Shape[] shapes1 = new Shape[2];
shapes1[0] = new Rectangle(1, 25.0, 20.0);
shapes1[1] = new Rectangle(2, 35.0, 10.0);
Shape[] shapes2 = new Shape[2];
shapes2[0] = new Rectangle(3, 25.0, 20.0);
shapes2[1] = new Rectangle(4, 35.0, 10.0);
InterfaceArray holder = new InterfaceArray(shapes1, shapes2);
Shape[] expectedShapes1 = new Shape[2];
expectedShapes1[0] = (Shape)((Rectangle)shapes1[0]).clone();
expectedShapes1[1] = (Shape)((Rectangle)shapes1[1]).clone();
performArrayQueryTest(holder, Shape[].class, expectedShapes1);
clean(Rectangle.class);
}
/**
* Test for a field of type PC[] stored using a foreign-key.
*/
/*public void testForeignKeyPCQueryArray()
{
org.jpox.samples.array.foreignkey.Product[] products = new org.jpox.samples.array.foreignkey.Product[3];
products[0] = new org.jpox.samples.array.foreignkey.Product("100", "Toaster", "New toaster", "http://www.jpox.org", "GBP", 17.5, 17.5, 17.5, 0.0, 1);
products[1] = new org.jpox.samples.array.foreignkey.Product("101", "Kettle", "Kettle", "http://www.jpox.org", "GBP", 10.0, 10.0, 10.0, 0.0, 1);
products[2] = new org.jpox.samples.array.foreignkey.Product("102", "Microwave", "Microwave oven", "http://www.jpox.org", "GBP", 65.0, 65.0, 65.0, 0.0, 1);
MyPCArrayFK holder = new MyPCArrayFK(products);
org.jpox.samples.array.foreignkey.Product[] expectedProducts = new org.jpox.samples.array.foreignkey.Product[3];
expectedProducts[0] = (org.jpox.samples.array.foreignkey.Product)products[0].clone();
expectedProducts[1] = (org.jpox.samples.array.foreignkey.Product)products[1].clone();
expectedProducts[2] = (org.jpox.samples.array.foreignkey.Product)products[2].clone();
performArrayQueryTest(holder, org.jpox.samples.array.foreignkey.Product[].class, expectedProducts);
holder = new MyPCArrayFK(null);
performArrayTest(holder, org.jpox.samples.array.foreignkey.Product[].class, null);
}*/
// -------------------------------- Utilities ------------------------------------------
/**
* Method to perform a test of an array type.
* @param arrayHolder The container object holding the array
* @param arrayType The type of the array
* @param expectedArray1 The array elements that we expect for array 1.
* @param expectedArray2 The array elements that we expect for array 2.
* @param rounding TODO
*/
public void performArrayTest(Object arrayHolder, Class arrayType,
Object expectedArray1, Object expectedArray2, double rounding)
{
try
{
// Persist the container
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try
{
tx.begin();
pm.makePersistent(arrayHolder);
tx.commit();
}
catch (JDOUserException e)
{
LOG.error(">> Exception thrown in test", e);
fail("Exception thrown while creating object with array of type " + arrayType + " : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
pm = pmf.getPersistenceManager();
tx = pm.currentTransaction();
try
{
tx.begin();
// Retrieve the holder
Query q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
List c = (List)q.execute();
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
Iterator iter = c.iterator();
while (iter.hasNext())
{
ArrayHolderInterface theArrayHolder = (ArrayHolderInterface)iter.next();
Object theArray1 = null;
Object theArray2 = null;
try
{
Class[] argClasses = new Class[] {};
Object[] argParams = new Object[] {};
Method getArray1Method = ArrayHolderInterface.class.getMethod("getArray1", argClasses);
theArray1 = getArray1Method.invoke(theArrayHolder, argParams);
Method getArray2Method = ArrayHolderInterface.class.getMethod("getArray2", argClasses);
theArray2 = getArray2Method.invoke(theArrayHolder, argParams);
}
catch (Exception e)
{
LOG.error("Exception thrown in test", e);
fail("Failed to get the array(s) from the holder " + arrayHolder.getClass().getName());
}
// Compare the array elements - 1
if (theArray1 != null && expectedArray1 != null)
{
assertEquals("Number of items in the " + arrayType.getName() + " (1) was incorrect",
Array.getLength(expectedArray1), Array.getLength(theArray1));
for (int i=0;i<Array.getLength(expectedArray1);i++)
{
Object expValue = Array.get(expectedArray1, i);
Object actValue = Array.get(theArray1, i);
String expType = (expValue != null ? expValue.getClass().getName() : null);
String actType = (actValue != null ? actValue.getClass().getName() : null);
assertEquals("Item " + i + " in " + arrayType.getName() + " (1) is of wrong type",
expType, actType);
assertEquals("Item " + i + " in " + arrayType.getName() + " (1) was wrong",
expValue, actValue);
}
}
else if (theArray1 == null && expectedArray1 != null)
{
fail("No array items retrieved for (1) yet should have had " + Array.getLength(expectedArray1));
}
else if (theArray1 == null && expectedArray1 == null)
{
// Success
}
else if (theArray1 != null && expectedArray1 == null)
{
fail("Array items returned for (1) yet should have been null");
}
// Compare the array elements - 2
if (theArray2 != null && expectedArray2 != null)
{
assertEquals("Number of items in the " + arrayType.getName() + " (2) was incorrect",
Array.getLength(expectedArray2), Array.getLength(theArray2));
for (int i=0;i<Array.getLength(expectedArray2);i++)
{
Object expValue = Array.get(expectedArray2, i);
Object actValue = Array.get(theArray2, i);
String expType = (expValue != null ? expValue.getClass().getName() : null);
String actType = (actValue != null ? actValue.getClass().getName() : null);
assertEquals("Item " + i + " in " + arrayType.getName() + " (2) is of wrong type",
expType, actType);
if (expValue instanceof Float || expValue instanceof Double ||
expValue instanceof BigDecimal)
{
double expected=0;
double actual=0;
if (expValue instanceof Float)
{
expected = ((Float)expValue).doubleValue();
actual = ((Float)actValue).doubleValue();
}
else if (expValue instanceof Double)
{
expected = ((Double)expValue).doubleValue();
actual = ((Double)actValue).doubleValue();
}
else if (expValue instanceof BigDecimal)
{
expected = ((BigDecimal)expValue).doubleValue();
actual = ((BigDecimal)actValue).doubleValue();
}
assertTrue("Item " + i + " in " + arrayType.getName() + " (2) was wrong (" +
Array.get(expectedArray2, i) +")",
(expected <= (actual+rounding)) && (expected >= (actual-rounding))
);
}
else
{
assertEquals("Item " + i + " in " + arrayType.getName() + " (2) was wrong",
expValue, actValue);
}
}
}
else if (theArray2 == null && expectedArray2 != null)
{
fail("No array items retrieved for (2) yet should have had " + Array.getLength(expectedArray2));
}
else if (theArray2 == null && expectedArray2 == null)
{
// Success
}
else if (theArray2 != null && expectedArray2 == null)
{
fail("Array items returned for (2) yet should have been null");
}
// Detach the holder with its array(s) - test of detaching
pm.getFetchPlan().addGroup(FetchPlan.ALL);
pm.detachCopy(theArrayHolder);
}
tx.commit();
}
catch (JDOUserException e)
{
LOG.error(">> Exception thrown in test", e);
fail("Exception thrown while querying objects with array of type " + arrayType.getName() + " : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
}
finally
{
// Clean out our data
clean(arrayHolder.getClass());
Class arrayElementType = arrayType.getComponentType();
if (Persistable.class.isAssignableFrom(arrayElementType))
{
clean(arrayElementType);
}
}
}
/**
* Method to perform a test of an array type.
* @param arrayHolder The container object holding the array
* @param arrayType The type of the array
* @param expectedArray The array elements that we expect.
*/
public void performArrayQueryTest(Object arrayHolder, Class arrayType, Object expectedArray)
{
try
{
// Persist the container
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try
{
tx.begin();
pm.makePersistent(arrayHolder);
pm.flush();
// Makes use of "array2" in ArrayHolderInterface as the array that is queryable (join table storage)
Query q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.size() == :p");
Collection c = (Collection) q.execute(new Integer(Array.getLength(expectedArray)));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.length == :p");
c = (Collection) q.execute(new Integer(Array.getLength(expectedArray)));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.size() == :p");
c = (Collection) q.execute(new Integer(Array.getLength(expectedArray)));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.contains(:p)");
c = (Collection) q.execute(Array.get(((ArrayHolderInterface)arrayHolder).getArray2(),1));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.size() == :p");
c = (Collection) q.execute(new Integer(0));
assertEquals(c.size(), 0);
tx.commit();
}
catch (JDOUserException e)
{
LOG.error(">> Exception thrown in test", e);
fail("Exception thrown while creating object with array of type " + arrayType + " : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
}
finally
{
// Clean out our data
clean(arrayHolder.getClass());
Class arrayElementType = arrayType.getComponentType();
if (Persistable.class.isAssignableFrom(arrayElementType))
{
clean(arrayElementType);
}
}
}
} | jdo/general/src/test/org/datanucleus/tests/types/ArrayTest.java | /**********************************************************************
Copyright (c) 2004 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors :
...
***********************************************************************/
package org.datanucleus.tests.types;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import javax.jdo.FetchPlan;
import javax.jdo.JDOUserException;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import javax.jdo.Transaction;
import org.datanucleus.enhancement.Persistable;
import org.datanucleus.tests.JDOPersistenceTestCase;
import org.jpox.samples.array.ArrayElement;
import org.jpox.samples.array.ArrayHolderInterface;
import org.jpox.samples.array.BigDecimalArray;
import org.jpox.samples.array.BigIntegerArray;
import org.jpox.samples.array.BooleanArray;
import org.jpox.samples.array.BooleanObjectArray;
import org.jpox.samples.array.ByteArray;
import org.jpox.samples.array.ByteObjectArray;
import org.jpox.samples.array.CharArray;
import org.jpox.samples.array.CharObjectArray;
import org.jpox.samples.array.DateArray;
import org.jpox.samples.array.DoubleArray;
import org.jpox.samples.array.DoubleObjectArray;
import org.jpox.samples.array.FloatArray;
import org.jpox.samples.array.FloatObjectArray;
import org.jpox.samples.array.IntArray;
import org.jpox.samples.array.IntObjectArray;
import org.jpox.samples.array.InterfaceArray;
import org.jpox.samples.array.LocaleArray;
import org.jpox.samples.array.LongArray;
import org.jpox.samples.array.LongObjectArray;
import org.jpox.samples.array.ObjectArray;
import org.jpox.samples.array.PersistableArray;
import org.jpox.samples.array.ShortArray;
import org.jpox.samples.array.ShortObjectArray;
import org.jpox.samples.array.StringArray;
import org.jpox.samples.interfaces.Rectangle;
import org.jpox.samples.interfaces.Shape;
/**
* Test case to test use of Arrays.
* @version $Revision: 1.5 $
**/
public class ArrayTest extends JDOPersistenceTestCase
{
protected Transaction tx;
protected PersistenceManager pm;
public ArrayTest(String name)
{
super(name);
}
// ---------------------------- Serialised tests --------------------------------------
/**
* Test for boolean[] array, stored serialised or in a join table.
*/
public void testBooleanArray()
{
boolean[] elements = new boolean[] {true, false, true, false};
BooleanArray holder = new BooleanArray(elements, elements);
performArrayTest(holder, boolean[].class, elements, elements, 0.0);
holder = new BooleanArray(null, null);
performArrayTest(holder, boolean[].class, null, null, 0.0);
}
/**
* Test for byte[] array, stored serialised or in a join table.
*/
public void testByteArray()
{
byte[] elements = new byte[] {1, 0, 1, 1};
ByteArray holder = new ByteArray(elements, elements);
performArrayTest(holder, byte[].class, elements, elements, 0.0);
holder = new ByteArray(null, null);
performArrayTest(holder, byte[].class, null, null, 0.0);
}
/**
* Test for char[] array, stored serialised or in a join table.
*/
public void testCharArray()
{
char[] elements = new char[] {'A', 'B', 'C'};
CharArray holder = new CharArray(elements, elements);
performArrayTest(holder, char[].class, elements, elements, 0.0);
holder = new CharArray(null, null);
performArrayTest(holder, char[].class, null, null, 0.0);
}
/**
* Test for double[] array, stored serialised or in a join table.
*/
public void testDoubleArray()
{
double[] elements = new double[] {12.34567, 23.45678, 1.00, -299.89};
DoubleArray holder = new DoubleArray(elements, elements);
performArrayTest(holder, double[].class, elements, elements, 0.00001);
holder = new DoubleArray(null, null);
performArrayTest(holder, double[].class, null, null, 0.0);
}
/**
* Test for float[] array, stored serialised or in a join table.
*/
public void testFloatArray()
{
float[] elements = new float[] {(float)12.34, (float)34.5};
FloatArray holder = new FloatArray(elements, elements);
performArrayTest(holder, float[].class, elements, elements, 0.00001);
holder = new FloatArray(null, null);
performArrayTest(holder, float[].class, null, null, 0.0);
}
/**
* Test for int[] array, stored serialised or in a join table.
*/
public void testIntArray()
{
int[] elements = new int[] {2001, 4001, 6004, 4000};
IntArray holder = new IntArray(elements, elements);
performArrayTest(holder, int[].class, elements, elements, 0.0);
holder = new IntArray(null, null);
performArrayTest(holder, int[].class, null, null, 0.0);
}
/**
* Test for long[] array, stored serialised or in a join table.
*/
public void testLongArray()
{
long[] elements = new long[] {123456789, 432156789, 1};
LongArray holder = new LongArray(elements, elements);
performArrayTest(holder, long[].class, elements, elements, 0.0);
holder = new LongArray(null, null);
performArrayTest(holder, long[].class, null, null, 0.0);
}
/**
* Test for short[] array, stored serialised or in a join table.
*/
public void testShortArray()
{
short[] elements = new short[] {123, 24, 1};
ShortArray holder = new ShortArray(elements, elements);
performArrayTest(holder, short[].class, elements, elements, 0.0);
holder = new ShortArray(null, null);
performArrayTest(holder, short[].class, null, null, 0.0);
}
/**
* Test for Boolean[] array, stored serialised or in a join table.
*/
public void testBooleanObjectArray()
{
Boolean[] elements = new Boolean[] {new Boolean(true), new Boolean(false), new Boolean(true), new Boolean(false)};
BooleanObjectArray holder = new BooleanObjectArray(elements, elements);
performArrayTest(holder, Boolean[].class, elements, elements, 0.0);
holder = new BooleanObjectArray(null, null);
performArrayTest(holder, Boolean[].class, null, null, 0.0);
}
/**
* Test for Byte[] array, stored serialised or in a join table.
*/
public void testByteObjectArray()
{
Byte[] elements = new Byte[] {new Byte("1"), new Byte("0"), new Byte("1"), new Byte("1")};
ByteObjectArray holder = new ByteObjectArray(elements, elements);
performArrayTest(holder, Byte[].class, elements, elements, 0.0);
holder = new ByteObjectArray(null, null);
performArrayTest(holder, Byte[].class, null, null, 0.0);
}
/**
* Test for Character[] array, stored serialised or in a join table.
*/
public void testCharObjectArray()
{
Character[] elements = new Character[] {new Character('A'), new Character('B'), new Character('C')};
CharObjectArray holder = new CharObjectArray(elements, elements);
performArrayTest(holder, Character[].class, elements, elements, 0.0);
holder = new CharObjectArray(null, null);
performArrayTest(holder, Character[].class, null, null, 0.0);
}
/**
* Test for Double[] array, stored serialised or in a join table.
*/
public void testDoubleObjectArray()
{
Double[] elements = new Double[] {new Double(12.34567), new Double(23.45678), new Double(1.00), new Double(-299.89)};
DoubleObjectArray holder = new DoubleObjectArray(elements, elements);
performArrayTest(holder, Double[].class, elements, elements, 0.00001);
holder = new DoubleObjectArray(null, null);
performArrayTest(holder, Double[].class, null, null, 0.0);
}
/**
* Test for Float[] array, stored serialised or in a join table.
*/
public void testFloatObjectArray()
{
Float[] elements = new Float[] {new Float(12.34), new Float(34.5)};
FloatObjectArray holder = new FloatObjectArray(elements, elements);
performArrayTest(holder, Float[].class, elements, elements, 0.00001);
holder = new FloatObjectArray(null, null);
performArrayTest(holder, Float[].class, null, null, 0.0);
}
/**
* Test for Integer[] array, stored serialised or in a join table.
*/
public void testIntObjectArray()
{
Integer[] elements = new Integer[] {new Integer(2001), new Integer(4001), new Integer(6004), new Integer(4000)};
IntObjectArray holder = new IntObjectArray(elements, elements);
performArrayTest(holder, Integer[].class, elements, elements, 0.0);
holder = new IntObjectArray(null, null);
performArrayTest(holder, Integer[].class, null, null, 0.0);
}
/**
* Test for Long[] array, stored serialised or in a join table.
*/
public void testLongObjectArray()
{
Long[] elements = new Long[] {new Long(123456789), new Long(432156789), new Long(1)};
LongObjectArray holder = new LongObjectArray(elements, elements);
performArrayTest(holder, Long[].class, elements, elements, 0.0);
holder = new LongObjectArray(null, null);
performArrayTest(holder, Long[].class, null, null, 0.0);
}
/**
* Test for Short[] array, stored serialised or in a join table.
*/
public void testShortObjectArray()
{
Short[] elements = new Short[] {new Short("123"), new Short("24"), new Short("1")};
ShortObjectArray holder = new ShortObjectArray(elements, elements);
performArrayTest(holder, Short[].class, elements, elements, 0.0);
holder = new ShortObjectArray(null, null);
performArrayTest(holder, Short[].class, null, null, 0.0);
}
/**
* Test for BigDecimal[] array, stored serialised or in a join table.
*/
public void testBigDecimalArray()
{
BigDecimal[] elements = new BigDecimal[] {new BigDecimal(12.34567), new BigDecimal(23.45678),
new BigDecimal(1.00), new BigDecimal(-299.89)};
BigDecimalArray holder = new BigDecimalArray(elements, elements);
performArrayTest(holder, BigDecimal[].class, elements, elements, 0.00001);
holder = new BigDecimalArray(null, null);
performArrayTest(holder, BigDecimal[].class, null, null, 0.0);
}
/**
* Test for BigInteger[] array, stored serialised or in a join table.
*/
public void testBigIntegerArray()
{
BigInteger[] elements = new BigInteger[] {new BigInteger("12"), new BigInteger("23"), new BigInteger("1"), new BigInteger("-299")};
BigIntegerArray holder = new BigIntegerArray(elements, elements);
performArrayTest(holder, BigInteger[].class, elements, elements, 0.0);
holder = new BigIntegerArray(null, null);
performArrayTest(holder, BigInteger[].class, null, null, 0.0);
}
/**
* Test for Date[] array, stored serialised or in a join table.
*/
public void testDateArray()
{
Date[] elements = new Date[]{new Date(1000), new Date(10000000), new Date(20000000)};
DateArray holder = new DateArray(elements, elements);
performArrayTest(holder, Date[].class, elements, elements, 0.0);
holder = new DateArray(null, null);
performArrayTest(holder, Date[].class, null, null, 0.0);
}
/**
* Test for Locale[] array, stored serialised or in a join table.
*/
public void testLocaleArray()
{
Locale[] elements = new Locale[] {Locale.ENGLISH, Locale.JAPANESE, Locale.GERMAN};
LocaleArray holder = new LocaleArray(elements, elements);
performArrayTest(holder, Locale[].class, elements, elements, 0.0);
holder = new LocaleArray(null, null);
performArrayTest(holder, Locale[].class, null, null, 0.0);
}
/**
* Test for String[] array, stored serialised or in a join table.
*/
public void testStringArray()
{
String[] elements = new String[] {"First string", "Second string", "Third string that is much longer"};
StringArray holder = new StringArray(elements, elements);
performArrayTest(holder, String[].class, elements, elements, 0.0);
holder = new StringArray(null, null);
performArrayTest(holder, String[].class, null, null, 0.0);
}
/**
* Test for String[] array, stored serialised or in a join table, including some nulls.
*/
public void testStringArrayWithNulls()
{
String[] elements = new String[] {"First string", null, "Third string that is much longer"};
StringArray holder = new StringArray(elements, elements);
performArrayTest(holder, String[].class, elements, elements, 0.0);
holder = new StringArray(null, null);
performArrayTest(holder, String[].class, null, null, 0.0);
}
/**
* Test for a field of type Interface[] stored serialised or in a join table.
*/
public void testInterfaceArray()
{
Shape[] shapes = new Shape[2];
shapes[0] = new Rectangle(1, 25.0, 20.0);
shapes[1] = new Rectangle(2, 35.0, 10.0);
InterfaceArray holder = new InterfaceArray(shapes, shapes);
Shape[] expectedShapes = new Shape[2];
expectedShapes[0] = (Shape)((Rectangle)shapes[0]).clone();
expectedShapes[1] = (Shape)((Rectangle)shapes[1]).clone();
performArrayTest(holder, Shape[].class, expectedShapes, expectedShapes, 0.0);
holder = new InterfaceArray(null, null);
performArrayTest(holder, Shape[].class, null, null, 0.0);
clean(Rectangle.class);
}
/**
* Test for a field of type PC[] stored serialised or in a join table.
*/
public void testPersistableArray()
{
ArrayElement[] elements = new ArrayElement[3];
elements[0] = new ArrayElement("101", "First element");
elements[1] = new ArrayElement("102", "Second element");
elements[2] = new ArrayElement("103", "Third element");
PersistableArray holder = new PersistableArray(elements, elements);
ArrayElement[] expectedElements = new ArrayElement[3];
expectedElements[0] = (ArrayElement)elements[0].clone();
expectedElements[1] = (ArrayElement)elements[1].clone();
expectedElements[2] = (ArrayElement)elements[2].clone();
performArrayTest(holder, ArrayElement[].class, expectedElements, expectedElements, 0.0);
holder = new PersistableArray(null, null);
performArrayTest(holder, ArrayElement[].class, null, null, 0.0);
}
/**
* Test for a field of type PC[] stored in a join table, and having nulls.
*/
public void testPersistableArrayWithNulls()
{
ArrayElement[] elements = new ArrayElement[3];
elements[0] = new ArrayElement("101", "First element");
elements[1] = null;
elements[2] = new ArrayElement("103", "Third element");
PersistableArray holder = new PersistableArray(elements, elements);
ArrayElement[] expectedElements = new ArrayElement[3];
expectedElements[0] = (ArrayElement)elements[0].clone();
expectedElements[1] = null;
expectedElements[2] = (ArrayElement)elements[2].clone();
performArrayTest(holder, ArrayElement[].class, expectedElements, expectedElements, 0.0);
holder = new PersistableArray(null, null);
performArrayTest(holder, ArrayElement[].class, null, null, 0.0);
}
/**
* Test for a field of type Object[] (as PCs) stored.
*/
public void testObjectArray()
{
ArrayElement[] elements = new ArrayElement[3];
elements[0] = new ArrayElement("101", "First element");
elements[1] = new ArrayElement("102", "Second element");
elements[2] = new ArrayElement("103", "Third element");
ObjectArray holder = new ObjectArray(elements, elements);
ArrayElement[] expectedElements = new ArrayElement[3];
expectedElements[0] = (ArrayElement)elements[0].clone();
expectedElements[1] = (ArrayElement)elements[1].clone();
expectedElements[2] = (ArrayElement)elements[2].clone();
performArrayTest(holder, ArrayElement[].class, expectedElements, expectedElements, 0.0);
holder = new ObjectArray(null, null);
performArrayTest(holder, ArrayElement[].class, null, null, 0.0);
}
/**
* Test for a field of type PC[] stored using a foreign-key.
*/
/*public void testForeignKeyPCArray()
{
org.jpox.samples.array.foreignkey.Product[] products = new org.jpox.samples.array.foreignkey.Product[3];
products[0] = new org.jpox.samples.array.foreignkey.Product("100", "Toaster", "New toaster", "http://www.jpox.org", "GBP", 17.5, 17.5, 17.5, 0.0, 1);
products[1] = new org.jpox.samples.array.foreignkey.Product("101", "Kettle", "Kettle", "http://www.jpox.org", "GBP", 10.0, 10.0, 10.0, 0.0, 1);
products[2] = new org.jpox.samples.array.foreignkey.Product("102", "Microwave", "Microwave oven", "http://www.jpox.org", "GBP", 65.0, 65.0, 65.0, 0.0, 1);
MyPCArrayFK holder = new MyPCArrayFK(products);
org.jpox.samples.array.foreignkey.Product[] expectedProducts = new org.jpox.samples.array.foreignkey.Product[3];
expectedProducts[0] = (org.jpox.samples.array.foreignkey.Product)products[0].clone();
expectedProducts[1] = (org.jpox.samples.array.foreignkey.Product)products[1].clone();
expectedProducts[2] = (org.jpox.samples.array.foreignkey.Product)products[2].clone();
performArrayTest(holder, org.jpox.samples.array.foreignkey.Product[].class, expectedProducts);
holder = new MyPCArrayFK(null);
performArrayTest(holder, org.jpox.samples.array.foreignkey.Product[].class, null);
}*/
// ---------------------------- Array querying tests --------------------------------------
/**
* Test for a field of type int[] stored in a join table.
*/
public void testJoinTableIntQueryArray()
{
int[] elements = new int[] {2001, 4001, 6004, 4000};
IntArray holder = new IntArray(elements, elements);
performArrayQueryTest(holder, int[].class, elements);
performArrayTest(holder, int[].class, null, null, 0.0);
}
/**
* Test for a field of type String[] stored in a join table.
*/
public void testJoinTableStringQueryArray()
{
String[] elements = new String[] {"First string", "Second string", "Third string that is much longer"};
StringArray holder = new StringArray(elements, elements);
performArrayQueryTest(holder, String[].class, elements);
}
/**
* Test for a field of type PC[] stored using a join table.
*/
public void testJoinTablePCQueryArray()
{
ArrayElement[] elements = new ArrayElement[3];
elements[0] = new ArrayElement("100", "First");
elements[1] = new ArrayElement("101", "Second");
elements[2] = new ArrayElement("102", "Third");
PersistableArray holder = new PersistableArray(elements, elements);
ArrayElement[] expectedElements = new ArrayElement[3];
expectedElements[0] = (ArrayElement)elements[0].clone();
expectedElements[1] = (ArrayElement)elements[1].clone();
expectedElements[2] = (ArrayElement)elements[2].clone();
performArrayQueryTest(holder, ArrayElement[].class, expectedElements);
}
/**
* Test for a field of type Interface[] stored using a join table.
*/
public void testJoinTableInterfaceQueryArray()
{
Shape[] shapes = new Shape[2];
shapes[0] = new Rectangle(1, 25.0, 20.0);
shapes[1] = new Rectangle(2, 35.0, 10.0);
InterfaceArray holder = new InterfaceArray(shapes, shapes);
Shape[] expectedShapes = new Shape[2];
expectedShapes[0] = (Shape)((Rectangle)shapes[0]).clone();
expectedShapes[1] = (Shape)((Rectangle)shapes[1]).clone();
performArrayQueryTest(holder, Shape[].class, expectedShapes);
clean(Rectangle.class);
}
/**
* Test for a field of type PC[] stored using a foreign-key.
*/
/*public void testForeignKeyPCQueryArray()
{
org.jpox.samples.array.foreignkey.Product[] products = new org.jpox.samples.array.foreignkey.Product[3];
products[0] = new org.jpox.samples.array.foreignkey.Product("100", "Toaster", "New toaster", "http://www.jpox.org", "GBP", 17.5, 17.5, 17.5, 0.0, 1);
products[1] = new org.jpox.samples.array.foreignkey.Product("101", "Kettle", "Kettle", "http://www.jpox.org", "GBP", 10.0, 10.0, 10.0, 0.0, 1);
products[2] = new org.jpox.samples.array.foreignkey.Product("102", "Microwave", "Microwave oven", "http://www.jpox.org", "GBP", 65.0, 65.0, 65.0, 0.0, 1);
MyPCArrayFK holder = new MyPCArrayFK(products);
org.jpox.samples.array.foreignkey.Product[] expectedProducts = new org.jpox.samples.array.foreignkey.Product[3];
expectedProducts[0] = (org.jpox.samples.array.foreignkey.Product)products[0].clone();
expectedProducts[1] = (org.jpox.samples.array.foreignkey.Product)products[1].clone();
expectedProducts[2] = (org.jpox.samples.array.foreignkey.Product)products[2].clone();
performArrayQueryTest(holder, org.jpox.samples.array.foreignkey.Product[].class, expectedProducts);
holder = new MyPCArrayFK(null);
performArrayTest(holder, org.jpox.samples.array.foreignkey.Product[].class, null);
}*/
// -------------------------------- Utilities ------------------------------------------
/**
* Method to perform a test of an array type.
* @param arrayHolder The container object holding the array
* @param arrayType The type of the array
* @param expectedArray1 The array elements that we expect for array 1.
* @param expectedArray2 The array elements that we expect for array 2.
* @param rounding TODO
*/
public void performArrayTest(Object arrayHolder, Class arrayType,
Object expectedArray1, Object expectedArray2, double rounding)
{
try
{
// Persist the container
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try
{
tx.begin();
pm.makePersistent(arrayHolder);
tx.commit();
}
catch (JDOUserException e)
{
LOG.error(">> Exception thrown in test", e);
fail("Exception thrown while creating object with array of type " + arrayType + " : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
pm = pmf.getPersistenceManager();
tx = pm.currentTransaction();
try
{
tx.begin();
// Retrieve the holder
Query q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
List c = (List)q.execute();
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
Iterator iter = c.iterator();
while (iter.hasNext())
{
ArrayHolderInterface theArrayHolder = (ArrayHolderInterface)iter.next();
Object theArray1 = null;
Object theArray2 = null;
try
{
Class[] argClasses = new Class[] {};
Object[] argParams = new Object[] {};
Method getArray1Method = ArrayHolderInterface.class.getMethod("getArray1", argClasses);
theArray1 = getArray1Method.invoke(theArrayHolder, argParams);
Method getArray2Method = ArrayHolderInterface.class.getMethod("getArray2", argClasses);
theArray2 = getArray2Method.invoke(theArrayHolder, argParams);
}
catch (Exception e)
{
LOG.error("Exception thrown in test", e);
fail("Failed to get the array(s) from the holder " + arrayHolder.getClass().getName());
}
// Compare the array elements - 1
if (theArray1 != null && expectedArray1 != null)
{
assertEquals("Number of items in the " + arrayType.getName() + " (1) was incorrect",
Array.getLength(expectedArray1), Array.getLength(theArray1));
for (int i=0;i<Array.getLength(expectedArray1);i++)
{
Object expValue = Array.get(expectedArray1, i);
Object actValue = Array.get(theArray1, i);
String expType = (expValue != null ? expValue.getClass().getName() : null);
String actType = (actValue != null ? actValue.getClass().getName() : null);
assertEquals("Item " + i + " in " + arrayType.getName() + " (1) is of wrong type",
expType, actType);
assertEquals("Item " + i + " in " + arrayType.getName() + " (1) was wrong",
expValue, actValue);
}
}
else if (theArray1 == null && expectedArray1 != null)
{
fail("No array items retrieved for (1) yet should have had " + Array.getLength(expectedArray1));
}
else if (theArray1 == null && expectedArray1 == null)
{
// Success
}
else if (theArray1 != null && expectedArray1 == null)
{
fail("Array items returned for (1) yet should have been null");
}
// Compare the array elements - 2
if (theArray2 != null && expectedArray2 != null)
{
assertEquals("Number of items in the " + arrayType.getName() + " (2) was incorrect",
Array.getLength(expectedArray2), Array.getLength(theArray2));
for (int i=0;i<Array.getLength(expectedArray2);i++)
{
Object expValue = Array.get(expectedArray2, i);
Object actValue = Array.get(theArray2, i);
String expType = (expValue != null ? expValue.getClass().getName() : null);
String actType = (actValue != null ? actValue.getClass().getName() : null);
assertEquals("Item " + i + " in " + arrayType.getName() + " (2) is of wrong type",
expType, actType);
if (expValue instanceof Float || expValue instanceof Double ||
expValue instanceof BigDecimal)
{
double expected=0;
double actual=0;
if (expValue instanceof Float)
{
expected = ((Float)expValue).doubleValue();
actual = ((Float)actValue).doubleValue();
}
else if (expValue instanceof Double)
{
expected = ((Double)expValue).doubleValue();
actual = ((Double)actValue).doubleValue();
}
else if (expValue instanceof BigDecimal)
{
expected = ((BigDecimal)expValue).doubleValue();
actual = ((BigDecimal)actValue).doubleValue();
}
assertTrue("Item " + i + " in " + arrayType.getName() + " (2) was wrong (" +
Array.get(expectedArray2, i) +")",
(expected <= (actual+rounding)) && (expected >= (actual-rounding))
);
}
else
{
assertEquals("Item " + i + " in " + arrayType.getName() + " (2) was wrong",
expValue, actValue);
}
}
}
else if (theArray2 == null && expectedArray2 != null)
{
fail("No array items retrieved for (2) yet should have had " + Array.getLength(expectedArray2));
}
else if (theArray2 == null && expectedArray2 == null)
{
// Success
}
else if (theArray2 != null && expectedArray2 == null)
{
fail("Array items returned for (2) yet should have been null");
}
// Detach the holder with its array(s) - test of detaching
pm.getFetchPlan().addGroup(FetchPlan.ALL);
pm.detachCopy(theArrayHolder);
}
tx.commit();
}
catch (JDOUserException e)
{
LOG.error(">> Exception thrown in test", e);
fail("Exception thrown while querying objects with array of type " + arrayType.getName() + " : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
}
finally
{
// Clean out our data
clean(arrayHolder.getClass());
Class arrayElementType = arrayType.getComponentType();
if (Persistable.class.isAssignableFrom(arrayElementType))
{
clean(arrayElementType);
}
}
}
/**
* Method to perform a test of an array type.
* @param arrayHolder The container object holding the array
* @param arrayType The type of the array
* @param expectedArray The array elements that we expect.
*/
public void performArrayQueryTest(Object arrayHolder, Class arrayType, Object expectedArray)
{
try
{
// Persist the container
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try
{
tx.begin();
pm.makePersistent(arrayHolder);
pm.flush();
// Makes use of "array2" in ArrayHolderInterface as the array that is queryable (join table storage)
Query q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.size() == :p");
Collection c = (Collection) q.execute(new Integer(Array.getLength(expectedArray)));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.length == :p");
c = (Collection) q.execute(new Integer(Array.getLength(expectedArray)));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.size() == :p");
c = (Collection) q.execute(new Integer(Array.getLength(expectedArray)));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.contains(:p)");
c = (Collection) q.execute(Array.get(((ArrayHolderInterface)arrayHolder).getArray2(),1));
assertEquals("Number of " + arrayHolder.getClass().getName() + " objects retrieved was incorrect", 1, c.size());
q = pm.newQuery(pm.getExtent(arrayHolder.getClass(), true));
q.setFilter("this.array2.size() == :p");
c = (Collection) q.execute(new Integer(0));
assertEquals(c.size(), 0);
tx.commit();
}
catch (JDOUserException e)
{
LOG.error(">> Exception thrown in test", e);
fail("Exception thrown while creating object with array of type " + arrayType + " : " + e.getMessage());
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
}
finally
{
// Clean out our data
clean(arrayHolder.getClass());
Class arrayElementType = arrayType.getComponentType();
if (Persistable.class.isAssignableFrom(arrayElementType))
{
clean(arrayElementType);
}
}
}
} | Fix test to not use an element in multiple ways | jdo/general/src/test/org/datanucleus/tests/types/ArrayTest.java | Fix test to not use an element in multiple ways |
|
Java | apache-2.0 | 48493e8883a9432f22899478c072d00a91ee4054 | 0 | milanperera/product-mdm,dilee/product-mdm,Malintha/product-emm,madhawap/product-emm,milanperera/product-mdm,madhawap/product-emm,wso2/product-mdm,madhawap/product-emm,wso2/product-mdm,dilee/product-mdm,wso2/product-mdm,dilee/product-mdm,dilee/product-mdm,Kamidu/product-mdm,milanperera/product-mdm,Malintha/product-emm,Kamidu/product-mdm,dilee/product-emm,dilee/product-emm,Kamidu/product-mdm,Malintha/product-emm,wso2/product-mdm,madhawap/product-emm,milanperera/product-mdm,dilee/product-emm,Kamidu/product-mdm,dilee/product-emm,Malintha/product-emm | /*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.emm.agent.services.location.impl;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import com.google.gson.Gson;
import org.wso2.emm.agent.R;
import org.wso2.emm.agent.services.location.LocationService;
import org.wso2.emm.agent.utils.Preference;
/**
* This class holds the function implementations of the location service.
*/
public class LocationServiceImpl extends Service implements LocationListener, LocationService {
private Location location;
private LocationManager locationManager;
private static LocationServiceImpl serviceInstance;
private Context context;
private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 10;
private static final long MIN_TIME_BW_UPDATES = 1000 * 60;
private static final String TAG = LocationServiceImpl.class.getSimpleName();
private LocationServiceImpl() {}
private LocationServiceImpl(Context context) {
this.context = context;
locationManager = (LocationManager) context.getSystemService(LOCATION_SERVICE);
class LooperThread extends Thread {
public Handler mHandler;
public void run() {
if (Looper.myLooper() == null) {
Looper.prepare();
}
LocationServiceImpl.this.setLocation();
mHandler = new Handler() {
public void handleMessage(Message msg) {
Log.e(TAG, "No network/GPS Switched off." + msg);
}
};
}
}
new LooperThread().run();
}
public static LocationServiceImpl getInstance(Context context) {
if (serviceInstance == null) {
synchronized (LocationServiceImpl.class) {
if (serviceInstance == null) {
serviceInstance = new LocationServiceImpl(context);
}
}
}
return serviceInstance;
}
/**
* In this method, it gets the latest location updates from gps/ network.
*/
private void setLocation() {
if (locationManager != null) {
try {
if (Build.VERSION.SDK_INT >= 23 &&
ContextCompat.checkSelfPermission(context, android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(context, android.Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
return;
}
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
if (locationManager != null) {
location = locationManager.getLastKnownLocation(
LocationManager.NETWORK_PROVIDER);
if (location != null) {
Preference.putString(context, context.getResources().getString(R.string.shared_pref_location),
new Gson().toJson(location));
}
}
if (location == null) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES,
this);
if (locationManager != null) {
location = locationManager.getLastKnownLocation(
LocationManager.GPS_PROVIDER);
if (location != null) {
Preference.putString(context, context.getResources().getString(R.string.shared_pref_location),
new Gson().toJson(location));
}
}
}
} catch (RuntimeException e) {
Log.e(TAG, "No network/GPS Switched off.", e);
}
}
}
public Location getLastKnownLocation() {
return location;
}
@Override
public Location getLocation() {
if (location == null) {
location = new Gson().fromJson(Preference.getString(context, context.getResources().getString(
R.string.shared_pref_location)), Location.class);
}
return location;
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onLocationChanged(Location location) {
if (location != null) {
Preference.putString(context, context.getResources().getString(R.string.shared_pref_location),
new Gson().toJson(location));
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
serviceInstance = null;
serviceInstance = getInstance(context);
}
@Override
public void onProviderDisabled(String provider) {
}
}
| modules/mobile-agents/android/client/client/src/main/java/org/wso2/emm/agent/services/location/impl/LocationServiceImpl.java | /*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.emm.agent.services.location.impl;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import com.google.gson.Gson;
import org.wso2.emm.agent.R;
import org.wso2.emm.agent.services.location.LocationService;
import org.wso2.emm.agent.utils.Preference;
/**
* This class holds the function implementations of the location service.
*/
public class LocationServiceImpl extends Service implements LocationListener, LocationService {
private Location location;
private LocationManager locationManager;
private static LocationServiceImpl serviceInstance;
private Context context;
private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 10;
private static final long MIN_TIME_BW_UPDATES = 1000 * 60;
private static final String TAG = LocationServiceImpl.class.getSimpleName();
private LocationServiceImpl() {}
private LocationServiceImpl(Context context) {
this.context = context;
locationManager = (LocationManager) context.getSystemService(LOCATION_SERVICE);
class LooperThread extends Thread {
public Handler mHandler;
public void run() {
if (Looper.myLooper() == null) {
Looper.prepare();
}
LocationServiceImpl.this.setLocation();
mHandler = new Handler() {
public void handleMessage(Message msg) {
Log.e(TAG, "No network/GPS Switched off." + msg);
}
};
}
}
new LooperThread().run();
}
public static LocationServiceImpl getInstance(Context context) {
if (serviceInstance == null) {
synchronized (LocationServiceImpl.class) {
if (serviceInstance == null) {
serviceInstance = new LocationServiceImpl(context);
}
}
}
return serviceInstance;
}
/**
* In this method, it gets the latest location updates from gps/ network.
*/
private void setLocation() {
if (locationManager != null) {
try {
if (Build.VERSION.SDK_INT >= 23 &&
ContextCompat.checkSelfPermission(context, android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(context, android.Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
return;
}
boolean isGpsEnabled = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
boolean isNetworkEnabled = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
if (isNetworkEnabled) {
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
if (locationManager != null) {
location = locationManager.getLastKnownLocation(
LocationManager.NETWORK_PROVIDER);
if (location != null) {
Preference.putString(context, context.getResources().getString(R.string.shared_pref_location),
new Gson().toJson(location));
}
}
}
if (isGpsEnabled) {
if (location == null) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES,
this);
if (locationManager != null) {
location = locationManager.getLastKnownLocation(
LocationManager.GPS_PROVIDER);
if (location != null) {
Preference.putString(context, context.getResources().getString(R.string.shared_pref_location),
new Gson().toJson(location));
}
}
}
}
} catch (RuntimeException e) {
Log.e(TAG, "No network/GPS Switched off.", e);
}
}
}
public Location getLastKnownLocation() {
return location;
}
@Override
public Location getLocation() {
if (location == null) {
location = new Gson().fromJson(Preference.getString(context, context.getResources().getString(
R.string.shared_pref_location)), Location.class);
}
return location;
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onLocationChanged(Location location) {
if (location != null) {
Preference.putString(context, context.getResources().getString(R.string.shared_pref_location),
new Gson().toJson(location));
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
}
}
| EMM-1768: Removing provider enabled checks before subscribing for location updates
| modules/mobile-agents/android/client/client/src/main/java/org/wso2/emm/agent/services/location/impl/LocationServiceImpl.java | EMM-1768: Removing provider enabled checks before subscribing for location updates |
|
Java | apache-2.0 | d85eb8a4bb4a713ebd806fc444083a6de5c5c5ea | 0 | real-logic/Aeron,real-logic/Aeron,mikeb01/Aeron,mikeb01/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron,EvilMcJerkface/Aeron,mikeb01/Aeron,real-logic/Aeron,mikeb01/Aeron,galderz/Aeron,galderz/Aeron,galderz/Aeron,EvilMcJerkface/Aeron,EvilMcJerkface/Aeron,galderz/Aeron | /*
* Copyright 2014-2018 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.archive.client.AeronArchive;
import io.aeron.archive.codecs.SourceLocation;
import io.aeron.archive.status.RecordingPos;
import io.aeron.cluster.service.RecordingLog;
import io.aeron.cluster.codecs.*;
import io.aeron.cluster.service.*;
import io.aeron.exceptions.TimeoutException;
import io.aeron.logbuffer.ControlledFragmentHandler;
import io.aeron.logbuffer.Header;
import io.aeron.status.ReadableCounter;
import org.agrona.CloseHelper;
import org.agrona.DirectBuffer;
import org.agrona.MutableDirectBuffer;
import org.agrona.collections.ArrayListUtil;
import org.agrona.collections.Long2ObjectHashMap;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.CountersReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static io.aeron.ChannelUri.SPY_QUALIFIER;
import static io.aeron.CommonContext.ENDPOINT_PARAM_NAME;
import static io.aeron.CommonContext.NULL_SESSION_ID;
import static io.aeron.CommonContext.UDP_MEDIA;
import static io.aeron.archive.client.AeronArchive.NULL_POSITION;
import static io.aeron.cluster.ClusterMember.NULL_MEMBER_ID;
import static io.aeron.cluster.ClusterSession.State.*;
import static io.aeron.cluster.ConsensusModule.Configuration.SESSION_TIMEOUT_MSG;
import static io.aeron.cluster.ConsensusModule.SNAPSHOT_TYPE_ID;
class SequencerAgent implements Agent, ServiceControlListener, MemberStatusListener
{
private boolean isRecovering;
private final int memberId;
private int votedForMemberId;
private int serviceAckCount = 0;
private int logSessionId = CommonContext.NULL_SESSION_ID;
private final long sessionTimeoutMs;
private final long leaderHeartbeatIntervalMs;
private final long leaderHeartbeatTimeoutMs;
private final long serviceHeartbeatTimeoutMs;
private long nextSessionId = 1;
private long termBaseLogPosition = 0;
private long leadershipTermId = -1;
private long lastRecordingPosition = 0;
private long timeOfLastLogUpdateMs = 0;
private long followerCommitPosition = 0;
private ReadableCounter logRecordingPosition;
private Counter commitPosition;
private ConsensusModule.State state = ConsensusModule.State.INIT;
private Cluster.Role role;
private ClusterMember[] clusterMembers;
private ClusterMember leaderMember;
private final ClusterMember thisMember;
private long[] rankedPositions;
private final Counter clusterRoleCounter;
private final ClusterMarkFile markFile;
private final AgentInvoker aeronClientInvoker;
private final EpochClock epochClock;
private final CachedEpochClock cachedEpochClock = new CachedEpochClock();
private final Counter moduleState;
private final Counter controlToggle;
private final TimerService timerService;
private final ServiceControlAdapter serviceControlAdapter;
private final ServiceControlPublisher serviceControlPublisher;
private final IngressAdapter ingressAdapter;
private final EgressPublisher egressPublisher;
private final LogPublisher logPublisher;
private LogAdapter logAdapter;
private final MemberStatusAdapter memberStatusAdapter;
private final MemberStatusPublisher memberStatusPublisher = new MemberStatusPublisher();
private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>();
private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>();
private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>();
private final Authenticator authenticator;
private final SessionProxy sessionProxy;
private final Aeron aeron;
private AeronArchive archive;
private final ConsensusModule.Context ctx;
private final MutableDirectBuffer tempBuffer;
private final Counter[] serviceHeartbeats;
private final IdleStrategy idleStrategy;
private final RecordingLog recordingLog;
private RecordingLog.RecoveryPlan recoveryPlan;
private UnsafeBuffer recoveryPlanBuffer;
private RecordingCatchUp recordingCatchUp;
SequencerAgent(
final ConsensusModule.Context ctx, final EgressPublisher egressPublisher, final LogPublisher logPublisher)
{
this.ctx = ctx;
this.aeron = ctx.aeron();
this.epochClock = ctx.epochClock();
this.sessionTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.sessionTimeoutNs());
this.leaderHeartbeatIntervalMs = TimeUnit.NANOSECONDS.toMillis(ctx.leaderHeartbeatIntervalNs());
this.leaderHeartbeatTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.leaderHeartbeatTimeoutNs());
this.serviceHeartbeatTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.serviceHeartbeatTimeoutNs());
this.egressPublisher = egressPublisher;
this.moduleState = ctx.moduleStateCounter();
this.controlToggle = ctx.controlToggleCounter();
this.logPublisher = logPublisher;
this.idleStrategy = ctx.idleStrategy();
this.timerService = new TimerService(this);
this.clusterMembers = ClusterMember.parse(ctx.clusterMembers());
this.sessionProxy = new SessionProxy(egressPublisher);
this.memberId = ctx.clusterMemberId();
this.votedForMemberId = ctx.appointedLeaderId();
this.clusterRoleCounter = ctx.clusterNodeCounter();
this.markFile = ctx.clusterMarkFile();
this.recordingLog = ctx.recordingLog();
this.tempBuffer = ctx.tempBuffer();
this.serviceHeartbeats = ctx.serviceHeartbeatCounters();
aeronClientInvoker = aeron.conductorAgentInvoker();
aeronClientInvoker.invoke();
rankedPositions = new long[ClusterMember.quorumThreshold(clusterMembers.length)];
role(Cluster.Role.FOLLOWER);
thisMember = clusterMembers[memberId];
final ChannelUri memberStatusUri = ChannelUri.parse(ctx.memberStatusChannel());
memberStatusUri.put(ENDPOINT_PARAM_NAME, thisMember.memberFacingEndpoint());
final int statusStreamId = ctx.memberStatusStreamId();
memberStatusAdapter = new MemberStatusAdapter(
aeron.addSubscription(memberStatusUri.toString(), statusStreamId), this);
ClusterMember.addMemberStatusPublications(clusterMembers, thisMember, memberStatusUri, statusStreamId, aeron);
final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel());
if (!ingressUri.containsKey(ENDPOINT_PARAM_NAME))
{
ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.clientFacingEndpoint());
}
ingressAdapter = new IngressAdapter(
aeron.addSubscription(ingressUri.toString(), ctx.ingressStreamId()), this, ctx.invalidRequestCounter());
final ChannelUri archiveUri = ChannelUri.parse(ctx.archiveContext().controlRequestChannel());
ClusterMember.checkArchiveEndpoint(thisMember, archiveUri);
archiveUri.put(ENDPOINT_PARAM_NAME, thisMember.archiveEndpoint());
ctx.archiveContext().controlRequestChannel(archiveUri.toString());
serviceControlAdapter = new ServiceControlAdapter(
aeron.addSubscription(ctx.serviceControlChannel(), ctx.serviceControlStreamId()), this);
serviceControlPublisher = new ServiceControlPublisher(
aeron.addPublication(ctx.serviceControlChannel(), ctx.serviceControlStreamId()));
authenticator = ctx.authenticatorSupplier().newAuthenticator(ctx);
}
public void onClose()
{
CloseHelper.close(archive);
if (!ctx.ownsAeronClient())
{
for (final ClusterSession session : sessionByIdMap.values())
{
session.close();
}
CloseHelper.close(memberStatusAdapter);
ClusterMember.closeMemberPublications(clusterMembers);
logPublisher.disconnect();
CloseHelper.close(ingressAdapter);
CloseHelper.close(serviceControlPublisher);
CloseHelper.close(serviceControlAdapter);
}
}
public void onStart()
{
archive = AeronArchive.connect(ctx.archiveContext());
recoveryPlan = recordingLog.createRecoveryPlan(archive);
recoveryPlanBuffer = new UnsafeBuffer(new byte[recoveryPlan.encodedLength()]);
recoveryPlan.encode(recoveryPlanBuffer, 0);
try (Counter ignore = addRecoveryStateCounter(recoveryPlan))
{
isRecovering = true;
if (null != recoveryPlan.snapshotStep)
{
recoverFromSnapshot(recoveryPlan.snapshotStep, archive);
}
awaitServiceAcks();
if (recoveryPlan.termSteps.size() > 0)
{
recoverFromLog(recoveryPlan.termSteps, archive);
}
isRecovering = false;
}
if (ConsensusModule.State.SUSPENDED != state)
{
state(ConsensusModule.State.ACTIVE);
}
establishLeader();
final long nowMs = epochClock.time();
cachedEpochClock.update(nowMs);
timeOfLastLogUpdateMs = nowMs;
}
public int doWork()
{
int workCount = 0;
boolean isSlowTickCycle = false;
final long nowMs = epochClock.time();
if (cachedEpochClock.time() != nowMs)
{
cachedEpochClock.update(nowMs);
isSlowTickCycle = true;
}
if (Cluster.Role.LEADER == role && ConsensusModule.State.ACTIVE == state)
{
workCount += ingressAdapter.poll();
}
else if (Cluster.Role.FOLLOWER == role &&
(ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state))
{
workCount += logAdapter.poll(followerCommitPosition);
}
workCount += memberStatusAdapter.poll();
workCount += updateMemberPosition(nowMs);
if (isSlowTickCycle)
{
workCount += slowTickCycle(nowMs);
}
return workCount;
}
public String roleName()
{
return "sequencer";
}
public void onServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
validateServiceAck(logPosition, leadershipTermId, serviceId, action);
if (++serviceAckCount == ctx.serviceCount())
{
if (isRecovering)
{
return;
}
final long termPosition = currentTermPosition();
switch (action)
{
case SNAPSHOT:
final long nowNs = cachedEpochClock.time();
takeSnapshot(nowNs, termPosition);
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
for (final ClusterSession session : sessionByIdMap.values())
{
session.timeOfLastActivityMs(nowNs);
}
break;
case SHUTDOWN:
takeSnapshot(cachedEpochClock.time(), termPosition);
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
case ABORT:
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
}
}
else if (serviceAckCount > ctx.serviceCount())
{
throw new IllegalStateException("Service count exceeded: " + serviceAckCount);
}
}
public void onSessionConnect(
final long correlationId,
final int responseStreamId,
final String responseChannel,
final byte[] encodedCredentials)
{
final long nowMs = cachedEpochClock.time();
final long sessionId = nextSessionId++;
final ClusterSession session = new ClusterSession(sessionId, responseStreamId, responseChannel);
session.connect(aeron);
session.lastActivity(nowMs, correlationId);
if (pendingSessions.size() + sessionByIdMap.size() < ctx.maxConcurrentSessions())
{
authenticator.onConnectRequest(sessionId, encodedCredentials, nowMs);
pendingSessions.add(session);
}
else
{
rejectedSessions.add(session);
}
}
public void onSessionClose(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closeReason(CloseReason.CLIENT_ACTION);
session.close();
if (appendClosedSession(session, cachedEpochClock.time()))
{
sessionByIdMap.remove(clusterSessionId);
}
}
}
public ControlledFragmentAssembler.Action onSessionMessage(
final DirectBuffer buffer,
final int offset,
final int length,
final long clusterSessionId,
final long correlationId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null == session || session.state() == CLOSED)
{
return ControlledFragmentHandler.Action.CONTINUE;
}
final long nowMs = cachedEpochClock.time();
if (session.state() == OPEN && logPublisher.appendMessage(buffer, offset, length, nowMs))
{
session.lastActivity(nowMs, correlationId);
return ControlledFragmentHandler.Action.CONTINUE;
}
return ControlledFragmentHandler.Action.ABORT;
}
public void onSessionKeepAlive(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.timeOfLastActivityMs(cachedEpochClock.time());
}
}
public void onChallengeResponse(
final long correlationId, final long clusterSessionId, final byte[] encodedCredentials)
{
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.id() == clusterSessionId && session.state() == CHALLENGED)
{
final long nowMs = cachedEpochClock.time();
session.lastActivity(nowMs, correlationId);
authenticator.onChallengeResponse(clusterSessionId, encodedCredentials, nowMs);
break;
}
}
}
public boolean onTimerEvent(final long correlationId, final long nowMs)
{
return Cluster.Role.LEADER != role || logPublisher.appendTimerEvent(correlationId, nowMs);
}
public void onScheduleTimer(final long correlationId, final long deadlineMs)
{
timerService.scheduleTimer(correlationId, deadlineMs);
}
public void onCancelTimer(final long correlationId)
{
timerService.cancelTimer(correlationId);
}
public void onServiceCloseSession(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closeReason(CloseReason.SERVICE_ACTION);
session.close();
if (Cluster.Role.LEADER == role && appendClosedSession(session, cachedEpochClock.time()))
{
sessionByIdMap.remove(clusterSessionId);
}
}
}
public void onRequestVote(
final long candidateTermId, final long lastBaseLogPosition, final long lastTermPosition, final int candidateId)
{
if (Cluster.Role.FOLLOWER == role &&
candidateTermId == leadershipTermId &&
lastBaseLogPosition == recoveryPlan.lastTermBaseLogPosition)
{
if (lastTermPosition >= recoveryPlan.lastTermPositionAppended)
{
votedForMemberId = candidateId;
final long logPosition = lastBaseLogPosition + lastTermPosition;
recordingLog.appendTerm(leadershipTermId, logPosition, epochClock.time(), votedForMemberId);
sendVote(candidateTermId, candidateId, true);
return;
}
}
sendVote(candidateTermId, candidateId, false);
}
public void onNewLeadershipTerm(
final long lastBaseLogPosition,
final long lastTermPosition,
final long leadershipTermId,
final int leaderMemberId,
final int logSessionId)
{
if (leadershipTermId == this.leadershipTermId)
{
this.logSessionId = logSessionId;
if (recoveryPlan.lastTermPositionAppended < lastTermPosition && null == recordingCatchUp)
{
recordingCatchUp = ctx.recordingCatchUpSupplier().catchUp(
archive,
memberStatusPublisher,
clusterMembers,
votedForMemberId,
memberId,
recoveryPlan,
ctx);
}
}
}
public void onVote(
final long candidateTermId, final int candidateMemberId, final int followerMemberId, final boolean vote)
{
if (Cluster.Role.CANDIDATE == role && candidateTermId == leadershipTermId && candidateMemberId == memberId)
{
if (vote)
{
clusterMembers[followerMemberId].votedForId(candidateMemberId);
}
else
{
// TODO: Have to deal with failed candidacy
throw new IllegalStateException("rejected vote from: " + followerMemberId);
}
}
}
public void onAppendedPosition(final long termPosition, final long leadershipTermId, final int followerMemberId)
{
if (leadershipTermId == this.leadershipTermId)
{
clusterMembers[followerMemberId].termPosition(termPosition);
}
}
public void onCommitPosition(final long termPosition, final long leadershipTermId, final int leaderMemberId)
{
if (leadershipTermId == this.leadershipTermId)
{
timeOfLastLogUpdateMs = cachedEpochClock.time();
followerCommitPosition = termPosition;
}
}
public void onQueryResponse(
final long correlationId,
final int requestMemberId,
final int responseMemberId,
final DirectBuffer data,
final int offset,
final int length)
{
if (null != recordingCatchUp)
{
recordingCatchUp.onLeaderRecoveryPlan(
correlationId, requestMemberId, responseMemberId, data, offset, length);
}
}
public void onRecoveryPlanQuery(final long correlationId, final int leaderMemberId, final int requestMemberId)
{
if (leaderMemberId == memberId)
{
memberStatusPublisher.queryResponse(
clusterMembers[requestMemberId].publication(),
correlationId,
requestMemberId,
memberId,
recoveryPlanBuffer,
0,
recoveryPlanBuffer.capacity());
}
}
void state(final ConsensusModule.State state)
{
this.state = state;
moduleState.set(state.code());
}
void role(final Cluster.Role role)
{
this.role = role;
clusterRoleCounter.setOrdered(role.code());
}
Cluster.Role role()
{
return role;
}
void logRecordingPositionCounter(final ReadableCounter logRecordingPosition)
{
this.logRecordingPosition = logRecordingPosition;
}
void commitPositionCounter(final Counter commitPosition)
{
this.commitPosition = commitPosition;
}
@SuppressWarnings("unused")
void onReplaySessionMessage(
final long correlationId,
final long clusterSessionId,
final long timestamp,
final DirectBuffer buffer,
final int offset,
final int length,
final Header header)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.get(clusterSessionId).lastActivity(timestamp, correlationId);
}
void onReplayTimerEvent(@SuppressWarnings("unused") final long correlationId, final long timestamp)
{
cachedEpochClock.update(timestamp);
}
void onReplaySessionOpen(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final int responseStreamId,
final String responseChannel)
{
cachedEpochClock.update(timestamp);
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onLoadSession(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final CloseReason closeReason,
final int responseStreamId,
final String responseChannel)
{
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.closeReason(closeReason);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
if (CloseReason.NULL_VAL != closeReason)
{
session.close();
}
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
@SuppressWarnings("unused")
void onReplaySessionClose(
final long correlationId, final long clusterSessionId, final long timestamp, final CloseReason closeReason)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.remove(clusterSessionId).close();
}
@SuppressWarnings("unused")
void onReplayClusterAction(
final long logPosition, final long leadershipTermId, final long timestamp, final ClusterAction action)
{
cachedEpochClock.update(timestamp);
final long termPosition = logPosition - termBaseLogPosition;
switch (action)
{
case SUSPEND:
state(ConsensusModule.State.SUSPENDED);
break;
case RESUME:
state(ConsensusModule.State.ACTIVE);
break;
case SNAPSHOT:
if (!isRecovering)
{
serviceAckCount = 0;
state(ConsensusModule.State.SNAPSHOT);
}
break;
case SHUTDOWN:
if (!isRecovering)
{
serviceAckCount = 0;
state(ConsensusModule.State.SHUTDOWN);
}
break;
case ABORT:
if (!isRecovering)
{
serviceAckCount = 0;
state(ConsensusModule.State.ABORT);
}
break;
}
}
void onReloadState(final long nextSessionId)
{
this.nextSessionId = nextSessionId;
}
void catchupLog(final RecordingCatchUp recordingCatchUp)
{
final long fromPosition = recordingCatchUp.fromPosition();
final long targetPosition = recordingCatchUp.targetPosition();
final long length = targetPosition - fromPosition;
final int lastStepIndex = recoveryPlan.termSteps.size() - 1;
final RecordingLog.ReplayStep lastStep = recoveryPlan.termSteps.get(lastStepIndex);
final RecordingLog.Entry entry = lastStep.entry;
final long originalLeadershipTermId = leadershipTermId;
termBaseLogPosition = entry.termBaseLogPosition;
leadershipTermId = entry.leadershipTermId;
try (Counter counter = CommitPos.allocate(aeron, tempBuffer, leadershipTermId, termBaseLogPosition, length))
{
final int streamId = ctx.replayStreamId();
final ChannelUri channelUri = ChannelUri.parse(ctx.replayChannel());
final int logSessionId = lastStepIndex + 1;
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String channel = channelUri.toString();
try (Subscription subscription = aeron.addSubscription(channel, streamId))
{
serviceAckCount = 0;
logAdapter = null;
serviceControlPublisher.joinLog(leadershipTermId, counter.id(), logSessionId, streamId, true, channel);
awaitServiceAcks();
final int replaySessionId = (int)archive.startReplay(
recordingCatchUp.recordingIdToExtend(), fromPosition, length, channel, streamId);
final Image image = awaitImage(replaySessionId, subscription);
replayTerm(image, targetPosition, counter);
final long termPosition = image.position();
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
termBaseLogPosition = entry.termBaseLogPosition + termPosition;
}
}
leadershipTermId = originalLeadershipTermId;
}
private int slowTickCycle(final long nowMs)
{
int workCount = 0;
markFile.updateActivityTimestamp(nowMs);
checkServiceHeartbeats(nowMs);
workCount += aeronClientInvoker.invoke();
workCount += serviceControlAdapter.poll();
if (Cluster.Role.LEADER == role)
{
workCount += checkControlToggle(nowMs);
if (ConsensusModule.State.ACTIVE == state)
{
workCount += processPendingSessions(pendingSessions, nowMs);
workCount += checkSessions(sessionByIdMap, nowMs);
workCount += processRejectedSessions(rejectedSessions, nowMs);
workCount += timerService.poll(nowMs);
}
}
if (null != archive)
{
archive.checkForErrorResponse();
}
return workCount;
}
private void checkServiceHeartbeats(final long nowMs)
{
final long heartbeatThreshold = nowMs - serviceHeartbeatTimeoutMs;
for (final Counter serviceHeartbeat : serviceHeartbeats)
{
if (serviceHeartbeat.get() < heartbeatThreshold)
{
ctx.errorHandler().onError(new TimeoutException("no heartbeat from clustered service"));
ctx.terminationHook().run();
}
}
}
private int checkControlToggle(final long nowMs)
{
switch (ClusterControl.ToggleState.get(controlToggle))
{
case SUSPEND:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND, nowMs))
{
state(ConsensusModule.State.SUSPENDED);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case RESUME:
if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME, nowMs))
{
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case SNAPSHOT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT, nowMs))
{
state(ConsensusModule.State.SNAPSHOT);
}
break;
case SHUTDOWN:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SHUTDOWN, nowMs))
{
state(ConsensusModule.State.SHUTDOWN);
}
break;
case ABORT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.ABORT, nowMs))
{
state(ConsensusModule.State.ABORT);
}
break;
default:
return 0;
}
return 1;
}
private void sendVote(final long candidateTermId, final int candidateId, final boolean vote)
{
idleStrategy.reset();
while (!memberStatusPublisher.placeVote(
clusterMembers[candidateId].publication(), candidateTermId, candidateId, memberId, vote))
{
idle();
}
}
private boolean appendAction(final ClusterAction action, final long nowMs)
{
final long position = termBaseLogPosition +
logPublisher.position() +
MessageHeaderEncoder.ENCODED_LENGTH +
ClusterActionRequestEncoder.BLOCK_LENGTH;
return logPublisher.appendClusterAction(action, leadershipTermId, position, nowMs);
}
private int processPendingSessions(final ArrayList<ClusterSession> pendingSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.state() == INIT || session.state() == CONNECTED)
{
if (session.isResponsePublicationConnected())
{
session.state(CONNECTED);
authenticator.onProcessConnectedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == CHALLENGED)
{
if (session.isResponsePublicationConnected())
{
authenticator.onProcessChallengedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == AUTHENTICATED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.timeOfLastActivityMs(nowMs);
sessionByIdMap.put(session.id(), session);
appendConnectedSession(session, nowMs);
workCount += 1;
}
else if (session.state() == REJECTED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
rejectedSessions.add(session);
}
else if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close();
}
}
return workCount;
}
private int processRejectedSessions(final ArrayList<ClusterSession> rejectedSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = rejectedSessions.get(i);
String detail = ConsensusModule.Configuration.SESSION_LIMIT_MSG;
EventCode eventCode = EventCode.ERROR;
if (session.state() == REJECTED)
{
detail = ConsensusModule.Configuration.SESSION_REJECTED_MSG;
eventCode = EventCode.AUTHENTICATION_REJECTED;
}
if (egressPublisher.sendEvent(session, eventCode, detail) ||
nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--);
session.close();
workCount++;
}
}
return workCount;
}
private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowMs)
{
int workCount = 0;
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
final ClusterSession.State state = session.state();
if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
switch (state)
{
case OPEN:
egressPublisher.sendEvent(session, EventCode.ERROR, SESSION_TIMEOUT_MSG);
session.closeReason(CloseReason.TIMEOUT);
session.close();
if (appendClosedSession(session, nowMs))
{
i.remove();
}
break;
case CLOSED:
if (appendClosedSession(session, nowMs))
{
session.close();
i.remove();
}
break;
default:
session.close();
i.remove();
}
workCount += 1;
}
else if (state == CONNECTED)
{
appendConnectedSession(session, nowMs);
workCount += 1;
}
}
return workCount;
}
private void appendConnectedSession(final ClusterSession session, final long nowMs)
{
final long resultingPosition = logPublisher.appendConnectedSession(session, nowMs);
if (resultingPosition > 0)
{
session.open(resultingPosition);
}
}
private boolean appendClosedSession(final ClusterSession session, final long nowMs)
{
if (logPublisher.appendClosedSession(session, nowMs))
{
session.close();
return true;
}
return false;
}
private void establishLeader()
{
leadershipTermId++;
if (clusterMembers.length == 1)
{
votedForMemberId = memberId;
leaderMember = thisMember;
recordingLog.appendTerm(leadershipTermId, termBaseLogPosition, epochClock.time(), votedForMemberId);
}
else if (ctx.appointedLeaderId() != NULL_MEMBER_ID)
{
if (ctx.appointedLeaderId() == memberId)
{
role(Cluster.Role.CANDIDATE);
ClusterMember.becomeCandidate(clusterMembers, memberId);
votedForMemberId = memberId;
recordingLog.appendTerm(leadershipTermId, termBaseLogPosition, epochClock.time(), votedForMemberId);
requestVotes(
clusterMembers, recoveryPlan.lastTermBaseLogPosition, recoveryPlan.lastTermPositionAppended);
do
{
idle(memberStatusAdapter.poll());
}
while (ClusterMember.awaitingVotes(clusterMembers));
leaderMember = thisMember;
}
else
{
votedForMemberId = NULL_MEMBER_ID;
while (NULL_MEMBER_ID == votedForMemberId)
{
idle(memberStatusAdapter.poll());
}
}
}
if (memberId == votedForMemberId)
{
becomeLeader();
}
else
{
becomeFollower();
}
}
private void requestVotes(
final ClusterMember[] clusterMembers, final long lastLogPosition, final long lastTermPosition)
{
idleStrategy.reset();
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
while (!memberStatusPublisher.requestVote(
member.publication(), leadershipTermId, lastLogPosition, lastTermPosition, memberId))
{
idle();
}
}
}
}
private void becomeLeader()
{
updateMemberDetails(votedForMemberId);
role(Cluster.Role.LEADER);
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
final Publication publication = aeron.addExclusivePublication(ctx.logChannel(), ctx.logStreamId());
if (!channelUri.containsKey(CommonContext.ENDPOINT_PARAM_NAME) && UDP_MEDIA.equals(channelUri.media()))
{
final ChannelUriStringBuilder builder = new ChannelUriStringBuilder().media(UDP_MEDIA);
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
final String destination = builder.endpoint(member.logEndpoint()).build();
publication.addDestination(destination);
}
}
}
logAdapter = null;
logPublisher.connect(publication);
logSessionId = publication.sessionId();
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String recordingChannel = channelUri.toString();
archive.startRecording(recordingChannel, ctx.logStreamId(), SourceLocation.LOCAL);
createPositionCounters();
final long recordingId = RecordingPos.getRecordingId(aeron.countersReader(), logRecordingPosition.counterId());
recordingLog.commitLeadershipRecordingId(leadershipTermId, recordingId);
awaitServicesReady(channelUri, true);
awaitFollowersReady();
final long nowMs = epochClock.time();
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() != CLOSED)
{
session.connect(aeron);
session.timeOfLastActivityMs(nowMs);
}
}
}
private void becomeFollower()
{
leaderMember = clusterMembers[votedForMemberId];
updateMemberDetails(votedForMemberId);
role(Cluster.Role.FOLLOWER);
followerCommitPosition = 0;
logSessionId = NULL_SESSION_ID;
awaitLogSessionIdFromLeader();
awaitCatchUp();
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
channelUri.put(CommonContext.ENDPOINT_PARAM_NAME, thisMember.logEndpoint());
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String logChannel = channelUri.toString();
final int streamId = ctx.logStreamId();
archive.startRecording(logChannel, streamId, SourceLocation.REMOTE);
final Image image = awaitImage(logSessionId, aeron.addSubscription(logChannel, streamId));
logAdapter = new LogAdapter(image, this);
createPositionCounters();
final long recordingId = RecordingPos.getRecordingId(aeron.countersReader(), logRecordingPosition.counterId());
recordingLog.commitLeadershipRecordingId(leadershipTermId, recordingId);
awaitServicesReady(channelUri, false);
notifyLeaderThatFollowerIsReady();
}
private void awaitLogSessionIdFromLeader()
{
while (NULL_SESSION_ID == logSessionId)
{
idle(memberStatusAdapter.poll());
}
}
private void awaitCatchUp()
{
if (null != recordingCatchUp)
{
do
{
idle(memberStatusAdapter.poll() + recordingCatchUp.doWork());
}
while (!recordingCatchUp.isCaughtUp());
recordingCatchUp.close();
catchupLog(recordingCatchUp);
recordingCatchUp = null;
}
}
private void notifyLeaderThatFollowerIsReady()
{
idleStrategy.reset();
final Publication publication = leaderMember.publication();
while (!memberStatusPublisher.appendedPosition(publication, followerCommitPosition, leadershipTermId, memberId))
{
idle();
}
lastRecordingPosition = 0;
}
private void awaitFollowersReady()
{
ClusterMember.resetTermPositions(clusterMembers, NULL_POSITION);
clusterMembers[memberId].termPosition(0);
do
{
final long nowMs = epochClock.time();
if (nowMs > (timeOfLastLogUpdateMs + leaderHeartbeatIntervalMs))
{
timeOfLastLogUpdateMs = nowMs;
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
memberStatusPublisher.newLeadershipTerm(
member.publication(),
recoveryPlan.lastTermBaseLogPosition,
recoveryPlan.lastTermPositionAppended,
leadershipTermId,
memberId,
logSessionId);
}
}
}
idle(memberStatusAdapter.poll());
}
while (!ClusterMember.hasReachedPosition(clusterMembers, 0));
}
private void createPositionCounters()
{
final CountersReader counters = aeron.countersReader();
final int recordingCounterId = awaitRecordingCounter(counters, logSessionId);
logRecordingPosition = new ReadableCounter(counters, recordingCounterId);
commitPosition = CommitPos.allocate(
aeron, tempBuffer, leadershipTermId, termBaseLogPosition, CommitPos.NULL_VALUE);
}
private void awaitServicesReady(final ChannelUri channelUri, final boolean isLeader)
{
serviceAckCount = 0;
final String channel = isLeader && UDP_MEDIA.equals(channelUri.media()) ?
channelUri.prefix(SPY_QUALIFIER).toString() : channelUri.toString();
serviceControlPublisher.joinLog(
leadershipTermId, commitPosition.id(), logSessionId, ctx.logStreamId(), false, channel);
awaitServiceAcks();
}
private void updateMemberDetails(final int leaderMemberId)
{
for (final ClusterMember clusterMember : clusterMembers)
{
clusterMember.isLeader(clusterMember.id() == leaderMemberId);
}
updateClusterMemberDetails(clusterMembers);
}
private void recoverFromSnapshot(final RecordingLog.ReplayStep snapshotStep, final AeronArchive archive)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
cachedEpochClock.update(snapshot.timestamp);
termBaseLogPosition = snapshot.termBaseLogPosition + snapshot.termPosition;
leadershipTermId = snapshot.leadershipTermId;
final long recordingId = snapshot.recordingId;
final RecordingExtent recordingExtent = new RecordingExtent();
if (0 == archive.listRecording(recordingId, recordingExtent))
{
throw new IllegalStateException("unknown recordingId: " + recordingId);
}
final String channel = ctx.replayChannel();
final int streamId = ctx.replayStreamId();
final long length = recordingExtent.stopPosition - recordingExtent.startPosition;
final int sessionId = (int)archive.startReplay(recordingId, 0, length, channel, streamId);
final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId);
try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId))
{
final Image image = awaitImage(sessionId, subscription);
final SnapshotLoader snapshotLoader = new SnapshotLoader(image, this);
while (true)
{
final int fragments = snapshotLoader.poll();
if (fragments == 0)
{
if (snapshotLoader.isDone())
{
break;
}
if (image.isClosed())
{
throw new IllegalStateException("snapshot ended unexpectedly");
}
}
idle(fragments);
}
}
}
private Image awaitImage(final int sessionId, final Subscription subscription)
{
idleStrategy.reset();
Image image;
while ((image = subscription.imageBySessionId(sessionId)) == null)
{
idle();
}
return image;
}
private void recoverFromLog(final List<RecordingLog.ReplayStep> steps, final AeronArchive archive)
{
final int streamId = ctx.replayStreamId();
final ChannelUri channelUri = ChannelUri.parse(ctx.replayChannel());
for (int i = 0, size = steps.size(); i < size; i++)
{
final RecordingLog.ReplayStep step = steps.get(i);
final RecordingLog.Entry entry = step.entry;
final long startPosition = step.recordingStartPosition;
final long stopPosition = step.recordingStopPosition;
final long length = stopPosition - startPosition;
termBaseLogPosition = entry.termBaseLogPosition;
leadershipTermId = entry.leadershipTermId;
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(i));
final String channel = channelUri.toString();
final long recordingId = entry.recordingId;
try (Counter counter = CommitPos.allocate(aeron, tempBuffer, leadershipTermId, termBaseLogPosition, length))
{
serviceAckCount = 0;
logAdapter = null;
if (length > 0)
{
try (Subscription subscription = aeron.addSubscription(channel, streamId))
{
serviceControlPublisher.joinLog(leadershipTermId, counter.id(), i, streamId, true, channel);
awaitServiceAcks();
final Image image = awaitImage(
(int)archive.startReplay(recordingId, startPosition, length, channel, streamId),
subscription);
serviceAckCount = 0;
replayTerm(image, stopPosition, counter);
awaitServiceAcks();
final long termPosition = image.position();
if (step.entry.termPosition < termPosition)
{
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
}
termBaseLogPosition = entry.termBaseLogPosition + termPosition;
}
}
else
{
awaitServiceAcks();
}
}
}
}
private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan)
{
final int termCount = plan.termSteps.size();
final RecordingLog.ReplayStep snapshotStep = plan.snapshotStep;
if (null != snapshotStep)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
return RecoveryState.allocate(
aeron, tempBuffer, snapshot.leadershipTermId, snapshot.termPosition, snapshot.timestamp, termCount);
}
return RecoveryState.allocate(aeron, tempBuffer, leadershipTermId, NULL_POSITION, 0, termCount);
}
private void awaitServiceAcks()
{
while (true)
{
final int fragmentsRead = serviceControlAdapter.poll();
if (serviceAckCount >= ctx.serviceCount())
{
break;
}
idle(fragmentsRead);
}
}
private void validateServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
final long currentLogPosition = termBaseLogPosition + currentTermPosition();
if (logPosition != currentLogPosition || leadershipTermId != this.leadershipTermId)
{
throw new IllegalStateException("invalid log state:" +
" serviceId=" + serviceId +
", logPosition=" + logPosition + " current is " + currentLogPosition +
", leadershipTermId=" + leadershipTermId + " current is " + this.leadershipTermId);
}
if (!state.isValid(action))
{
throw new IllegalStateException("invalid service ACK for state " + state + ", action " + action);
}
}
private long currentTermPosition()
{
return null != logAdapter ? logAdapter.position() : logPublisher.position();
}
private void updateClusterMemberDetails(final ClusterMember[] members)
{
int leaderIndex = 0;
for (int i = 0, length = members.length; i < length; i++)
{
if (members[i].isLeader())
{
leaderIndex = i;
break;
}
}
final StringBuilder builder = new StringBuilder(100);
builder.append(members[leaderIndex].clientFacingEndpoint());
for (int i = 0, length = members.length; i < length; i++)
{
if (i != leaderIndex)
{
builder.append(',').append(members[i].clientFacingEndpoint());
}
}
sessionProxy.memberEndpointsDetail(builder.toString());
}
private int updateMemberPosition(final long nowMs)
{
int workCount = 0;
if (Cluster.Role.LEADER == role)
{
thisMember.termPosition(logRecordingPosition.get());
final long position = ClusterMember.quorumPosition(clusterMembers, rankedPositions);
if (position > commitPosition.getWeak() || nowMs >= (timeOfLastLogUpdateMs + leaderHeartbeatIntervalMs))
{
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
final Publication publication = member.publication();
memberStatusPublisher.commitPosition(publication, position, leadershipTermId, memberId);
}
}
commitPosition.setOrdered(position);
timeOfLastLogUpdateMs = nowMs;
workCount = 1;
}
}
else if (Cluster.Role.FOLLOWER == role)
{
final long recordingPosition = logRecordingPosition.get();
if (recordingPosition != lastRecordingPosition)
{
final Publication publication = leaderMember.publication();
if (memberStatusPublisher.appendedPosition(
publication, recordingPosition, leadershipTermId, memberId))
{
lastRecordingPosition = recordingPosition;
}
workCount = 1;
}
commitPosition.proposeMaxOrdered(logAdapter.position());
if (nowMs >= (timeOfLastLogUpdateMs + leaderHeartbeatTimeoutMs))
{
throw new AgentTerminationException("no heartbeat from cluster leader");
}
}
return workCount;
}
private void idle()
{
checkInterruptedStatus();
aeronClientInvoker.invoke();
idleStrategy.idle();
}
private void idle(final int workCount)
{
checkInterruptedStatus();
aeronClientInvoker.invoke();
idleStrategy.idle(workCount);
}
private static void checkInterruptedStatus()
{
if (Thread.currentThread().isInterrupted())
{
throw new RuntimeException("unexpected interrupt");
}
}
private void takeSnapshot(final long timestampMs, final long termPosition)
{
final String channel = ctx.snapshotChannel();
final int streamId = ctx.snapshotStreamId();
try (Publication publication = archive.addRecordedExclusivePublication(channel, streamId))
{
try
{
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, publication.sessionId());
final long recordingId = RecordingPos.getRecordingId(counters, counterId);
snapshotState(publication, termBaseLogPosition + termPosition, leadershipTermId);
awaitRecordingComplete(recordingId, publication.position(), counters, counterId);
recordingLog.appendSnapshot(
recordingId, leadershipTermId, termBaseLogPosition, termPosition, timestampMs);
}
finally
{
archive.stopRecording(publication);
}
ctx.snapshotCounter().incrementOrdered();
}
}
private void awaitRecordingComplete(
final long recordingId, final long completePosition, final CountersReader counters, final int counterId)
{
idleStrategy.reset();
do
{
idle();
if (!RecordingPos.isActive(counters, counterId, recordingId))
{
throw new IllegalStateException("recording has stopped unexpectedly: " + recordingId);
}
}
while (counters.getCounterValue(counterId) < completePosition);
}
private int awaitRecordingCounter(final CountersReader counters, final int sessionId)
{
idleStrategy.reset();
int counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
while (CountersReader.NULL_COUNTER_ID == counterId)
{
idle();
counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
}
return counterId;
}
private void snapshotState(final Publication publication, final long logPosition, final long leadershipTermId)
{
final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker(
publication, idleStrategy, aeronClientInvoker);
snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
snapshotTaker.snapshotSession(session);
}
}
aeronClientInvoker.invoke();
timerService.snapshot(snapshotTaker);
snapshotTaker.sequencerState(nextSessionId);
snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
}
private void replayTerm(final Image image, final long finalTermPosition, final Counter replayPosition)
{
logAdapter = new LogAdapter(image, this);
while (true)
{
int workCount = logAdapter.poll(finalTermPosition);
if (workCount == 0)
{
if (image.isClosed())
{
if (!image.isEndOfStream())
{
throw new IllegalStateException("unexpected close of image when replaying");
}
break;
}
}
replayPosition.setOrdered(image.position());
workCount += serviceControlAdapter.poll();
workCount += timerService.poll(cachedEpochClock.time());
idle(workCount);
}
}
}
| aeron-cluster/src/main/java/io/aeron/cluster/SequencerAgent.java | /*
* Copyright 2014-2018 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.archive.client.AeronArchive;
import io.aeron.archive.codecs.SourceLocation;
import io.aeron.archive.status.RecordingPos;
import io.aeron.cluster.service.RecordingLog;
import io.aeron.cluster.codecs.*;
import io.aeron.cluster.service.*;
import io.aeron.exceptions.TimeoutException;
import io.aeron.logbuffer.ControlledFragmentHandler;
import io.aeron.logbuffer.Header;
import io.aeron.status.ReadableCounter;
import org.agrona.CloseHelper;
import org.agrona.DirectBuffer;
import org.agrona.MutableDirectBuffer;
import org.agrona.collections.ArrayListUtil;
import org.agrona.collections.Long2ObjectHashMap;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.CountersReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static io.aeron.ChannelUri.SPY_QUALIFIER;
import static io.aeron.CommonContext.ENDPOINT_PARAM_NAME;
import static io.aeron.CommonContext.NULL_SESSION_ID;
import static io.aeron.CommonContext.UDP_MEDIA;
import static io.aeron.archive.client.AeronArchive.NULL_POSITION;
import static io.aeron.cluster.ClusterMember.NULL_MEMBER_ID;
import static io.aeron.cluster.ClusterSession.State.*;
import static io.aeron.cluster.ConsensusModule.Configuration.SESSION_TIMEOUT_MSG;
import static io.aeron.cluster.ConsensusModule.SNAPSHOT_TYPE_ID;
class SequencerAgent implements Agent, ServiceControlListener, MemberStatusListener
{
private boolean isRecovering;
private final int memberId;
private int votedForMemberId;
private int serviceAckCount = 0;
private int logSessionId = CommonContext.NULL_SESSION_ID;
private final long sessionTimeoutMs;
private final long leaderHeartbeatIntervalMs;
private final long leaderHeartbeatTimeoutMs;
private final long serviceHeartbeatTimeoutMs;
private long nextSessionId = 1;
private long termBaseLogPosition = 0;
private long leadershipTermId = -1;
private long lastRecordingPosition = 0;
private long timeOfLastLogUpdateMs = 0;
private long followerCommitPosition = 0;
private ReadableCounter logRecordingPosition;
private Counter commitPosition;
private ConsensusModule.State state = ConsensusModule.State.INIT;
private Cluster.Role role;
private ClusterMember[] clusterMembers;
private ClusterMember leaderMember;
private final ClusterMember thisMember;
private long[] rankedPositions;
private final Counter clusterRoleCounter;
private final ClusterMarkFile markFile;
private final AgentInvoker aeronClientInvoker;
private final EpochClock epochClock;
private final CachedEpochClock cachedEpochClock = new CachedEpochClock();
private final Counter moduleState;
private final Counter controlToggle;
private final TimerService timerService;
private final ServiceControlAdapter serviceControlAdapter;
private final ServiceControlPublisher serviceControlPublisher;
private final IngressAdapter ingressAdapter;
private final EgressPublisher egressPublisher;
private final LogPublisher logPublisher;
private LogAdapter logAdapter;
private final MemberStatusAdapter memberStatusAdapter;
private final MemberStatusPublisher memberStatusPublisher = new MemberStatusPublisher();
private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>();
private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>();
private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>();
private final Authenticator authenticator;
private final SessionProxy sessionProxy;
private final Aeron aeron;
private AeronArchive archive;
private final ConsensusModule.Context ctx;
private final MutableDirectBuffer tempBuffer;
private final Counter[] serviceHeartbeats;
private final IdleStrategy idleStrategy;
private final RecordingLog recordingLog;
private RecordingLog.RecoveryPlan recoveryPlan;
private UnsafeBuffer recoveryPlanBuffer;
private RecordingCatchUp recordingCatchUp;
SequencerAgent(
final ConsensusModule.Context ctx, final EgressPublisher egressPublisher, final LogPublisher logPublisher)
{
this.ctx = ctx;
this.aeron = ctx.aeron();
this.epochClock = ctx.epochClock();
this.sessionTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.sessionTimeoutNs());
this.leaderHeartbeatIntervalMs = TimeUnit.NANOSECONDS.toMillis(ctx.leaderHeartbeatIntervalNs());
this.leaderHeartbeatTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.leaderHeartbeatTimeoutNs());
this.serviceHeartbeatTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.serviceHeartbeatTimeoutNs());
this.egressPublisher = egressPublisher;
this.moduleState = ctx.moduleStateCounter();
this.controlToggle = ctx.controlToggleCounter();
this.logPublisher = logPublisher;
this.idleStrategy = ctx.idleStrategy();
this.timerService = new TimerService(this);
this.clusterMembers = ClusterMember.parse(ctx.clusterMembers());
this.sessionProxy = new SessionProxy(egressPublisher);
this.memberId = ctx.clusterMemberId();
this.votedForMemberId = ctx.appointedLeaderId();
this.clusterRoleCounter = ctx.clusterNodeCounter();
this.markFile = ctx.clusterMarkFile();
this.recordingLog = ctx.recordingLog();
this.tempBuffer = ctx.tempBuffer();
this.serviceHeartbeats = ctx.serviceHeartbeatCounters();
aeronClientInvoker = aeron.conductorAgentInvoker();
aeronClientInvoker.invoke();
rankedPositions = new long[ClusterMember.quorumThreshold(clusterMembers.length)];
role(Cluster.Role.FOLLOWER);
thisMember = clusterMembers[memberId];
final ChannelUri memberStatusUri = ChannelUri.parse(ctx.memberStatusChannel());
memberStatusUri.put(ENDPOINT_PARAM_NAME, thisMember.memberFacingEndpoint());
final int statusStreamId = ctx.memberStatusStreamId();
memberStatusAdapter = new MemberStatusAdapter(
aeron.addSubscription(memberStatusUri.toString(), statusStreamId), this);
ClusterMember.addMemberStatusPublications(clusterMembers, thisMember, memberStatusUri, statusStreamId, aeron);
final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel());
if (!ingressUri.containsKey(ENDPOINT_PARAM_NAME))
{
ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.clientFacingEndpoint());
}
ingressAdapter = new IngressAdapter(
aeron.addSubscription(ingressUri.toString(), ctx.ingressStreamId()), this, ctx.invalidRequestCounter());
final ChannelUri archiveUri = ChannelUri.parse(ctx.archiveContext().controlRequestChannel());
ClusterMember.checkArchiveEndpoint(thisMember, archiveUri);
archiveUri.put(ENDPOINT_PARAM_NAME, thisMember.archiveEndpoint());
ctx.archiveContext().controlRequestChannel(archiveUri.toString());
serviceControlAdapter = new ServiceControlAdapter(
aeron.addSubscription(ctx.serviceControlChannel(), ctx.serviceControlStreamId()), this);
serviceControlPublisher = new ServiceControlPublisher(
aeron.addPublication(ctx.serviceControlChannel(), ctx.serviceControlStreamId()));
authenticator = ctx.authenticatorSupplier().newAuthenticator(ctx);
}
public void onClose()
{
CloseHelper.close(archive);
if (!ctx.ownsAeronClient())
{
for (final ClusterSession session : sessionByIdMap.values())
{
session.close();
}
CloseHelper.close(memberStatusAdapter);
ClusterMember.closeMemberPublications(clusterMembers);
logPublisher.disconnect();
CloseHelper.close(ingressAdapter);
CloseHelper.close(serviceControlPublisher);
CloseHelper.close(serviceControlAdapter);
}
}
public void onStart()
{
archive = AeronArchive.connect(ctx.archiveContext());
recoveryPlan = recordingLog.createRecoveryPlan(archive);
recoveryPlanBuffer = new UnsafeBuffer(new byte[recoveryPlan.encodedLength()]);
recoveryPlan.encode(recoveryPlanBuffer, 0);
try (Counter ignore = addRecoveryStateCounter(recoveryPlan))
{
isRecovering = true;
if (null != recoveryPlan.snapshotStep)
{
recoverFromSnapshot(recoveryPlan.snapshotStep, archive);
}
awaitServiceAcks();
if (recoveryPlan.termSteps.size() > 0)
{
recoverFromLog(recoveryPlan.termSteps, archive);
}
isRecovering = false;
}
if (ConsensusModule.State.SUSPENDED != state)
{
state(ConsensusModule.State.ACTIVE);
}
establishLeader();
final long nowMs = epochClock.time();
cachedEpochClock.update(nowMs);
timeOfLastLogUpdateMs = nowMs;
}
public int doWork()
{
int workCount = 0;
boolean isSlowTickCycle = false;
final long nowMs = epochClock.time();
if (cachedEpochClock.time() != nowMs)
{
cachedEpochClock.update(nowMs);
isSlowTickCycle = true;
}
if (Cluster.Role.LEADER == role && ConsensusModule.State.ACTIVE == state)
{
workCount += ingressAdapter.poll();
}
else if (Cluster.Role.FOLLOWER == role &&
(ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state))
{
workCount += logAdapter.poll(followerCommitPosition);
}
workCount += memberStatusAdapter.poll();
workCount += updateMemberPosition(nowMs);
if (isSlowTickCycle)
{
workCount += slowTickCycle(nowMs);
}
return workCount;
}
public String roleName()
{
return "sequencer";
}
public void onServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
validateServiceAck(logPosition, leadershipTermId, serviceId, action);
if (++serviceAckCount == ctx.serviceCount())
{
if (isRecovering)
{
return;
}
final long termPosition = currentTermPosition();
switch (action)
{
case SNAPSHOT:
final long nowNs = cachedEpochClock.time();
takeSnapshot(nowNs, termPosition);
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
for (final ClusterSession session : sessionByIdMap.values())
{
session.timeOfLastActivityMs(nowNs);
}
break;
case SHUTDOWN:
takeSnapshot(cachedEpochClock.time(), termPosition);
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
case ABORT:
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
}
}
else if (serviceAckCount > ctx.serviceCount())
{
throw new IllegalStateException("Service count exceeded: " + serviceAckCount);
}
}
public void onSessionConnect(
final long correlationId,
final int responseStreamId,
final String responseChannel,
final byte[] encodedCredentials)
{
final long nowMs = cachedEpochClock.time();
final long sessionId = nextSessionId++;
final ClusterSession session = new ClusterSession(sessionId, responseStreamId, responseChannel);
session.connect(aeron);
session.lastActivity(nowMs, correlationId);
if (pendingSessions.size() + sessionByIdMap.size() < ctx.maxConcurrentSessions())
{
authenticator.onConnectRequest(sessionId, encodedCredentials, nowMs);
pendingSessions.add(session);
}
else
{
rejectedSessions.add(session);
}
}
public void onSessionClose(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closeReason(CloseReason.CLIENT_ACTION);
session.close();
if (appendClosedSession(session, cachedEpochClock.time()))
{
sessionByIdMap.remove(clusterSessionId);
}
}
}
public ControlledFragmentAssembler.Action onSessionMessage(
final DirectBuffer buffer,
final int offset,
final int length,
final long clusterSessionId,
final long correlationId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null == session || session.state() == CLOSED)
{
return ControlledFragmentHandler.Action.CONTINUE;
}
final long nowMs = cachedEpochClock.time();
if (session.state() == OPEN && logPublisher.appendMessage(buffer, offset, length, nowMs))
{
session.lastActivity(nowMs, correlationId);
return ControlledFragmentHandler.Action.CONTINUE;
}
return ControlledFragmentHandler.Action.ABORT;
}
public void onSessionKeepAlive(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.timeOfLastActivityMs(cachedEpochClock.time());
}
}
public void onChallengeResponse(
final long correlationId, final long clusterSessionId, final byte[] encodedCredentials)
{
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.id() == clusterSessionId && session.state() == CHALLENGED)
{
final long nowMs = cachedEpochClock.time();
session.lastActivity(nowMs, correlationId);
authenticator.onChallengeResponse(clusterSessionId, encodedCredentials, nowMs);
break;
}
}
}
public boolean onTimerEvent(final long correlationId, final long nowMs)
{
return Cluster.Role.LEADER != role || logPublisher.appendTimerEvent(correlationId, nowMs);
}
public void onScheduleTimer(final long correlationId, final long deadlineMs)
{
timerService.scheduleTimer(correlationId, deadlineMs);
}
public void onCancelTimer(final long correlationId)
{
timerService.cancelTimer(correlationId);
}
public void onServiceCloseSession(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closeReason(CloseReason.SERVICE_ACTION);
session.close();
if (Cluster.Role.LEADER == role && appendClosedSession(session, cachedEpochClock.time()))
{
sessionByIdMap.remove(clusterSessionId);
}
}
}
public void onRequestVote(
final long candidateTermId, final long lastBaseLogPosition, final long lastTermPosition, final int candidateId)
{
if (Cluster.Role.FOLLOWER == role &&
candidateTermId == leadershipTermId &&
lastBaseLogPosition == recoveryPlan.lastTermBaseLogPosition)
{
if (lastTermPosition >= recoveryPlan.lastTermPositionAppended)
{
votedForMemberId = candidateId;
final long logPosition = lastBaseLogPosition + lastTermPosition;
recordingLog.appendTerm(leadershipTermId, logPosition, epochClock.time(), votedForMemberId);
sendVote(candidateTermId, candidateId, true);
return;
}
}
sendVote(candidateTermId, candidateId, false);
}
public void onNewLeadershipTerm(
final long lastBaseLogPosition,
final long lastTermPosition,
final long leadershipTermId,
final int leaderMemberId,
final int logSessionId)
{
if (leadershipTermId == this.leadershipTermId)
{
this.logSessionId = logSessionId;
if (recoveryPlan.lastTermPositionAppended < lastTermPosition && null == recordingCatchUp)
{
recordingCatchUp = ctx.recordingCatchUpSupplier().catchUp(
archive,
memberStatusPublisher,
clusterMembers,
votedForMemberId,
memberId,
recoveryPlan,
ctx);
}
}
}
public void onVote(
final long candidateTermId, final int candidateMemberId, final int followerMemberId, final boolean vote)
{
if (Cluster.Role.CANDIDATE == role && candidateTermId == leadershipTermId && candidateMemberId == memberId)
{
if (vote)
{
clusterMembers[followerMemberId].votedForId(candidateMemberId);
}
else
{
// TODO: Have to deal with failed candidacy
throw new IllegalStateException("rejected vote from: " + followerMemberId);
}
}
}
public void onAppendedPosition(final long termPosition, final long leadershipTermId, final int followerMemberId)
{
if (leadershipTermId == this.leadershipTermId)
{
clusterMembers[followerMemberId].termPosition(termPosition);
}
}
public void onCommitPosition(final long termPosition, final long leadershipTermId, final int leaderMemberId)
{
if (leadershipTermId == this.leadershipTermId)
{
timeOfLastLogUpdateMs = cachedEpochClock.time();
followerCommitPosition = termPosition;
}
}
public void onQueryResponse(
final long correlationId,
final int requestMemberId,
final int responseMemberId,
final DirectBuffer data,
final int offset,
final int length)
{
if (null != recordingCatchUp)
{
recordingCatchUp.onLeaderRecoveryPlan(
correlationId, requestMemberId, responseMemberId, data, offset, length);
}
}
public void onRecoveryPlanQuery(final long correlationId, final int leaderMemberId, final int requestMemberId)
{
if (leaderMemberId == memberId)
{
memberStatusPublisher.queryResponse(
clusterMembers[requestMemberId].publication(),
correlationId,
requestMemberId,
memberId,
recoveryPlanBuffer,
0,
recoveryPlanBuffer.capacity());
}
}
void state(final ConsensusModule.State state)
{
this.state = state;
moduleState.set(state.code());
}
void role(final Cluster.Role role)
{
this.role = role;
clusterRoleCounter.setOrdered(role.code());
}
Cluster.Role role()
{
return role;
}
void logRecordingPositionCounter(final ReadableCounter logRecordingPosition)
{
this.logRecordingPosition = logRecordingPosition;
}
void commitPositionCounter(final Counter commitPosition)
{
this.commitPosition = commitPosition;
}
@SuppressWarnings("unused")
void onReplaySessionMessage(
final long correlationId,
final long clusterSessionId,
final long timestamp,
final DirectBuffer buffer,
final int offset,
final int length,
final Header header)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.get(clusterSessionId).lastActivity(timestamp, correlationId);
}
void onReplayTimerEvent(@SuppressWarnings("unused") final long correlationId, final long timestamp)
{
cachedEpochClock.update(timestamp);
}
void onReplaySessionOpen(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final int responseStreamId,
final String responseChannel)
{
cachedEpochClock.update(timestamp);
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onLoadSession(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final CloseReason closeReason,
final int responseStreamId,
final String responseChannel)
{
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.closeReason(closeReason);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
if (CloseReason.NULL_VAL != closeReason)
{
session.close();
}
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
@SuppressWarnings("unused")
void onReplaySessionClose(
final long correlationId, final long clusterSessionId, final long timestamp, final CloseReason closeReason)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.remove(clusterSessionId).close();
}
@SuppressWarnings("unused")
void onReplayClusterAction(
final long logPosition, final long leadershipTermId, final long timestamp, final ClusterAction action)
{
cachedEpochClock.update(timestamp);
final long termPosition = logPosition - termBaseLogPosition;
switch (action)
{
case SUSPEND:
state(ConsensusModule.State.SUSPENDED);
break;
case RESUME:
state(ConsensusModule.State.ACTIVE);
break;
case SNAPSHOT:
if (!isRecovering)
{
serviceAckCount = 0;
state(ConsensusModule.State.SNAPSHOT);
}
break;
case SHUTDOWN:
if (!isRecovering)
{
serviceAckCount = 0;
state(ConsensusModule.State.SHUTDOWN);
}
break;
case ABORT:
if (!isRecovering)
{
serviceAckCount = 0;
state(ConsensusModule.State.ABORT);
}
break;
}
}
void onReloadState(final long nextSessionId)
{
this.nextSessionId = nextSessionId;
}
void catchupLog(final RecordingCatchUp recordingCatchUp)
{
final long fromPosition = recordingCatchUp.fromPosition();
final long targetPosition = recordingCatchUp.targetPosition();
final long length = targetPosition - fromPosition;
final int lastStepIndex = recoveryPlan.termSteps.size() - 1;
final RecordingLog.ReplayStep lastStep = recoveryPlan.termSteps.get(lastStepIndex);
final RecordingLog.Entry entry = lastStep.entry;
final long originalLeadershipTermId = leadershipTermId;
termBaseLogPosition = entry.termBaseLogPosition;
leadershipTermId = entry.leadershipTermId;
try (Counter counter = CommitPos.allocate(aeron, tempBuffer, leadershipTermId, termBaseLogPosition, length))
{
final int streamId = ctx.replayStreamId();
final ChannelUri channelUri = ChannelUri.parse(ctx.replayChannel());
final int logSessionId = lastStepIndex + 1;
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String channel = channelUri.toString();
try (Subscription subscription = aeron.addSubscription(channel, streamId))
{
serviceAckCount = 0;
logAdapter = null;
serviceControlPublisher.joinLog(leadershipTermId, counter.id(), logSessionId, streamId, true, channel);
awaitServiceAcks();
final int replaySessionId = (int)archive.startReplay(
recordingCatchUp.recordingIdToExtend(), fromPosition, length, channel, streamId);
final Image image = awaitImage(replaySessionId, subscription);
replayTerm(image, targetPosition, counter);
final long termPosition = image.position();
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
termBaseLogPosition = entry.termBaseLogPosition + termPosition;
}
}
leadershipTermId = originalLeadershipTermId;
}
private int slowTickCycle(final long nowMs)
{
int workCount = 0;
markFile.updateActivityTimestamp(nowMs);
checkServiceHeartbeats(nowMs);
workCount += aeronClientInvoker.invoke();
workCount += serviceControlAdapter.poll();
if (Cluster.Role.LEADER == role)
{
workCount += checkControlToggle(nowMs);
if (ConsensusModule.State.ACTIVE == state)
{
workCount += processPendingSessions(pendingSessions, nowMs);
workCount += checkSessions(sessionByIdMap, nowMs);
workCount += processRejectedSessions(rejectedSessions, nowMs);
workCount += timerService.poll(nowMs);
}
}
if (null != archive)
{
archive.checkForErrorResponse();
}
return workCount;
}
private void checkServiceHeartbeats(final long nowMs)
{
final long heartbeatThreshold = nowMs - serviceHeartbeatTimeoutMs;
for (final Counter serviceHeartbeat : serviceHeartbeats)
{
if (serviceHeartbeat.get() < heartbeatThreshold)
{
ctx.errorHandler().onError(new TimeoutException("no heartbeat from clustered service"));
ctx.terminationHook().run();
}
}
}
private int checkControlToggle(final long nowMs)
{
switch (ClusterControl.ToggleState.get(controlToggle))
{
case SUSPEND:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND, nowMs))
{
state(ConsensusModule.State.SUSPENDED);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case RESUME:
if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME, nowMs))
{
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case SNAPSHOT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT, nowMs))
{
state(ConsensusModule.State.SNAPSHOT);
}
break;
case SHUTDOWN:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SHUTDOWN, nowMs))
{
state(ConsensusModule.State.SHUTDOWN);
}
break;
case ABORT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.ABORT, nowMs))
{
state(ConsensusModule.State.ABORT);
}
break;
default:
return 0;
}
return 1;
}
private void sendVote(final long candidateTermId, final int candidateId, final boolean vote)
{
idleStrategy.reset();
while (!memberStatusPublisher.placeVote(
clusterMembers[candidateId].publication(), candidateTermId, candidateId, memberId, vote))
{
idle();
}
}
private boolean appendAction(final ClusterAction action, final long nowMs)
{
final long position = termBaseLogPosition +
logPublisher.position() +
MessageHeaderEncoder.ENCODED_LENGTH +
ClusterActionRequestEncoder.BLOCK_LENGTH;
return logPublisher.appendClusterAction(action, leadershipTermId, position, nowMs);
}
private int processPendingSessions(final ArrayList<ClusterSession> pendingSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.state() == INIT || session.state() == CONNECTED)
{
if (session.isResponsePublicationConnected())
{
session.state(CONNECTED);
authenticator.onProcessConnectedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == CHALLENGED)
{
if (session.isResponsePublicationConnected())
{
authenticator.onProcessChallengedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == AUTHENTICATED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.timeOfLastActivityMs(nowMs);
sessionByIdMap.put(session.id(), session);
appendConnectedSession(session, nowMs);
workCount += 1;
}
else if (session.state() == REJECTED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
rejectedSessions.add(session);
}
else if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close();
}
}
return workCount;
}
private int processRejectedSessions(final ArrayList<ClusterSession> rejectedSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = rejectedSessions.get(i);
String detail = ConsensusModule.Configuration.SESSION_LIMIT_MSG;
EventCode eventCode = EventCode.ERROR;
if (session.state() == REJECTED)
{
detail = ConsensusModule.Configuration.SESSION_REJECTED_MSG;
eventCode = EventCode.AUTHENTICATION_REJECTED;
}
if (egressPublisher.sendEvent(session, eventCode, detail) ||
nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--);
session.close();
workCount++;
}
}
return workCount;
}
private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowMs)
{
int workCount = 0;
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
final ClusterSession.State state = session.state();
if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
switch (state)
{
case OPEN:
egressPublisher.sendEvent(session, EventCode.ERROR, SESSION_TIMEOUT_MSG);
session.closeReason(CloseReason.TIMEOUT);
session.close();
if (appendClosedSession(session, nowMs))
{
i.remove();
}
break;
case CLOSED:
if (appendClosedSession(session, nowMs))
{
session.close();
i.remove();
}
break;
default:
session.close();
i.remove();
}
workCount += 1;
}
else if (state == CONNECTED)
{
appendConnectedSession(session, nowMs);
workCount += 1;
}
}
return workCount;
}
private void appendConnectedSession(final ClusterSession session, final long nowMs)
{
final long resultingPosition = logPublisher.appendConnectedSession(session, nowMs);
if (resultingPosition > 0)
{
session.open(resultingPosition);
}
}
private boolean appendClosedSession(final ClusterSession session, final long nowMs)
{
if (logPublisher.appendClosedSession(session, nowMs))
{
session.close();
return true;
}
return false;
}
private void establishLeader()
{
leadershipTermId++;
if (clusterMembers.length == 1)
{
votedForMemberId = memberId;
leaderMember = thisMember;
recordingLog.appendTerm(leadershipTermId, termBaseLogPosition, epochClock.time(), votedForMemberId);
}
else if (ctx.appointedLeaderId() != NULL_MEMBER_ID)
{
if (ctx.appointedLeaderId() == memberId)
{
role(Cluster.Role.CANDIDATE);
ClusterMember.becomeCandidate(clusterMembers, memberId);
votedForMemberId = memberId;
recordingLog.appendTerm(leadershipTermId, termBaseLogPosition, epochClock.time(), votedForMemberId);
requestVotes(
clusterMembers, recoveryPlan.lastTermBaseLogPosition, recoveryPlan.lastTermPositionAppended);
do
{
idle(memberStatusAdapter.poll());
}
while (ClusterMember.awaitingVotes(clusterMembers));
leaderMember = thisMember;
}
else
{
votedForMemberId = NULL_MEMBER_ID;
while (NULL_MEMBER_ID == votedForMemberId)
{
idle(memberStatusAdapter.poll());
}
}
}
if (memberId == votedForMemberId)
{
becomeLeader();
}
else
{
becomeFollower();
}
}
private void requestVotes(
final ClusterMember[] clusterMembers, final long lastLogPosition, final long lastTermPosition)
{
idleStrategy.reset();
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
while (!memberStatusPublisher.requestVote(
member.publication(), leadershipTermId, lastLogPosition, lastTermPosition, memberId))
{
idle();
}
}
}
}
private void becomeLeader()
{
updateMemberDetails(votedForMemberId);
role(Cluster.Role.LEADER);
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
final Publication publication = aeron.addExclusivePublication(ctx.logChannel(), ctx.logStreamId());
if (!channelUri.containsKey(CommonContext.ENDPOINT_PARAM_NAME) && UDP_MEDIA.equals(channelUri.media()))
{
final ChannelUriStringBuilder builder = new ChannelUriStringBuilder().media(UDP_MEDIA);
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
final String destination = builder.endpoint(member.logEndpoint()).build();
publication.addDestination(destination);
}
}
}
logAdapter = null;
logPublisher.connect(publication);
logSessionId = publication.sessionId();
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String recordingChannel = channelUri.toString();
archive.startRecording(recordingChannel, ctx.logStreamId(), SourceLocation.LOCAL);
createPositionCounters();
final long recordingId = RecordingPos.getRecordingId(aeron.countersReader(), logRecordingPosition.counterId());
recordingLog.commitLeadershipRecordingId(leadershipTermId, recordingId);
awaitServicesReady(channelUri, true);
awaitFollowersReady();
final long nowMs = epochClock.time();
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() != CLOSED)
{
session.connect(aeron);
session.timeOfLastActivityMs(nowMs);
}
}
}
private void becomeFollower()
{
leaderMember = clusterMembers[votedForMemberId];
updateMemberDetails(votedForMemberId);
role(Cluster.Role.FOLLOWER);
followerCommitPosition = 0;
logSessionId = NULL_SESSION_ID;
awaitLogSessionIdFromLeader();
awaitCatchUp();
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
channelUri.put(CommonContext.ENDPOINT_PARAM_NAME, thisMember.logEndpoint());
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String logChannel = channelUri.toString();
final int streamId = ctx.logStreamId();
archive.startRecording(logChannel, streamId, SourceLocation.REMOTE);
final Image image = awaitImage(logSessionId, aeron.addSubscription(logChannel, streamId));
logAdapter = new LogAdapter(image, this);
createPositionCounters();
final long recordingId = RecordingPos.getRecordingId(aeron.countersReader(), logRecordingPosition.counterId());
recordingLog.commitLeadershipRecordingId(leadershipTermId, recordingId);
awaitServicesReady(channelUri, false);
notifyLeaderThatFollowerIsReady();
}
private void awaitLogSessionIdFromLeader()
{
while (NULL_SESSION_ID == logSessionId)
{
idle(memberStatusAdapter.poll());
}
}
private void awaitCatchUp()
{
if (null != recordingCatchUp)
{
do
{
idle(memberStatusAdapter.poll() + recordingCatchUp.doWork());
}
while (!recordingCatchUp.isCaughtUp());
recordingCatchUp.close();
catchupLog(recordingCatchUp);
recordingCatchUp = null;
}
}
private void notifyLeaderThatFollowerIsReady()
{
idleStrategy.reset();
final Publication publication = leaderMember.publication();
while (!memberStatusPublisher.appendedPosition(publication, followerCommitPosition, leadershipTermId, memberId))
{
idle();
}
lastRecordingPosition = 0;
}
private void awaitFollowersReady()
{
ClusterMember.resetTermPositions(clusterMembers, NULL_POSITION);
clusterMembers[memberId].termPosition(0);
final long lastBaseLogPosition = recoveryPlan.lastTermBaseLogPosition;
final long lastTermPosition = recoveryPlan.lastTermPositionAppended;
do
{
final long nowMs = epochClock.time();
if (nowMs > (timeOfLastLogUpdateMs + leaderHeartbeatIntervalMs))
{
timeOfLastLogUpdateMs = nowMs;
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
memberStatusPublisher.newLeadershipTerm(
member.publication(),
lastBaseLogPosition,
lastTermPosition,
leadershipTermId,
memberId,
logSessionId);
}
}
}
idle(memberStatusAdapter.poll());
}
while (!ClusterMember.hasReachedPosition(clusterMembers, 0));
}
private void createPositionCounters()
{
final CountersReader counters = aeron.countersReader();
final int recordingCounterId = awaitRecordingCounter(counters, logSessionId);
logRecordingPosition = new ReadableCounter(counters, recordingCounterId);
commitPosition = CommitPos.allocate(
aeron, tempBuffer, leadershipTermId, termBaseLogPosition, CommitPos.NULL_VALUE);
}
private void awaitServicesReady(final ChannelUri channelUri, final boolean isLeader)
{
serviceAckCount = 0;
final String channel = isLeader && UDP_MEDIA.equals(channelUri.media()) ?
channelUri.prefix(SPY_QUALIFIER).toString() : channelUri.toString();
serviceControlPublisher.joinLog(
leadershipTermId, commitPosition.id(), logSessionId, ctx.logStreamId(), false, channel);
awaitServiceAcks();
}
private void updateMemberDetails(final int leaderMemberId)
{
for (final ClusterMember clusterMember : clusterMembers)
{
clusterMember.isLeader(clusterMember.id() == leaderMemberId);
}
updateClusterMemberDetails(clusterMembers);
}
private void recoverFromSnapshot(final RecordingLog.ReplayStep snapshotStep, final AeronArchive archive)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
cachedEpochClock.update(snapshot.timestamp);
termBaseLogPosition = snapshot.termBaseLogPosition + snapshot.termPosition;
leadershipTermId = snapshot.leadershipTermId;
final long recordingId = snapshot.recordingId;
final RecordingExtent recordingExtent = new RecordingExtent();
if (0 == archive.listRecording(recordingId, recordingExtent))
{
throw new IllegalStateException("unknown recordingId: " + recordingId);
}
final String channel = ctx.replayChannel();
final int streamId = ctx.replayStreamId();
final long length = recordingExtent.stopPosition - recordingExtent.startPosition;
final int sessionId = (int)archive.startReplay(recordingId, 0, length, channel, streamId);
final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId);
try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId))
{
final Image image = awaitImage(sessionId, subscription);
final SnapshotLoader snapshotLoader = new SnapshotLoader(image, this);
while (true)
{
final int fragments = snapshotLoader.poll();
if (fragments == 0)
{
if (snapshotLoader.isDone())
{
break;
}
if (image.isClosed())
{
throw new IllegalStateException("snapshot ended unexpectedly");
}
}
idle(fragments);
}
}
}
private Image awaitImage(final int sessionId, final Subscription subscription)
{
idleStrategy.reset();
Image image;
while ((image = subscription.imageBySessionId(sessionId)) == null)
{
idle();
}
return image;
}
private void recoverFromLog(final List<RecordingLog.ReplayStep> steps, final AeronArchive archive)
{
final int streamId = ctx.replayStreamId();
final ChannelUri channelUri = ChannelUri.parse(ctx.replayChannel());
for (int i = 0, size = steps.size(); i < size; i++)
{
final RecordingLog.ReplayStep step = steps.get(i);
final RecordingLog.Entry entry = step.entry;
final long startPosition = step.recordingStartPosition;
final long stopPosition = step.recordingStopPosition;
final long length = stopPosition - startPosition;
termBaseLogPosition = entry.termBaseLogPosition;
leadershipTermId = entry.leadershipTermId;
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(i));
final String channel = channelUri.toString();
final long recordingId = entry.recordingId;
try (Counter counter = CommitPos.allocate(aeron, tempBuffer, leadershipTermId, termBaseLogPosition, length))
{
serviceAckCount = 0;
logAdapter = null;
if (length > 0)
{
try (Subscription subscription = aeron.addSubscription(channel, streamId))
{
serviceControlPublisher.joinLog(leadershipTermId, counter.id(), i, streamId, true, channel);
awaitServiceAcks();
final Image image = awaitImage(
(int)archive.startReplay(recordingId, startPosition, length, channel, streamId),
subscription);
serviceAckCount = 0;
replayTerm(image, stopPosition, counter);
awaitServiceAcks();
final long termPosition = image.position();
if (step.entry.termPosition < termPosition)
{
recordingLog.commitLeadershipTermPosition(leadershipTermId, termPosition);
}
termBaseLogPosition = entry.termBaseLogPosition + termPosition;
}
}
else
{
awaitServiceAcks();
}
}
}
}
private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan)
{
final int termCount = plan.termSteps.size();
final RecordingLog.ReplayStep snapshotStep = plan.snapshotStep;
if (null != snapshotStep)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
return RecoveryState.allocate(
aeron, tempBuffer, snapshot.leadershipTermId, snapshot.termPosition, snapshot.timestamp, termCount);
}
return RecoveryState.allocate(aeron, tempBuffer, leadershipTermId, NULL_POSITION, 0, termCount);
}
private void awaitServiceAcks()
{
while (true)
{
final int fragmentsRead = serviceControlAdapter.poll();
if (serviceAckCount >= ctx.serviceCount())
{
break;
}
idle(fragmentsRead);
}
}
private void validateServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
final long currentLogPosition = termBaseLogPosition + currentTermPosition();
if (logPosition != currentLogPosition || leadershipTermId != this.leadershipTermId)
{
throw new IllegalStateException("invalid log state:" +
" serviceId=" + serviceId +
", logPosition=" + logPosition + " current is " + currentLogPosition +
", leadershipTermId=" + leadershipTermId + " current is " + this.leadershipTermId);
}
if (!state.isValid(action))
{
throw new IllegalStateException("invalid service ACK for state " + state + ", action " + action);
}
}
private long currentTermPosition()
{
return null != logAdapter ? logAdapter.position() : logPublisher.position();
}
private void updateClusterMemberDetails(final ClusterMember[] members)
{
int leaderIndex = 0;
for (int i = 0, length = members.length; i < length; i++)
{
if (members[i].isLeader())
{
leaderIndex = i;
break;
}
}
final StringBuilder builder = new StringBuilder(100);
builder.append(members[leaderIndex].clientFacingEndpoint());
for (int i = 0, length = members.length; i < length; i++)
{
if (i != leaderIndex)
{
builder.append(',').append(members[i].clientFacingEndpoint());
}
}
sessionProxy.memberEndpointsDetail(builder.toString());
}
private int updateMemberPosition(final long nowMs)
{
int workCount = 0;
if (Cluster.Role.LEADER == role)
{
thisMember.termPosition(logRecordingPosition.get());
final long position = ClusterMember.quorumPosition(clusterMembers, rankedPositions);
if (position > commitPosition.getWeak() || nowMs >= (timeOfLastLogUpdateMs + leaderHeartbeatIntervalMs))
{
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
final Publication publication = member.publication();
memberStatusPublisher.commitPosition(publication, position, leadershipTermId, memberId);
}
}
commitPosition.setOrdered(position);
timeOfLastLogUpdateMs = nowMs;
workCount = 1;
}
}
else if (Cluster.Role.FOLLOWER == role)
{
final long recordingPosition = logRecordingPosition.get();
if (recordingPosition != lastRecordingPosition)
{
final Publication publication = leaderMember.publication();
if (memberStatusPublisher.appendedPosition(
publication, recordingPosition, leadershipTermId, memberId))
{
lastRecordingPosition = recordingPosition;
}
workCount = 1;
}
commitPosition.proposeMaxOrdered(logAdapter.position());
if (nowMs >= (timeOfLastLogUpdateMs + leaderHeartbeatTimeoutMs))
{
throw new AgentTerminationException("no heartbeat from cluster leader");
}
}
return workCount;
}
private void idle()
{
checkInterruptedStatus();
aeronClientInvoker.invoke();
idleStrategy.idle();
}
private void idle(final int workCount)
{
checkInterruptedStatus();
aeronClientInvoker.invoke();
idleStrategy.idle(workCount);
}
private static void checkInterruptedStatus()
{
if (Thread.currentThread().isInterrupted())
{
throw new RuntimeException("unexpected interrupt");
}
}
private void takeSnapshot(final long timestampMs, final long termPosition)
{
final String channel = ctx.snapshotChannel();
final int streamId = ctx.snapshotStreamId();
try (Publication publication = archive.addRecordedExclusivePublication(channel, streamId))
{
try
{
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, publication.sessionId());
final long recordingId = RecordingPos.getRecordingId(counters, counterId);
snapshotState(publication, termBaseLogPosition + termPosition, leadershipTermId);
awaitRecordingComplete(recordingId, publication.position(), counters, counterId);
recordingLog.appendSnapshot(
recordingId, leadershipTermId, termBaseLogPosition, termPosition, timestampMs);
}
finally
{
archive.stopRecording(publication);
}
ctx.snapshotCounter().incrementOrdered();
}
}
private void awaitRecordingComplete(
final long recordingId, final long completePosition, final CountersReader counters, final int counterId)
{
idleStrategy.reset();
do
{
idle();
if (!RecordingPos.isActive(counters, counterId, recordingId))
{
throw new IllegalStateException("recording has stopped unexpectedly: " + recordingId);
}
}
while (counters.getCounterValue(counterId) < completePosition);
}
private int awaitRecordingCounter(final CountersReader counters, final int sessionId)
{
idleStrategy.reset();
int counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
while (CountersReader.NULL_COUNTER_ID == counterId)
{
idle();
counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
}
return counterId;
}
private void snapshotState(final Publication publication, final long logPosition, final long leadershipTermId)
{
final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker(
publication, idleStrategy, aeronClientInvoker);
snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
snapshotTaker.snapshotSession(session);
}
}
aeronClientInvoker.invoke();
timerService.snapshot(snapshotTaker);
snapshotTaker.sequencerState(nextSessionId);
snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
}
private void replayTerm(final Image image, final long finalTermPosition, final Counter replayPosition)
{
logAdapter = new LogAdapter(image, this);
while (true)
{
int workCount = logAdapter.poll(finalTermPosition);
if (workCount == 0)
{
if (image.isClosed())
{
if (!image.isEndOfStream())
{
throw new IllegalStateException("unexpected close of image when replaying");
}
break;
}
}
replayPosition.setOrdered(image.position());
workCount += serviceControlAdapter.poll();
workCount += timerService.poll(cachedEpochClock.time());
idle(workCount);
}
}
}
| [Java] Simplify for extraction.
| aeron-cluster/src/main/java/io/aeron/cluster/SequencerAgent.java | [Java] Simplify for extraction. |
|
Java | apache-2.0 | 2327a639e8c00831c1c15be86dc9b413c45bfffe | 0 | flbrino/pentaho-kettle,ivanpogodin/pentaho-kettle,pavel-sakun/pentaho-kettle,stevewillcock/pentaho-kettle,matthewtckr/pentaho-kettle,GauravAshara/pentaho-kettle,ddiroma/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,graimundo/pentaho-kettle,bmorrise/pentaho-kettle,mattyb149/pentaho-kettle,mattyb149/pentaho-kettle,brosander/pentaho-kettle,alina-ipatina/pentaho-kettle,emartin-pentaho/pentaho-kettle,HiromuHota/pentaho-kettle,tmcsantos/pentaho-kettle,dkincade/pentaho-kettle,gretchiemoran/pentaho-kettle,ivanpogodin/pentaho-kettle,matrix-stone/pentaho-kettle,Advent51/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,pymjer/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,GauravAshara/pentaho-kettle,jbrant/pentaho-kettle,stepanovdg/pentaho-kettle,sajeetharan/pentaho-kettle,akhayrutdinov/pentaho-kettle,tmcsantos/pentaho-kettle,pavel-sakun/pentaho-kettle,brosander/pentaho-kettle,ViswesvarSekar/pentaho-kettle,brosander/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,wseyler/pentaho-kettle,GauravAshara/pentaho-kettle,denisprotopopov/pentaho-kettle,nantunes/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,e-cuellar/pentaho-kettle,yshakhau/pentaho-kettle,airy-ict/pentaho-kettle,birdtsai/pentaho-kettle,ma459006574/pentaho-kettle,nicoben/pentaho-kettle,marcoslarsen/pentaho-kettle,pedrofvteixeira/pentaho-kettle,pavel-sakun/pentaho-kettle,mdamour1976/pentaho-kettle,ccaspanello/pentaho-kettle,rfellows/pentaho-kettle,nantunes/pentaho-kettle,tmcsantos/pentaho-kettle,brosander/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,pentaho/pentaho-kettle,lgrill-pentaho/pentaho-kettle,matrix-stone/pentaho-kettle,mkambol/pentaho-kettle,eayoungs/pentaho-kettle,nicoben/pentaho-kettle,roboguy/pentaho-kettle,stevewillcock/pentaho-kettle,hudak/pentaho-kettle,mattyb149/pentaho-kettle,ViswesvarSekar/pentaho-kettle,codek/pentaho-kettle,EcoleKeine/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,pedrofvteixeira/pentaho-kettle,mbatchelor/pentaho-kettle,stevewillcock/pentaho-kettle,skofra0/pentaho-kettle,nicoben/pentaho-kettle,denisprotopopov/pentaho-kettle,YuryBY/pentaho-kettle,emartin-pentaho/pentaho-kettle,cjsonger/pentaho-kettle,pavel-sakun/pentaho-kettle,codek/pentaho-kettle,wseyler/pentaho-kettle,zlcnju/kettle,pminutillo/pentaho-kettle,ddiroma/pentaho-kettle,stepanovdg/pentaho-kettle,alina-ipatina/pentaho-kettle,airy-ict/pentaho-kettle,pentaho/pentaho-kettle,ccaspanello/pentaho-kettle,zlcnju/kettle,cjsonger/pentaho-kettle,YuryBY/pentaho-kettle,roboguy/pentaho-kettle,nicoben/pentaho-kettle,aminmkhan/pentaho-kettle,matthewtckr/pentaho-kettle,mdamour1976/pentaho-kettle,skofra0/pentaho-kettle,SergeyTravin/pentaho-kettle,e-cuellar/pentaho-kettle,ccaspanello/pentaho-kettle,flbrino/pentaho-kettle,ViswesvarSekar/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,jbrant/pentaho-kettle,rmansoor/pentaho-kettle,e-cuellar/pentaho-kettle,skofra0/pentaho-kettle,ma459006574/pentaho-kettle,Advent51/pentaho-kettle,rmansoor/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,pminutillo/pentaho-kettle,sajeetharan/pentaho-kettle,kurtwalker/pentaho-kettle,alina-ipatina/pentaho-kettle,gretchiemoran/pentaho-kettle,sajeetharan/pentaho-kettle,DFieldFL/pentaho-kettle,rmansoor/pentaho-kettle,dkincade/pentaho-kettle,tkafalas/pentaho-kettle,rmansoor/pentaho-kettle,jbrant/pentaho-kettle,mattyb149/pentaho-kettle,nanata1115/pentaho-kettle,nantunes/pentaho-kettle,akhayrutdinov/pentaho-kettle,pminutillo/pentaho-kettle,ccaspanello/pentaho-kettle,mbatchelor/pentaho-kettle,hudak/pentaho-kettle,birdtsai/pentaho-kettle,matrix-stone/pentaho-kettle,pminutillo/pentaho-kettle,GauravAshara/pentaho-kettle,kurtwalker/pentaho-kettle,stepanovdg/pentaho-kettle,MikhailHubanau/pentaho-kettle,wseyler/pentaho-kettle,denisprotopopov/pentaho-kettle,HiromuHota/pentaho-kettle,drndos/pentaho-kettle,kurtwalker/pentaho-kettle,lgrill-pentaho/pentaho-kettle,gretchiemoran/pentaho-kettle,jbrant/pentaho-kettle,DFieldFL/pentaho-kettle,bmorrise/pentaho-kettle,bmorrise/pentaho-kettle,ma459006574/pentaho-kettle,mkambol/pentaho-kettle,HiromuHota/pentaho-kettle,eayoungs/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,stepanovdg/pentaho-kettle,nanata1115/pentaho-kettle,andrei-viaryshka/pentaho-kettle,tkafalas/pentaho-kettle,pentaho/pentaho-kettle,DFieldFL/pentaho-kettle,pymjer/pentaho-kettle,flbrino/pentaho-kettle,graimundo/pentaho-kettle,YuryBY/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,andrei-viaryshka/pentaho-kettle,akhayrutdinov/pentaho-kettle,yshakhau/pentaho-kettle,mbatchelor/pentaho-kettle,codek/pentaho-kettle,pedrofvteixeira/pentaho-kettle,gretchiemoran/pentaho-kettle,aminmkhan/pentaho-kettle,ivanpogodin/pentaho-kettle,kurtwalker/pentaho-kettle,airy-ict/pentaho-kettle,ivanpogodin/pentaho-kettle,CapeSepias/pentaho-kettle,eayoungs/pentaho-kettle,aminmkhan/pentaho-kettle,rfellows/pentaho-kettle,cjsonger/pentaho-kettle,cjsonger/pentaho-kettle,hudak/pentaho-kettle,mdamour1976/pentaho-kettle,tmcsantos/pentaho-kettle,nanata1115/pentaho-kettle,CapeSepias/pentaho-kettle,wseyler/pentaho-kettle,marcoslarsen/pentaho-kettle,matthewtckr/pentaho-kettle,birdtsai/pentaho-kettle,drndos/pentaho-kettle,EcoleKeine/pentaho-kettle,e-cuellar/pentaho-kettle,alina-ipatina/pentaho-kettle,andrei-viaryshka/pentaho-kettle,graimundo/pentaho-kettle,pentaho/pentaho-kettle,dkincade/pentaho-kettle,DFieldFL/pentaho-kettle,codek/pentaho-kettle,roboguy/pentaho-kettle,lgrill-pentaho/pentaho-kettle,ddiroma/pentaho-kettle,pedrofvteixeira/pentaho-kettle,sajeetharan/pentaho-kettle,EcoleKeine/pentaho-kettle,yshakhau/pentaho-kettle,yshakhau/pentaho-kettle,mbatchelor/pentaho-kettle,skofra0/pentaho-kettle,EcoleKeine/pentaho-kettle,MikhailHubanau/pentaho-kettle,zlcnju/kettle,flbrino/pentaho-kettle,SergeyTravin/pentaho-kettle,birdtsai/pentaho-kettle,tkafalas/pentaho-kettle,denisprotopopov/pentaho-kettle,akhayrutdinov/pentaho-kettle,rfellows/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,emartin-pentaho/pentaho-kettle,drndos/pentaho-kettle,hudak/pentaho-kettle,mkambol/pentaho-kettle,YuryBY/pentaho-kettle,eayoungs/pentaho-kettle,matrix-stone/pentaho-kettle,mkambol/pentaho-kettle,SergeyTravin/pentaho-kettle,aminmkhan/pentaho-kettle,airy-ict/pentaho-kettle,ma459006574/pentaho-kettle,emartin-pentaho/pentaho-kettle,Advent51/pentaho-kettle,roboguy/pentaho-kettle,Advent51/pentaho-kettle,marcoslarsen/pentaho-kettle,zlcnju/kettle,pymjer/pentaho-kettle,drndos/pentaho-kettle,HiromuHota/pentaho-kettle,nanata1115/pentaho-kettle,MikhailHubanau/pentaho-kettle,CapeSepias/pentaho-kettle,bmorrise/pentaho-kettle,matthewtckr/pentaho-kettle,CapeSepias/pentaho-kettle,pymjer/pentaho-kettle,ViswesvarSekar/pentaho-kettle,lgrill-pentaho/pentaho-kettle,ddiroma/pentaho-kettle,nantunes/pentaho-kettle,marcoslarsen/pentaho-kettle,SergeyTravin/pentaho-kettle,stevewillcock/pentaho-kettle,tkafalas/pentaho-kettle,graimundo/pentaho-kettle,dkincade/pentaho-kettle,mdamour1976/pentaho-kettle | /* Copyright (c) 2007 Pentaho Corporation. All rights reserved.
* This software was developed by Pentaho Corporation and is provided under the terms
* of the GNU Lesser General Public License, Version 2.1. You may not use
* this file except in compliance with the license. If you need a copy of the license,
* please go to http://www.gnu.org/licenses/lgpl-2.1.txt. The Original Code is Pentaho
* Data Integration. The Initial Developer is Pentaho Corporation.
*
* Software distributed under the GNU Lesser Public License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. Please refer to
* the license for the specific language governing your rights and limitations.*/
/*
* Created on 18-mei-2003
*
*/
package org.pentaho.di.ui.trans.steps.zipfile;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.Group;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.zipfile.ZipFileMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class ZipFileDialog extends BaseStepDialog implements StepDialogInterface
{
private static Class<?> PKG = ZipFileMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private Label wlSourceFileNameField;
private CCombo wSourceFileNameField;
private FormData fdlSourceFileNameField, fdSourceFileNameField;
private Label wlTargetFileNameField;
private CCombo wTargetFileNameField;
private FormData fdlTargetFileNameField, fdTargetFileNameField;
private Button wAddResult;
private FormData fdAddResult,fdlAddResult;
private Label wlAddResult;
private Button wOverwriteZipEntry;
private FormData fdOverwriteTarget,fdlOverwriteTarget;
private Label wlOverwriteTarget;
private Button wCreateParentFolder;
private FormData fdCreateParentFolder,fdlCreateParentFolder;
private Label wlCreateParentFolder;
private Button wKeepFolders;
private FormData fdKeepFolders,fdlKeepFolders;
private Label wlKeepFolders;
private Group wSettingsGroup;
private FormData fdSettingsGroup;
private ZipFileMeta input;
private Label wlBaseFolderField;
private CCombo wBaseFolderField;
private FormData fdlBaseFolderField, fdBaseFolderField;
private Label wlOperation;
private CCombo wOperation;
private FormData fdlOperation;
private FormData fdOperation;
private Label wlMoveToFolderField;
private CCombo wMoveToFolderField;
private FormData fdlMoveToFolderField, fdMoveToFolderField;
private boolean gotPreviousFields=false;
public ZipFileDialog(Shell parent, Object in, TransMeta transMeta, String sname)
{
super(parent, (BaseStepMeta)in, transMeta, sname);
input=(ZipFileMeta)in;
}
public String open()
{
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN);
props.setLook(shell);
setShellImage(shell, input);
ModifyListener lsMod = new ModifyListener()
{
public void modifyText(ModifyEvent e)
{
input.setChanged();
}
};
SelectionAdapter lsSel = new SelectionAdapter() {
public void widgetSelected(SelectionEvent arg0) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout ();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "ZipFileDialog.Shell.Title")); //$NON-NLS-1$
int middle = props.getMiddlePct();
int margin=Const.MARGIN;
// Stepname line
wlStepname=new Label(shell, SWT.RIGHT);
wlStepname.setText(BaseMessages.getString(PKG, "ZipFileDialog.Stepname.Label")); //$NON-NLS-1$
props.setLook(wlStepname);
fdlStepname=new FormData();
fdlStepname.left = new FormAttachment(0, 0);
fdlStepname.right= new FormAttachment(middle, -margin);
fdlStepname.top = new FormAttachment(0, margin);
wlStepname.setLayoutData(fdlStepname);
wStepname=new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wStepname.setText(stepname);
props.setLook(wStepname);
wStepname.addModifyListener(lsMod);
fdStepname=new FormData();
fdStepname.left = new FormAttachment(middle, 0);
fdStepname.top = new FormAttachment(0, margin);
fdStepname.right= new FormAttachment(100, 0);
wStepname.setLayoutData(fdStepname);
/////////////////////////////////
// START OF Settings GROUP //
/////////////////////////////////
wSettingsGroup = new Group(shell, SWT.SHADOW_NONE);
props.setLook(wSettingsGroup);
wSettingsGroup.setText(BaseMessages.getString(PKG, "ZipFileDialog.wSettingsGroup.Label"));
FormLayout settingGroupLayout = new FormLayout();
settingGroupLayout.marginWidth = 10;
settingGroupLayout.marginHeight = 10;
wSettingsGroup.setLayout(settingGroupLayout);
// Create target parent folder?
wlCreateParentFolder=new Label(wSettingsGroup, SWT.RIGHT);
wlCreateParentFolder.setText(BaseMessages.getString(PKG, "ZipFileDialog.CreateParentFolder.Label"));
props.setLook(wlCreateParentFolder);
fdlCreateParentFolder=new FormData();
fdlCreateParentFolder.left = new FormAttachment(0, 0);
fdlCreateParentFolder.top = new FormAttachment(wStepname, margin);
fdlCreateParentFolder.right= new FormAttachment(middle, -margin);
wlCreateParentFolder.setLayoutData(fdlCreateParentFolder);
wCreateParentFolder=new Button(wSettingsGroup, SWT.CHECK );
props.setLook(wCreateParentFolder);
wCreateParentFolder.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.CreateParentFolder.Tooltip"));
fdCreateParentFolder=new FormData();
fdCreateParentFolder.left = new FormAttachment(middle, 0);
fdCreateParentFolder.top = new FormAttachment(wStepname, margin);
wCreateParentFolder.setLayoutData(fdCreateParentFolder);
wCreateParentFolder.addSelectionListener(lsSel);
// Overwrite target file?
wlOverwriteTarget=new Label(wSettingsGroup, SWT.RIGHT);
wlOverwriteTarget.setText(BaseMessages.getString(PKG, "ZipFileDialog.OverwriteTarget.Label"));
props.setLook(wlOverwriteTarget);
fdlOverwriteTarget=new FormData();
fdlOverwriteTarget.left = new FormAttachment(0, 0);
fdlOverwriteTarget.top = new FormAttachment(wCreateParentFolder, margin);
fdlOverwriteTarget.right= new FormAttachment(middle, -margin);
wlOverwriteTarget.setLayoutData(fdlOverwriteTarget);
wOverwriteZipEntry=new Button(wSettingsGroup, SWT.CHECK );
props.setLook(wOverwriteZipEntry);
wOverwriteZipEntry.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.OverwriteTarget.Tooltip"));
fdOverwriteTarget=new FormData();
fdOverwriteTarget.left = new FormAttachment(middle, 0);
fdOverwriteTarget.top = new FormAttachment(wCreateParentFolder, margin);
wOverwriteZipEntry.setLayoutData(fdOverwriteTarget);
wOverwriteZipEntry.addSelectionListener(lsSel);
// Add Target filename to result filenames?
wlAddResult=new Label(wSettingsGroup, SWT.RIGHT);
wlAddResult.setText(BaseMessages.getString(PKG, "ZipFileDialog.AddResult.Label"));
props.setLook(wlAddResult);
fdlAddResult=new FormData();
fdlAddResult.left = new FormAttachment(0, 0);
fdlAddResult.top = new FormAttachment(wOverwriteZipEntry, margin);
fdlAddResult.right= new FormAttachment(middle, -margin);
wlAddResult.setLayoutData(fdlAddResult);
wAddResult=new Button(wSettingsGroup, SWT.CHECK );
props.setLook(wAddResult);
wAddResult.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.AddResult.Tooltip"));
fdAddResult=new FormData();
fdAddResult.left = new FormAttachment(middle, 0);
fdAddResult.top = new FormAttachment(wOverwriteZipEntry, margin);
wAddResult.setLayoutData(fdAddResult);
wAddResult.addSelectionListener(lsSel);
fdSettingsGroup = new FormData();
fdSettingsGroup.left = new FormAttachment(0, margin);
fdSettingsGroup.top = new FormAttachment(wStepname, margin);
fdSettingsGroup.right = new FormAttachment(100, -margin);
wSettingsGroup.setLayoutData(fdSettingsGroup);
/////////////////////////////////
// END OF Settings Fields GROUP //
/////////////////////////////////
// SourceFileNameField field
wlSourceFileNameField=new Label(shell, SWT.RIGHT);
wlSourceFileNameField.setText(BaseMessages.getString(PKG, "ZipFileDialog.SourceFileNameField.Label")); //$NON-NLS-1$
props.setLook(wlSourceFileNameField);
fdlSourceFileNameField=new FormData();
fdlSourceFileNameField.left = new FormAttachment(0, 0);
fdlSourceFileNameField.right= new FormAttachment(middle, -margin);
fdlSourceFileNameField.top = new FormAttachment(wSettingsGroup, 2*margin);
wlSourceFileNameField.setLayoutData(fdlSourceFileNameField);
wSourceFileNameField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
props.setLook(wSourceFileNameField);
wSourceFileNameField.setEditable(true);
wSourceFileNameField.addModifyListener(lsMod);
fdSourceFileNameField=new FormData();
fdSourceFileNameField.left = new FormAttachment(middle, 0);
fdSourceFileNameField.top = new FormAttachment(wSettingsGroup, 2*margin);
fdSourceFileNameField.right= new FormAttachment(100, -margin);
wSourceFileNameField.setLayoutData(fdSourceFileNameField);
wSourceFileNameField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
// TargetFileNameField field
wlTargetFileNameField=new Label(shell, SWT.RIGHT);
wlTargetFileNameField.setText(BaseMessages.getString(PKG, "ZipFileDialog.TargetFileNameField.Label")); //$NON-NLS-1$
props.setLook(wlTargetFileNameField);
fdlTargetFileNameField=new FormData();
fdlTargetFileNameField.left = new FormAttachment(0, 0);
fdlTargetFileNameField.right= new FormAttachment(middle, -margin);
fdlTargetFileNameField.top = new FormAttachment(wSourceFileNameField, margin);
wlTargetFileNameField.setLayoutData(fdlTargetFileNameField);
wTargetFileNameField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wTargetFileNameField.setEditable(true);
props.setLook(wTargetFileNameField);
wTargetFileNameField.addModifyListener(lsMod);
fdTargetFileNameField=new FormData();
fdTargetFileNameField.left = new FormAttachment(middle, 0);
fdTargetFileNameField.top = new FormAttachment(wSourceFileNameField, margin);
fdTargetFileNameField.right= new FormAttachment(100, -margin);
wTargetFileNameField.setLayoutData(fdTargetFileNameField);
wTargetFileNameField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
wlKeepFolders=new Label(shell, SWT.RIGHT);
wlKeepFolders.setText(BaseMessages.getString(PKG, "ZipFileDialog.KeepFolders.Label"));
props.setLook(wlKeepFolders);
fdlKeepFolders=new FormData();
fdlKeepFolders.left = new FormAttachment(0, 0);
fdlKeepFolders.top = new FormAttachment(wTargetFileNameField, margin);
fdlKeepFolders.right= new FormAttachment(middle, -margin);
wlKeepFolders.setLayoutData(fdlKeepFolders);
wKeepFolders=new Button(shell, SWT.CHECK );
props.setLook(wKeepFolders);
wKeepFolders.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.KeepFolders.Tooltip"));
fdKeepFolders=new FormData();
fdKeepFolders.left = new FormAttachment(middle, 0);
fdKeepFolders.top = new FormAttachment(wTargetFileNameField, margin);
wKeepFolders.setLayoutData(fdKeepFolders);
wKeepFolders.addSelectionListener(lsSel);
wKeepFolders.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent arg0) {
keepFolder();
}
});
// BaseFolderField field
wlBaseFolderField=new Label(shell, SWT.RIGHT);
wlBaseFolderField.setText(BaseMessages.getString(PKG, "ZipFileDialog.BaseFolderField.Label")); //$NON-NLS-1$
props.setLook(wlBaseFolderField);
fdlBaseFolderField=new FormData();
fdlBaseFolderField.left = new FormAttachment(0, 0);
fdlBaseFolderField.right= new FormAttachment(middle, -margin);
fdlBaseFolderField.top = new FormAttachment(wKeepFolders, margin);
wlBaseFolderField.setLayoutData(fdlBaseFolderField);
wBaseFolderField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wBaseFolderField.setEditable(true);
props.setLook(wBaseFolderField);
wBaseFolderField.addModifyListener(lsMod);
fdBaseFolderField=new FormData();
fdBaseFolderField.left = new FormAttachment(middle, 0);
fdBaseFolderField.top = new FormAttachment(wKeepFolders, margin);
fdBaseFolderField.right= new FormAttachment(100, -margin);
wBaseFolderField.setLayoutData(fdBaseFolderField);
wBaseFolderField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
// Operation
wlOperation=new Label(shell, SWT.RIGHT);
wlOperation.setText(BaseMessages.getString(PKG, "ZipFileDialog.Operation.Label")); //$NON-NLS-1$
props.setLook(wlOperation);
fdlOperation=new FormData();
fdlOperation.left = new FormAttachment(0, 0);
fdlOperation.right= new FormAttachment(middle, -margin);
fdlOperation.top = new FormAttachment(wBaseFolderField, margin);
wlOperation.setLayoutData(fdlOperation);
wOperation=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
props.setLook(wOperation);
wOperation.addModifyListener(lsMod);
fdOperation=new FormData();
fdOperation.left = new FormAttachment(middle, 0);
fdOperation.top = new FormAttachment(wBaseFolderField, margin);
fdOperation.right= new FormAttachment(100, -margin);
wOperation.setLayoutData(fdOperation);
wOperation.setItems(ZipFileMeta.operationTypeDesc);
wOperation.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
updateOperation();
}
});
// MoveToFolderField field
wlMoveToFolderField=new Label(shell, SWT.RIGHT);
wlMoveToFolderField.setText(BaseMessages.getString(PKG, "ZipFileDialog.MoveToFolderField.Label")); //$NON-NLS-1$
props.setLook(wlMoveToFolderField);
fdlMoveToFolderField=new FormData();
fdlMoveToFolderField.left = new FormAttachment(0, 0);
fdlMoveToFolderField.right= new FormAttachment(middle, -margin);
fdlMoveToFolderField.top = new FormAttachment(wOperation, margin);
wlMoveToFolderField.setLayoutData(fdlMoveToFolderField);
wMoveToFolderField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wMoveToFolderField.setEditable(true);
props.setLook(wMoveToFolderField);
wMoveToFolderField.addModifyListener(lsMod);
fdMoveToFolderField=new FormData();
fdMoveToFolderField.left = new FormAttachment(middle, 0);
fdMoveToFolderField.top = new FormAttachment(wOperation, margin);
fdMoveToFolderField.right= new FormAttachment(100, -margin);
wMoveToFolderField.setLayoutData(fdMoveToFolderField);
wMoveToFolderField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
// THE BUTTONS
wOK=new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK")); //$NON-NLS-1$
wCancel=new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); //$NON-NLS-1$
setButtonPositions(new Button[] { wOK, wCancel }, margin, wMoveToFolderField);
// Add listeners
lsOK = new Listener() { public void handleEvent(Event e) { ok(); } };
lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } };
wOK.addListener (SWT.Selection, lsOK );
wCancel.addListener(SWT.Selection, lsCancel);
lsDef=new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { ok(); } };
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { cancel(); } } );
// Set the shell size, based upon previous time...
setSize();
getData();
keepFolder();
updateOperation();
input.setChanged(changed);
shell.open();
while (!shell.isDisposed())
{
if (!display.readAndDispatch()) display.sleep();
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData()
{
if(log.isDebug()) log.logDebug(toString(), BaseMessages.getString(PKG, "ZipFileDialog.Log.GettingKeyInfo")); //$NON-NLS-1$
if (input.getBaseFolderField() !=null) wBaseFolderField.setText(input.getBaseFolderField());
if (input.getDynamicSourceFileNameField() !=null) wSourceFileNameField.setText(input.getDynamicSourceFileNameField());
if (input.getDynamicTargetFileNameField() !=null) wTargetFileNameField.setText(input.getDynamicTargetFileNameField());
wOperation.setText(ZipFileMeta.getOperationTypeDesc(input.getOperationType()));
if (input.getMoveToFolderField() !=null) wMoveToFolderField.setText(input.getMoveToFolderField());
wAddResult.setSelection(input.isaddTargetFileNametoResult());
wOverwriteZipEntry.setSelection(input.isOverwriteZipEntry());
wCreateParentFolder.setSelection(input.isCreateParentFolder());
wKeepFolders.setSelection(input.isKeepSouceFolder());
wStepname.selectAll();
}
private void cancel()
{
stepname=null;
input.setChanged(changed);
dispose();
}
private void ok()
{
if(Const.isEmpty(wStepname.getText())) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage("Please give a name to this step!");
mb.setText("Error");
mb.open();
return;
}
input.setBaseFolderField(wBaseFolderField.getText() );
input.setDynamicSourceFileNameField(wSourceFileNameField.getText() );
input.setDynamicTargetFileNameField(wTargetFileNameField.getText() );
input.setaddTargetFileNametoResult(wAddResult.getSelection());
input.setOverwriteZipEntry(wOverwriteZipEntry.getSelection());
input.setCreateParentFolder(wCreateParentFolder.getSelection());
input.setKeepSouceFolder(wKeepFolders.getSelection());
input.setOperationType(ZipFileMeta.getOperationTypeByDesc(wOperation.getText()));
input.setMoveToFolderField(wMoveToFolderField.getText() );
stepname = wStepname.getText(); // return value
dispose();
}
private void keepFolder() {
wlBaseFolderField.setEnabled(wKeepFolders.getSelection());
wBaseFolderField.setEnabled(wKeepFolders.getSelection());
}
private void get()
{
if(!gotPreviousFields)
{
gotPreviousFields=true;
String source=wSourceFileNameField.getText();
String target=wTargetFileNameField.getText();
String base=wBaseFolderField.getText();
try
{
wSourceFileNameField.removeAll();
wTargetFileNameField.removeAll();
wBaseFolderField.removeAll();
RowMetaInterface r = transMeta.getPrevStepFields(stepname);
if (r!=null)
{
String[] fields=r.getFieldNames();
wSourceFileNameField.setItems(fields);
wTargetFileNameField.setItems(fields);
wBaseFolderField.setItems(fields);
}
}
catch(KettleException ke)
{
new ErrorDialog(shell, BaseMessages.getString(PKG, "ZipFileDialog.FailedToGetFields.DialogTitle"), BaseMessages.getString(PKG, "ZipFileDialog.FailedToGetFields.DialogMessage"), ke); //$NON-NLS-1$ //$NON-NLS-2$
}finally {
if(source!=null) wSourceFileNameField.setText(source);
if(target!=null) wTargetFileNameField.setText(target);
if(base!=null) wBaseFolderField.setText(base);
}
}
}
private void updateOperation()
{
wlMoveToFolderField.setEnabled(ZipFileMeta.getOperationTypeByDesc(wOperation.getText())==ZipFileMeta.OPERATION_TYPE_MOVE);
wMoveToFolderField.setEnabled(ZipFileMeta.getOperationTypeByDesc(wOperation.getText())==ZipFileMeta.OPERATION_TYPE_MOVE);
}
}
| src-ui/org/pentaho/di/ui/trans/steps/zipfile/ZipFileDialog.java | /* Copyright (c) 2007 Pentaho Corporation. All rights reserved.
* This software was developed by Pentaho Corporation and is provided under the terms
* of the GNU Lesser General Public License, Version 2.1. You may not use
* this file except in compliance with the license. If you need a copy of the license,
* please go to http://www.gnu.org/licenses/lgpl-2.1.txt. The Original Code is Pentaho
* Data Integration. The Initial Developer is Pentaho Corporation.
*
* Software distributed under the GNU Lesser Public License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. Please refer to
* the license for the specific language governing your rights and limitations.*/
/*
* Created on 18-mei-2003
*
*/
package org.pentaho.di.ui.trans.steps.zipfile;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.Group;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.zipfile.ZipFileMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class ZipFileDialog extends BaseStepDialog implements StepDialogInterface
{
private static Class<?> PKG = ZipFileMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private Label wlSourceFileNameField;
private CCombo wSourceFileNameField;
private FormData fdlSourceFileNameField, fdSourceFileNameField;
private Label wlTargetFileNameField;
private CCombo wTargetFileNameField;
private FormData fdlTargetFileNameField, fdTargetFileNameField;
private Button wAddResult;
private FormData fdAddResult,fdlAddResult;
private Label wlAddResult;
private Button wOverwriteZipEntry;
private FormData fdOverwriteTarget,fdlOverwriteTarget;
private Label wlOverwriteTarget;
private Button wCreateParentFolder;
private FormData fdCreateParentFolder,fdlCreateParentFolder;
private Label wlCreateParentFolder;
private Button wKeepFolders;
private FormData fdKeepFolders,fdlKeepFolders;
private Label wlKeepFolders;
private Group wSettingsGroup;
private FormData fdSettingsGroup;
private ZipFileMeta input;
private Label wlBaseFolderField;
private CCombo wBaseFolderField;
private FormData fdlBaseFolderField, fdBaseFolderField;
private Label wlOperation;
private CCombo wOperation;
private FormData fdlOperation;
private FormData fdOperation;
private Label wlMoveToFolderField;
private CCombo wMoveToFolderField;
private FormData fdlMoveToFolderField, fdMoveToFolderField;
private boolean gotPreviousFields=false;
public ZipFileDialog(Shell parent, Object in, TransMeta transMeta, String sname)
{
super(parent, (BaseStepMeta)in, transMeta, sname);
input=(ZipFileMeta)in;
}
public String open()
{
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN);
props.setLook(shell);
setShellImage(shell, input);
ModifyListener lsMod = new ModifyListener()
{
public void modifyText(ModifyEvent e)
{
input.setChanged();
}
};
SelectionAdapter lsSel = new SelectionAdapter() {
public void widgetSelected(SelectionEvent arg0) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout ();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "ZipFileDialog.Shell.Title")); //$NON-NLS-1$
int middle = props.getMiddlePct();
int margin=Const.MARGIN;
// Stepname line
wlStepname=new Label(shell, SWT.RIGHT);
wlStepname.setText(BaseMessages.getString(PKG, "ZipFileDialog.Stepname.Label")); //$NON-NLS-1$
props.setLook(wlStepname);
fdlStepname=new FormData();
fdlStepname.left = new FormAttachment(0, 0);
fdlStepname.right= new FormAttachment(middle, -margin);
fdlStepname.top = new FormAttachment(0, margin);
wlStepname.setLayoutData(fdlStepname);
wStepname=new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wStepname.setText(stepname);
props.setLook(wStepname);
wStepname.addModifyListener(lsMod);
fdStepname=new FormData();
fdStepname.left = new FormAttachment(middle, 0);
fdStepname.top = new FormAttachment(0, margin);
fdStepname.right= new FormAttachment(100, 0);
wStepname.setLayoutData(fdStepname);
/////////////////////////////////
// START OF Settings GROUP //
/////////////////////////////////
wSettingsGroup = new Group(shell, SWT.SHADOW_NONE);
props.setLook(wSettingsGroup);
wSettingsGroup.setText(BaseMessages.getString(PKG, "ZipFileDialog.wSettingsGroup.Label"));
FormLayout settingGroupLayout = new FormLayout();
settingGroupLayout.marginWidth = 10;
settingGroupLayout.marginHeight = 10;
wSettingsGroup.setLayout(settingGroupLayout);
// Create target parent folder?
wlCreateParentFolder=new Label(wSettingsGroup, SWT.RIGHT);
wlCreateParentFolder.setText(BaseMessages.getString(PKG, "ZipFileDialog.CreateParentFolder.Label"));
props.setLook(wlCreateParentFolder);
fdlCreateParentFolder=new FormData();
fdlCreateParentFolder.left = new FormAttachment(0, 0);
fdlCreateParentFolder.top = new FormAttachment(wStepname, margin);
fdlCreateParentFolder.right= new FormAttachment(middle, -margin);
wlCreateParentFolder.setLayoutData(fdlCreateParentFolder);
wCreateParentFolder=new Button(wSettingsGroup, SWT.CHECK );
props.setLook(wCreateParentFolder);
wCreateParentFolder.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.CreateParentFolder.Tooltip"));
fdCreateParentFolder=new FormData();
fdCreateParentFolder.left = new FormAttachment(middle, 0);
fdCreateParentFolder.top = new FormAttachment(wStepname, margin);
wCreateParentFolder.setLayoutData(fdCreateParentFolder);
wCreateParentFolder.addSelectionListener(lsSel);
// Overwrite target file?
wlOverwriteTarget=new Label(wSettingsGroup, SWT.RIGHT);
wlOverwriteTarget.setText(BaseMessages.getString(PKG, "ZipFileDialog.OverwriteTarget.Label"));
props.setLook(wlOverwriteTarget);
fdlOverwriteTarget=new FormData();
fdlOverwriteTarget.left = new FormAttachment(0, 0);
fdlOverwriteTarget.top = new FormAttachment(wCreateParentFolder, margin);
fdlOverwriteTarget.right= new FormAttachment(middle, -margin);
wlOverwriteTarget.setLayoutData(fdlOverwriteTarget);
wOverwriteZipEntry=new Button(wSettingsGroup, SWT.CHECK );
props.setLook(wOverwriteZipEntry);
wOverwriteZipEntry.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.OverwriteTarget.Tooltip"));
fdOverwriteTarget=new FormData();
fdOverwriteTarget.left = new FormAttachment(middle, 0);
fdOverwriteTarget.top = new FormAttachment(wCreateParentFolder, margin);
wOverwriteZipEntry.setLayoutData(fdOverwriteTarget);
wOverwriteZipEntry.addSelectionListener(lsSel);
// Add Target filename to result filenames?
wlAddResult=new Label(wSettingsGroup, SWT.RIGHT);
wlAddResult.setText(BaseMessages.getString(PKG, "ZipFileDialog.AddResult.Label"));
props.setLook(wlAddResult);
fdlAddResult=new FormData();
fdlAddResult.left = new FormAttachment(0, 0);
fdlAddResult.top = new FormAttachment(wOverwriteZipEntry, margin);
fdlAddResult.right= new FormAttachment(middle, -margin);
wlAddResult.setLayoutData(fdlAddResult);
wAddResult=new Button(wSettingsGroup, SWT.CHECK );
props.setLook(wAddResult);
wAddResult.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.AddResult.Tooltip"));
fdAddResult=new FormData();
fdAddResult.left = new FormAttachment(middle, 0);
fdAddResult.top = new FormAttachment(wOverwriteZipEntry, margin);
wAddResult.setLayoutData(fdAddResult);
wAddResult.addSelectionListener(lsSel);
fdSettingsGroup = new FormData();
fdSettingsGroup.left = new FormAttachment(0, margin);
fdSettingsGroup.top = new FormAttachment(wStepname, margin);
fdSettingsGroup.right = new FormAttachment(100, -margin);
wSettingsGroup.setLayoutData(fdSettingsGroup);
/////////////////////////////////
// END OF Settings Fields GROUP //
/////////////////////////////////
// SourceFileNameField field
wlSourceFileNameField=new Label(shell, SWT.RIGHT);
wlSourceFileNameField.setText(BaseMessages.getString(PKG, "ZipFileDialog.SourceFileNameField.Label")); //$NON-NLS-1$
props.setLook(wlSourceFileNameField);
fdlSourceFileNameField=new FormData();
fdlSourceFileNameField.left = new FormAttachment(0, 0);
fdlSourceFileNameField.right= new FormAttachment(middle, -margin);
fdlSourceFileNameField.top = new FormAttachment(wSettingsGroup, 2*margin);
wlSourceFileNameField.setLayoutData(fdlSourceFileNameField);
wSourceFileNameField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
props.setLook(wSourceFileNameField);
wSourceFileNameField.setEditable(true);
wSourceFileNameField.addModifyListener(lsMod);
fdSourceFileNameField=new FormData();
fdSourceFileNameField.left = new FormAttachment(middle, 0);
fdSourceFileNameField.top = new FormAttachment(wSettingsGroup, 2*margin);
fdSourceFileNameField.right= new FormAttachment(100, -margin);
wSourceFileNameField.setLayoutData(fdSourceFileNameField);
wSourceFileNameField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
// TargetFileNameField field
wlTargetFileNameField=new Label(shell, SWT.RIGHT);
wlTargetFileNameField.setText(BaseMessages.getString(PKG, "ZipFileDialog.TargetFileNameField.Label")); //$NON-NLS-1$
props.setLook(wlTargetFileNameField);
fdlTargetFileNameField=new FormData();
fdlTargetFileNameField.left = new FormAttachment(0, 0);
fdlTargetFileNameField.right= new FormAttachment(middle, -margin);
fdlTargetFileNameField.top = new FormAttachment(wSourceFileNameField, margin);
wlTargetFileNameField.setLayoutData(fdlTargetFileNameField);
wTargetFileNameField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wTargetFileNameField.setEditable(true);
props.setLook(wTargetFileNameField);
wTargetFileNameField.addModifyListener(lsMod);
fdTargetFileNameField=new FormData();
fdTargetFileNameField.left = new FormAttachment(middle, 0);
fdTargetFileNameField.top = new FormAttachment(wSourceFileNameField, margin);
fdTargetFileNameField.right= new FormAttachment(100, -margin);
wTargetFileNameField.setLayoutData(fdTargetFileNameField);
wTargetFileNameField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
wlKeepFolders=new Label(shell, SWT.RIGHT);
wlKeepFolders.setText(BaseMessages.getString(PKG, "ZipFileDialog.KeepFolders.Label"));
props.setLook(wlKeepFolders);
fdlKeepFolders=new FormData();
fdlKeepFolders.left = new FormAttachment(0, 0);
fdlKeepFolders.top = new FormAttachment(wTargetFileNameField, margin);
fdlKeepFolders.right= new FormAttachment(middle, -margin);
wlKeepFolders.setLayoutData(fdlKeepFolders);
wKeepFolders=new Button(shell, SWT.CHECK );
props.setLook(wKeepFolders);
wKeepFolders.setToolTipText(BaseMessages.getString(PKG, "ZipFileDialog.KeepFolders.Tooltip"));
fdKeepFolders=new FormData();
fdKeepFolders.left = new FormAttachment(middle, 0);
fdKeepFolders.top = new FormAttachment(wTargetFileNameField, margin);
wKeepFolders.setLayoutData(fdKeepFolders);
wKeepFolders.addSelectionListener(lsSel);
wKeepFolders.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent arg0) {
keepFolder();
}
});
// BaseFolderField field
wlBaseFolderField=new Label(shell, SWT.RIGHT);
wlBaseFolderField.setText(BaseMessages.getString(PKG, "ZipFileDialog.BaseFolderField.Label")); //$NON-NLS-1$
props.setLook(wlBaseFolderField);
fdlBaseFolderField=new FormData();
fdlBaseFolderField.left = new FormAttachment(0, 0);
fdlBaseFolderField.right= new FormAttachment(middle, -margin);
fdlBaseFolderField.top = new FormAttachment(wKeepFolders, margin);
wlBaseFolderField.setLayoutData(fdlBaseFolderField);
wBaseFolderField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wBaseFolderField.setEditable(true);
props.setLook(wBaseFolderField);
wBaseFolderField.addModifyListener(lsMod);
fdBaseFolderField=new FormData();
fdBaseFolderField.left = new FormAttachment(middle, 0);
fdBaseFolderField.top = new FormAttachment(wKeepFolders, margin);
fdBaseFolderField.right= new FormAttachment(100, -margin);
wBaseFolderField.setLayoutData(fdBaseFolderField);
wBaseFolderField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
// Operation
wlOperation=new Label(shell, SWT.RIGHT);
wlOperation.setText(BaseMessages.getString(PKG, "ZipFileDialog.Operation.Label")); //$NON-NLS-1$
props.setLook(wlOperation);
fdlOperation=new FormData();
fdlOperation.left = new FormAttachment(0, 0);
fdlOperation.right= new FormAttachment(middle, -margin);
fdlOperation.top = new FormAttachment(wBaseFolderField, margin);
wlOperation.setLayoutData(fdlOperation);
wOperation=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
props.setLook(wOperation);
wOperation.addModifyListener(lsMod);
fdOperation=new FormData();
fdOperation.left = new FormAttachment(middle, 0);
fdOperation.top = new FormAttachment(wBaseFolderField, margin);
fdOperation.right= new FormAttachment(100, -margin);
wOperation.setLayoutData(fdOperation);
wOperation.setItems(ZipFileMeta.operationTypeDesc);
wOperation.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
updateOperation();
}
});
// MoveToFolderField field
wlMoveToFolderField=new Label(shell, SWT.RIGHT);
wlMoveToFolderField.setText(BaseMessages.getString(PKG, "ZipFileDialog.MoveToFolderField.Label")); //$NON-NLS-1$
props.setLook(wlMoveToFolderField);
fdlMoveToFolderField=new FormData();
fdlMoveToFolderField.left = new FormAttachment(0, 0);
fdlMoveToFolderField.right= new FormAttachment(middle, -margin);
fdlMoveToFolderField.top = new FormAttachment(wOperation, margin);
wlMoveToFolderField.setLayoutData(fdlMoveToFolderField);
wMoveToFolderField=new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wMoveToFolderField.setEditable(true);
props.setLook(wMoveToFolderField);
wMoveToFolderField.addModifyListener(lsMod);
fdMoveToFolderField=new FormData();
fdMoveToFolderField.left = new FormAttachment(middle, 0);
fdMoveToFolderField.top = new FormAttachment(wOperation, margin);
fdMoveToFolderField.right= new FormAttachment(100, -margin);
wMoveToFolderField.setLayoutData(fdMoveToFolderField);
wMoveToFolderField.addFocusListener(new FocusListener()
{
public void focusLost(org.eclipse.swt.events.FocusEvent e)
{
}
public void focusGained(org.eclipse.swt.events.FocusEvent e)
{
get();
}
}
);
// THE BUTTONS
wOK=new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK")); //$NON-NLS-1$
wCancel=new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); //$NON-NLS-1$
setButtonPositions(new Button[] { wOK, wCancel }, margin, wMoveToFolderField);
// Add listeners
lsOK = new Listener() { public void handleEvent(Event e) { ok(); } };
lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } };
wOK.addListener (SWT.Selection, lsOK );
wCancel.addListener(SWT.Selection, lsCancel);
lsDef=new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { ok(); } };
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { cancel(); } } );
// Set the shell size, based upon previous time...
setSize();
getData();
keepFolder();
updateOperation();
input.setChanged(changed);
shell.open();
while (!shell.isDisposed())
{
if (!display.readAndDispatch()) display.sleep();
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData()
{
if(log.isDebug()) log.logDebug(toString(), BaseMessages.getString(PKG, "ZipFileDialog.Log.GettingKeyInfo")); //$NON-NLS-1$
if (input.getBaseFolderField() !=null) wBaseFolderField.setText(input.getBaseFolderField());
if (input.getDynamicSourceFileNameField() !=null) wSourceFileNameField.setText(input.getDynamicSourceFileNameField());
if (input.getDynamicTargetFileNameField() !=null) wTargetFileNameField.setText(input.getDynamicTargetFileNameField());
wOperation.setText(ZipFileMeta.getOperationTypeDesc(input.getOperationType()));
if (input.getMoveToFolderField() !=null) wMoveToFolderField.setText(input.getMoveToFolderField());
wAddResult.setSelection(input.isaddTargetFileNametoResult());
wOverwriteZipEntry.setSelection(input.isOverwriteZipEntry());
wCreateParentFolder.setSelection(input.isCreateParentFolder());
wKeepFolders.setSelection(input.isKeepSouceFolder());
wStepname.selectAll();
}
private void cancel()
{
stepname=null;
input.setChanged(changed);
dispose();
}
private void ok()
{
if(Const.isEmpty(wStepname.getText())) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage("Veuillez svp donner un nom cette tape!");
mb.setText("Etape non nomme");
mb.open();
return;
}
input.setBaseFolderField(wBaseFolderField.getText() );
input.setDynamicSourceFileNameField(wSourceFileNameField.getText() );
input.setDynamicTargetFileNameField(wTargetFileNameField.getText() );
input.setaddTargetFileNametoResult(wAddResult.getSelection());
input.setOverwriteZipEntry(wOverwriteZipEntry.getSelection());
input.setCreateParentFolder(wCreateParentFolder.getSelection());
input.setKeepSouceFolder(wKeepFolders.getSelection());
input.setOperationType(ZipFileMeta.getOperationTypeByDesc(wOperation.getText()));
input.setMoveToFolderField(wMoveToFolderField.getText() );
stepname = wStepname.getText(); // return value
dispose();
}
private void keepFolder() {
wlBaseFolderField.setEnabled(wKeepFolders.getSelection());
wBaseFolderField.setEnabled(wKeepFolders.getSelection());
}
private void get()
{
if(!gotPreviousFields)
{
gotPreviousFields=true;
String source=wSourceFileNameField.getText();
String target=wTargetFileNameField.getText();
String base=wBaseFolderField.getText();
try
{
wSourceFileNameField.removeAll();
wTargetFileNameField.removeAll();
wBaseFolderField.removeAll();
RowMetaInterface r = transMeta.getPrevStepFields(stepname);
if (r!=null)
{
String[] fields=r.getFieldNames();
wSourceFileNameField.setItems(fields);
wTargetFileNameField.setItems(fields);
wBaseFolderField.setItems(fields);
}
}
catch(KettleException ke)
{
new ErrorDialog(shell, BaseMessages.getString(PKG, "ZipFileDialog.FailedToGetFields.DialogTitle"), BaseMessages.getString(PKG, "ZipFileDialog.FailedToGetFields.DialogMessage"), ke); //$NON-NLS-1$ //$NON-NLS-2$
}finally {
if(source!=null) wSourceFileNameField.setText(source);
if(target!=null) wTargetFileNameField.setText(target);
if(base!=null) wBaseFolderField.setText(base);
}
}
}
private void updateOperation()
{
wlMoveToFolderField.setEnabled(ZipFileMeta.getOperationTypeByDesc(wOperation.getText())==ZipFileMeta.OPERATION_TYPE_MOVE);
wMoveToFolderField.setEnabled(ZipFileMeta.getOperationTypeByDesc(wOperation.getText())==ZipFileMeta.OPERATION_TYPE_MOVE);
}
}
| code compile fix
git-svn-id: 51b39fcfd0d3a6ea7caa15377cad4af13b9d2664@15806 5fb7f6ec-07c1-534a-b4ca-9155e429e800
| src-ui/org/pentaho/di/ui/trans/steps/zipfile/ZipFileDialog.java | code compile fix |
|
Java | apache-2.0 | 2e3e43e1ac3bcbee3e507691060ab31573c81066 | 0 | client-side/throttle | /*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package engineering.clientside.throttle;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ThreadLocalRandom;
import static engineering.clientside.throttle.NanoThrottle.ONE_SECOND_NANOS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static junit.framework.TestCase.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* The following tests were adapted directly from com.google.common.util.concurrent.RateLimiterTest.
* Changes were made to test the non-burst behavior of Throttle, to ensure the rate limit is not
* exceeded over the period of one second.
*
* @author Dimitris Andreou - Original RateLimiterTest author
* @author James P Edwards
*/
public class ThrottleTest {
private static final double FIRST_DELTA = 0.007; // 7ms
private static final double SECOND_DELTA = 0.006; // 6ms
@BeforeClass
public static void warmup() {
Throttle.create(100.0);
}
@Test
public void testReserve() throws InterruptedException {
final NanoThrottle throttle = new NanoThrottle.GoldFish(5.0, 1.0);
long sleep = throttle.reserve(1);
NANOSECONDS.sleep(sleep);
assertEquals(0.0, sleep / ONE_SECOND_NANOS, 0.0);
sleep = throttle.reserve(1);
NANOSECONDS.sleep(sleep);
assertEquals(0.20, sleep / ONE_SECOND_NANOS, FIRST_DELTA);
sleep = throttle.reserve(1);
NANOSECONDS.sleep(sleep);
assertEquals(0.20, sleep / ONE_SECOND_NANOS, SECOND_DELTA);
}
@Test
public void testAcquire() throws InterruptedException {
final Throttle throttle = Throttle.create(5.0);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.20, throttle.acquireUnchecked(), FIRST_DELTA);
assertEquals(0.20, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testAcquireWeights() throws InterruptedException {
final Throttle throttle = Throttle.create(20.0);
assertEquals(0.00, throttle.acquireUnchecked(1), FIRST_DELTA);
assertEquals(0.05, throttle.acquire(1), SECOND_DELTA);
assertEquals(0.05, throttle.acquire(2), SECOND_DELTA);
assertEquals(0.10, throttle.acquire(4), SECOND_DELTA);
assertEquals(0.20, throttle.acquire(8), SECOND_DELTA);
assertEquals(0.40, throttle.acquire(1), SECOND_DELTA);
}
@Test
public void testAcquireWithWait() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertEquals(0.0, throttle.acquire(), 0.0);
Thread.sleep(20);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.020, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testAcquireWithDoubleWait() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertEquals(0.0, throttle.acquire(), 0.0);
Thread.sleep(40);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.020, throttle.acquire(), SECOND_DELTA);
assertEquals(0.020, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testManyPermits() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.02, throttle.acquire(), FIRST_DELTA);
assertEquals(0.02, throttle.acquire(3), SECOND_DELTA);
assertEquals(0.06, throttle.acquire(), SECOND_DELTA);
assertEquals(0.02, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testAcquireAndUpdate() throws InterruptedException {
final Throttle throttle = Throttle.create(10.0);
assertEquals(0.0, throttle.acquire(1), 0.0);
assertEquals(0.10, throttle.acquire(1), FIRST_DELTA);
throttle.setRate(20.0);
assertEquals(0.10, throttle.acquire(1), SECOND_DELTA);
assertEquals(0.05, throttle.acquire(2), SECOND_DELTA);
assertEquals(0.10, throttle.acquire(4), SECOND_DELTA);
assertEquals(0.20, throttle.acquire(1), SECOND_DELTA);
}
@Test
public void testTryAcquire_noWaitAllowed() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire());
assertFalse(throttle.tryAcquireUnchecked(0, SECONDS));
assertFalse(throttle.tryAcquire(0, SECONDS));
Thread.sleep(10);
assertFalse(throttle.tryAcquire(0, SECONDS));
}
@Test
public void testTryAcquire_someWaitAllowed() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire(0, SECONDS));
assertTrue(throttle.tryAcquire(20, MILLISECONDS));
assertFalse(throttle.tryAcquire(10, MILLISECONDS));
Thread.sleep(10);
assertTrue(throttle.tryAcquire(10, MILLISECONDS));
}
@Test
public void testTryAcquire_overflow() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire(0, MICROSECONDS));
Thread.sleep(10);
assertTrue(throttle.tryAcquire(Long.MAX_VALUE, MICROSECONDS));
}
@Test
public void testTryAcquire_negative() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire(5, 0, SECONDS));
Thread.sleep(90);
assertFalse(throttle.tryAcquire(1, Long.MIN_VALUE, SECONDS));
Thread.sleep(10);
assertTrue(throttle.tryAcquire(1, -1, SECONDS));
}
@Test
public void testImmediateTryAcquire() throws InterruptedException {
final Throttle throttle = Throttle.create(1.0);
assertTrue("Unable to acquire initial permit", throttle.tryAcquire());
assertFalse("Capable of acquiring secondary permit", throttle.tryAcquire());
}
@Test
public void testDoubleMinValueCanAcquireExactlyOnce() throws InterruptedException {
final Throttle throttle = Throttle.create(Double.MIN_VALUE);
assertTrue("Unable to acquire initial permit", throttle.tryAcquire());
assertFalse("Capable of acquiring an additional permit", throttle.tryAcquire());
Thread.sleep(10);
assertFalse("Capable of acquiring an additional permit after sleeping", throttle.tryAcquire());
}
@Test
public void testSimpleRateUpdate() {
final Throttle throttle = Throttle.create(5.0);
assertEquals(5.0, throttle.getRate(), 0.0);
throttle.setRate(10.0);
assertEquals(10.0, throttle.getRate(), 0.0);
try {
throttle.setRate(0.0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.setRate(-10.0);
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testAcquireParameterValidation() throws InterruptedException {
final Throttle throttle = Throttle.create(999);
try {
throttle.acquire(0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.acquire(-1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquire(0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquire(-1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquireUnchecked(0, 1, SECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquire(-1, 1, SECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testIllegalConstructorArgs() throws InterruptedException {
try {
Throttle.create(Double.POSITIVE_INFINITY);
fail();
} catch (IllegalArgumentException expected) {
}
try {
Throttle.create(Double.NEGATIVE_INFINITY);
fail();
} catch (IllegalArgumentException expected) {
}
try {
Throttle.create(Double.NaN);
fail();
} catch (IllegalArgumentException expected) {
}
try {
Throttle.create(-.0000001);
fail();
} catch (IllegalArgumentException expected) {
}
try {
final Throttle throttle = Throttle.create(1.0);
throttle.setRate(Double.POSITIVE_INFINITY);
fail();
} catch (IllegalArgumentException expected) {
}
try {
final Throttle throttle = Throttle.create(1.0);
throttle.setRate(Double.NaN);
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testInterruptUnchecked() throws InterruptedException {
final Throttle throttle = Throttle.create(1);
throttle.acquireUnchecked(10);
final CompletableFuture<Throwable> futureEx = new CompletableFuture<>();
Thread thread = new Thread(() -> {
try {
throttle.acquireUnchecked();
futureEx.complete(null);
} catch (CompletionException ex) {
futureEx.complete(ex.getCause());
}
});
thread.start();
thread.interrupt();
thread.join();
assertFalse(throttle.tryAcquire());
assertEquals(InterruptedException.class, futureEx.join().getClass());
final CompletableFuture<Throwable> futureEx2 = new CompletableFuture<>();
thread = new Thread(() -> {
try {
throttle.tryAcquireUnchecked(20, SECONDS);
futureEx2.complete(null);
} catch (CompletionException ex) {
futureEx2.complete(ex.getCause());
}
});
thread.start();
thread.interrupt();
thread.join();
assertFalse(throttle.tryAcquire());
assertEquals(InterruptedException.class, futureEx2.join().getClass());
}
@Test
public void testMax() throws InterruptedException {
final Throttle throttle = Throttle.create(Double.MAX_VALUE);
assertEquals(0.0, throttle.acquire(Integer.MAX_VALUE / 4), 0.0);
assertEquals(0.0, throttle.acquire(Integer.MAX_VALUE / 2), 0.0);
assertEquals(0.0, throttle.acquireUnchecked(Integer.MAX_VALUE), 0.0);
throttle.setRate(20.0);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.05, throttle.acquire(), SECOND_DELTA);
throttle.setRate(Double.MAX_VALUE);
assertEquals(0.05, throttle.acquire(), FIRST_DELTA);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.0, throttle.acquire(), 0.0);
}
@Test
public void testWeNeverGetABurstMoreThanOneSec() throws InterruptedException {
final Throttle throttle = Throttle.create(100.0);
final int[] rates = {10000, 100, 1000000, 1000, 100};
for (final int oneSecWorthOfWork : rates) {
throttle.setRate(oneSecWorthOfWork);
final int oneHundredMillisWorthOfWork = (int) (oneSecWorthOfWork / 10.0);
long durationMillis = measureTotalTimeMillis(throttle, oneHundredMillisWorthOfWork);
assertEquals(100.0, durationMillis, 15.0);
durationMillis = measureTotalTimeMillis(throttle, oneHundredMillisWorthOfWork);
assertEquals(100.0, durationMillis, 15.0);
}
}
private static long measureTotalTimeMillis(final Throttle throttle, int permits)
throws InterruptedException {
final Random random = ThreadLocalRandom.current();
final long startTime = System.nanoTime();
while (permits > 0) {
final int nextPermitsToAcquire = Math.max(1, random.nextInt(permits));
permits -= nextPermitsToAcquire;
throttle.acquire(nextPermitsToAcquire);
}
throttle.acquire(1); // to repay for any pending debt
return NANOSECONDS.toMillis(System.nanoTime() - startTime);
}
@Test
public void testToString() {
final Throttle throttle = Throttle.create(100.0);
assertEquals("Throttle{rate=100.0}", throttle.toString());
}
}
| src/test/java/engineering/clientside/throttle/ThrottleTest.java | /*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package engineering.clientside.throttle;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Random;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ThreadLocalRandom;
import static engineering.clientside.throttle.NanoThrottle.ONE_SECOND_NANOS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static junit.framework.TestCase.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* The following tests were adapted directly from com.google.common.util.concurrent.RateLimiterTest.
* Changes were made to test the non-burst behavior of Throttle, to ensure the rate limit is not
* exceeded over the period of one second.
*
* @author Dimitris Andreou - Original RateLimiterTest author
* @author James P Edwards
*/
public class ThrottleTest {
private static final double FIRST_DELTA = 0.007; // 7ms
private static final double SECOND_DELTA = 0.006; // 6ms
@BeforeClass
public static void warmup() {
Throttle.create(100.0);
}
@Test
public void testReserve() throws InterruptedException {
final NanoThrottle throttle = new NanoThrottle.GoldFish(5.0, 1.0);
long sleep = throttle.reserve(1);
NANOSECONDS.sleep(sleep);
assertEquals(0.0, sleep / ONE_SECOND_NANOS, 0.0);
sleep = throttle.reserve(1);
NANOSECONDS.sleep(sleep);
assertEquals(0.20, sleep / ONE_SECOND_NANOS, FIRST_DELTA);
sleep = throttle.reserve(1);
NANOSECONDS.sleep(sleep);
assertEquals(0.20, sleep / ONE_SECOND_NANOS, SECOND_DELTA);
}
@Test
public void testAcquire() throws InterruptedException {
final Throttle throttle = Throttle.create(5.0);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.20, throttle.acquireUnchecked(), FIRST_DELTA);
assertEquals(0.20, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testAcquireWeights() throws InterruptedException {
final Throttle throttle = Throttle.create(20.0);
assertEquals(0.00, throttle.acquireUnchecked(1), FIRST_DELTA);
assertEquals(0.05, throttle.acquire(1), SECOND_DELTA);
assertEquals(0.05, throttle.acquire(2), SECOND_DELTA);
assertEquals(0.10, throttle.acquire(4), SECOND_DELTA);
assertEquals(0.20, throttle.acquire(8), SECOND_DELTA);
assertEquals(0.40, throttle.acquire(1), SECOND_DELTA);
}
@Test
public void testAcquireWithWait() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertEquals(0.0, throttle.acquire(), 0.0);
Thread.sleep(20);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.020, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testAcquireWithDoubleWait() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertEquals(0.0, throttle.acquire(), 0.0);
Thread.sleep(40);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.020, throttle.acquire(), SECOND_DELTA);
assertEquals(0.020, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testManyPermits() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.02, throttle.acquire(), FIRST_DELTA);
assertEquals(0.02, throttle.acquire(3), SECOND_DELTA);
assertEquals(0.06, throttle.acquire(), SECOND_DELTA);
assertEquals(0.02, throttle.acquire(), SECOND_DELTA);
}
@Test
public void testAcquireAndUpdate() throws InterruptedException {
final Throttle throttle = Throttle.create(10.0);
assertEquals(0.0, throttle.acquire(1), 0.0);
assertEquals(0.10, throttle.acquire(1), FIRST_DELTA);
throttle.setRate(20.0);
assertEquals(0.10, throttle.acquire(1), SECOND_DELTA);
assertEquals(0.05, throttle.acquire(2), SECOND_DELTA);
assertEquals(0.10, throttle.acquire(4), SECOND_DELTA);
assertEquals(0.20, throttle.acquire(1), SECOND_DELTA);
}
@Test
public void testTryAcquire_noWaitAllowed() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire());
assertFalse(throttle.tryAcquireUnchecked(0, SECONDS));
assertFalse(throttle.tryAcquire(0, SECONDS));
Thread.sleep(10);
assertFalse(throttle.tryAcquire(0, SECONDS));
}
@Test
public void testTryAcquire_someWaitAllowed() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire(0, SECONDS));
assertTrue(throttle.tryAcquire(20, MILLISECONDS));
assertFalse(throttle.tryAcquire(10, MILLISECONDS));
Thread.sleep(10);
assertTrue(throttle.tryAcquire(10, MILLISECONDS));
}
@Test
public void testTryAcquire_overflow() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire(0, MICROSECONDS));
Thread.sleep(10);
assertTrue(throttle.tryAcquire(Long.MAX_VALUE, MICROSECONDS));
}
@Test
public void testTryAcquire_negative() throws InterruptedException {
final Throttle throttle = Throttle.create(50.0);
assertTrue(throttle.tryAcquire(5, 0, SECONDS));
Thread.sleep(90);
assertFalse(throttle.tryAcquire(1, Long.MIN_VALUE, SECONDS));
Thread.sleep(10);
assertTrue(throttle.tryAcquire(1, -1, SECONDS));
}
@Test
public void testImmediateTryAcquire() throws InterruptedException {
final Throttle throttle = Throttle.create(1.0);
assertTrue("Unable to acquire initial permit", throttle.tryAcquire());
assertFalse("Capable of acquiring secondary permit", throttle.tryAcquire());
}
@Test
public void testDoubleMinValueCanAcquireExactlyOnce() throws InterruptedException {
final Throttle throttle = Throttle.create(Double.MIN_VALUE);
assertTrue("Unable to acquire initial permit", throttle.tryAcquire());
assertFalse("Capable of acquiring an additional permit", throttle.tryAcquire());
Thread.sleep(10);
assertFalse("Capable of acquiring an additional permit after sleeping", throttle.tryAcquire());
}
@Test
public void testSimpleRateUpdate() {
final Throttle throttle = Throttle.create(5.0);
assertEquals(5.0, throttle.getRate(), 0.0);
throttle.setRate(10.0);
assertEquals(10.0, throttle.getRate(), 0.0);
try {
throttle.setRate(0.0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.setRate(-10.0);
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testAcquireParameterValidation() throws InterruptedException {
final Throttle throttle = Throttle.create(999);
try {
throttle.acquire(0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.acquire(-1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquire(0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquire(-1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquireUnchecked(0, 1, SECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
try {
throttle.tryAcquire(-1, 1, SECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testIllegalConstructorArgs() throws InterruptedException {
try {
Throttle.create(Double.POSITIVE_INFINITY);
fail();
} catch (IllegalArgumentException expected) {
}
try {
Throttle.create(Double.NEGATIVE_INFINITY);
fail();
} catch (IllegalArgumentException expected) {
}
try {
Throttle.create(Double.NaN);
fail();
} catch (IllegalArgumentException expected) {
}
try {
Throttle.create(-.0000001);
fail();
} catch (IllegalArgumentException expected) {
}
try {
final Throttle throttle = Throttle.create(1.0);
throttle.setRate(Double.POSITIVE_INFINITY);
fail();
} catch (IllegalArgumentException expected) {
}
try {
final Throttle throttle = Throttle.create(1.0);
throttle.setRate(Double.NaN);
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testInterruptUnchecked() throws InterruptedException {
final Throttle throttle = Throttle.create(1);
throttle.acquireUnchecked(100_000);
Thread thread = new Thread(() -> {
try {
throttle.acquireUnchecked();
} catch (CompletionException ex) {
assertEquals(InterruptedException.class, ex.getCause().getClass());
}
});
thread.start();
thread.interrupt();
thread.join();
assertFalse(throttle.tryAcquire());
thread = new Thread(() -> {
try {
throttle.tryAcquireUnchecked(60, SECONDS);
} catch (CompletionException ex) {
assertEquals(InterruptedException.class, ex.getCause().getClass());
}
});
thread.start();
thread.interrupt();
thread.join();
assertFalse(throttle.tryAcquire());
}
@Test
public void testMax() throws InterruptedException {
final Throttle throttle = Throttle.create(Double.MAX_VALUE);
assertEquals(0.0, throttle.acquire(Integer.MAX_VALUE / 4), 0.0);
assertEquals(0.0, throttle.acquire(Integer.MAX_VALUE / 2), 0.0);
assertEquals(0.0, throttle.acquireUnchecked(Integer.MAX_VALUE), 0.0);
throttle.setRate(20.0);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.05, throttle.acquire(), SECOND_DELTA);
throttle.setRate(Double.MAX_VALUE);
assertEquals(0.05, throttle.acquire(), FIRST_DELTA);
assertEquals(0.0, throttle.acquire(), 0.0);
assertEquals(0.0, throttle.acquire(), 0.0);
}
@Test
public void testWeNeverGetABurstMoreThanOneSec() throws InterruptedException {
final Throttle throttle = Throttle.create(100.0);
final int[] rates = {10000, 100, 1000000, 1000, 100};
for (final int oneSecWorthOfWork : rates) {
throttle.setRate(oneSecWorthOfWork);
final int oneHundredMillisWorthOfWork = (int) (oneSecWorthOfWork / 10.0);
long durationMillis = measureTotalTimeMillis(throttle, oneHundredMillisWorthOfWork);
assertEquals(100.0, durationMillis, 15.0);
durationMillis = measureTotalTimeMillis(throttle, oneHundredMillisWorthOfWork);
assertEquals(100.0, durationMillis, 15.0);
}
}
private static long measureTotalTimeMillis(final Throttle throttle, int permits)
throws InterruptedException {
final Random random = ThreadLocalRandom.current();
final long startTime = System.nanoTime();
while (permits > 0) {
final int nextPermitsToAcquire = Math.max(1, random.nextInt(permits));
permits -= nextPermitsToAcquire;
throttle.acquire(nextPermitsToAcquire);
}
throttle.acquire(1); // to repay for any pending debt
return NANOSECONDS.toMillis(System.nanoTime() - startTime);
}
@Test
public void testToString() {
final Throttle throttle = Throttle.create(100.0);
assertEquals("Throttle{rate=100.0}", throttle.toString());
}
}
| Fix testInterruptUnchecked
| src/test/java/engineering/clientside/throttle/ThrottleTest.java | Fix testInterruptUnchecked |
|
Java | apache-2.0 | ba21813fc749e53a9a914a0da02beaee47f371dd | 0 | lexs/webimageloader | package com.webimageloader.loader;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLStreamHandler;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import android.os.Process;
import android.text.TextUtils;
import android.util.Log;
import com.webimageloader.util.Android;
import com.webimageloader.util.PriorityThreadFactory;
public class NetworkLoader extends BackgroundLoader {
private static final String TAG = "NetworkLoader";
private static final long DEFAULT_MAX_AGE = 3 * 24 * 60 * 60 * 1000; // Three days
private Map<String, URLStreamHandler> streamHandlers;
private int connectTimeout;
private int readTimeout;
public NetworkLoader(Map<String, URLStreamHandler> streamHandlers, int connectionTimeout, int readTimeout) {
this.streamHandlers = Collections.unmodifiableMap(streamHandlers);
this.connectTimeout = connectionTimeout;
this.readTimeout = readTimeout;
}
@Override
protected ExecutorService createExecutor() {
return Executors.newFixedThreadPool(2, new PriorityThreadFactory("Network", Process.THREAD_PRIORITY_BACKGROUND));
}
@Override
protected void loadInBackground(LoaderRequest request, Iterator<Loader> chain, Listener listener) throws Exception {
String url = request.getUrl();
disableConnectionReuseIfNecessary();
String protocol = getProtocol(url);
URLStreamHandler streamHandler = getURLStreamHandler(protocol);
HttpURLConnection urlConnection = (HttpURLConnection) new URL(null, url, streamHandler).openConnection();
if (connectTimeout > 0) {
urlConnection.setConnectTimeout(connectTimeout);
}
if (readTimeout > 0) {
urlConnection.setReadTimeout(readTimeout);
}
Metadata metadata = request.getMetadata();
if (metadata != null) {
// We have some information available
long modifiedSince = metadata.getLastModified();
if (modifiedSince != 0) {
urlConnection.setIfModifiedSince(modifiedSince);
}
String etag = metadata.getEtag();
if (!TextUtils.isEmpty(etag)) {
urlConnection.addRequestProperty("If-None-Match", etag);
}
}
String contentType = urlConnection.getContentType();
long lastModified = urlConnection.getLastModified();
// TODO: Use cache-control: max-age instead
long expires = urlConnection.getExpiration();
String etag = urlConnection.getHeaderField("ETag");
if (expires == 0) {
expires = System.currentTimeMillis() + DEFAULT_MAX_AGE;
}
// Update metadata
metadata = new Metadata(contentType, lastModified, expires, etag);
if (metadata != null && urlConnection.getResponseCode() == HttpURLConnection.HTTP_NOT_MODIFIED) {
Log.v(TAG, request + " was not modified since last fetch");
listener.onNotModified(metadata);
} else {
InputStream is = urlConnection.getInputStream();
Log.v(TAG, "Loaded " + request + " from network");
try {
listener.onStreamLoaded(is, metadata);
} finally {
is.close();
}
}
}
/**
* Workaround for bug pre-Froyo, see here for more info:
* http://android-developers.blogspot.com/2011/09/androids-http-clients.html
*/
private static void disableConnectionReuseIfNecessary() {
// HTTP connection reuse which was buggy pre-froyo
if (!Android.isAPI(8)) {
System.setProperty("http.keepAlive", "false");
}
}
private static String getProtocol(String url) {
int i = url.indexOf(':');
return i == -1 ? null : url.substring(0, i);
}
private URLStreamHandler getURLStreamHandler(String protocol) {
return streamHandlers.get(protocol);
}
}
| webimageloader/src/com/webimageloader/loader/NetworkLoader.java | package com.webimageloader.loader;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLStreamHandler;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import android.os.Process;
import android.text.TextUtils;
import android.util.Log;
import com.webimageloader.util.Android;
import com.webimageloader.util.PriorityThreadFactory;
public class NetworkLoader extends BackgroundLoader {
private static final String TAG = "NetworkLoader";
private Map<String, URLStreamHandler> streamHandlers;
private int connectTimeout;
private int readTimeout;
public NetworkLoader(Map<String, URLStreamHandler> streamHandlers, int connectionTimeout, int readTimeout) {
this.streamHandlers = Collections.unmodifiableMap(streamHandlers);
this.connectTimeout = connectionTimeout;
this.readTimeout = readTimeout;
}
@Override
protected ExecutorService createExecutor() {
return Executors.newFixedThreadPool(2, new PriorityThreadFactory("Network", Process.THREAD_PRIORITY_BACKGROUND));
}
@Override
protected void loadInBackground(LoaderRequest request, Iterator<Loader> chain, Listener listener) throws Exception {
String url = request.getUrl();
disableConnectionReuseIfNecessary();
String protocol = getProtocol(url);
URLStreamHandler streamHandler = getURLStreamHandler(protocol);
HttpURLConnection urlConnection = (HttpURLConnection) new URL(null, url, streamHandler).openConnection();
if (connectTimeout > 0) {
urlConnection.setConnectTimeout(connectTimeout);
}
if (readTimeout > 0) {
urlConnection.setReadTimeout(readTimeout);
}
Metadata metadata = request.getMetadata();
if (metadata != null) {
// We have some information available
long modifiedSince = metadata.getLastModified();
if (modifiedSince != 0) {
urlConnection.setIfModifiedSince(modifiedSince);
}
String etag = metadata.getEtag();
if (!TextUtils.isEmpty(etag)) {
urlConnection.addRequestProperty("If-None-Match", etag);
}
}
String contentType = urlConnection.getContentType();
long lastModified = urlConnection.getLastModified();
// TODO: Use cache-control: max-age instead
long expires = urlConnection.getExpiration();
String etag = urlConnection.getHeaderField("ETag");
// Update metadata
metadata = new Metadata(contentType, lastModified, expires, etag);
if (metadata != null && urlConnection.getResponseCode() == HttpURLConnection.HTTP_NOT_MODIFIED) {
Log.v(TAG, request + " was not modified since last fetch");
listener.onNotModified(metadata);
} else {
InputStream is = urlConnection.getInputStream();
Log.v(TAG, "Loaded " + request + " from network");
try {
listener.onStreamLoaded(is, metadata);
} finally {
is.close();
}
}
}
/**
* Workaround for bug pre-Froyo, see here for more info:
* http://android-developers.blogspot.com/2011/09/androids-http-clients.html
*/
private static void disableConnectionReuseIfNecessary() {
// HTTP connection reuse which was buggy pre-froyo
if (!Android.isAPI(8)) {
System.setProperty("http.keepAlive", "false");
}
}
private static String getProtocol(String url) {
int i = url.indexOf(':');
return i == -1 ? null : url.substring(0, i);
}
private URLStreamHandler getURLStreamHandler(String protocol) {
return streamHandlers.get(protocol);
}
}
| Set default expiration if missing
| webimageloader/src/com/webimageloader/loader/NetworkLoader.java | Set default expiration if missing |
|
Java | apache-2.0 | 956320c4790863325c6090ccf93c83940e75cdc9 | 0 | Im-dex/intellij-scala,JetBrains/intellij-scala,triplequote/intellij-scala,igrocki/intellij-scala,whorbowicz/intellij-scala,jeantil/intellij-scala,katejim/intellij-scala,jastice/intellij-scala,triplequote/intellij-scala,ilinum/intellij-scala,ilinum/intellij-scala,loskutov/intellij-scala,JetBrains/intellij-scala,jeantil/intellij-scala,double-y/translation-idea-plugin,ilinum/intellij-scala,jastice/intellij-scala,Im-dex/intellij-scala,loskutov/intellij-scala,jastice/intellij-scala,advancedxy/intellij-scala,advancedxy/intellij-scala,whorbowicz/intellij-scala,whorbowicz/intellij-scala,katejim/intellij-scala,igrocki/intellij-scala,advancedxy/intellij-scala,igrocki/intellij-scala,ghik/intellij-scala,ghik/intellij-scala,loskutov/intellij-scala,jeantil/intellij-scala,triplequote/intellij-scala,ghik/intellij-scala,jastice/intellij-scala,katejim/intellij-scala,Im-dex/intellij-scala,double-y/translation-idea-plugin,double-y/translation-idea-plugin | package org.jetbrains.plugins.scala.debugger;
import com.intellij.debugger.NoDataException;
import com.intellij.debugger.PositionManager;
import com.intellij.debugger.SourcePosition;
import com.intellij.debugger.engine.CompoundPositionManager;
import com.intellij.debugger.engine.DebugProcess;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.jdi.VirtualMachineProxy;
import com.intellij.debugger.requests.ClassPrepareRequestor;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.impl.DirectoryIndex;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Processor;
import com.intellij.util.Query;
import com.intellij.util.containers.HashSet;
import com.sun.jdi.AbsentInformationException;
import com.sun.jdi.ClassNotPreparedException;
import com.sun.jdi.Location;
import com.sun.jdi.ReferenceType;
import com.sun.jdi.request.ClassPrepareRequest;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.scala.ScalaLoader;
import org.jetbrains.plugins.scala.caches.ScalaCachesManager;
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement;
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile;
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScCaseClauses;
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScForStatement;
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScFunctionExpr;
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScBlockExpr;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScExtendsBlock;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTrait;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* @author ilyas
*/
public class ScalaPositionManager implements PositionManager {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.PositionManagerImpl");
private final DebugProcess myDebugProcess;
public ScalaPositionManager(DebugProcess debugProcess) {
myDebugProcess = debugProcess;
}
public DebugProcess getDebugProcess() {
return myDebugProcess;
}
@NotNull
public List<Location> locationsOfLine(ReferenceType type,
SourcePosition position) throws NoDataException {
try {
int line = position.getLine() + 1;
List<Location> locations = getDebugProcess().getVirtualMachineProxy().versionHigher("1.4") ?
type.locationsOfLine(DebugProcessImpl.JAVA_STRATUM, null, line) : type.locationsOfLine(line);
if (locations == null || locations.isEmpty()) throw new NoDataException();
return locations;
}
catch (AbsentInformationException e) {
throw new NoDataException();
}
}
private ScalaPsiElement findReferenceTypeSourceImage(SourcePosition position) {
PsiFile file = position.getFile();
if (!(file instanceof ScalaFile)) return null;
PsiElement element = file.findElementAt(position.getOffset());
if (element == null) return null;
while (true) {
if (element == null) break;
if (element instanceof ScForStatement || element instanceof ScTypeDefinition || element instanceof ScFunctionExpr)
break;
if (element instanceof ScExtendsBlock && ((ScExtendsBlock) element).isAnonymousClass()) break;
if (element instanceof ScCaseClauses && element.getParent() instanceof ScBlockExpr) break;
element = element.getParent();
}
return (ScalaPsiElement) element;
}
private ScTypeDefinition findEnclosingTypeDefinition(SourcePosition position) {
PsiFile file = position.getFile();
if (!(file instanceof ScalaFile)) return null;
PsiElement element = file.findElementAt(position.getOffset());
if (element == null) return null;
return PsiTreeUtil.getParentOfType(element, ScTypeDefinition.class);
}
private static String getSpecificName(String name, Class<? extends PsiClass> clazzClass) {
if (ScObject.class.isAssignableFrom(clazzClass)) return name + "$";
if (ScTrait.class.isAssignableFrom(clazzClass)) return name + "$class";
return name;
}
public ClassPrepareRequest createPrepareRequest(final ClassPrepareRequestor requestor, final SourcePosition position) throws NoDataException {
ScalaPsiElement sourceImage = ApplicationManager.getApplication().runReadAction(new Computable<ScalaPsiElement>() {
public ScalaPsiElement compute() {
return findReferenceTypeSourceImage(position);
}
});
String qName = null;
if (sourceImage instanceof ScTypeDefinition) {
qName = getSpecificName(((ScTypeDefinition) sourceImage).getQualifiedNameForDebugger(), ((ScTypeDefinition) sourceImage).getClass());
} else if (sourceImage instanceof ScFunctionExpr ||
sourceImage instanceof ScForStatement ||
sourceImage instanceof ScExtendsBlock ||
sourceImage instanceof ScCaseClauses && sourceImage.getParent() instanceof ScBlockExpr) {
ScTypeDefinition typeDefinition = findEnclosingTypeDefinition(position);
if (typeDefinition != null) {
final String fqn = typeDefinition.getQualifiedNameForDebugger();
qName = fqn + "$*";
}
}
// Enclosing closure not found
if (qName == null) {
ScTypeDefinition typeDefinition = findEnclosingTypeDefinition(position);
if (typeDefinition != null) {
qName = getSpecificName(typeDefinition.getQualifiedNameForDebugger(), typeDefinition.getClass());
}
if (qName == null) throw new NoDataException();
}
ClassPrepareRequestor waitRequestor = new MyClassPrepareRequestor(position, requestor);
return myDebugProcess.getRequestsManager().createClassPrepareRequest(waitRequestor, qName);
}
public SourcePosition getSourcePosition(final Location location) throws NoDataException {
if (location == null) throw new NoDataException();
PsiFile psiFile = getPsiFileByLocation(getDebugProcess().getProject(), location);
if (psiFile == null) throw new NoDataException();
int lineNumber = calcLineIndex(location);
if (lineNumber < 0) throw new NoDataException();
return SourcePosition.createFromLine(psiFile, lineNumber);
}
private int calcLineIndex(Location location) {
LOG.assertTrue(myDebugProcess != null);
if (location == null) return -1;
try {
return location.lineNumber() - 1;
}
catch (InternalError e) {
return -1;
}
}
@Nullable
private PsiFile getPsiFileByLocation(final Project project, final Location location) {
if (location == null) return null;
final ReferenceType refType = location.declaringType();
if (refType == null) return null;
final String originalQName = refType.name().replace('/', '.');
int dollar = originalQName.indexOf('$');
final String qName = dollar >= 0 ? originalQName.substring(0, dollar) : originalQName;
final GlobalSearchScope searchScope = myDebugProcess.getSearchScope();
final PsiClass[] classes = ScalaCachesManager.getInstance(project).getNamesCache().getClassesByFQName(qName, searchScope);
PsiClass clazz = classes.length == 1 ? classes[0] : null;
if (clazz != null && clazz.isValid()) {
return clazz.getNavigationElement().getContainingFile();
}
DirectoryIndex directoryIndex = DirectoryIndex.getInstance(project);
int dotIndex = qName.lastIndexOf(".");
String packageName = dotIndex > 0 ? qName.substring(0, dotIndex) : "";
Query<VirtualFile> query = directoryIndex.getDirectoriesByPackageName(packageName, true);
String fileNameWithoutExtension = dotIndex > 0 ? qName.substring(dotIndex + 1) : qName;
final Set<String> fileNames = new HashSet<String>();
for (final String extention : ScalaLoader.SCALA_EXTENSIONS) {
fileNames.add(fileNameWithoutExtension + "." + extention);
}
final Ref<PsiFile> result = new Ref<PsiFile>();
query.forEach(new Processor<VirtualFile>() {
public boolean process(VirtualFile vDir) {
for (final String fileName : fileNames) {
VirtualFile vFile = vDir.findChild(fileName);
if (vFile != null) {
PsiFile psiFile = PsiManager.getInstance(project).findFile(vFile);
if (psiFile instanceof ScalaFile) {
result.set(psiFile);
return false;
}
}
}
return true;
}
});
return result.get();
}
@NotNull
public List<ReferenceType> getAllClasses(final SourcePosition position) throws NoDataException {
List<ReferenceType> result = ApplicationManager.getApplication().runReadAction(new Computable<List<ReferenceType>>() {
public List<ReferenceType> compute() {
ScalaPsiElement sourceImage = findReferenceTypeSourceImage(position);
if (sourceImage instanceof ScTypeDefinition) {
ScTypeDefinition definition = (ScTypeDefinition) sourceImage;
String qName = getSpecificName(definition.getQualifiedNameForDebugger(), definition.getClass());
if (qName != null) return myDebugProcess.getVirtualMachineProxy().classesByName(qName);
} else {
final ScTypeDefinition typeDefinition = findEnclosingTypeDefinition(position);
String enclosingName = null;
if (typeDefinition != null) {
enclosingName = typeDefinition.getQualifiedNameForDebugger();
}
if (enclosingName != null) {
final List<ReferenceType> outers = myDebugProcess.getVirtualMachineProxy().allClasses();
final List<ReferenceType> result = new ArrayList<ReferenceType>(outers.size());
for (ReferenceType outer : outers) {
if (outer.name().startsWith(enclosingName)) {
try {
if (outer.locationsOfLine(position.getLine() + 1).size() > 0) {
result.add(outer);
}
} catch (AbsentInformationException ignore) {
} catch (ClassNotPreparedException ignore) {
}
}
}
return result;
}
}
return Collections.emptyList();
}
});
if (result == null || result.isEmpty()) throw new NoDataException();
return result;
}
//todo: this is possibly redundant method. (Copy paste from Java/Groovy)
@Deprecated
@Nullable
private ReferenceType findNested(ReferenceType fromClass, final ScalaPsiElement toFind, SourcePosition classPosition) {
final VirtualMachineProxy vmProxy = myDebugProcess.getVirtualMachineProxy();
if (fromClass.isPrepared()) {
final List<ReferenceType> nestedTypes = vmProxy.nestedTypes(fromClass);
for (ReferenceType nested : nestedTypes) {
final ReferenceType found = findNested(nested, toFind, classPosition);
if (found != null) {
return found;
}
}
try {
final int lineNumber = classPosition.getLine() + 1;
if (fromClass.locationsOfLine(lineNumber).size() > 0) {
return fromClass;
}
//noinspection LoopStatementThatDoesntLoop
for (Location location : fromClass.allLineLocations()) {
final SourcePosition candidateFirstPosition = SourcePosition.createFromLine(toFind.getContainingFile(), location.lineNumber() - 1);
if (toFind.equals(findReferenceTypeSourceImage(candidateFirstPosition))) {
return fromClass;
}
break; // check only the first location
}
}
catch (AbsentInformationException ignored) {
}
}
return null;
}
private static class MyClassPrepareRequestor implements ClassPrepareRequestor {
private final SourcePosition position;
private final ClassPrepareRequestor requestor;
public MyClassPrepareRequestor(SourcePosition position, ClassPrepareRequestor requestor) {
this.position = position;
this.requestor = requestor;
}
public void processClassPrepare(DebugProcess debuggerProcess, ReferenceType referenceType) {
final CompoundPositionManager positionManager = ((DebugProcessImpl) debuggerProcess).getPositionManager();
if (positionManager.locationsOfLine(referenceType, position).size() > 0) {
requestor.processClassPrepare(debuggerProcess, referenceType);
} else {
final List<ReferenceType> positionClasses = positionManager.getAllClasses(position);
if (positionClasses.contains(referenceType)) {
requestor.processClassPrepare(debuggerProcess, referenceType);
}
}
}
}
}
| src/org/jetbrains/plugins/scala/debugger/ScalaPositionManager.java | package org.jetbrains.plugins.scala.debugger;
import com.intellij.debugger.NoDataException;
import com.intellij.debugger.PositionManager;
import com.intellij.debugger.SourcePosition;
import com.intellij.debugger.engine.CompoundPositionManager;
import com.intellij.debugger.engine.DebugProcess;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.jdi.VirtualMachineProxy;
import com.intellij.debugger.requests.ClassPrepareRequestor;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.impl.DirectoryIndex;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Processor;
import com.intellij.util.Query;
import com.intellij.util.containers.HashSet;
import com.sun.jdi.AbsentInformationException;
import com.sun.jdi.ClassNotPreparedException;
import com.sun.jdi.Location;
import com.sun.jdi.ReferenceType;
import com.sun.jdi.request.ClassPrepareRequest;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.scala.ScalaLoader;
import org.jetbrains.plugins.scala.caches.ScalaCachesManager;
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement;
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile;
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScCaseClauses;
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScForStatement;
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScFunctionExpr;
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScBlockExpr;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScExtendsBlock;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTrait;
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* @author ilyas
*/
public class ScalaPositionManager implements PositionManager {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.PositionManagerImpl");
private final DebugProcess myDebugProcess;
public ScalaPositionManager(DebugProcess debugProcess) {
myDebugProcess = debugProcess;
}
public DebugProcess getDebugProcess() {
return myDebugProcess;
}
@NotNull
public List<Location> locationsOfLine(ReferenceType type,
SourcePosition position) throws NoDataException {
try {
int line = position.getLine() + 1;
List<Location> locations = getDebugProcess().getVirtualMachineProxy().versionHigher("1.4") ?
type.locationsOfLine(DebugProcessImpl.JAVA_STRATUM, null, line) : type.locationsOfLine(line);
if (locations == null || locations.isEmpty()) throw new NoDataException();
return locations;
}
catch (AbsentInformationException e) {
throw new NoDataException();
}
}
private ScalaPsiElement findReferenceTypeSourceImage(SourcePosition position) {
PsiFile file = position.getFile();
if (!(file instanceof ScalaFile)) return null;
PsiElement element = file.findElementAt(position.getOffset());
if (element == null) return null;
while (true) {
if (element == null) break;
if (element instanceof ScForStatement || element instanceof ScTypeDefinition || element instanceof ScFunctionExpr)
break;
if (element instanceof ScExtendsBlock && ((ScExtendsBlock) element).isAnonymousClass()) break;
if (element instanceof ScCaseClauses && element.getParent() instanceof ScBlockExpr) break;
element = element.getParent();
}
return (ScalaPsiElement) element;
}
private ScTypeDefinition findEnclosingTypeDefinition(SourcePosition position) {
PsiFile file = position.getFile();
if (!(file instanceof ScalaFile)) return null;
PsiElement element = file.findElementAt(position.getOffset());
if (element == null) return null;
return PsiTreeUtil.getParentOfType(element, ScTypeDefinition.class);
}
private static String getSpecificName(String name, Class<? extends PsiClass> clazzClass) {
if (ScObject.class.isAssignableFrom(clazzClass)) return name + "$";
if (ScTrait.class.isAssignableFrom(clazzClass)) return name + "$class";
return name;
}
public ClassPrepareRequest createPrepareRequest(final ClassPrepareRequestor requestor, final SourcePosition position) throws NoDataException {
ScalaPsiElement sourceImage = ApplicationManager.getApplication().runReadAction(new Computable<ScalaPsiElement>() {
public ScalaPsiElement compute() {
return findReferenceTypeSourceImage(position);
}
});
String qName = null;
if (sourceImage instanceof ScTypeDefinition) {
qName = getSpecificName(((ScTypeDefinition) sourceImage).getQualifiedNameForDebugger(), ((ScTypeDefinition) sourceImage).getClass());
} else if (sourceImage instanceof ScFunctionExpr ||
sourceImage instanceof ScForStatement ||
sourceImage instanceof ScExtendsBlock ||
sourceImage instanceof ScCaseClauses && sourceImage.getParent() instanceof ScBlockExpr) {
ScTypeDefinition typeDefinition = findEnclosingTypeDefinition(position);
if (typeDefinition != null) {
final String fqn = typeDefinition.getQualifiedNameForDebugger();
qName = fqn + "$*";
}
}
// Enclosing closure not found
if (qName == null) {
ScTypeDefinition typeDefinition = findEnclosingTypeDefinition(position);
if (typeDefinition != null) {
qName = getSpecificName(typeDefinition.getQualifiedNameForDebugger(), typeDefinition.getClass());
}
if (qName == null) throw new NoDataException();
}
ClassPrepareRequestor waitRequestor = new MyClassPrepareRequestor(position, requestor);
return myDebugProcess.getRequestsManager().createClassPrepareRequest(waitRequestor, qName);
}
public SourcePosition getSourcePosition(final Location location) throws NoDataException {
if (location == null) throw new NoDataException();
PsiFile psiFile = getPsiFileByLocation(getDebugProcess().getProject(), location);
if (psiFile == null) throw new NoDataException();
int lineNumber = calcLineIndex(location);
if (lineNumber < 0) throw new NoDataException();
return SourcePosition.createFromLine(psiFile, lineNumber);
}
private int calcLineIndex(Location location) {
LOG.assertTrue(myDebugProcess != null);
if (location == null) return -1;
try {
return location.lineNumber() - 1;
}
catch (InternalError e) {
return -1;
}
}
@Nullable
private PsiFile getPsiFileByLocation(final Project project, final Location location) {
if (location == null) return null;
final ReferenceType refType = location.declaringType();
if (refType == null) return null;
final String originalQName = refType.name().replace('/', '.');
int dollar = originalQName.indexOf('$');
final String qName = dollar >= 0 ? originalQName.substring(0, dollar) : originalQName;
final GlobalSearchScope searchScope = myDebugProcess.getSearchScope();
final PsiClass[] classes = ScalaCachesManager.getInstance(project).getNamesCache().getClassesByFQName(qName, searchScope);
PsiClass clazz = classes.length == 1 ? classes[0] : null;
if (clazz != null && clazz.isValid()) return clazz.getContainingFile();
DirectoryIndex directoryIndex = DirectoryIndex.getInstance(project);
int dotIndex = qName.lastIndexOf(".");
String packageName = dotIndex > 0 ? qName.substring(0, dotIndex) : "";
Query<VirtualFile> query = directoryIndex.getDirectoriesByPackageName(packageName, true);
String fileNameWithoutExtension = dotIndex > 0 ? qName.substring(dotIndex + 1) : qName;
final Set<String> fileNames = new HashSet<String>();
for (final String extention : ScalaLoader.SCALA_EXTENSIONS) {
fileNames.add(fileNameWithoutExtension + "." + extention);
}
final Ref<PsiFile> result = new Ref<PsiFile>();
query.forEach(new Processor<VirtualFile>() {
public boolean process(VirtualFile vDir) {
for (final String fileName : fileNames) {
VirtualFile vFile = vDir.findChild(fileName);
if (vFile != null) {
PsiFile psiFile = PsiManager.getInstance(project).findFile(vFile);
if (psiFile instanceof ScalaFile) {
result.set(psiFile);
return false;
}
}
}
return true;
}
});
return result.get();
}
@NotNull
public List<ReferenceType> getAllClasses(final SourcePosition position) throws NoDataException {
List<ReferenceType> result = ApplicationManager.getApplication().runReadAction(new Computable<List<ReferenceType>>() {
public List<ReferenceType> compute() {
ScalaPsiElement sourceImage = findReferenceTypeSourceImage(position);
if (sourceImage instanceof ScTypeDefinition) {
ScTypeDefinition definition = (ScTypeDefinition) sourceImage;
String qName = getSpecificName(definition.getQualifiedNameForDebugger(), definition.getClass());
if (qName != null) return myDebugProcess.getVirtualMachineProxy().classesByName(qName);
} else {
final ScTypeDefinition typeDefinition = findEnclosingTypeDefinition(position);
String enclosingName = null;
if (typeDefinition != null) {
enclosingName = typeDefinition.getQualifiedNameForDebugger();
}
if (enclosingName != null) {
final List<ReferenceType> outers = myDebugProcess.getVirtualMachineProxy().allClasses();
final List<ReferenceType> result = new ArrayList<ReferenceType>(outers.size());
for (ReferenceType outer : outers) {
if (outer.name().startsWith(enclosingName)) {
try {
if (outer.locationsOfLine(position.getLine() + 1).size() > 0) {
result.add(outer);
}
} catch (AbsentInformationException ignore) {
} catch (ClassNotPreparedException ignore) {
}
}
}
return result;
}
}
return Collections.emptyList();
}
});
if (result == null || result.isEmpty()) throw new NoDataException();
return result;
}
//todo: this is possibly redundant method. (Copy paste from Java/Groovy)
@Deprecated
@Nullable
private ReferenceType findNested(ReferenceType fromClass, final ScalaPsiElement toFind, SourcePosition classPosition) {
final VirtualMachineProxy vmProxy = myDebugProcess.getVirtualMachineProxy();
if (fromClass.isPrepared()) {
final List<ReferenceType> nestedTypes = vmProxy.nestedTypes(fromClass);
for (ReferenceType nested : nestedTypes) {
final ReferenceType found = findNested(nested, toFind, classPosition);
if (found != null) {
return found;
}
}
try {
final int lineNumber = classPosition.getLine() + 1;
if (fromClass.locationsOfLine(lineNumber).size() > 0) {
return fromClass;
}
//noinspection LoopStatementThatDoesntLoop
for (Location location : fromClass.allLineLocations()) {
final SourcePosition candidateFirstPosition = SourcePosition.createFromLine(toFind.getContainingFile(), location.lineNumber() - 1);
if (toFind.equals(findReferenceTypeSourceImage(candidateFirstPosition))) {
return fromClass;
}
break; // check only the first location
}
}
catch (AbsentInformationException ignored) {
}
}
return null;
}
private static class MyClassPrepareRequestor implements ClassPrepareRequestor {
private final SourcePosition position;
private final ClassPrepareRequestor requestor;
public MyClassPrepareRequestor(SourcePosition position, ClassPrepareRequestor requestor) {
this.position = position;
this.requestor = requestor;
}
public void processClassPrepare(DebugProcess debuggerProcess, ReferenceType referenceType) {
final CompoundPositionManager positionManager = ((DebugProcessImpl) debuggerProcess).getPositionManager();
if (positionManager.locationsOfLine(referenceType, position).size() > 0) {
requestor.processClassPrepare(debuggerProcess, referenceType);
} else {
final List<ReferenceType> positionClasses = positionManager.getAllClasses(position);
if (positionClasses.contains(referenceType)) {
requestor.processClassPrepare(debuggerProcess, referenceType);
}
}
}
}
}
| Breakpoints in third party jars now appear in the linked source, not in the decompiled class.
| src/org/jetbrains/plugins/scala/debugger/ScalaPositionManager.java | Breakpoints in third party jars now appear in the linked source, not in the decompiled class. |
|
Java | bsd-3-clause | 2f09ac5c3ed704990660c81f6b694f396058bb48 | 0 | krzyk/rultor,joansmith/rultor,linlihai/rultor,maurezen/rultor,linlihai/rultor,dalifreire/rultor,joansmith/rultor,krzyk/rultor,krzyk/rultor,joansmith/rultor,joansmith/rultor,dalifreire/rultor,dalifreire/rultor,pecko/rultor,pecko/rultor,maurezen/rultor,maurezen/rultor,dalifreire/rultor,maurezen/rultor,pecko/rultor,dalifreire/rultor,pecko/rultor,linlihai/rultor,joansmith/rultor,linlihai/rultor,linlihai/rultor,pecko/rultor,krzyk/rultor,krzyk/rultor,maurezen/rultor | /**
* Copyright (c) 2009-2013, rultor.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the rultor.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rultor.guard.github;
import com.jcabi.aspects.Immutable;
import com.jcabi.aspects.Loggable;
import java.io.IOException;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import org.eclipse.egit.github.core.PullRequest;
/**
* Approval for a pull request.
*
* @author Yegor Bugayenko ([email protected])
* @version $Id$
* @since 1.0
*/
@Immutable
public interface Approval {
/**
* This pull request has an approval?
* @param request Pull request
* @param client Client
* @param repo Repository
* @return TRUE if approved
* @throws IOException If fails
*/
boolean has(PullRequest request, Github client, Github.Repo repo)
throws IOException;
/**
* Always yes.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.INFO)
final class Always implements Approval {
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return true;
}
}
/**
* Always no.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.INFO)
final class Never implements Approval {
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return false;
}
}
/**
* Logical OR.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.INFO)
final class Or implements Approval {
/**
* First approval to ask.
*/
private final transient Approval first;
/**
* Second approval to ask.
*/
private final transient Approval second;
/**
* Public ctor.
* @param left Left
* @param right Right
*/
public Or(final Approval left, final Approval right) {
this.first = left;
this.second = right;
}
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return this.first.has(request, client, repo)
|| this.second.has(request, client, repo);
}
}
/**
* Logical AND.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.INFO)
final class And implements Approval {
/**
* First approval to ask.
*/
private final transient Approval first;
/**
* Second approval to ask.
*/
private final transient Approval second;
/**
* Public ctor.
* @param left Left
* @param right Right
*/
public And(final Approval left, final Approval right) {
this.first = left;
this.second = right;
}
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return this.first.has(request, client, repo)
&& this.second.has(request, client, repo);
}
}
/**
* Logical NOT.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.INFO)
final class Not implements Approval {
/**
* The approval to reverse.
*/
private final transient Approval approval;
/**
* Public ctor.
* @param app Approval to negate
*/
public Not(final Approval app) {
this.approval = app;
}
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return !this.approval.has(request, client, repo);
}
}
}
| rultor-base/src/main/java/com/rultor/guard/github/Approval.java | /**
* Copyright (c) 2009-2013, rultor.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the rultor.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rultor.guard.github;
import com.jcabi.aspects.Immutable;
import com.jcabi.aspects.Loggable;
import java.io.IOException;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import org.eclipse.egit.github.core.PullRequest;
/**
* Approval for a pull request.
*
* @author Yegor Bugayenko ([email protected])
* @version $Id$
* @since 1.0
*/
@Immutable
public interface Approval {
/**
* This pull request has an approval?
* @param request Pull request
* @param client Client
* @param repo Repository
* @return TRUE if approved
* @throws IOException If fails
*/
boolean has(PullRequest request, Github client, Github.Repo repo)
throws IOException;
/**
* Always yes.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.DEBUG)
final class Always implements Approval {
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return true;
}
}
/**
* Always no.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.DEBUG)
final class Never implements Approval {
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return false;
}
}
/**
* Logical OR.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.DEBUG)
final class Or implements Approval {
/**
* First approval to ask.
*/
private final transient Approval first;
/**
* Second approval to ask.
*/
private final transient Approval second;
/**
* Public ctor.
* @param left Left
* @param right Right
*/
public Or(final Approval left, final Approval right) {
this.first = left;
this.second = right;
}
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return this.first.has(request, client, repo)
|| this.second.has(request, client, repo);
}
}
/**
* Logical AND.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.DEBUG)
final class And implements Approval {
/**
* First approval to ask.
*/
private final transient Approval first;
/**
* Second approval to ask.
*/
private final transient Approval second;
/**
* Public ctor.
* @param left Left
* @param right Right
*/
public And(final Approval left, final Approval right) {
this.first = left;
this.second = right;
}
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return this.first.has(request, client, repo)
&& this.second.has(request, client, repo);
}
}
/**
* Logical NOT.
*/
@Immutable
@ToString
@EqualsAndHashCode
@Loggable(Loggable.DEBUG)
final class Not implements Approval {
/**
* The approval to reverse.
*/
private final transient Approval approval;
/**
* Public ctor.
* @param app Approval to negate
*/
public Not(final Approval app) {
this.approval = app;
}
@Override
public boolean has(final PullRequest request, final Github client,
final Github.Repo repo) throws IOException {
return !this.approval.has(request, client, repo);
}
}
}
| INFO level for Approval
| rultor-base/src/main/java/com/rultor/guard/github/Approval.java | INFO level for Approval |
|
Java | mit | 29936be6e34a30ea17169b1cb5b12166eda3f082 | 0 | JavaNetworking/JavaNetworking | package com.javanetworking;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.ProtocolException;
import java.net.URL;
import java.net.URLConnection;
import java.util.List;
import java.util.Map;
/**
Wrapper class for a {@link URLConnection} request. Holds the string URL, URLConnection
request and HTTP body content for current request.
*/
public class URLRequest {
private String urlString;
private URLConnection urlConnection;
private byte[] HTTPBody;
private Exception error;
public static URLRequest requestWithURLString(String url) {
return new URLRequest(url);
}
/**
Contructor for a new request.
@param urlString A {@link String} representation of the resource URL.
*/
public URLRequest(String urlString) {
this.urlString = urlString;
this.urlConnection = null;
this.HTTPBody = null;
}
public byte[] getHTTPBody() {
return HTTPBody;
}
public void setHTTPBody(byte[] HTTPBody) {
this.HTTPBody = HTTPBody;
}
public Exception getException() {
return this.error;
}
public URLConnection getURLConnection() {
if (urlConnection == null) {
try {
urlConnection = new URL(this.urlString).openConnection();
} catch (Exception e) {
this.error = e;
}
}
return urlConnection;
}
public HttpURLConnection getHttpURLConnection() {
return ((HttpURLConnection)getURLConnection());
}
public void setRequestProperty(String key, String value) {
getURLConnection().setRequestProperty(key, value);
}
public void setDoOutput(boolean b) {
getURLConnection().setDoOutput(b);
}
public OutputStream getOutputStream() throws IOException {
return getURLConnection().getOutputStream();
}
public InputStream getInputStream() throws IOException {
return getURLConnection().getInputStream();
}
public int getResponseCode() throws IOException {
return getHttpURLConnection().getResponseCode();
}
public String getContentType() {
return getHttpURLConnection().getContentType();
}
public void setRequestMethod(String method) {
try {
getHttpURLConnection().setRequestMethod(method);
} catch (ProtocolException e) {
this.error = e;
}
}
public void setConnectTimeout(int timeout) {
getHttpURLConnection().setReadTimeout(timeout);
}
public Map<String, List<String>> getHeaderFields() {
return getHttpURLConnection().getHeaderFields();
}
}
| library/src/main/java/com/javanetworking/URLRequest.java | package com.javanetworking;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.ProtocolException;
import java.net.URL;
import java.net.URLConnection;
import java.util.List;
import java.util.Map;
/**
Wrapper class for a {@link URLConnection} request. Holds the string URL, URLConnection
request and HTTP body content for current request.
*/
public class URLRequest {
private String url;
private URLConnection urlConnection;
private byte[] HTTPBody;
private Exception error;
public static URLRequest requestWithURLString(String url) {
return new URLRequest(url);
}
public URLRequest(String url) {
this.url = url;
this.urlConnection = null;
this.HTTPBody = null;
}
public byte[] getHTTPBody() {
return HTTPBody;
}
public void setHTTPBody(byte[] HTTPBody) {
this.HTTPBody = HTTPBody;
}
public Exception getException() {
return this.error;
}
public URLConnection getURLConnection() {
if (urlConnection == null) {
try {
urlConnection = new URL(this.url).openConnection();
} catch (Exception e) {
this.error = e;
}
}
return urlConnection;
}
public HttpURLConnection getHttpURLConnection() {
return ((HttpURLConnection)getURLConnection());
}
public void setRequestProperty(String key, String value) {
getURLConnection().setRequestProperty(key, value);
}
public void setDoOutput(boolean b) {
getURLConnection().setDoOutput(b);
}
public OutputStream getOutputStream() throws IOException {
return getURLConnection().getOutputStream();
}
public InputStream getInputStream() throws IOException {
return getURLConnection().getInputStream();
}
public int getResponseCode() throws IOException {
return getHttpURLConnection().getResponseCode();
}
public String getContentType() {
return getHttpURLConnection().getContentType();
}
public void setRequestMethod(String method) {
try {
getHttpURLConnection().setRequestMethod(method);
} catch (ProtocolException e) {
this.error = e;
}
}
public void setConnectTimeout(int timeout) {
getHttpURLConnection().setReadTimeout(timeout);
}
public Map<String, List<String>> getHeaderFields() {
return getHttpURLConnection().getHeaderFields();
}
}
| Added comment to URLRequest
| library/src/main/java/com/javanetworking/URLRequest.java | Added comment to URLRequest |
|
Java | mit | 791f853cd788a59d8dd78f1464742e88133e0072 | 0 | axel-halin/Thesis-JHipster,axel-halin/Thesis-JHipster,axel-halin/Thesis-JHipster,axel-halin/Thesis-JHipster,axel-halin/Thesis-JHipster | package oracle;
import csv.CSVUtils;
import selenium.SeleniumTest;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.eclipse.xtext.util.Files;
import org.junit.Test;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* Extension of previous work from Mathieu ACHER, Inria Rennes-Bretagne Atlantique.
*
* Oracle for all variants of configurator JHipster
* - generate
* - build
* - tests
*
* @author Nuttinck Alexandre
* @author Axel Halin
*
*/
public class Oracle {
private static final Logger _log = Logger.getLogger("Oracle");
private static final String JHIPSTERS_DIRECTORY = "jhipsters";
private static final Integer weightFolder = new File(JHIPSTERS_DIRECTORY+"/").list().length;
private static final String projectDirectory = System.getProperty("user.dir");
private static final String JS_COVERAGE_PATH = "target/test-results/coverage/report-lcov/lcov-report/index.html";
private static ResultChecker resultChecker = null;
private static CSVUtils csvUtils = null;
private Thread threadRegistry;
private Thread threadUAA;
private void startProcess(String fileName, String desiredDirectory){
Process process = null;
try{
ProcessBuilder processBuilder = new ProcessBuilder(fileName);
processBuilder.directory(new File(projectDirectory + "/" + desiredDirectory));
process = processBuilder.start();
process.waitFor();
} catch(IOException e){
_log.error("IOException: "+e.getMessage());
} catch(InterruptedException e){
_log.error("InterruptedException: "+e.getMessage());
} finally{
try{process.destroy();}
catch(Exception e){_log.error("Destroy error: "+e.getMessage());}
}
}
/**
* Generate the App from the yo-rc.json.
*
* @param jDirectory Name of the folder
* @param system boolean type of the system (linux then true, else false)
* @throws InterruptedException
* @throws IOException
*/
private void generateApp(String jDirectory) throws InterruptedException, IOException{
startProcess("./generate.sh",JHIPSTERS_DIRECTORY+"/"+jDirectory+"/");
}
/**
* Check the App is generated successfully
*
* @param jDirectory Name of the folder
*/
private boolean checkGenerateApp(String jDirectory) throws FileNotFoundException{
String text = "";
//extract log
text = Files.readFileIntoString(getjDirectory(jDirectory) + "generate.log");
//CHECK IF Server app generated successfully.
//OR Client app generated successfully.
Matcher m = Pattern.compile("((.*?)Server app generated successfully.)").matcher(text);
Matcher m2 = Pattern.compile("((.*?)Client app generated successfully.)").matcher(text);
while(m.find() | m2.find()) return true;
return false;
}
/**
* Compile the App from the yo-rc.json.
*
* @param jDirectory Name of the folder
* @param system boolean type of the system (linux then true, else false)
*/
private void compileApp(String jDirectory){
startProcess("./compile.sh", JHIPSTERS_DIRECTORY+"/"+jDirectory);
}
/**
* Check the App is compile successfully
*
* @param jDirectory Name of the folder
*/
private boolean checkCompileApp(String jDirectory) throws FileNotFoundException{
String text = "";
//extract log
text = Files.readFileIntoString(getjDirectory(jDirectory) + "compile.log");
//CHECK IF BUILD FAILED THEN false
Matcher m1 = Pattern.compile("((.*?)BUILD FAILED)").matcher(text);
Matcher m2 = Pattern.compile("((.*?)BUILD FAILURE)").matcher(text);
while(m1.find() | m2.find()) return false;
return true;
}
/**
* Build the App which is generated successfully
*
* @param jDirectory Name of the folder
* @param system boolean type of the system (linux then true, else false)
* @throws InterruptedException
*/
private void buildApp(String jDirectory) throws InterruptedException{
startProcess("./build.sh", getjDirectory(jDirectory));
}
/**
* Launch UnitTests on the App is compiled successfully
*
* @param jDirectory Name of the folder
* @throws InterruptedException
*/
private void unitTestsApp(String jDirectory) throws InterruptedException{
startProcess("./unitTest.sh", JHIPSTERS_DIRECTORY+"/"+jDirectory);
}
/**
* Return the path to folder jDirectory (which is in the relative path JHIPSTERS_DIRECTORY/)
*
* @param jDirectory Name of the folder
* @return The relative path to folder with name jDirectory.
*/
private String getjDirectory(String jDirectory) {
return JHIPSTERS_DIRECTORY + "/" + jDirectory + "/";
}
/**
* Launch initialization scripts:\n
* - Start Uaa Server (in case of Uaa authentication)
* - Start Jhipster-Registry (in case of Microservices)
*
* @param system Boolean to check OS (True = Linux, False = Windows)
*/
private void initialization(boolean docker, String applicationType, String authentication){
_log.info("Starting intialization scripts...");
if(!docker){
// Start database services
startProcess("./startDB.sh","");
if (applicationType.equals("gateway") || applicationType.equals("microservice") || applicationType.equals("uaa")){
// Start Jhipster Registry
threadRegistry = new Thread(new ThreadRegistry(projectDirectory+"/JHipster-Registry/"));
threadRegistry.start();
// Let Jhipster Registry initiate before attempting to launch UAA Server...
try{Thread.sleep(30000);}
catch(Exception e){_log.error(e.getMessage());}
if(authentication.equals("uaa")){
// Start UAA Server
threadUAA = new Thread(new ThreadUAA(projectDirectory+"/"+JHIPSTERS_DIRECTORY+"/uaa/"));
threadUAA.start();
try{Thread.sleep(5000);}
catch(Exception e){_log.error(e.getMessage());}
}
}
} else{
// STOP DB FOR DOCKER
startProcess("./stopDB.sh","");
}
_log.info("Oracle intialized !");
}
/**
* Terminate the Oracle by ending JHipster Registry and UAA servers.
*/
private void termination(){
try{
threadRegistry.interrupt();
threadUAA.interrupt();
} catch (Exception e){
_log.error(e.getMessage());
}
}
private void cleanUp(String jDirectory){
startProcess("./dockerStop.sh", getjDirectory(jDirectory));
}
private void dockerCompose(String jDirectory){
// Run the App
startProcess("./dockerStart.sh",getjDirectory(jDirectory));
}
/**
* Generate & Build & Tests all variants of JHipster 3.6.1.
*/
@Test
public void genJHipsterVariants() throws Exception{
//Create CSV file JHipster if not exist.
File f = new File("jhipster.csv");
if(!f.exists()) {
_log.info("Create New CSV File JHipster");
CSVUtils.createCSVFileJHipster("jhipster.csv");
}
//Create CSV file Coverage if not exist.
File f2 = new File("coverageJACOCO.csv");
if(!f2.exists()) {
_log.info("Create New CSV File Coverage");
CSVUtils.createCSVFileCoverage("coverageJACOCO.csv");
}
CSVUtils.createCSVCucumber("cucumber.csv");
// 1 -> weightFolder -1 (UAA directory...)
for (Integer i =1;i<=weightFolder-1;i++){
_log.info("Starting treatment of JHipster n° "+i);
String jDirectory = "jhipster"+i;
resultChecker = new ResultChecker(getjDirectory(jDirectory));
//ID CSV ID used for jhipster,coverageJACOCO,cucumber csv
String Id = "ND";
// generate a new ID -> depend of the csv lenght
Id = String.valueOf(f.length());
//Strings used for the csv
String generation = "X";
String generationTime = "X";
String stacktracesGen = "X";
String compile = "KO";
String compileTime = "ND";
String stacktracesCompile = "ND";
StringBuilder build = new StringBuilder("KO");
String stacktracesBuild = "ND";
String buildTime = "ND";
StringBuilder buildWithDocker = new StringBuilder("KO");
String stacktracesBuildWithDocker = "ND";
String buildTimeWithDocker = "ND";
String buildTimeWithDockerPackage = "ND";
//jsonStrings
String applicationType = "X";
String authenticationType = "X";
String hibernateCache = "X";
String clusteredHttpSession = "X";
String websocket = "X";
String databaseType= "X";
String devDatabaseType= "X";
String prodDatabaseType= "X";
String searchEngine= "X";
String enableSocialSignIn= "X";
String useSass= "X";
String enableTranslation = "X";
String testFrameworks ="X";
//Tests part
String resultsTest= "X";
String cucumber= "X";
String karmaJS= "X";
String gatling = "X";
String protractor = "X";
String gatlingDocker = "X";
String protractorDocker = "X";
StringBuilder imageSize = new StringBuilder("ND");
String coverageInstuctions= "X";
String coverageBranches= "X";
String coverageJSStatements = "X";
String coverageJSBranches = "X";
//Get Json strings used for the csv
JsonParser jsonParser = new JsonParser();
JsonObject objectGen = jsonParser.parse(Files.readFileIntoString(getjDirectory(jDirectory)+".yo-rc.json")).getAsJsonObject();
JsonObject object = (JsonObject) objectGen.get("generator-jhipster");
if (object.get("applicationType") != null) applicationType = object.get("applicationType").toString();
if (object.get("authenticationType") != null) authenticationType = object.get("authenticationType").toString();
if (object.get("hibernateCache") != null) hibernateCache = object.get("hibernateCache").toString();
if (object.get("clusteredHttpSession") != null) clusteredHttpSession = object.get("clusteredHttpSession").toString();
if (object.get("websocket") != null) websocket = object.get("websocket").toString();
if (object.get("databaseType") != null) databaseType = object.get("databaseType").toString();
if (object.get("devDatabaseType") != null) devDatabaseType = object.get("devDatabaseType").toString();
if (object.get("prodDatabaseType") != null) prodDatabaseType = object.get("prodDatabaseType").toString();
if (object.get("searchEngine") != null) searchEngine = object.get("buildTool").toString();
if (object.get("enableSocialSignIn") != null) enableSocialSignIn = object.get("enableSocialSignIn").toString();
if (object.get("useSass") != null) useSass = object.get("useSass").toString();
if (object.get("enableTranslation") != null) enableTranslation = object.get("enableTranslation").toString();
if (object.get("testFrameworks") != null) testFrameworks = object.get("testFrameworks").toString();
_log.info("Check if this config isn't done yet...");
String[] yorc = {applicationType,authenticationType,hibernateCache,clusteredHttpSession,
websocket,databaseType,devDatabaseType,prodDatabaseType,searchEngine,enableSocialSignIn,useSass,enableTranslation,testFrameworks};
boolean check = CSVUtils.CheckNotExistLineCSV("jhipster.csv", yorc);
// IF check TRUE the Generate else next
if(check)
{
_log.info("Generating the App...");
long millis = System.currentTimeMillis();
generateApp(jDirectory);
long millisAfterGenerate = System.currentTimeMillis();
_log.info("Generation done!");
_log.info("Checking the generation of the App...");
if(checkGenerateApp(jDirectory)){
generation ="OK";
// Time to Generate
Long generationTimeLong = millisAfterGenerate - millis;
Double generationTimeDouble = generationTimeLong/1000.0;
generationTime = generationTimeDouble.toString();
stacktracesGen = resultChecker.extractStacktraces("generate.log");
_log.info("Generation complete ! Trying to compile the App...");
compileApp(jDirectory);
if(checkCompileApp(jDirectory)){
compile ="OK";
compileTime = resultChecker.extractTime("compile.log");
String[] partsCompile = compileTime.split(";");
compileTime = partsCompile[0]; // delete the ";" used for Docker
stacktracesCompile = resultChecker.extractStacktraces("compile.log");
_log.info("Compilation success ! Launch Unit Tests...");
unitTestsApp(jDirectory);
resultsTest = resultChecker.extractResultsTest("test.log");
karmaJS = resultChecker.extractKarmaJS("testKarmaJS.log");
cucumber= resultChecker.extractCucumber("test.log");
csvUtils = new CSVUtils(getjDirectory(jDirectory));
coverageInstuctions= resultChecker.extractCoverageIntstructions("index.html");
coverageBranches = resultChecker.extractCoverageBranches("index.html");
coverageJSBranches = resultChecker.extractJSCoverageBranches(JS_COVERAGE_PATH);
coverageJSStatements = resultChecker.extractJSCoverageStatements(JS_COVERAGE_PATH);
//Extract CSV Coverage Data and write in coverage.csv
csvUtils.writeLinesCoverageCSV("jacoco.csv","coverageJACOCO.csv",jDirectory,Id);
_log.info("Compilation success ! Trying to build the App...");
_log.info("Trying to build the App with Docker...");
initialization(true, applicationType, authenticationType);
imageSize = new StringBuilder();
ThreadCheckBuild t1 = new ThreadCheckBuild(getjDirectory(jDirectory), true, "buildDocker.log",imageSize, buildWithDocker);
t1.start();
//build WITH docker
dockerCompose(jDirectory);
t1.done();
if(imageSize.toString().equals("")){
imageSize.delete(0, 5);
imageSize.append("ND");
}
if(buildWithDocker.toString().equals("KO")) stacktracesBuildWithDocker = resultChecker.extractStacktraces("buildDocker.log");
String buildTimeWithDockerVar = resultChecker.extractTime("buildDocker.log");
String[] partsBuildWithDocker = buildTimeWithDockerVar.split(";");
buildTimeWithDockerPackage = partsBuildWithDocker[0];
if(partsBuildWithDocker.length>1) buildTimeWithDocker = partsBuildWithDocker[1];
gatlingDocker = resultChecker.extractGatling("testDockerGatling.log");
protractorDocker = resultChecker.extractProtractor("testDockerProtractor.log");
_log.info("Cleaning up... Docker");
cleanUp(jDirectory);
// Building without Docker
initialization(false, applicationType, authenticationType);
ThreadCheckBuild t2 = new ThreadCheckBuild(getjDirectory(jDirectory), false, "build.log",imageSize,build);
t2.start();
_log.info("Trying to build the App without Docker...");
//build WITHOUT docker
buildApp(jDirectory);
t2.done();
if(build.toString().equals("KO")) stacktracesBuild = resultChecker.extractStacktraces("build.log");
gatling = resultChecker.extractGatling("testGatling.log");
protractor = resultChecker.extractProtractor("testProtractor.log");
buildTime = resultChecker.extractTime("build.log");
String[] partsBuildWithoutDocker = buildTime.split(";");
buildTime = partsBuildWithoutDocker[0]; // only two parts with Docker
} else{
_log.error("App Compilation Failed ...");
compile ="KO";
compileTime = "KO";
stacktracesCompile = resultChecker.extractStacktraces("compile.log");
}
} else{
_log.error("App Generation Failed...");
generation ="KO";
stacktracesGen = resultChecker.extractStacktraces("generate.log");
}
_log.info("Writing into jhipster.csv");
//WITH DOCKER
String docker = "true";
//New line for file csv With Docker
String[] line = {Id,jDirectory,docker,applicationType,authenticationType,hibernateCache,clusteredHttpSession,
websocket,databaseType,devDatabaseType,prodDatabaseType,searchEngine,enableSocialSignIn,useSass,enableTranslation,testFrameworks,
generation,stacktracesGen,generationTime,compile,stacktracesCompile,compileTime,buildWithDocker.toString(),
stacktracesBuildWithDocker,buildTimeWithDockerPackage,buildTimeWithDocker,imageSize.toString(),
resultsTest,cucumber,karmaJS,gatlingDocker,protractorDocker,coverageInstuctions,coverageBranches,
coverageJSStatements, coverageJSBranches};
//write into CSV file
CSVUtils.writeNewLineCSV("jhipster.csv",line);
//WITHOUT DOCKER
docker = "false";
//New line for file csv without Docker
String[] line2 = {Id,jDirectory,docker,applicationType,authenticationType,hibernateCache,clusteredHttpSession,
websocket,databaseType,devDatabaseType,prodDatabaseType,searchEngine,enableSocialSignIn,useSass,enableTranslation,testFrameworks,
generation,stacktracesGen,generationTime,compile,stacktracesCompile,compileTime,build.toString(),stacktracesBuild,"NOTDOCKER",
buildTime,"NOTDOCKER",resultsTest,cucumber,karmaJS,gatling,protractor,
coverageInstuctions,coverageBranches, coverageJSStatements, coverageJSBranches};
//write into CSV file
CSVUtils.writeNewLineCSV("jhipster.csv",line2);
}
else {
_log.info("This configuration has been already tested");
}
}
_log.info("Termination...");
termination();
}
} | FML-brute/src/oracle/Oracle.java | package oracle;
import csv.CSVUtils;
import selenium.SeleniumTest;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.eclipse.xtext.util.Files;
import org.junit.Test;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* Extension of previous work from Mathieu ACHER, Inria Rennes-Bretagne Atlantique.
*
* Oracle for all variants of configurator JHipster
* - generate
* - build
* - tests
*
* @author Nuttinck Alexandre
* @author Axel Halin
*
*/
public class Oracle {
private static final Logger _log = Logger.getLogger("Oracle");
private static final String JHIPSTERS_DIRECTORY = "jhipsters";
private static final Integer weightFolder = new File(JHIPSTERS_DIRECTORY+"/").list().length;
private static final String projectDirectory = System.getProperty("user.dir");
private static final String JS_COVERAGE_PATH = "target/test-results/coverage/report-lcov/lcov-report/index.html";
private static ResultChecker resultChecker = null;
private static CSVUtils csvUtils = null;
private Thread threadRegistry;
private Thread threadUAA;
private void startProcess(String fileName, String desiredDirectory){
Process process = null;
try{
ProcessBuilder processBuilder = new ProcessBuilder(fileName);
processBuilder.directory(new File(projectDirectory + "/" + desiredDirectory));
process = processBuilder.start();
process.waitFor();
} catch(IOException e){
_log.error("IOException: "+e.getMessage());
} catch(InterruptedException e){
_log.error("InterruptedException: "+e.getMessage());
} finally{
try{process.destroy();}
catch(Exception e){_log.error("Destroy error: "+e.getMessage());}
}
}
/**
* Generate the App from the yo-rc.json.
*
* @param jDirectory Name of the folder
* @param system boolean type of the system (linux then true, else false)
* @throws InterruptedException
* @throws IOException
*/
private void generateApp(String jDirectory) throws InterruptedException, IOException{
startProcess("./generate.sh",JHIPSTERS_DIRECTORY+"/"+jDirectory+"/");
}
/**
* Check the App is generated successfully
*
* @param jDirectory Name of the folder
*/
private boolean checkGenerateApp(String jDirectory) throws FileNotFoundException{
String text = "";
//extract log
text = Files.readFileIntoString(getjDirectory(jDirectory) + "generate.log");
//CHECK IF Server app generated successfully.
//OR Client app generated successfully.
Matcher m = Pattern.compile("((.*?)Server app generated successfully.)").matcher(text);
Matcher m2 = Pattern.compile("((.*?)Client app generated successfully.)").matcher(text);
while(m.find() | m2.find()) return true;
return false;
}
/**
* Compile the App from the yo-rc.json.
*
* @param jDirectory Name of the folder
* @param system boolean type of the system (linux then true, else false)
*/
private void compileApp(String jDirectory){
startProcess("./compile.sh", JHIPSTERS_DIRECTORY+"/"+jDirectory);
}
/**
* Check the App is compile successfully
*
* @param jDirectory Name of the folder
*/
private boolean checkCompileApp(String jDirectory) throws FileNotFoundException{
String text = "";
//extract log
text = Files.readFileIntoString(getjDirectory(jDirectory) + "compile.log");
//CHECK IF BUILD FAILED THEN false
Matcher m1 = Pattern.compile("((.*?)BUILD FAILED)").matcher(text);
Matcher m2 = Pattern.compile("((.*?)BUILD FAILURE)").matcher(text);
while(m1.find() | m2.find()) return false;
return true;
}
/**
* Build the App which is generated successfully
*
* @param jDirectory Name of the folder
* @param system boolean type of the system (linux then true, else false)
* @throws InterruptedException
*/
private void buildApp(String jDirectory) throws InterruptedException{
startProcess("./build.sh", getjDirectory(jDirectory));
}
/**
* Launch UnitTests on the App is compiled successfully
*
* @param jDirectory Name of the folder
* @throws InterruptedException
*/
private void unitTestsApp(String jDirectory) throws InterruptedException{
startProcess("./unitTest.sh", JHIPSTERS_DIRECTORY+"/"+jDirectory);
}
/**
* Return the path to folder jDirectory (which is in the relative path JHIPSTERS_DIRECTORY/)
*
* @param jDirectory Name of the folder
* @return The relative path to folder with name jDirectory.
*/
private String getjDirectory(String jDirectory) {
return JHIPSTERS_DIRECTORY + "/" + jDirectory + "/";
}
/**
* Launch initialization scripts:\n
* - Start Uaa Server (in case of Uaa authentication)
* - Start Jhipster-Registry (in case of Microservices)
*
* @param system Boolean to check OS (True = Linux, False = Windows)
*/
private void initialization(boolean docker, String applicationType, String authentication){
_log.info("Starting intialization scripts...");
if(!docker){
// Start database services
startProcess("./startDB.sh","");
if (applicationType.equals("gateway") || applicationType.equals("microservice") || applicationType.equals("uaa")){
// Start Jhipster Registry
threadRegistry = new Thread(new ThreadRegistry(projectDirectory+"/JHipster-Registry/"));
threadRegistry.start();
// Let Jhipster Registry initiate before attempting to launch UAA Server...
try{Thread.sleep(30000);}
catch(Exception e){_log.error(e.getMessage());}
if(authentication.equals("uaa")){
// Start UAA Server
threadUAA = new Thread(new ThreadUAA(projectDirectory+"/"+JHIPSTERS_DIRECTORY+"/uaa/"));
threadUAA.start();
try{Thread.sleep(5000);}
catch(Exception e){_log.error(e.getMessage());}
}
}
} else{
// STOP DB FOR DOCKER
startProcess("./stopDB.sh","");
}
_log.info("Oracle intialized !");
}
/**
* Terminate the Oracle by ending JHipster Registry and UAA servers.
*/
private void termination(){
try{
threadRegistry.interrupt();
threadUAA.interrupt();
} catch (Exception e){
_log.error(e.getMessage());
}
}
private void cleanUp(String jDirectory){
startProcess("./dockerStop.sh", getjDirectory(jDirectory));
}
private void dockerCompose(String jDirectory){
// Run the App
startProcess("./dockerStart.sh",getjDirectory(jDirectory));
}
/**
* Generate & Build & Tests all variants of JHipster 3.6.1.
*/
@Test
public void genJHipsterVariants() throws Exception{
//Create CSV file JHipster if not exist.
File f = new File("jhipster.csv");
if(!f.exists()) {
_log.info("Create New CSV File JHipster");
CSVUtils.createCSVFileJHipster("jhipster.csv");
}
//Create CSV file Coverage if not exist.
File f2 = new File("coverageJACOCO.csv");
if(!f2.exists()) {
_log.info("Create New CSV File Coverage");
CSVUtils.createCSVFileCoverage("coverageJACOCO.csv");
}
CSVUtils.createCSVCucumber("cucumber.csv");
// 1 -> weightFolder -1 (UAA directory...)
for (Integer i =1;i<=weightFolder-1;i++){
_log.info("Starting treatment of JHipster n° "+i);
String jDirectory = "jhipster"+i;
resultChecker = new ResultChecker(getjDirectory(jDirectory));
//ID CSV ID used for jhipster,coverageJACOCO,cucumber csv
String Id = "ND";
// generate a new ID -> depend of the csv lenght
Id = String.valueOf(f.length());
//Strings used for the csv
String generation = "X";
String generationTime = "X";
String stacktracesGen = "X";
String compile = "KO";
String compileTime = "ND";
String stacktracesCompile = "ND";
StringBuilder build = new StringBuilder("KO");
String stacktracesBuild = "ND";
String buildTime = "ND";
StringBuilder buildWithDocker = new StringBuilder("KO");
String stacktracesBuildWithDocker = "ND";
String buildTimeWithDocker = "ND";
String buildTimeWithDockerPackage = "ND";
//jsonStrings
String applicationType = "X";
String authenticationType = "X";
String hibernateCache = "X";
String clusteredHttpSession = "X";
String websocket = "X";
String databaseType= "X";
String devDatabaseType= "X";
String prodDatabaseType= "X";
String searchEngine= "X";
String enableSocialSignIn= "X";
String useSass= "X";
String enableTranslation = "X";
String testFrameworks ="X";
//Tests part
String resultsTest= "X";
String cucumber= "X";
String karmaJS= "X";
String gatling = "X";
String protractor = "X";
String gatlingDocker = "X";
String protractorDocker = "X";
StringBuilder imageSize = new StringBuilder("ND");
String coverageInstuctions= "X";
String coverageBranches= "X";
String coverageJSStatements = "X";
String coverageJSBranches = "X";
//Get Json strings used for the csv
JsonParser jsonParser = new JsonParser();
JsonObject objectGen = jsonParser.parse(Files.readFileIntoString(getjDirectory(jDirectory)+".yo-rc.json")).getAsJsonObject();
JsonObject object = (JsonObject) objectGen.get("generator-jhipster");
if (object.get("applicationType") != null) applicationType = object.get("applicationType").toString();
if (object.get("authenticationType") != null) authenticationType = object.get("authenticationType").toString();
if (object.get("hibernateCache") != null) hibernateCache = object.get("hibernateCache").toString();
if (object.get("clusteredHttpSession") != null) clusteredHttpSession = object.get("clusteredHttpSession").toString();
if (object.get("websocket") != null) websocket = object.get("websocket").toString();
if (object.get("databaseType") != null) databaseType = object.get("databaseType").toString();
if (object.get("devDatabaseType") != null) devDatabaseType = object.get("devDatabaseType").toString();
if (object.get("prodDatabaseType") != null) prodDatabaseType = object.get("prodDatabaseType").toString();
if (object.get("searchEngine") != null) searchEngine = object.get("buildTool").toString();
if (object.get("enableSocialSignIn") != null) enableSocialSignIn = object.get("enableSocialSignIn").toString();
if (object.get("useSass") != null) useSass = object.get("useSass").toString();
if (object.get("enableTranslation") != null) enableTranslation = object.get("enableTranslation").toString();
if (object.get("testFrameworks") != null) testFrameworks = object.get("testFrameworks").toString();
_log.info("Check if this config isn't done yet...");
String[] yorc = {applicationType,authenticationType,hibernateCache,clusteredHttpSession,
websocket,databaseType,devDatabaseType,prodDatabaseType,searchEngine,enableSocialSignIn,useSass,enableTranslation,testFrameworks};
boolean check = CSVUtils.CheckNotExistLineCSV("jhipster.csv", yorc);
// IF check TRUE the Generate else next
if(check)
{
_log.info("Generating the App...");
long millis = System.currentTimeMillis();
generateApp(jDirectory);
long millisAfterGenerate = System.currentTimeMillis();
_log.info("Generation done!");
_log.info("Checking the generation of the App...");
if(checkGenerateApp(jDirectory)){
generation ="OK";
// Time to Generate
Long generationTimeLong = millisAfterGenerate - millis;
Double generationTimeDouble = generationTimeLong/1000.0;
generationTime = generationTimeDouble.toString();
stacktracesGen = resultChecker.extractStacktraces("generate.log");
_log.info("Generation complete ! Trying to compile the App...");
compileApp(jDirectory);
if(checkCompileApp(jDirectory)){
compile ="OK";
compileTime = resultChecker.extractTime("compile.log");
String[] partsCompile = compileTime.split(";");
compileTime = partsCompile[0]; // delete the ";" used for Docker
stacktracesCompile = resultChecker.extractStacktraces("compile.log");
_log.info("Compilation success ! Launch Unit Tests...");
unitTestsApp(jDirectory);
resultsTest = resultChecker.extractResultsTest("test.log");
karmaJS = resultChecker.extractKarmaJS("testKarmaJS.log");
cucumber= resultChecker.extractCucumber("test.log");
csvUtils = new CSVUtils(getjDirectory(jDirectory));
coverageInstuctions= resultChecker.extractCoverageIntstructions("index.html");
coverageBranches = resultChecker.extractCoverageBranches("index.html");
coverageJSBranches = resultChecker.extractJSCoverageBranches(JS_COVERAGE_PATH);
coverageJSStatements = resultChecker.extractJSCoverageStatements(JS_COVERAGE_PATH);
//Extract CSV Coverage Data and write in coverage.csv
csvUtils.writeLinesCoverageCSV("jacoco.csv","coverageJACOCO.csv",jDirectory,Id);
_log.info("Compilation success ! Trying to build the App...");
_log.info("Trying to build the App with Docker...");
initialization(true, applicationType, authenticationType);
imageSize = new StringBuilder();
ThreadCheckBuild t1 = new ThreadCheckBuild(getjDirectory(jDirectory), true, "buildDocker.log",imageSize, buildWithDocker);
t1.start();
//build WITH docker
dockerCompose(jDirectory);
t1.done();
if(imageSize.toString().equals("")){
imageSize.delete(0, 5);
imageSize.append("ND");
}
if(buildWithDocker.toString().equals("KO")) stacktracesBuildWithDocker = resultChecker.extractStacktraces("buildDocker.log");
buildTimeWithDocker = resultChecker.extractTime("buildDocker.log");
String[] partsBuildWithDocker = buildTimeWithDocker.split(";");
buildTimeWithDockerPackage = partsBuildWithDocker[0];
if(partsBuildWithDocker.length>1) buildTimeWithDocker = partsBuildWithDocker[1];
gatlingDocker = resultChecker.extractGatling("testDockerGatling.log");
protractorDocker = resultChecker.extractProtractor("testDockerProtractor.log");
_log.info("Cleaning up... Docker");
cleanUp(jDirectory);
// Building without Docker
initialization(false, applicationType, authenticationType);
ThreadCheckBuild t2 = new ThreadCheckBuild(getjDirectory(jDirectory), false, "build.log",imageSize,build);
t2.start();
_log.info("Trying to build the App without Docker...");
//build WITHOUT docker
buildApp(jDirectory);
t2.done();
if(build.toString().equals("KO")) stacktracesBuild = resultChecker.extractStacktraces("build.log");
gatling = resultChecker.extractGatling("testGatling.log");
protractor = resultChecker.extractProtractor("testProtractor.log");
buildTime = resultChecker.extractTime("build.log");
String[] partsBuildWithoutDocker = buildTime.split(";");
buildTime = partsBuildWithoutDocker[0]; // only two parts with Docker
} else{
_log.error("App Compilation Failed ...");
compile ="KO";
compileTime = "KO";
stacktracesCompile = resultChecker.extractStacktraces("compile.log");
}
} else{
_log.error("App Generation Failed...");
generation ="KO";
stacktracesGen = resultChecker.extractStacktraces("generate.log");
}
_log.info("Writing into jhipster.csv");
//WITH DOCKER
String docker = "true";
//New line for file csv With Docker
String[] line = {Id,jDirectory,docker,applicationType,authenticationType,hibernateCache,clusteredHttpSession,
websocket,databaseType,devDatabaseType,prodDatabaseType,searchEngine,enableSocialSignIn,useSass,enableTranslation,testFrameworks,
generation,stacktracesGen,generationTime,compile,stacktracesCompile,compileTime,buildWithDocker.toString(),
stacktracesBuildWithDocker,buildTimeWithDockerPackage,buildTimeWithDocker,imageSize.toString(),
resultsTest,cucumber,karmaJS,gatlingDocker,protractorDocker,coverageInstuctions,coverageBranches,
coverageJSStatements, coverageJSBranches};
//write into CSV file
CSVUtils.writeNewLineCSV("jhipster.csv",line);
//WITHOUT DOCKER
docker = "false";
//New line for file csv without Docker
String[] line2 = {Id,jDirectory,docker,applicationType,authenticationType,hibernateCache,clusteredHttpSession,
websocket,databaseType,devDatabaseType,prodDatabaseType,searchEngine,enableSocialSignIn,useSass,enableTranslation,testFrameworks,
generation,stacktracesGen,generationTime,compile,stacktracesCompile,compileTime,build.toString(),stacktracesBuild,"NOTDOCKER",
buildTime,"NOTDOCKER",resultsTest,cucumber,karmaJS,gatling,protractor,
coverageInstuctions,coverageBranches, coverageJSStatements, coverageJSBranches};
//write into CSV file
CSVUtils.writeNewLineCSV("jhipster.csv",line2);
}
else {
_log.info("This configuration has been already tested");
}
}
_log.info("Termination...");
termination();
}
} | Add var buildTimeWithDocker
| FML-brute/src/oracle/Oracle.java | Add var buildTimeWithDocker |
|
Java | mit | 6cb2d25cabec82380091381bdb6aee6238c72fb8 | 0 | takenspc/validator,takenspc/validator,takenspc/validator,validator/validator,validator/validator,takenspc/validator,validator/validator,takenspc/validator,validator/validator,validator/validator | /*
* Copyright (c) 2005, 2006 Henri Sivonen
* Copyright (c) 2007-2018 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.servlet;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.net.SocketTimeoutException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import nu.validator.checker.XmlPiChecker;
import nu.validator.checker.jing.CheckerSchema;
import nu.validator.gnu.xml.aelfred2.FatalSAXException;
import nu.validator.gnu.xml.aelfred2.SAXDriver;
import nu.validator.htmlparser.common.DocumentMode;
import nu.validator.htmlparser.common.DocumentModeHandler;
import nu.validator.htmlparser.common.Heuristics;
import nu.validator.htmlparser.common.XmlViolationPolicy;
import nu.validator.htmlparser.sax.HtmlParser;
import nu.validator.htmlparser.sax.HtmlSerializer;
import nu.validator.htmlparser.sax.XmlSerializer;
import nu.validator.io.BoundedInputStream;
import nu.validator.io.DataUri;
import nu.validator.io.StreamBoundException;
import nu.validator.localentities.LocalCacheEntityResolver;
import nu.validator.messages.GnuMessageEmitter;
import nu.validator.messages.JsonMessageEmitter;
import nu.validator.messages.MessageEmitterAdapter;
import nu.validator.messages.TextMessageEmitter;
import nu.validator.messages.TooManyErrorsException;
import nu.validator.messages.XhtmlMessageEmitter;
import nu.validator.messages.XmlMessageEmitter;
import nu.validator.servlet.imagereview.ImageCollector;
import nu.validator.servlet.OutlineBuildingXMLReaderWrapper.Section;
import nu.validator.source.SourceCode;
import nu.validator.spec.Spec;
import nu.validator.spec.html5.Html5SpecBuilder;
import nu.validator.xml.AttributesImpl;
import nu.validator.xml.AttributesPermutingXMLReaderWrapper;
import nu.validator.xml.BaseUriTracker;
import nu.validator.xml.CharacterUtil;
import nu.validator.xml.CombineContentHandler;
import nu.validator.xml.ContentTypeParser;
import nu.validator.xml.ContentTypeParser.NonXmlContentTypeException;
import nu.validator.xml.DataUriEntityResolver;
import nu.validator.xml.IdFilter;
import nu.validator.xml.LanguageDetectingXMLReaderWrapper;
import nu.validator.xml.UseCountingXMLReaderWrapper;
import nu.validator.xml.NamespaceDroppingXMLReaderWrapper;
import nu.validator.xml.NullEntityResolver;
import nu.validator.xml.PrudentHttpEntityResolver;
import nu.validator.xml.PrudentHttpEntityResolver.ResourceNotRetrievableException;
import nu.validator.xml.SystemErrErrorHandler;
import nu.validator.xml.TypedInputSource;
import nu.validator.xml.WiretapXMLReaderWrapper;
import nu.validator.xml.XhtmlSaxEmitter;
import nu.validator.xml.customelements.NamespaceChangingSchemaWrapper;
import nu.validator.xml.templateelement.TemplateElementDroppingSchemaWrapper;
import nu.validator.xml.dataattributes.DataAttributeDroppingSchemaWrapper;
import nu.validator.xml.langattributes.XmlLangAttributeDroppingSchemaWrapper;
import nu.validator.xml.roleattributes.RoleAttributeFilteringSchemaWrapper;
import org.xml.sax.ContentHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.LexicalHandler;
import com.thaiopensource.relaxng.impl.CombineValidator;
import com.thaiopensource.util.PropertyMap;
import com.thaiopensource.util.PropertyMapBuilder;
import com.thaiopensource.validate.IncorrectSchemaException;
import com.thaiopensource.validate.Schema;
import com.thaiopensource.validate.SchemaReader;
import com.thaiopensource.validate.SchemaResolver;
import com.thaiopensource.validate.ValidateProperty;
import com.thaiopensource.validate.Validator;
import com.thaiopensource.validate.auto.AutoSchemaReader;
import com.thaiopensource.validate.prop.rng.RngProperty;
import com.thaiopensource.validate.prop.wrap.WrapProperty;
import com.thaiopensource.validate.rng.CompactSchemaReader;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.log4j.Logger;
import com.ibm.icu.text.Normalizer;
/**
* @version $Id: VerifierServletTransaction.java,v 1.10 2005/07/24 07:32:48
* hsivonen Exp $
* @author hsivonen
*/
class VerifierServletTransaction implements DocumentModeHandler, SchemaResolver {
private enum OutputFormat {
HTML, XHTML, TEXT, XML, JSON, RELAXED, SOAP, UNICORN, GNU
}
private static final Logger log4j = Logger.getLogger(VerifierServletTransaction.class);
private static final Pattern SPACE = Pattern.compile("\\s+");
private static final Pattern JS_IDENTIFIER = Pattern.compile("[\\p{Lu}\\p{Ll}\\p{Lt}\\p{Lm}\\p{Lo}\\p{Nl}_\\$][\\p{Lu}\\p{Ll}\\p{Lt}\\p{Lm}\\p{Lo}\\p{Nl}_\\$\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}]*");
private static final String[] JS_RESERVED_WORDS = { "abstract", "boolean",
"break", "byte", "case", "catch", "char", "class", "const",
"continue", "debugger", "default", "delete", "do", "double",
"else", "enum", "export", "extends", "final", "finally", "float",
"for", "function", "goto", "if", "implements", "import", "in",
"instanceof", "int", "interface", "long", "native", "new",
"package", "private", "protected", "public", "return", "short",
"static", "super", "switch", "synchronized", "this", "throw",
"throws", "transient", "try", "typeof", "var", "void", "volatile",
"while", "with" };
private static final String[] CHARSETS = { "UTF-8", "UTF-16",
"Windows-1250", "Windows-1251", "Windows-1252", "Windows-1253",
"Windows-1254", "Windows-1255", "Windows-1256", "Windows-1257",
"Windows-1258", "ISO-8859-1", "ISO-8859-2", "ISO-8859-3",
"ISO-8859-4", "ISO-8859-5", "ISO-8859-6", "ISO-8859-7",
"ISO-8859-8", "ISO-8859-9", "ISO-8859-13", "ISO-8859-15", "KOI8-R",
"TIS-620", "GBK", "GB18030", "Big5", "Big5-HKSCS", "Shift_JIS",
"ISO-2022-JP", "EUC-JP", "ISO-2022-KR", "EUC-KR" };
private static final char[][] CHARSET_DESCRIPTIONS = {
"UTF-8 (Global)".toCharArray(), "UTF-16 (Global)".toCharArray(),
"Windows-1250 (Central European)".toCharArray(),
"Windows-1251 (Cyrillic)".toCharArray(),
"Windows-1252 (Western)".toCharArray(),
"Windows-1253 (Greek)".toCharArray(),
"Windows-1254 (Turkish)".toCharArray(),
"Windows-1255 (Hebrew)".toCharArray(),
"Windows-1256 (Arabic)".toCharArray(),
"Windows-1257 (Baltic)".toCharArray(),
"Windows-1258 (Vietnamese)".toCharArray(),
"ISO-8859-1 (Western)".toCharArray(),
"ISO-8859-2 (Central European)".toCharArray(),
"ISO-8859-3 (South European)".toCharArray(),
"ISO-8859-4 (Baltic)".toCharArray(),
"ISO-8859-5 (Cyrillic)".toCharArray(),
"ISO-8859-6 (Arabic)".toCharArray(),
"ISO-8859-7 (Greek)".toCharArray(),
"ISO-8859-8 (Hebrew)".toCharArray(),
"ISO-8859-9 (Turkish)".toCharArray(),
"ISO-8859-13 (Baltic)".toCharArray(),
"ISO-8859-15 (Western)".toCharArray(),
"KOI8-R (Russian)".toCharArray(), "TIS-620 (Thai)".toCharArray(),
"GBK (Chinese, simplified)".toCharArray(),
"GB18030 (Chinese, simplified)".toCharArray(),
"Big5 (Chinese, traditional)".toCharArray(),
"Big5-HKSCS (Chinese, traditional)".toCharArray(),
"Shift_JIS (Japanese)".toCharArray(),
"ISO-2022-JP (Japanese)".toCharArray(),
"EUC-JP (Japanese)".toCharArray(),
"ISO-2022-KR (Korean)".toCharArray(),
"EUC-KR (Korean)".toCharArray() };
protected static final int HTML5_SCHEMA = 3;
protected static final int XHTML1STRICT_SCHEMA = 2;
protected static final int XHTML1TRANSITIONAL_SCHEMA = 1;
protected static final int XHTML5_SCHEMA = 7;
private static final char[] SERVICE_TITLE;
private static final char[] LIVING_VERSION = "Living Validator".toCharArray();
private static final char[] VERSION;
private static final char[] RESULTS_TITLE;
private static final char[] FOR = " for ".toCharArray();
private static final char[] ABOUT_THIS_SERVICE = "About this Service".toCharArray();
private static final char[] SIMPLE_UI = "Simplified Interface".toCharArray();
private static final byte[] CSS_CHECKING_PROLOG = //
"<!DOCTYPE html><title>s</title><style>\n".getBytes();
private static final byte[] CSS_CHECKING_EPILOG = "\n</style>".getBytes();
private static final String USER_AGENT;
private static Spec html5spec;
private static int[] presetDoctypes;
private static String[] presetLabels;
private static String[] presetUrls;
private static String[] presetNamespaces;
// XXX SVG!!!
private static final String[] KNOWN_CONTENT_TYPES = {
"application/atom+xml", "application/docbook+xml",
"application/xhtml+xml", "application/xv+xml", "image/svg+xml" };
private static final String[] NAMESPACES_FOR_KNOWN_CONTENT_TYPES = {
"http://www.w3.org/2005/Atom", "http://docbook.org/ns/docbook",
"http://www.w3.org/1999/xhtml", "http://www.w3.org/1999/xhtml",
"http://www.w3.org/2000/svg" };
private static final String[] ALL_CHECKERS = {
"http://c.validator.nu/table/", "http://c.validator.nu/nfc/",
"http://c.validator.nu/text-content/",
"http://c.validator.nu/unchecked/",
"http://c.validator.nu/usemap/", "http://c.validator.nu/obsolete/",
"http://c.validator.nu/xml-pi/", "http://c.validator.nu/unsupported/",
"http://c.validator.nu/microdata/" };
private static final String[] ALL_CHECKERS_HTML4 = {
"http://c.validator.nu/table/", "http://c.validator.nu/nfc/",
"http://c.validator.nu/unchecked/", "http://c.validator.nu/usemap/" };
private long start = System.currentTimeMillis();
protected final HttpServletRequest request;
private final HttpServletResponse response;
protected String document = null;
private ParserMode parser = ParserMode.AUTO;
private String profile = "";
private boolean laxType = false;
private boolean aboutLegacyCompat = false;
private boolean xhtml1Doctype = false;
private boolean html4Doctype = false;
protected ContentHandler contentHandler;
protected XhtmlSaxEmitter emitter;
protected MessageEmitterAdapter errorHandler;
protected final AttributesImpl attrs = new AttributesImpl();
private OutputStream out;
private PropertyMap jingPropertyMap;
protected LocalCacheEntityResolver entityResolver;
private static long lastModified;
private static String[] preloadedSchemaUrls;
private static Schema[] preloadedSchemas;
private final static String cannotRecover = "Cannot recover after last"
+ " error. Any further errors will be ignored.";
private final static String changingEncoding = "Changing encoding at this"
+ " point would need non-streamable behavior.";
private final static String[] DENY_LIST = System.getProperty(
"nu.validator.servlet.deny-list", "").split("\\s+");
private final static String ABOUT_PAGE = System.getProperty(
"nu.validator.servlet.about-page", "https://about.validator.nu/");
private final static String HTML5_FACET = (VerifierServlet.HTML5_HOST.isEmpty() ? "" : ("//" + VerifierServlet.HTML5_HOST)) + VerifierServlet.HTML5_PATH;
private final static String STYLE_SHEET = System.getProperty(
"nu.validator.servlet.style-sheet",
"style.css");
private final static String ICON = System.getProperty(
"nu.validator.servlet.icon",
"icon.png");
private final static String SCRIPT = System.getProperty(
"nu.validator.servlet.script",
"script.js");
private static final long SIZE_LIMIT = Integer.parseInt(System.getProperty(
"nu.validator.servlet.max-file-size", "2097152"));
private static String systemFilterString = "";
private final static String FILTER_FILE = System.getProperty(
"nu.validator.servlet.filterfile", "resources/message-filters.txt");
protected String schemaUrls = null;
protected Validator validator = null;
private BufferingRootNamespaceSniffer bufferingRootNamespaceSniffer = null;
private String contentType = null;
protected HtmlParser htmlParser = null;
protected SAXDriver xmlParser = null;
protected XMLReader reader;
protected TypedInputSource documentInput;
protected PrudentHttpEntityResolver httpRes;
protected DataUriEntityResolver dataRes;
protected ContentTypeParser contentTypeParser;
private Set<String> loadedValidatorUrls = new HashSet<>();
private boolean checkNormalization = false;
private boolean rootNamespaceSeen = false;
private OutputFormat outputFormat;
private String postContentType;
private boolean methodIsGet;
private SourceCode sourceCode = new SourceCode();
private Deque<Section> outline;
private Deque<Section> headingOutline;
private boolean showSource;
private boolean showOutline;
private boolean checkErrorPages;
private boolean schemaIsDefault;
private String userAgent;
private BaseUriTracker baseUriTracker = null;
private String charsetOverride = null;
private Set<String> filteredNamespaces = new LinkedHashSet<>(); // linked
private LexicalHandler lexicalHandler;
// for
// UI
// stability
protected ImageCollector imageCollector;
private boolean externalSchema = false;
private boolean externalSchematron = false;
private String schemaListForStats = null;
static {
try {
log4j.debug("Starting static initializer.");
lastModified = 0;
BufferedReader r = new BufferedReader(new InputStreamReader(LocalCacheEntityResolver.getPresetsAsStream(), "UTF-8"));
String line;
List<String> doctypes = new LinkedList<>();
List<String> namespaces = new LinkedList<>();
List<String> labels = new LinkedList<>();
List<String> urls = new LinkedList<>();
Properties props = new Properties();
log4j.debug("Reading miscellaneous properties.");
props.load(VerifierServlet.class.getClassLoader().getResourceAsStream(
"nu/validator/localentities/files/misc.properties"));
SERVICE_TITLE = (System.getProperty(
"nu.validator.servlet.service-name",
props.getProperty("nu.validator.servlet.service-name",
"Validator.nu")) + " ").toCharArray();
RESULTS_TITLE = (System.getProperty(
"nu.validator.servlet.results-title", props.getProperty(
"nu.validator.servlet.results-title",
"Validation results"))).toCharArray();
VERSION = (System.getProperty("nu.validator.servlet.version",
props.getProperty("nu.validator.servlet.version",
"Living Validator"))).toCharArray();
USER_AGENT = (System.getProperty("nu.validator.servlet.user-agent",
props.getProperty("nu.validator.servlet.user-agent",
"Validator.nu/LV")));
log4j.debug("Starting to loop over config file lines.");
while ((line = r.readLine()) != null) {
if ("".equals(line.trim())) {
break;
}
String s[] = line.split("\t");
doctypes.add(s[0]);
namespaces.add(s[1]);
labels.add(s[2]);
urls.add(s[3]);
}
log4j.debug("Finished reading config.");
String[] presetDoctypesAsStrings = doctypes.toArray(new String[0]);
presetNamespaces = namespaces.toArray(new String[0]);
presetLabels = labels.toArray(new String[0]);
presetUrls = urls.toArray(new String[0]);
log4j.debug("Converted config to arrays.");
for (int i = 0; i < presetNamespaces.length; i++) {
String str = presetNamespaces[i];
if ("-".equals(str)) {
presetNamespaces[i] = null;
} else {
presetNamespaces[i] = presetNamespaces[i].intern();
}
}
log4j.debug("Prepared namespace array.");
presetDoctypes = new int[presetDoctypesAsStrings.length];
for (int i = 0; i < presetDoctypesAsStrings.length; i++) {
presetDoctypes[i] = Integer.parseInt(presetDoctypesAsStrings[i]);
}
log4j.debug("Parsed doctype numbers into ints.");
String prefix = System.getProperty("nu.validator.servlet.cachepathprefix");
log4j.debug("The cache path prefix is: " + prefix);
ErrorHandler eh = new SystemErrErrorHandler();
LocalCacheEntityResolver er = new LocalCacheEntityResolver(new NullEntityResolver());
er.setAllowRnc(true);
PropertyMapBuilder pmb = new PropertyMapBuilder();
pmb.put(ValidateProperty.ERROR_HANDLER, eh);
pmb.put(ValidateProperty.ENTITY_RESOLVER, er);
pmb.put(ValidateProperty.XML_READER_CREATOR,
new VerifierServletXMLReaderCreator(eh, er));
RngProperty.CHECK_ID_IDREF.add(pmb);
PropertyMap pMap = pmb.toPropertyMap();
log4j.debug("Parsing set up. Starting to read schemas.");
SortedMap<String, Schema> schemaMap = new TreeMap<>();
schemaMap.put("http://c.validator.nu/table/",
CheckerSchema.TABLE_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/table/",
CheckerSchema.TABLE_CHECKER);
schemaMap.put("http://c.validator.nu/nfc/",
CheckerSchema.NORMALIZATION_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/nfc/",
CheckerSchema.NORMALIZATION_CHECKER);
schemaMap.put("http://c.validator.nu/debug/",
CheckerSchema.DEBUG_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/debug/",
CheckerSchema.DEBUG_CHECKER);
schemaMap.put("http://c.validator.nu/text-content/",
CheckerSchema.TEXT_CONTENT_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/text-content/",
CheckerSchema.TEXT_CONTENT_CHECKER);
schemaMap.put("http://c.validator.nu/usemap/",
CheckerSchema.USEMAP_CHECKER);
schemaMap.put("http://n.validator.nu/checkers/usemap/",
CheckerSchema.USEMAP_CHECKER);
schemaMap.put("http://c.validator.nu/unchecked/",
CheckerSchema.UNCHECKED_SUBTREE_WARNER);
schemaMap.put("http://s.validator.nu/html5/assertions.sch",
CheckerSchema.ASSERTION_SCH);
schemaMap.put("http://s.validator.nu/html4/assertions.sch",
CheckerSchema.HTML4ASSERTION_SCH);
schemaMap.put("http://c.validator.nu/obsolete/",
CheckerSchema.CONFORMING_BUT_OBSOLETE_WARNER);
schemaMap.put("http://c.validator.nu/xml-pi/",
CheckerSchema.XML_PI_CHECKER);
schemaMap.put("http://c.validator.nu/unsupported/",
CheckerSchema.UNSUPPORTED_CHECKER);
schemaMap.put("http://c.validator.nu/microdata/",
CheckerSchema.MICRODATA_CHECKER);
schemaMap.put("http://c.validator.nu/rdfalite/",
CheckerSchema.RDFALITE_CHECKER);
for (String presetUrl : presetUrls) {
for (String url : SPACE.split(presetUrl)) {
if (schemaMap.get(url) == null && !isCheckerUrl(url)) {
Schema sch = schemaByUrl(url, er, pMap);
schemaMap.put(url, sch);
}
}
}
log4j.debug("Schemas read.");
preloadedSchemaUrls = new String[schemaMap.size()];
preloadedSchemas = new Schema[schemaMap.size()];
int i = 0;
for (Map.Entry<String, Schema> entry : schemaMap.entrySet()) {
preloadedSchemaUrls[i] = entry.getKey().intern();
Schema s = entry.getValue();
String u = entry.getKey();
if (isDataAttributeDroppingSchema(u)) {
s = new DataAttributeDroppingSchemaWrapper(
s);
}
if (isXmlLangAllowingSchema(u)) {
s = new XmlLangAttributeDroppingSchemaWrapper(s);
}
if (isRoleAttributeFilteringSchema(u)) {
s = new RoleAttributeFilteringSchemaWrapper(s);
}
if (isTemplateElementDroppingSchema(u)) {
s = new TemplateElementDroppingSchemaWrapper(s);
}
if (isCustomElementNamespaceChangingSchema(u)) {
s = new NamespaceChangingSchemaWrapper(s);
}
preloadedSchemas[i] = s;
i++;
}
log4j.debug("Reading spec.");
html5spec = Html5SpecBuilder.parseSpec(LocalCacheEntityResolver.getHtml5SpecAsStream());
log4j.debug("Spec read.");
if (new File(FILTER_FILE).isFile()) {
log4j.debug("Reading filter file " + FILTER_FILE);
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(FILTER_FILE),
"UTF-8"))) {
StringBuilder sb = new StringBuilder();
String filterline;
String pipe = "";
while ((filterline = reader.readLine()) != null) {
if (filterline.startsWith("#")) {
continue;
}
sb.append(pipe);
sb.append(filterline);
pipe = "|";
}
if (sb.length() != 0) {
if ("".equals(systemFilterString)) {
systemFilterString = sb.toString();
} else {
systemFilterString += "|" + sb.toString();
}
}
}
log4j.debug("Filter file read.");
}
log4j.debug("Initializing language detector.");
LanguageDetectingXMLReaderWrapper.initialize();
log4j.debug("Initialization complete.");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("deprecation")
protected static String scrub(CharSequence s) {
return Normalizer.normalize(
CharacterUtil.prudentlyScrubCharacterData(s), Normalizer.NFC);
}
private static boolean isDataAttributeDroppingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isXmlLangAllowingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isRoleAttributeFilteringSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isTemplateElementDroppingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isCustomElementNamespaceChangingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isCheckerUrl(String url) {
if ("http://c.validator.nu/all/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all/".equals(url)) {
return true;
} else if ("http://c.validator.nu/all-html4/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all-html4/".equals(url)) {
return true;
} else if ("http://c.validator.nu/base/".equals(url)) {
return true;
} else if ("http://c.validator.nu/rdfalite/".equals(url)) {
return true;
}
for (String checker : ALL_CHECKERS) {
if (checker.equals(url)) {
return true;
}
}
return false;
}
/**
* @param request
* @param response
*/
VerifierServletTransaction(HttpServletRequest request,
HttpServletResponse response) {
this.request = request;
this.response = response;
}
protected boolean willValidate() {
if (methodIsGet) {
return document != null;
} else { // POST
return true;
}
}
void service() throws ServletException, IOException {
this.methodIsGet = "GET".equals(request.getMethod())
|| "HEAD".equals(request.getMethod());
this.out = response.getOutputStream();
try {
request.setCharacterEncoding("utf-8");
} catch (NoSuchMethodError e) {
log4j.debug("Vintage Servlet API doesn't support setCharacterEncoding().", e);
}
if (!methodIsGet) {
postContentType = request.getContentType();
if (postContentType == null) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Content-Type missing");
return;
} else if (postContentType.trim().toLowerCase().startsWith(
"application/x-www-form-urlencoded")) {
response.sendError(
HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE,
"application/x-www-form-urlencoded not supported. Please use multipart/form-data.");
return;
}
}
String outFormat = request.getParameter("out");
if (outFormat == null) {
outputFormat = OutputFormat.HTML;
} else {
if ("html".equals(outFormat)) {
outputFormat = OutputFormat.HTML;
} else if ("xhtml".equals(outFormat)) {
outputFormat = OutputFormat.XHTML;
} else if ("text".equals(outFormat)) {
outputFormat = OutputFormat.TEXT;
} else if ("gnu".equals(outFormat)) {
outputFormat = OutputFormat.GNU;
} else if ("xml".equals(outFormat)) {
outputFormat = OutputFormat.XML;
} else if ("json".equals(outFormat)) {
outputFormat = OutputFormat.JSON;
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Unsupported output format");
return;
}
}
if (!methodIsGet) {
document = request.getHeader("Content-Location");
}
if (document == null) {
document = request.getParameter("doc");
}
if (document == null) {
document = request.getParameter("file");
}
document = ("".equals(document)) ? null : document;
if (document != null) {
for (String domain : DENY_LIST) {
if (!"".equals(domain) && document.contains(domain)) {
response.sendError(429, "Too many requests");
return;
}
}
}
String callback = null;
if (outputFormat == OutputFormat.JSON) {
callback = request.getParameter("callback");
if (callback != null) {
Matcher m = JS_IDENTIFIER.matcher(callback);
if (m.matches()) {
if (Arrays.binarySearch(JS_RESERVED_WORDS, callback) >= 0) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Callback is a reserved word.");
return;
}
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Callback is not a valid ECMA 262 IdentifierName.");
return;
}
}
}
if (willValidate()) {
response.setDateHeader("Expires", 0);
response.setHeader("Cache-Control", "no-cache");
} else if (outputFormat == OutputFormat.HTML
|| outputFormat == OutputFormat.XHTML) {
response.setDateHeader("Last-Modified", lastModified);
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"No input document");
return;
}
setup();
String filterString = systemFilterString;
String filterPatternParam = request.getParameter("filterpattern");
if (filterPatternParam != null && !"".equals(filterPatternParam)) {
if ("".equals(filterString)) {
filterString = scrub(filterPatternParam);
} else {
filterString += "|" + scrub(filterPatternParam);
}
}
String filterUrl = request.getParameter("filterurl");
if (filterUrl != null && !"".equals(filterUrl)) {
try {
InputSource filterFile = //
(new PrudentHttpEntityResolver(-1, true, null)) //
.resolveEntity(null, filterUrl);
StringBuilder sb = new StringBuilder();
BufferedReader reader = //
new BufferedReader(new InputStreamReader(
filterFile.getByteStream()));
String line;
String pipe = "";
while ((line = reader.readLine()) != null) {
if (line.startsWith("#")) {
continue;
}
sb.append(pipe);
sb.append(line);
pipe = "|";
}
if (sb.length() != 0) {
if (!"".equals(filterString)) {
filterString = scrub(sb.toString());
} else {
filterString += "|" + scrub(sb.toString());
}
}
} catch (Exception e) {
response.sendError(500, e.getMessage());
}
}
Pattern filterPattern = null;
if (!"".equals(filterString)) {
filterPattern = Pattern.compile(filterString);
}
if (request.getParameter("useragent") != null) {
userAgent = scrub(request.getParameter("useragent"));
} else {
userAgent = USER_AGENT;
}
if (request.getParameter("acceptlanguage") != null) {
request.setAttribute(
"http://validator.nu/properties/accept-language",
scrub(request.getParameter("acceptlanguage")));
}
Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
showSource = (request.getParameter("showsource") != null);
showSource = (showSource || "textarea".equals(inputType));
showOutline = (request.getParameter("showoutline") != null);
if (request.getParameter("checkerrorpages") != null) {
request.setAttribute(
"http://validator.nu/properties/ignore-response-status",
true);
}
if (request.getParameter("showimagereport") != null) {
imageCollector = new ImageCollector(sourceCode);
}
String charset = request.getParameter("charset");
if (charset != null) {
charset = scrub(charset.trim());
if (!"".equals(charset)) {
charsetOverride = charset;
}
}
String nsfilter = request.getParameter("nsfilter");
if (nsfilter != null) {
for (String ns : SPACE.split(nsfilter)) {
if (ns.length() > 0) {
filteredNamespaces.add(ns);
}
}
}
boolean errorsOnly = ("error".equals(request.getParameter("level")));
boolean asciiQuotes = (request.getParameter("asciiquotes") != null);
int lineOffset = 0;
String lineOffsetStr = request.getParameter("lineoffset");
if (lineOffsetStr != null) {
try {
lineOffset = Integer.parseInt(lineOffsetStr);
} catch (NumberFormatException e) {
}
}
try {
if (outputFormat == OutputFormat.HTML
|| outputFormat == OutputFormat.XHTML) {
if (outputFormat == OutputFormat.HTML) {
response.setContentType("text/html; charset=utf-8");
contentHandler = new HtmlSerializer(out);
} else {
response.setContentType("application/xhtml+xml");
contentHandler =
new XmlSerializer(out);
}
emitter = new XhtmlSaxEmitter(contentHandler);
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, imageCollector, lineOffset,
false, new XhtmlMessageEmitter(contentHandler));
PageEmitter.emit(contentHandler, this);
} else {
if (outputFormat == OutputFormat.TEXT) {
response.setContentType("text/plain; charset=utf-8");
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new TextMessageEmitter(out, asciiQuotes));
} else if (outputFormat == OutputFormat.GNU) {
response.setContentType("text/plain; charset=utf-8");
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new GnuMessageEmitter(out, asciiQuotes));
} else if (outputFormat == OutputFormat.XML) {
response.setContentType("application/xml");
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new XmlMessageEmitter(new XmlSerializer(out)));
} else if (outputFormat == OutputFormat.JSON) {
if (callback == null) {
response.setContentType("application/json; charset=utf-8");
} else {
response.setContentType("application/javascript; charset=utf-8");
}
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new JsonMessageEmitter(
new nu.validator.json.Serializer(out),
callback));
} else {
throw new RuntimeException("Unreachable.");
}
errorHandler.setErrorsOnly(errorsOnly);
validate();
}
} catch (SAXException e) {
log4j.debug("SAXException: " + e.getMessage());
}
}
/**
* @throws ServletException
*/
protected void setup() throws ServletException {
String preset = request.getParameter("preset");
if (preset != null && !"".equals(preset)) {
schemaUrls = preset;
} else {
schemaUrls = request.getParameter("schema");
}
if (schemaUrls == null) {
schemaUrls = "";
}
String parserStr = request.getParameter("parser");
if ("html".equals(parserStr)) {
parser = ParserMode.HTML;
} else if ("xmldtd".equals(parserStr)) {
parser = ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION;
} else if ("xml".equals(parserStr)) {
parser = ParserMode.XML_NO_EXTERNAL_ENTITIES;
} else if ("html5".equals(parserStr)) {
parser = ParserMode.HTML;
} // else auto
laxType = (request.getParameter("laxtype") != null);
}
private boolean useXhtml5Schema() {
if ("".equals(schemaUrls)) {
return false;
}
return (schemaUrls.contains("http://s.validator.nu/xhtml5.rnc")
|| schemaUrls.contains("http://s.validator.nu/xhtml5-all.rnc")
|| schemaUrls.contains("http://s.validator.nu/xhtml5-its.rnc")
|| schemaUrls.contains(
"http://s.validator.nu/xhtml5-rdfalite.rnc"));
}
private boolean isHtmlUnsafePreset() {
if ("".equals(schemaUrls)) {
return false;
}
boolean preset = false;
for (String presetUrl : presetUrls) {
if (presetUrl.equals(schemaUrls)) {
preset = true;
break;
}
}
if (!preset) {
return false;
}
return !(schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-basic.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-strict.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-transitional.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-frameset.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5-all.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5-its.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5-rdfalite.rnc"));
}
/**
* @throws SAXException
*/
@SuppressWarnings({ "deprecation", "unchecked" }) void validate() throws SAXException {
if (!willValidate()) {
return;
}
boolean isHtmlOrXhtml = (outputFormat == OutputFormat.HTML || outputFormat == OutputFormat.XHTML);
if (isHtmlOrXhtml) {
try {
out.flush();
} catch (IOException e1) {
throw new SAXException(e1);
}
}
httpRes = new PrudentHttpEntityResolver(SIZE_LIMIT, laxType,
errorHandler, request);
httpRes.setUserAgent(userAgent);
dataRes = new DataUriEntityResolver(httpRes, laxType, errorHandler);
contentTypeParser = new ContentTypeParser(errorHandler, laxType);
entityResolver = new LocalCacheEntityResolver(dataRes);
setAllowRnc(true);
setAllowCss(true);
try {
this.errorHandler.start(document);
PropertyMapBuilder pmb = new PropertyMapBuilder();
pmb.put(ValidateProperty.ERROR_HANDLER, errorHandler);
pmb.put(ValidateProperty.ENTITY_RESOLVER, entityResolver);
pmb.put(ValidateProperty.XML_READER_CREATOR,
new VerifierServletXMLReaderCreator(errorHandler,
entityResolver));
pmb.put(ValidateProperty.SCHEMA_RESOLVER, this);
RngProperty.CHECK_ID_IDREF.add(pmb);
jingPropertyMap = pmb.toPropertyMap();
tryToSetupValidator();
setAllowRnc(false);
loadDocAndSetupParser();
setErrorProfile();
contentType = documentInput.getType();
if ("text/css".equals(contentType)) {
String charset = "UTF-8";
if (documentInput.getEncoding() != null) {
charset = documentInput.getEncoding();
}
List<InputStream> streams = new ArrayList<>();
streams.add(new ByteArrayInputStream(CSS_CHECKING_PROLOG));
streams.add(documentInput.getByteStream());
streams.add(new ByteArrayInputStream(CSS_CHECKING_EPILOG));
Enumeration<InputStream> e = Collections.enumeration(streams);
documentInput.setByteStream(new SequenceInputStream(e));
documentInput.setEncoding(charset);
errorHandler.setLineOffset(-1);
sourceCode.setIsCss();
parser = ParserMode.HTML;
loadDocAndSetupParser();
}
reader.setErrorHandler(errorHandler);
sourceCode.initialize(documentInput);
if (validator == null) {
checkNormalization = true;
}
if (checkNormalization) {
reader.setFeature(
"http://xml.org/sax/features/unicode-normalization-checking",
true);
}
WiretapXMLReaderWrapper wiretap = new WiretapXMLReaderWrapper(
reader);
ContentHandler recorder = sourceCode.getLocationRecorder();
if (baseUriTracker == null) {
wiretap.setWiretapContentHander(recorder);
} else {
wiretap.setWiretapContentHander(new CombineContentHandler(
recorder, baseUriTracker));
}
wiretap.setWiretapLexicalHandler((LexicalHandler) recorder);
reader = wiretap;
if (htmlParser != null) {
htmlParser.addCharacterHandler(sourceCode);
htmlParser.setMappingLangToXmlLang(true);
htmlParser.setErrorHandler(errorHandler.getExactErrorHandler());
htmlParser.setTreeBuilderErrorHandlerOverride(errorHandler);
errorHandler.setHtml(true);
} else if (xmlParser != null) {
// this must be after wiretap!
if (!filteredNamespaces.isEmpty()) {
reader = new NamespaceDroppingXMLReaderWrapper(reader,
filteredNamespaces);
}
xmlParser.setErrorHandler(errorHandler.getExactErrorHandler());
xmlParser.lockErrorHandler();
} else {
throw new RuntimeException("Bug. Unreachable.");
}
reader = new AttributesPermutingXMLReaderWrapper(reader); // make
// RNG
// validation
// better
if (charsetOverride != null) {
String charset = documentInput.getEncoding();
if (charset == null) {
errorHandler.warning(new SAXParseException(
"Overriding document character encoding from none to \u201C"
+ charsetOverride + "\u201D.", null));
} else {
errorHandler.warning(new SAXParseException(
"Overriding document character encoding from \u201C"
+ charset + "\u201D to \u201C"
+ charsetOverride + "\u201D.", null));
}
documentInput.setEncoding(charsetOverride);
}
if (showOutline) {
reader = new OutlineBuildingXMLReaderWrapper(reader, request, false);
reader = new OutlineBuildingXMLReaderWrapper(reader, request, true);
}
reader.parse(documentInput);
if (showOutline) {
outline = (Deque<Section>) request.getAttribute(
"http://validator.nu/properties/document-outline");
headingOutline = (Deque<Section>) request.getAttribute(
"http://validator.nu/properties/heading-outline");
}
} catch (CannotFindPresetSchemaException e) {
} catch (ResourceNotRetrievableException e) {
log4j.debug(e.getMessage());
} catch (NonXmlContentTypeException e) {
log4j.debug(e.getMessage());
} catch (FatalSAXException e) {
log4j.debug(e.getMessage());
} catch (SocketTimeoutException e) {
errorHandler.ioError(new IOException(e.getMessage(), null));
} catch (ConnectTimeoutException e) {
errorHandler.ioError(new IOException(e.getMessage(), null));
} catch (TooManyErrorsException e) {
errorHandler.fatalError(e);
} catch (SAXException e) {
String msg = e.getMessage();
if (!cannotRecover.equals(msg) && !changingEncoding.equals(msg)) {
log4j.debug("SAXException: " + e.getMessage());
}
} catch (IOException e) {
isHtmlOrXhtml = false;
errorHandler.ioError(e);
} catch (IncorrectSchemaException e) {
log4j.debug("IncorrectSchemaException", e);
errorHandler.schemaError(e);
} catch (RuntimeException e) {
isHtmlOrXhtml = false;
log4j.error("RuntimeException, doc: " + document + " schema: "
+ schemaUrls + " lax: " + laxType, e);
errorHandler.internalError(
e,
"Oops. That was not supposed to happen. A bug manifested itself in the application internals. Unable to continue. Sorry. The admin was notified.");
} catch (Error e) {
isHtmlOrXhtml = false;
log4j.error("Error, doc: " + document + " schema: " + schemaUrls
+ " lax: " + laxType, e);
errorHandler.internalError(
e,
"Oops. That was not supposed to happen. A bug manifested itself in the application internals. Unable to continue. Sorry. The admin was notified.");
} finally {
errorHandler.end(successMessage(), failureMessage(),
(String) request.getAttribute(
"http://validator.nu/properties/document-language"));
gatherStatistics();
}
if (isHtmlOrXhtml) {
XhtmlOutlineEmitter outlineEmitter = new XhtmlOutlineEmitter(
contentHandler, outline, headingOutline);
outlineEmitter.emitHeadings();
outlineEmitter.emit();
emitDetails();
StatsEmitter.emit(contentHandler, this);
}
}
private void gatherStatistics() {
Statistics stats = Statistics.STATISTICS;
if (stats == null) {
return;
}
synchronized (stats) {
stats.incrementTotal();
if (charsetOverride != null) {
stats.incrementField(Statistics.Field.CUSTOM_ENC);
}
switch (parser) {
case XML_EXTERNAL_ENTITIES_NO_VALIDATION:
stats.incrementField(Statistics.Field.PARSER_XML_EXTERNAL);
break;
case AUTO:
case HTML:
case XML_NO_EXTERNAL_ENTITIES:
default:
break;
}
if (!filteredNamespaces.isEmpty()) {
stats.incrementField(Statistics.Field.XMLNS_FILTER);
}
if (laxType) {
stats.incrementField(Statistics.Field.LAX_TYPE);
}
if (aboutLegacyCompat) {
stats.incrementField(Statistics.Field.ABOUT_LEGACY_COMPAT);
}
if (xhtml1Doctype) {
stats.incrementField(Statistics.Field.XHTML1_DOCTYPE);
}
if (html4Doctype) {
stats.incrementField(Statistics.Field.HTML4_DOCTYPE);
}
if (imageCollector != null) {
stats.incrementField(Statistics.Field.IMAGE_REPORT);
}
if (showSource) {
stats.incrementField(Statistics.Field.SHOW_SOURCE);
}
if (showOutline) {
stats.incrementField(Statistics.Field.SHOW_OUTLINE);
}
if (methodIsGet) {
stats.incrementField(Statistics.Field.INPUT_GET);
} else { // POST
stats.incrementField(Statistics.Field.INPUT_POST);
Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
if ("textarea".equals(inputType)) {
stats.incrementField(Statistics.Field.INPUT_TEXT_FIELD);
} else if ("file".equals(inputType)) {
stats.incrementField(Statistics.Field.INPUT_FILE_UPLOAD);
} else {
stats.incrementField(Statistics.Field.INPUT_ENTITY_BODY);
}
}
if (documentInput != null
&& "text/css".equals(documentInput.getType())) {
stats.incrementField(Statistics.Field.INPUT_CSS);
} else if (htmlParser != null) {
stats.incrementField(Statistics.Field.INPUT_HTML);
} else if (xmlParser != null) {
stats.incrementField(Statistics.Field.INPUT_XML);
} else {
stats.incrementField(Statistics.Field.INPUT_UNSUPPORTED);
}
switch (outputFormat) {
case GNU:
stats.incrementField(Statistics.Field.OUTPUT_GNU);
break;
case HTML:
stats.incrementField(Statistics.Field.OUTPUT_HTML);
break;
case JSON:
stats.incrementField(Statistics.Field.OUTPUT_JSON);
break;
case TEXT:
stats.incrementField(Statistics.Field.OUTPUT_TEXT);
break;
case XHTML:
stats.incrementField(Statistics.Field.OUTPUT_XHTML);
break;
case XML:
stats.incrementField(Statistics.Field.OUTPUT_XML);
break;
case RELAXED:
case SOAP:
case UNICORN:
default:
break;
}
if (schemaListForStats == null) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
} else {
boolean preset = false;
for (int i = 0; i < presetUrls.length; i++) {
if (presetUrls[i].equals(schemaListForStats)) {
preset = true;
if (externalSchema || externalSchematron) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
} else {
stats.incrementField(Statistics.Field.PRESET_SCHEMA);
/*
* XXX WARNING WARNING: These mappings correspond to
* values in the presets.txt file in the validator
* source repo. They might be bogus if a custom
* presets file is used instead.
*/
switch (i) {
case 0:
case 5:
stats.incrementField(Statistics.Field.HTML5_SCHEMA);
break;
case 1:
case 6:
stats.incrementField(Statistics.Field.HTML5_RDFA_LITE_SCHEMA);
break;
case 2:
stats.incrementField(Statistics.Field.HTML4_STRICT_SCHEMA);
break;
case 3:
stats.incrementField(Statistics.Field.HTML4_TRANSITIONAL_SCHEMA);
break;
case 4:
stats.incrementField(Statistics.Field.HTML4_FRAMESET_SCHEMA);
break;
case 7:
stats.incrementField(Statistics.Field.XHTML1_COMPOUND_SCHEMA);
break;
case 8:
stats.incrementField(Statistics.Field.SVG_SCHEMA);
break;
default:
stats.incrementField(Statistics.Field.LOGIC_ERROR);
break;
}
}
break;
}
}
if (!preset && !externalSchema) {
stats.incrementField(Statistics.Field.BUILT_IN_NON_PRESET);
}
}
if ("".equals(schemaUrls)) {
stats.incrementField(Statistics.Field.AUTO_SCHEMA);
if (externalSchema) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
}
} else if (externalSchema) {
if (externalSchematron) {
stats.incrementField(Statistics.Field.EXTERNAL_SCHEMA_SCHEMATRON);
} else {
stats.incrementField(Statistics.Field.EXTERNAL_SCHEMA_NON_SCHEMATRON);
}
} else if (externalSchematron) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-alternate-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-alternate-found")) {
stats.incrementField(Statistics.Field.REL_ALTERNATE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-author-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-author-found")) {
stats.incrementField(Statistics.Field.REL_AUTHOR_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-bookmark-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-bookmark-found")) {
stats.incrementField(Statistics.Field.REL_BOOKMARK_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-canonical-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-canonical-found")) {
stats.incrementField(Statistics.Field.REL_CANONICAL_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-dns-prefetch-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-dns-prefetch-found")) {
stats.incrementField(Statistics.Field.REL_DNS_PREFETCH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-external-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-external-found")) {
stats.incrementField(Statistics.Field.REL_EXTERNAL_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-help-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-help-found")) {
stats.incrementField(Statistics.Field.REL_HELP_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-icon-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-icon-found")) {
stats.incrementField(Statistics.Field.REL_ICON_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-license-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-license-found")) {
stats.incrementField(Statistics.Field.REL_LICENSE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-next-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-next-found")) {
stats.incrementField(Statistics.Field.REL_NEXT_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-nofollow-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-nofollow-found")) {
stats.incrementField(Statistics.Field.REL_NOFOLLOW_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-noopener-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-noopener-found")) {
stats.incrementField(Statistics.Field.REL_NOOPENER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-noreferrer-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-noreferrer-found")) {
stats.incrementField(Statistics.Field.REL_NOREFERRER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-pingback-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-pingback-found")) {
stats.incrementField(Statistics.Field.REL_PINGBACK_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-preconnect-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-preconnect-found")) {
stats.incrementField(Statistics.Field.REL_PRECONNECT_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-prefetch-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-prefetch-found")) {
stats.incrementField(Statistics.Field.REL_PREFETCH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-preload-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-preload-found")) {
stats.incrementField(Statistics.Field.REL_PRELOAD_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-prerender-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-prerender-found")) {
stats.incrementField(Statistics.Field.REL_PRERENDER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-prev-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-prev-found")) {
stats.incrementField(Statistics.Field.REL_PREV_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-search-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-search-found")) {
stats.incrementField(Statistics.Field.REL_SEARCH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-serviceworker-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-serviceworker-found")) {
stats.incrementField(Statistics.Field.REL_SERVICEWORKER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-stylesheet-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-stylesheet-found")) {
stats.incrementField(Statistics.Field.REL_STYLESHEET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-tag-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-tag-found")) {
stats.incrementField(Statistics.Field.REL_TAG_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/link-with-charset-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/link-with-charset-found")) {
stats.incrementField(Statistics.Field.LINK_WITH_CHARSET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/script-with-charset-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/script-with-charset-found")) {
stats.incrementField(Statistics.Field.SCRIPT_WITH_CHARSET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/style-in-body-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/style-in-body-found")) {
stats.incrementField(Statistics.Field.STYLE_IN_BODY_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-found")) {
stats.incrementField(Statistics.Field.MAIN_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-multiple-visible-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-multiple-visible-found")) {
stats.incrementField(Statistics.Field.MAIN_MULTIPLE_VISIBLE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-address-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-address-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_ADDRESS_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-article-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-article-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_ARTICLE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-aside-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-aside-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_ASIDE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-blockquote-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-blockquote-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_BLOCKQUOTE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-caption-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-caption-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_CAPTION_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-dd-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-dd-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DD_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-details-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-details-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DETAILS_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-dialog-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-dialog-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DIALOG_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-div-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-div-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DIV_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-dt-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-dt-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DT_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-fieldset-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-fieldset-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FIELDSET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-figcaption-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-figcaption-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FIGCAPTION_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-figure-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-figure-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FIGURE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-footer-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-footer-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FOOTER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-form-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-form-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FORM_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-header-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-header-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_HEADER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-li-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-li-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_LI_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-main-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-main-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_MAIN_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-nav-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-nav-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_NAV_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-section-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-section-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_SECTION_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-td-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-td-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_TD_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-th-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-th-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_TH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/lang-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/lang-found")) {
stats.incrementField(Statistics.Field.LANG_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/lang-wrong") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/lang-wrong")) {
stats.incrementField(Statistics.Field.LANG_WRONG);
}
if (request.getAttribute(
"http://validator.nu/properties/lang-empty") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/lang-empty")) {
stats.incrementField(Statistics.Field.LANG_EMPTY);
}
if (request.getAttribute(
"http://validator.nu/properties/apple-touch-icon-with-sizes-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/apple-touch-icon-with-sizes-found")) {
stats.incrementField(Statistics.Field.APPLE_TOUCH_ICON_WITH_SIZES_FOUND);
}
String fieldName;
String language = (String) request.getAttribute(
"http://validator.nu/properties/document-language");
if (!"".equals(language) && language != null) {
fieldName = "DETECTEDLANG_" + language.toUpperCase();
if ("zh-hans".equals(language)) {
fieldName = "DETECTEDLANG_ZH_HANS";
} else if ("zh-hant".equals(language)) {
fieldName = "DETECTEDLANG_ZH_HANT";
} else if ("sr-latn".equals(language)) {
fieldName = "DETECTEDLANG_SR_LATN";
} else if ("sr-cyrl".equals(language)) {
fieldName = "DETECTEDLANG_SR_CYRL";
} else if ("uz-latn".equals(language)) {
fieldName = "DETECTEDLANG_UZ_LATN";
} else if ("uz-cyrl".equals(language)) {
fieldName = "DETECTEDLANG_UZ_CYRL";
}
try {
stats.incrementField(stats.getFieldFromName(fieldName));
} catch (IllegalArgumentException e) {
log4j.error(e.getMessage(), e);
}
}
String langVal = (String) request.getAttribute(
"http://validator.nu/properties/lang-value");
if (langVal != null) {
if ("".equals(langVal)) {
stats.incrementField(Statistics.Field.LANG_EMPTY);
} else {
if (langVal.contains("_")) {
fieldName = "LANG_"
+ langVal.replace("_", "__").toUpperCase();
} else {
fieldName = "LANG_"
+ langVal.replace("-", "_").toUpperCase();
}
try {
stats.incrementField(stats.getFieldFromName(fieldName));
} catch (IllegalArgumentException e) {
stats.incrementField(Statistics.Field.LANG_OTHER);
}
}
}
}
}
/**
* @return
* @throws SAXException
*/
protected String successMessage() throws SAXException {
return "The document validates according to the specified schema(s).";
}
protected String failureMessage() throws SAXException {
return "There were errors.";
}
void emitDetails() throws SAXException {
Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
String type = documentInput != null ? documentInput.getType() : "";
if ("text/html".equals(type) || "text/html-sandboxed".equals(type)) {
attrs.clear();
emitter.startElementWithClass("div", "details");
if (schemaIsDefault) {
emitter.startElementWithClass("p", "msgschema");
emitter.characters(String.format("Used the schema for %s.",
getPresetLabel(HTML5_SCHEMA)));
emitter.endElement("p");
}
emitter.startElementWithClass("p", "msgmediatype");
if (!isHtmlUnsafePreset()) {
emitter.characters("Used the HTML parser.");
}
if (methodIsGet && !"textarea".equals(inputType)
&& !"file".equals(inputType)) {
String charset = documentInput.getEncoding();
if (charset != null) {
emitter.characters(String.format(
" Externally specified character encoding was %s.", charset));
}
}
emitter.endElement("div");
}
}
/**
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
protected void tryToSetupValidator() throws SAXException, IOException,
IncorrectSchemaException {
validator = validatorByUrls(schemaUrls);
}
protected void setErrorProfile() {
profile = request.getParameter("profile");
HashMap<String, String> profileMap = new HashMap<>();
if ("pedagogical".equals(profile)) {
profileMap.put("xhtml1", "warn");
} else if ("polyglot".equals(profile)) {
profileMap.put("xhtml1", "warn");
profileMap.put("xhtml2", "warn");
} else {
return; // presumed to be permissive
}
htmlParser.setErrorProfile(profileMap);
}
/**
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
* @throws SAXNotRecognizedException
* @throws SAXNotSupportedException
*/
protected void loadDocAndSetupParser() throws SAXException, IOException,
IncorrectSchemaException, SAXNotRecognizedException,
SAXNotSupportedException {
switch (parser) {
case HTML:
if (isHtmlUnsafePreset()) {
String message = "The chosen preset schema is not appropriate for HTML.";
SAXException se = new SAXException(message);
errorHandler.schemaError(se);
throw se;
}
setAllowGenericXml(false);
setAllowHtml(true);
setAcceptAllKnownXmlTypes(false);
setAllowXhtml(false);
loadDocumentInput();
newHtmlParser();
int schemaId;
schemaId = HTML5_SCHEMA;
htmlParser.setDocumentModeHandler(this);
reader = htmlParser;
if (validator == null) {
validator = validatorByDoctype(schemaId);
}
if (validator != null) {
reader.setContentHandler(validator.getContentHandler());
}
reader = new LanguageDetectingXMLReaderWrapper(reader, request,
errorHandler, documentInput.getLanguage(),
documentInput.getSystemId());
if (Statistics.STATISTICS != null) {
reader = new UseCountingXMLReaderWrapper(reader, request,
documentInput.getSystemId());
}
break;
case XML_NO_EXTERNAL_ENTITIES:
case XML_EXTERNAL_ENTITIES_NO_VALIDATION:
setAllowGenericXml(true);
setAllowHtml(false);
setAcceptAllKnownXmlTypes(true);
setAllowXhtml(true);
loadDocumentInput();
setupXmlParser();
break;
default:
setAllowGenericXml(true);
setAllowHtml(true);
setAcceptAllKnownXmlTypes(true);
setAllowXhtml(true);
loadDocumentInput();
String type = documentInput.getType();
if ("text/css".equals(type)) {
break;
} else if ("text/html".equals(type) || "text/html-sandboxed".equals(type)) {
if (isHtmlUnsafePreset()) {
String message = "The Content-Type was \u201C" + type + "\u201D, but the chosen preset schema is not appropriate for HTML.";
SAXException se = new SAXException(message);
errorHandler.schemaError(se);
throw se;
}
newHtmlParser();
htmlParser.setDocumentModeHandler(this);
reader = htmlParser;
if (validator != null) {
reader.setContentHandler(validator.getContentHandler());
}
reader = new LanguageDetectingXMLReaderWrapper(reader,
request, errorHandler, documentInput.getLanguage(),
documentInput.getSystemId());
if (Statistics.STATISTICS != null) {
reader = new UseCountingXMLReaderWrapper(reader,
request, documentInput.getSystemId());
}
} else {
if (contentType != null) {
if ("application/xml".equals(contentType) ||
"text/xml".equals(contentType) ||
(Arrays.binarySearch(KNOWN_CONTENT_TYPES,
contentType)) > -1) {
errorHandler.info("The Content-Type was \u201C"
+ type
+ "\u201D. Using the XML parser (not resolving external entities).");
}
}
setupXmlParser();
}
break;
}
}
/**
*
*/
protected void newHtmlParser() {
htmlParser = new HtmlParser();
htmlParser.setCommentPolicy(XmlViolationPolicy.ALLOW);
htmlParser.setContentNonXmlCharPolicy(XmlViolationPolicy.ALLOW);
htmlParser.setContentSpacePolicy(XmlViolationPolicy.ALTER_INFOSET);
htmlParser.setNamePolicy(XmlViolationPolicy.ALLOW);
htmlParser.setStreamabilityViolationPolicy(XmlViolationPolicy.FATAL);
htmlParser.setXmlnsPolicy(XmlViolationPolicy.ALTER_INFOSET);
htmlParser.setMappingLangToXmlLang(true);
htmlParser.setHeuristics(Heuristics.ALL);
}
protected Validator validatorByDoctype(int schemaId) throws SAXException,
IOException, IncorrectSchemaException {
if (schemaId == 0) {
return null;
}
for (int i = 0; i < presetDoctypes.length; i++) {
if (presetDoctypes[i] == schemaId) {
return validatorByUrls(presetUrls[i]);
}
}
throw new RuntimeException("Doctype mappings not initialized properly.");
}
/**
* @throws SAXNotRecognizedException
* @throws SAXNotSupportedException
*/
protected void setupXmlParser() throws SAXNotRecognizedException,
SAXNotSupportedException {
xmlParser = new SAXDriver();
xmlParser.setCharacterHandler(sourceCode);
if (lexicalHandler != null) {
xmlParser.setProperty("http://xml.org/sax/properties/lexical-handler",
lexicalHandler);
}
reader = new IdFilter(xmlParser);
reader.setFeature("http://xml.org/sax/features/string-interning", true);
reader.setFeature(
"http://xml.org/sax/features/external-general-entities",
parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION);
reader.setFeature(
"http://xml.org/sax/features/external-parameter-entities",
parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION);
if (parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION) {
reader.setEntityResolver(entityResolver);
} else {
reader.setEntityResolver(new NullEntityResolver());
}
if (validator == null) {
bufferingRootNamespaceSniffer = new BufferingRootNamespaceSniffer(
this);
reader.setContentHandler(bufferingRootNamespaceSniffer);
} else {
reader.setContentHandler(new RootNamespaceSniffer(this,
validator.getContentHandler()));
reader.setDTDHandler(validator.getDTDHandler());
}
if (useXhtml5Schema()) {
reader = new LanguageDetectingXMLReaderWrapper(reader, request,
errorHandler, documentInput.getLanguage(),
documentInput.getSystemId());
if (Statistics.STATISTICS != null) {
reader = new UseCountingXMLReaderWrapper(reader, request,
documentInput.getSystemId());
}
}
}
/**
* @param validator
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private Validator validatorByUrls(String schemaList) throws SAXException,
IOException, IncorrectSchemaException {
System.setProperty("nu.validator.schema.rdfa-full", "0");
schemaListForStats = schemaList;
Validator v = null;
String[] schemas = SPACE.split(schemaList);
for (int i = schemas.length - 1; i > -1; i--) {
String url = schemas[i];
if ("http://s.validator.nu/html5-all.rnc".equals(url)) {
System.setProperty("nu.validator.schema.rdfa-full", "1");
}
if ("http://c.validator.nu/all/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all/".equals(url)) {
for (String checker : ALL_CHECKERS) {
v = combineValidatorByUrl(v, checker);
}
} else if ("http://c.validator.nu/all-html4/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all-html4/".equals(url)) {
for (String checker : ALL_CHECKERS_HTML4) {
v = combineValidatorByUrl(v, checker);
}
} else {
v = combineValidatorByUrl(v, url);
}
}
if (imageCollector != null && v != null) {
v = new CombineValidator(imageCollector, v);
}
return v;
}
/**
* @param val
* @param url
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private Validator combineValidatorByUrl(Validator val, String url)
throws SAXException, IOException, IncorrectSchemaException {
if (!"".equals(url)) {
Validator v = validatorByUrl(url);
if (val == null) {
val = v;
} else {
val = new CombineValidator(v, val);
}
}
return val;
}
/**
* @param url
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private Validator validatorByUrl(String url) throws SAXException,
IOException, IncorrectSchemaException {
if (loadedValidatorUrls.contains(url)) {
return null;
}
loadedValidatorUrls.add(url);
if ("http://s.validator.nu/xhtml5.rnc".equals(url)
|| "http://s.validator.nu/html5.rnc".equals(url)
|| "http://s.validator.nu/html5-all.rnc".equals(url)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(url)
|| "http://s.validator.nu/html5-its.rnc".equals(url)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(url)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(url)) {
errorHandler.setSpec(html5spec);
}
Schema sch = resolveSchema(url, jingPropertyMap);
Validator validator = sch.createValidator(jingPropertyMap);
if (validator.getContentHandler() instanceof XmlPiChecker) {
lexicalHandler = (LexicalHandler) validator.getContentHandler();
}
return validator;
}
@Override
public Schema resolveSchema(String url, PropertyMap options)
throws SAXException, IOException, IncorrectSchemaException {
int i = Arrays.binarySearch(preloadedSchemaUrls, url);
if (i > -1) {
Schema rv = preloadedSchemas[i];
if (options.contains(WrapProperty.ATTRIBUTE_OWNER)) {
if (rv instanceof CheckerSchema) {
errorHandler.error(new SAXParseException(
"A non-schema checker cannot be used as an attribute schema.",
null, url, -1, -1));
throw new IncorrectSchemaException();
} else {
// ugly fall through
}
} else {
return rv;
}
}
externalSchema = true;
TypedInputSource schemaInput = (TypedInputSource) entityResolver.resolveEntity(
null, url);
SchemaReader sr = null;
if ("application/relax-ng-compact-syntax".equals(schemaInput.getType())) {
sr = CompactSchemaReader.getInstance();
} else {
sr = new AutoSchemaReader();
}
Schema sch = sr.createSchema(schemaInput, options);
if (Statistics.STATISTICS != null && "com.thaiopensource.validate.schematron.SchemaImpl".equals(sch.getClass().getName())) {
externalSchematron = true;
}
return sch;
}
/**
* @param url
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private static Schema schemaByUrl(String url, EntityResolver resolver,
PropertyMap pMap) throws SAXException, IOException,
IncorrectSchemaException {
log4j.debug("Will load schema: " + url);
TypedInputSource schemaInput;
try {
schemaInput = (TypedInputSource) resolver.resolveEntity(
null, url);
} catch (ClassCastException e) {
log4j.fatal(url, e);
throw e;
}
SchemaReader sr = null;
if ("application/relax-ng-compact-syntax".equals(schemaInput.getType())) {
sr = CompactSchemaReader.getInstance();
} else {
sr = new AutoSchemaReader();
}
Schema sch = sr.createSchema(schemaInput, pMap);
return sch;
}
/**
* @throws SAXException
*/
void emitTitle(boolean markupAllowed) throws SAXException {
if (willValidate()) {
emitter.characters(RESULTS_TITLE);
emitter.characters(FOR);
if (document != null && document.length() > 0) {
emitter.characters(scrub(shortenDataUri(document)));
} else if (request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.filename") != null) {
emitter.characters("uploaded file "
+ scrub(request.getAttribute(
"nu.validator.servlet.MultipartFormDataFilter.filename").toString()));
} else {
emitter.characters("contents of text-input area");
}
} else {
emitter.characters(SERVICE_TITLE);
if (markupAllowed
&& System.getProperty("nu.validator.servlet.service-name",
"").equals("Validator.nu")) {
emitter.startElement("span");
emitter.characters(LIVING_VERSION);
emitter.endElement("span");
}
}
}
protected String shortenDataUri(String uri) {
if (DataUri.startsWithData(uri)) {
return "data:\u2026";
} else {
return uri;
}
}
void emitForm() throws SAXException {
attrs.clear();
attrs.addAttribute("method", "get");
// attrs.addAttribute("action", request.getRequestURL().toString());
if (isSimple()) {
attrs.addAttribute("class", "simple");
}
// attrs.addAttribute("onsubmit", "formSubmission()");
emitter.startElement("form", attrs);
emitFormContent();
emitter.endElement("form");
}
protected boolean isSimple() {
return false;
}
/**
* @throws SAXException
*/
protected void emitFormContent() throws SAXException {
FormEmitter.emit(contentHandler, this);
}
void emitSchemaField() throws SAXException {
attrs.clear();
attrs.addAttribute("name", "schema");
attrs.addAttribute("id", "schema");
// attrs.addAttribute("onchange", "schemaChanged();");
attrs.addAttribute(
"pattern",
"(?:(?:(?:https?://\\S+)|(?:data:\\S+))(?:\\s+(?:(?:https?://\\S+)|(?:data:\\S+)))*)?");
attrs.addAttribute("title",
"Space-separated list of schema URLs. (Leave blank to let the service guess.)");
if (schemaUrls != null) {
attrs.addAttribute("value", scrub(schemaUrls));
}
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void emitDocField() throws SAXException {
attrs.clear();
attrs.addAttribute("type", "url");
attrs.addAttribute("name", "doc");
attrs.addAttribute("id", "doc");
attrs.addAttribute("pattern", "(?:(?:https?://.+)|(?:data:.+))?");
attrs.addAttribute("title",
"Absolute URL (http, https or data only) of the document to be checked.");
attrs.addAttribute("tabindex", "0");
attrs.addAttribute("autofocus", "autofocus");
if (document != null) {
attrs.addAttribute("value", scrub(document));
}
Object att = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
if (att != null) {
attrs.addAttribute("class", att.toString());
}
emitter.startElement("input", attrs);
emitter.endElement("input");
}
/**
* @throws SAXException
*
*/
void emitSchemaDuration() throws SAXException {
}
/**
* @throws SAXException
*
*/
void emitDocDuration() throws SAXException {
}
/**
* @throws SAXException
*
*/
void emitTotalDuration() throws SAXException {
emitter.characters("" + (System.currentTimeMillis() - start));
}
/**
* @throws SAXException
*
*/
void emitPresetOptions() throws SAXException {
for (int i = 0; i < presetUrls.length; i++) {
emitter.option(presetLabels[i], presetUrls[i], false);
}
}
/**
* @throws SAXException
*
*/
void emitParserOptions() throws SAXException {
emitter.option("Automatically from Content-Type", "",
(parser == ParserMode.AUTO));
emitter.option("HTML", "html", (parser == ParserMode.HTML));
emitter.option("XML; don\u2019t load external entities", "xml",
(parser == ParserMode.XML_NO_EXTERNAL_ENTITIES));
emitter.option("XML; load external entities", "xmldtd",
(parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION));
}
/**
* @throws SAXException
*
*/
void emitProfileOptions() throws SAXException {
profile = request.getParameter("profile");
emitter.option("Permissive: only what the spec requires",
"", ("".equals(profile)));
emitter.option("Pedagogical: suitable for teaching purposes",
"pedagogical", ("pedagogical".equals(profile)));
emitter.option("Polyglot: works both as HTML and as XML",
"polyglot", ("polyglot".equals(profile)));
}
/**
* @throws SAXException
*
*/
void emitLaxTypeField() throws SAXException {
emitter.checkbox("laxtype", "yes", laxType);
}
/**
* @throws SAXException
*
*/
void emitShowSourceField() throws SAXException {
emitter.checkbox("showsource", "yes", showSource);
}
/**
* @throws SAXException
*
*/
void emitShowOutlineField() throws SAXException {
emitter.checkbox("showoutline", "yes", showOutline);
}
/**
* @throws SAXException
*
*/
void emitShowImageReportField() throws SAXException {
emitter.checkbox("showimagereport", "yes", imageCollector != null);
}
void emitCheckErrorPagesField() throws SAXException {
emitter.checkbox("checkerrorpages", "yes", checkErrorPages);
}
void rootNamespace(String namespace, Locator locator) throws SAXException {
if (validator == null) {
int index = -1;
for (int i = 0; i < presetNamespaces.length; i++) {
if (namespace.equals(presetNamespaces[i])) {
index = i;
break;
}
}
if (index == -1) {
String message = "Cannot find preset schema for namespace: \u201C"
+ namespace + "\u201D.";
SAXException se = new SAXException(message);
errorHandler.schemaError(se);
throw new CannotFindPresetSchemaException();
}
String label = presetLabels[index];
String urls = presetUrls[index];
errorHandler.info("Using the preset for " + label
+ " based on the root namespace.");
try {
validator = validatorByUrls(urls);
} catch (IncorrectSchemaException | IOException e) {
// At this point the schema comes from memory.
throw new RuntimeException(e);
}
if (bufferingRootNamespaceSniffer == null) {
throw new RuntimeException(
"Bug! bufferingRootNamespaceSniffer was null.");
}
bufferingRootNamespaceSniffer.setContentHandler(validator.getContentHandler());
}
if (!rootNamespaceSeen) {
rootNamespaceSeen = true;
if (contentType != null) {
int i;
if ((i = Arrays.binarySearch(KNOWN_CONTENT_TYPES, contentType)) > -1) {
if (!NAMESPACES_FOR_KNOWN_CONTENT_TYPES[i].equals(namespace)) {
String message = "".equals(namespace) ? "\u201C"
+ contentType
+ "\u201D is not an appropriate Content-Type for a document whose root element is not in a namespace."
: "\u201C"
+ contentType
+ "\u201D is not an appropriate Content-Type for a document whose root namespace is \u201C"
+ namespace + "\u201D.";
SAXParseException spe = new SAXParseException(message,
locator);
errorHandler.warning(spe);
}
}
}
}
}
@Override
public void documentMode(DocumentMode mode, String publicIdentifier,
String systemIdentifier)
throws SAXException {
if (systemIdentifier != null) {
if ("about:legacy-compat".equals(systemIdentifier)) {
aboutLegacyCompat = true;
errorHandler.warning(new SAXParseException(
"Documents should not use"
+ " \u201cabout:legacy-compat\u201d,"
+ " except if generated by legacy systems"
+ " that can't output the standard"
+ " \u201c<!DOCTYPE html>\u201d doctype.",
null));
}
if (systemIdentifier.contains("http://www.w3.org/TR/xhtml1")) {
xhtml1Doctype = true;
}
if (systemIdentifier.contains("http://www.w3.org/TR/html4")) {
html4Doctype = true;
}
}
if (publicIdentifier != null) {
if (publicIdentifier.contains("-//W3C//DTD HTML 4")) {
html4Doctype = true;
}
}
if (validator == null) {
try {
if ("yes".equals(request.getParameter("sniffdoctype"))) {
if ("-//W3C//DTD XHTML 1.0 Transitional//EN".equals(publicIdentifier)) {
errorHandler.info("XHTML 1.0 Transitional doctype seen. Appendix C is not supported. Proceeding anyway for your convenience. The parser is still an HTML parser, so namespace processing is not performed and \u201Cxml:*\u201D attributes are not supported. Using the schema for "
+ getPresetLabel(XHTML1TRANSITIONAL_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA);
} else if ("-//W3C//DTD XHTML 1.0 Strict//EN".equals(publicIdentifier)) {
errorHandler.info("XHTML 1.0 Strict doctype seen. Appendix C is not supported. Proceeding anyway for your convenience. The parser is still an HTML parser, so namespace processing is not performed and \u201Cxml:*\u201D attributes are not supported. Using the schema for "
+ getPresetLabel(XHTML1STRICT_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1STRICT_SCHEMA);
} else if ("-//W3C//DTD HTML 4.01 Transitional//EN".equals(publicIdentifier)) {
errorHandler.info("HTML 4.01 Transitional doctype seen. Using the schema for "
+ getPresetLabel(XHTML1TRANSITIONAL_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA);
} else if ("-//W3C//DTD HTML 4.01//EN".equals(publicIdentifier)) {
errorHandler.info("HTML 4.01 Strict doctype seen. Using the schema for "
+ getPresetLabel(XHTML1STRICT_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1STRICT_SCHEMA);
} else if ("-//W3C//DTD HTML 4.0 Transitional//EN".equals(publicIdentifier)) {
errorHandler.info("Legacy HTML 4.0 Transitional doctype seen. Please consider using HTML 4.01 Transitional instead. Proceeding anyway for your convenience with the schema for "
+ getPresetLabel(XHTML1TRANSITIONAL_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA);
} else if ("-//W3C//DTD HTML 4.0//EN".equals(publicIdentifier)) {
errorHandler.info("Legacy HTML 4.0 Strict doctype seen. Please consider using HTML 4.01 instead. Proceeding anyway for your convenience with the schema for "
+ getPresetLabel(XHTML1STRICT_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1STRICT_SCHEMA);
}
} else {
schemaIsDefault = true;
validator = validatorByDoctype(HTML5_SCHEMA);
}
} catch (IncorrectSchemaException | IOException e) {
// At this point the schema comes from memory.
throw new RuntimeException(e);
}
ContentHandler ch = validator.getContentHandler();
ch.setDocumentLocator(htmlParser.getDocumentLocator());
ch.startDocument();
reader.setContentHandler(ch);
}
}
private String getPresetLabel(int schemaId) {
for (int i = 0; i < presetDoctypes.length; i++) {
if (presetDoctypes[i] == schemaId) {
return presetLabels[i];
}
}
return "unknown";
}
/**
* @param acceptAllKnownXmlTypes
* @see nu.validator.xml.ContentTypeParser#setAcceptAllKnownXmlTypes(boolean)
*/
protected void setAcceptAllKnownXmlTypes(boolean acceptAllKnownXmlTypes) {
contentTypeParser.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes);
dataRes.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes);
httpRes.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes);
}
/**
* @param allowGenericXml
* @see nu.validator.xml.ContentTypeParser#setAllowGenericXml(boolean)
*/
protected void setAllowGenericXml(boolean allowGenericXml) {
contentTypeParser.setAllowGenericXml(allowGenericXml);
httpRes.setAllowGenericXml(allowGenericXml);
dataRes.setAllowGenericXml(allowGenericXml);
}
/**
* @param allowHtml
* @see nu.validator.xml.ContentTypeParser#setAllowHtml(boolean)
*/
protected void setAllowHtml(boolean allowHtml) {
contentTypeParser.setAllowHtml(allowHtml);
httpRes.setAllowHtml(allowHtml);
dataRes.setAllowHtml(allowHtml);
}
/**
* @param allowCss
* @see nu.validator.xml.ContentTypeParser#setAllowCss(boolean)
*/
protected void setAllowCss(boolean allowCss) {
contentTypeParser.setAllowCss(allowCss);
httpRes.setAllowCss(allowCss);
dataRes.setAllowCss(allowCss);
}
/**
* @param allowRnc
* @see nu.validator.xml.ContentTypeParser#setAllowRnc(boolean)
*/
protected void setAllowRnc(boolean allowRnc) {
contentTypeParser.setAllowRnc(allowRnc);
httpRes.setAllowRnc(allowRnc);
dataRes.setAllowRnc(allowRnc);
entityResolver.setAllowRnc(allowRnc);
}
/**
* @param allowXhtml
* @see nu.validator.xml.ContentTypeParser#setAllowXhtml(boolean)
*/
protected void setAllowXhtml(boolean allowXhtml) {
contentTypeParser.setAllowXhtml(allowXhtml);
httpRes.setAllowXhtml(allowXhtml);
dataRes.setAllowXhtml(allowXhtml);
}
/**
* @throws SAXException
* @throws IOException
*/
protected void loadDocumentInput() throws SAXException, IOException {
if (documentInput != null) {
return;
}
if (methodIsGet) {
documentInput = (TypedInputSource) entityResolver.resolveEntity(
null, document);
errorHandler.setLoggingOk(true);
} else { // POST
long len = request.getContentLength();
if (len > SIZE_LIMIT) {
throw new StreamBoundException("Resource size exceeds limit.");
}
documentInput = contentTypeParser.buildTypedInputSource(document,
null, postContentType);
documentInput.setByteStream(len < 0 ? new BoundedInputStream(
request.getInputStream(), SIZE_LIMIT, document)
: request.getInputStream());
documentInput.setSystemId(request.getHeader("Content-Location"));
}
if (imageCollector != null) {
baseUriTracker = new BaseUriTracker(documentInput.getSystemId(),
documentInput.getLanguage());
imageCollector.initializeContext(baseUriTracker);
}
}
void emitStyle() throws SAXException {
attrs.clear();
attrs.addAttribute("href", STYLE_SHEET);
attrs.addAttribute("rel", "stylesheet");
emitter.startElement("link", attrs);
emitter.endElement("link");
}
void emitIcon() throws SAXException {
attrs.clear();
attrs.addAttribute("href", ICON);
attrs.addAttribute("rel", "icon");
emitter.startElement("link", attrs);
emitter.endElement("link");
}
void emitScript() throws SAXException {
attrs.clear();
attrs.addAttribute("src", SCRIPT);
emitter.startElement("script", attrs);
emitter.endElement("script");
}
void emitAbout() throws SAXException {
attrs.clear();
attrs.addAttribute("href", ABOUT_PAGE);
emitter.startElement("a", attrs);
emitter.characters(ABOUT_THIS_SERVICE);
emitter.endElement("a");
}
void emitVersion() throws SAXException {
emitter.characters(VERSION);
}
void emitUserAgentInput() throws SAXException {
attrs.clear();
attrs.addAttribute("name", "useragent");
attrs.addAttribute("list", "useragents");
attrs.addAttribute("value", userAgent);
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void emitAcceptLanguageInput() throws SAXException {
attrs.clear();
attrs.addAttribute("id", "acceptlanguage");
attrs.addAttribute("name", "acceptlanguage");
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void emitOtherFacetLink() throws SAXException {
attrs.clear();
attrs.addAttribute("href", HTML5_FACET);
emitter.startElement("a", attrs);
emitter.characters(SIMPLE_UI);
emitter.endElement("a");
}
void emitNsfilterField() throws SAXException {
attrs.clear();
attrs.addAttribute("name", "nsfilter");
attrs.addAttribute("id", "nsfilter");
attrs.addAttribute("pattern", "(?:.+:.+(?:\\s+.+:.+)*)?");
attrs.addAttribute("title",
"Space-separated namespace URIs for vocabularies to be filtered out.");
if (!filteredNamespaces.isEmpty()) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (String ns : filteredNamespaces) {
if (!first) {
sb.append(' ');
}
sb.append(ns);
first = false;
}
attrs.addAttribute("value", scrub(sb));
}
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void maybeEmitNsfilterField() throws SAXException {
NsFilterEmitter.emit(contentHandler, this);
}
void emitCharsetOptions() throws SAXException {
boolean found = false;
for (int i = 0; i < CHARSETS.length; i++) {
String charset = CHARSETS[i];
boolean selected = charset.equalsIgnoreCase(charsetOverride); // XXX
// use
// ASCII-caseinsensitivity
emitter.option(CHARSET_DESCRIPTIONS[i], charset, selected);
if (selected) {
found = true;
}
}
if (!found && charsetOverride != null) {
emitter.option(charsetOverride, charsetOverride, true);
}
}
void maybeEmitCharsetField() throws SAXException {
CharsetEmitter.emit(contentHandler, this);
}
class CannotFindPresetSchemaException extends SAXException {
CannotFindPresetSchemaException() {
super();
}
}
}
| src/nu/validator/servlet/VerifierServletTransaction.java | /*
* Copyright (c) 2005, 2006 Henri Sivonen
* Copyright (c) 2007-2018 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.servlet;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.net.SocketTimeoutException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import nu.validator.checker.XmlPiChecker;
import nu.validator.checker.jing.CheckerSchema;
import nu.validator.gnu.xml.aelfred2.FatalSAXException;
import nu.validator.gnu.xml.aelfred2.SAXDriver;
import nu.validator.htmlparser.common.DocumentMode;
import nu.validator.htmlparser.common.DocumentModeHandler;
import nu.validator.htmlparser.common.Heuristics;
import nu.validator.htmlparser.common.XmlViolationPolicy;
import nu.validator.htmlparser.sax.HtmlParser;
import nu.validator.htmlparser.sax.HtmlSerializer;
import nu.validator.htmlparser.sax.XmlSerializer;
import nu.validator.io.BoundedInputStream;
import nu.validator.io.DataUri;
import nu.validator.io.StreamBoundException;
import nu.validator.localentities.LocalCacheEntityResolver;
import nu.validator.messages.GnuMessageEmitter;
import nu.validator.messages.JsonMessageEmitter;
import nu.validator.messages.MessageEmitterAdapter;
import nu.validator.messages.TextMessageEmitter;
import nu.validator.messages.TooManyErrorsException;
import nu.validator.messages.XhtmlMessageEmitter;
import nu.validator.messages.XmlMessageEmitter;
import nu.validator.servlet.imagereview.ImageCollector;
import nu.validator.servlet.OutlineBuildingXMLReaderWrapper.Section;
import nu.validator.source.SourceCode;
import nu.validator.spec.Spec;
import nu.validator.spec.html5.Html5SpecBuilder;
import nu.validator.xml.AttributesImpl;
import nu.validator.xml.AttributesPermutingXMLReaderWrapper;
import nu.validator.xml.BaseUriTracker;
import nu.validator.xml.CharacterUtil;
import nu.validator.xml.CombineContentHandler;
import nu.validator.xml.ContentTypeParser;
import nu.validator.xml.ContentTypeParser.NonXmlContentTypeException;
import nu.validator.xml.DataUriEntityResolver;
import nu.validator.xml.IdFilter;
import nu.validator.xml.LanguageDetectingXMLReaderWrapper;
import nu.validator.xml.UseCountingXMLReaderWrapper;
import nu.validator.xml.NamespaceDroppingXMLReaderWrapper;
import nu.validator.xml.NullEntityResolver;
import nu.validator.xml.PrudentHttpEntityResolver;
import nu.validator.xml.PrudentHttpEntityResolver.ResourceNotRetrievableException;
import nu.validator.xml.SystemErrErrorHandler;
import nu.validator.xml.TypedInputSource;
import nu.validator.xml.WiretapXMLReaderWrapper;
import nu.validator.xml.XhtmlSaxEmitter;
import nu.validator.xml.customelements.NamespaceChangingSchemaWrapper;
import nu.validator.xml.templateelement.TemplateElementDroppingSchemaWrapper;
import nu.validator.xml.dataattributes.DataAttributeDroppingSchemaWrapper;
import nu.validator.xml.langattributes.XmlLangAttributeDroppingSchemaWrapper;
import nu.validator.xml.roleattributes.RoleAttributeFilteringSchemaWrapper;
import org.xml.sax.ContentHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.LexicalHandler;
import com.thaiopensource.relaxng.impl.CombineValidator;
import com.thaiopensource.util.PropertyMap;
import com.thaiopensource.util.PropertyMapBuilder;
import com.thaiopensource.validate.IncorrectSchemaException;
import com.thaiopensource.validate.Schema;
import com.thaiopensource.validate.SchemaReader;
import com.thaiopensource.validate.SchemaResolver;
import com.thaiopensource.validate.ValidateProperty;
import com.thaiopensource.validate.Validator;
import com.thaiopensource.validate.auto.AutoSchemaReader;
import com.thaiopensource.validate.prop.rng.RngProperty;
import com.thaiopensource.validate.prop.wrap.WrapProperty;
import com.thaiopensource.validate.rng.CompactSchemaReader;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.log4j.Logger;
import com.ibm.icu.text.Normalizer;
/**
* @version $Id: VerifierServletTransaction.java,v 1.10 2005/07/24 07:32:48
* hsivonen Exp $
* @author hsivonen
*/
class VerifierServletTransaction implements DocumentModeHandler, SchemaResolver {
private enum OutputFormat {
HTML, XHTML, TEXT, XML, JSON, RELAXED, SOAP, UNICORN, GNU
}
private static final Logger log4j = Logger.getLogger(VerifierServletTransaction.class);
private static final Pattern SPACE = Pattern.compile("\\s+");
private static final Pattern JS_IDENTIFIER = Pattern.compile("[\\p{Lu}\\p{Ll}\\p{Lt}\\p{Lm}\\p{Lo}\\p{Nl}_\\$][\\p{Lu}\\p{Ll}\\p{Lt}\\p{Lm}\\p{Lo}\\p{Nl}_\\$\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}]*");
private static final String[] JS_RESERVED_WORDS = { "abstract", "boolean",
"break", "byte", "case", "catch", "char", "class", "const",
"continue", "debugger", "default", "delete", "do", "double",
"else", "enum", "export", "extends", "final", "finally", "float",
"for", "function", "goto", "if", "implements", "import", "in",
"instanceof", "int", "interface", "long", "native", "new",
"package", "private", "protected", "public", "return", "short",
"static", "super", "switch", "synchronized", "this", "throw",
"throws", "transient", "try", "typeof", "var", "void", "volatile",
"while", "with" };
private static final String[] CHARSETS = { "UTF-8", "UTF-16",
"Windows-1250", "Windows-1251", "Windows-1252", "Windows-1253",
"Windows-1254", "Windows-1255", "Windows-1256", "Windows-1257",
"Windows-1258", "ISO-8859-1", "ISO-8859-2", "ISO-8859-3",
"ISO-8859-4", "ISO-8859-5", "ISO-8859-6", "ISO-8859-7",
"ISO-8859-8", "ISO-8859-9", "ISO-8859-13", "ISO-8859-15", "KOI8-R",
"TIS-620", "GBK", "GB18030", "Big5", "Big5-HKSCS", "Shift_JIS",
"ISO-2022-JP", "EUC-JP", "ISO-2022-KR", "EUC-KR" };
private static final char[][] CHARSET_DESCRIPTIONS = {
"UTF-8 (Global)".toCharArray(), "UTF-16 (Global)".toCharArray(),
"Windows-1250 (Central European)".toCharArray(),
"Windows-1251 (Cyrillic)".toCharArray(),
"Windows-1252 (Western)".toCharArray(),
"Windows-1253 (Greek)".toCharArray(),
"Windows-1254 (Turkish)".toCharArray(),
"Windows-1255 (Hebrew)".toCharArray(),
"Windows-1256 (Arabic)".toCharArray(),
"Windows-1257 (Baltic)".toCharArray(),
"Windows-1258 (Vietnamese)".toCharArray(),
"ISO-8859-1 (Western)".toCharArray(),
"ISO-8859-2 (Central European)".toCharArray(),
"ISO-8859-3 (South European)".toCharArray(),
"ISO-8859-4 (Baltic)".toCharArray(),
"ISO-8859-5 (Cyrillic)".toCharArray(),
"ISO-8859-6 (Arabic)".toCharArray(),
"ISO-8859-7 (Greek)".toCharArray(),
"ISO-8859-8 (Hebrew)".toCharArray(),
"ISO-8859-9 (Turkish)".toCharArray(),
"ISO-8859-13 (Baltic)".toCharArray(),
"ISO-8859-15 (Western)".toCharArray(),
"KOI8-R (Russian)".toCharArray(), "TIS-620 (Thai)".toCharArray(),
"GBK (Chinese, simplified)".toCharArray(),
"GB18030 (Chinese, simplified)".toCharArray(),
"Big5 (Chinese, traditional)".toCharArray(),
"Big5-HKSCS (Chinese, traditional)".toCharArray(),
"Shift_JIS (Japanese)".toCharArray(),
"ISO-2022-JP (Japanese)".toCharArray(),
"EUC-JP (Japanese)".toCharArray(),
"ISO-2022-KR (Korean)".toCharArray(),
"EUC-KR (Korean)".toCharArray() };
protected static final int HTML5_SCHEMA = 3;
protected static final int XHTML1STRICT_SCHEMA = 2;
protected static final int XHTML1TRANSITIONAL_SCHEMA = 1;
protected static final int XHTML5_SCHEMA = 7;
private static final char[] SERVICE_TITLE;
private static final char[] LIVING_VERSION = "Living Validator".toCharArray();
private static final char[] VERSION;
private static final char[] RESULTS_TITLE;
private static final char[] FOR = " for ".toCharArray();
private static final char[] ABOUT_THIS_SERVICE = "About this Service".toCharArray();
private static final char[] SIMPLE_UI = "Simplified Interface".toCharArray();
private static final byte[] CSS_CHECKING_PROLOG = //
"<!DOCTYPE html><title>s</title><style>\n".getBytes();
private static final byte[] CSS_CHECKING_EPILOG = "\n</style>".getBytes();
private static final String USER_AGENT;
private static Spec html5spec;
private static int[] presetDoctypes;
private static String[] presetLabels;
private static String[] presetUrls;
private static String[] presetNamespaces;
// XXX SVG!!!
private static final String[] KNOWN_CONTENT_TYPES = {
"application/atom+xml", "application/docbook+xml",
"application/xhtml+xml", "application/xv+xml", "image/svg+xml" };
private static final String[] NAMESPACES_FOR_KNOWN_CONTENT_TYPES = {
"http://www.w3.org/2005/Atom", "http://docbook.org/ns/docbook",
"http://www.w3.org/1999/xhtml", "http://www.w3.org/1999/xhtml",
"http://www.w3.org/2000/svg" };
private static final String[] ALL_CHECKERS = {
"http://c.validator.nu/table/", "http://c.validator.nu/nfc/",
"http://c.validator.nu/text-content/",
"http://c.validator.nu/unchecked/",
"http://c.validator.nu/usemap/", "http://c.validator.nu/obsolete/",
"http://c.validator.nu/xml-pi/", "http://c.validator.nu/unsupported/",
"http://c.validator.nu/microdata/" };
private static final String[] ALL_CHECKERS_HTML4 = {
"http://c.validator.nu/table/", "http://c.validator.nu/nfc/",
"http://c.validator.nu/unchecked/", "http://c.validator.nu/usemap/" };
private long start = System.currentTimeMillis();
protected final HttpServletRequest request;
private final HttpServletResponse response;
protected String document = null;
private ParserMode parser = ParserMode.AUTO;
private String profile = "";
private boolean laxType = false;
private boolean aboutLegacyCompat = false;
private boolean xhtml1Doctype = false;
private boolean html4Doctype = false;
protected ContentHandler contentHandler;
protected XhtmlSaxEmitter emitter;
protected MessageEmitterAdapter errorHandler;
protected final AttributesImpl attrs = new AttributesImpl();
private OutputStream out;
private PropertyMap jingPropertyMap;
protected LocalCacheEntityResolver entityResolver;
private static long lastModified;
private static String[] preloadedSchemaUrls;
private static Schema[] preloadedSchemas;
private final static String cannotRecover = "Cannot recover after last"
+ " error. Any further errors will be ignored.";
private final static String changingEncoding = "Changing encoding at this"
+ " point would need non-streamable behavior.";
private final static String[] DENY_LIST = System.getProperty(
"nu.validator.servlet.deny-list", "").split("\\s+");
private final static String ABOUT_PAGE = System.getProperty(
"nu.validator.servlet.about-page", "https://about.validator.nu/");
private final static String HTML5_FACET = (VerifierServlet.HTML5_HOST.isEmpty() ? "" : ("//" + VerifierServlet.HTML5_HOST)) + VerifierServlet.HTML5_PATH;
private final static String STYLE_SHEET = System.getProperty(
"nu.validator.servlet.style-sheet",
"style.css");
private final static String ICON = System.getProperty(
"nu.validator.servlet.icon",
"icon.png");
private final static String SCRIPT = System.getProperty(
"nu.validator.servlet.script",
"script.js");
private static final long SIZE_LIMIT = Integer.parseInt(System.getProperty(
"nu.validator.servlet.max-file-size", "2097152"));
private static String systemFilterString = "";
private final static String FILTER_FILE = System.getProperty(
"nu.validator.servlet.filterfile", "resources/message-filters.txt");
protected String schemaUrls = null;
protected Validator validator = null;
private BufferingRootNamespaceSniffer bufferingRootNamespaceSniffer = null;
private String contentType = null;
protected HtmlParser htmlParser = null;
protected SAXDriver xmlParser = null;
protected XMLReader reader;
protected TypedInputSource documentInput;
protected PrudentHttpEntityResolver httpRes;
protected DataUriEntityResolver dataRes;
protected ContentTypeParser contentTypeParser;
private Set<String> loadedValidatorUrls = new HashSet<>();
private boolean checkNormalization = false;
private boolean rootNamespaceSeen = false;
private OutputFormat outputFormat;
private String postContentType;
private boolean methodIsGet;
private SourceCode sourceCode = new SourceCode();
private Deque<Section> outline;
private Deque<Section> headingOutline;
private boolean showSource;
private boolean showOutline;
private boolean checkErrorPages;
private boolean schemaIsDefault;
private String userAgent;
private BaseUriTracker baseUriTracker = null;
private String charsetOverride = null;
private Set<String> filteredNamespaces = new LinkedHashSet<>(); // linked
private LexicalHandler lexicalHandler;
// for
// UI
// stability
protected ImageCollector imageCollector;
private boolean externalSchema = false;
private boolean externalSchematron = false;
private String schemaListForStats = null;
static {
try {
log4j.debug("Starting static initializer.");
lastModified = 0;
BufferedReader r = new BufferedReader(new InputStreamReader(LocalCacheEntityResolver.getPresetsAsStream(), "UTF-8"));
String line;
List<String> doctypes = new LinkedList<>();
List<String> namespaces = new LinkedList<>();
List<String> labels = new LinkedList<>();
List<String> urls = new LinkedList<>();
Properties props = new Properties();
log4j.debug("Reading miscellaneous properties.");
props.load(VerifierServlet.class.getClassLoader().getResourceAsStream(
"nu/validator/localentities/files/misc.properties"));
SERVICE_TITLE = (System.getProperty(
"nu.validator.servlet.service-name",
props.getProperty("nu.validator.servlet.service-name",
"Validator.nu")) + " ").toCharArray();
RESULTS_TITLE = (System.getProperty(
"nu.validator.servlet.results-title", props.getProperty(
"nu.validator.servlet.results-title",
"Validation results"))).toCharArray();
VERSION = (System.getProperty("nu.validator.servlet.version",
props.getProperty("nu.validator.servlet.version",
"Living Validator"))).toCharArray();
USER_AGENT = (System.getProperty("nu.validator.servlet.user-agent",
props.getProperty("nu.validator.servlet.user-agent",
"Validator.nu/LV")));
log4j.debug("Starting to loop over config file lines.");
while ((line = r.readLine()) != null) {
if ("".equals(line.trim())) {
break;
}
String s[] = line.split("\t");
doctypes.add(s[0]);
namespaces.add(s[1]);
labels.add(s[2]);
urls.add(s[3]);
}
log4j.debug("Finished reading config.");
String[] presetDoctypesAsStrings = doctypes.toArray(new String[0]);
presetNamespaces = namespaces.toArray(new String[0]);
presetLabels = labels.toArray(new String[0]);
presetUrls = urls.toArray(new String[0]);
log4j.debug("Converted config to arrays.");
for (int i = 0; i < presetNamespaces.length; i++) {
String str = presetNamespaces[i];
if ("-".equals(str)) {
presetNamespaces[i] = null;
} else {
presetNamespaces[i] = presetNamespaces[i].intern();
}
}
log4j.debug("Prepared namespace array.");
presetDoctypes = new int[presetDoctypesAsStrings.length];
for (int i = 0; i < presetDoctypesAsStrings.length; i++) {
presetDoctypes[i] = Integer.parseInt(presetDoctypesAsStrings[i]);
}
log4j.debug("Parsed doctype numbers into ints.");
String prefix = System.getProperty("nu.validator.servlet.cachepathprefix");
log4j.debug("The cache path prefix is: " + prefix);
ErrorHandler eh = new SystemErrErrorHandler();
LocalCacheEntityResolver er = new LocalCacheEntityResolver(new NullEntityResolver());
er.setAllowRnc(true);
PropertyMapBuilder pmb = new PropertyMapBuilder();
pmb.put(ValidateProperty.ERROR_HANDLER, eh);
pmb.put(ValidateProperty.ENTITY_RESOLVER, er);
pmb.put(ValidateProperty.XML_READER_CREATOR,
new VerifierServletXMLReaderCreator(eh, er));
RngProperty.CHECK_ID_IDREF.add(pmb);
PropertyMap pMap = pmb.toPropertyMap();
log4j.debug("Parsing set up. Starting to read schemas.");
SortedMap<String, Schema> schemaMap = new TreeMap<>();
schemaMap.put("http://c.validator.nu/table/",
CheckerSchema.TABLE_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/table/",
CheckerSchema.TABLE_CHECKER);
schemaMap.put("http://c.validator.nu/nfc/",
CheckerSchema.NORMALIZATION_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/nfc/",
CheckerSchema.NORMALIZATION_CHECKER);
schemaMap.put("http://c.validator.nu/debug/",
CheckerSchema.DEBUG_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/debug/",
CheckerSchema.DEBUG_CHECKER);
schemaMap.put("http://c.validator.nu/text-content/",
CheckerSchema.TEXT_CONTENT_CHECKER);
schemaMap.put("http://hsivonen.iki.fi/checkers/text-content/",
CheckerSchema.TEXT_CONTENT_CHECKER);
schemaMap.put("http://c.validator.nu/usemap/",
CheckerSchema.USEMAP_CHECKER);
schemaMap.put("http://n.validator.nu/checkers/usemap/",
CheckerSchema.USEMAP_CHECKER);
schemaMap.put("http://c.validator.nu/unchecked/",
CheckerSchema.UNCHECKED_SUBTREE_WARNER);
schemaMap.put("http://s.validator.nu/html5/assertions.sch",
CheckerSchema.ASSERTION_SCH);
schemaMap.put("http://s.validator.nu/html4/assertions.sch",
CheckerSchema.HTML4ASSERTION_SCH);
schemaMap.put("http://c.validator.nu/obsolete/",
CheckerSchema.CONFORMING_BUT_OBSOLETE_WARNER);
schemaMap.put("http://c.validator.nu/xml-pi/",
CheckerSchema.XML_PI_CHECKER);
schemaMap.put("http://c.validator.nu/unsupported/",
CheckerSchema.UNSUPPORTED_CHECKER);
schemaMap.put("http://c.validator.nu/microdata/",
CheckerSchema.MICRODATA_CHECKER);
schemaMap.put("http://c.validator.nu/rdfalite/",
CheckerSchema.RDFALITE_CHECKER);
for (String presetUrl : presetUrls) {
for (String url : SPACE.split(presetUrl)) {
if (schemaMap.get(url) == null && !isCheckerUrl(url)) {
Schema sch = schemaByUrl(url, er, pMap);
schemaMap.put(url, sch);
}
}
}
log4j.debug("Schemas read.");
preloadedSchemaUrls = new String[schemaMap.size()];
preloadedSchemas = new Schema[schemaMap.size()];
int i = 0;
for (Map.Entry<String, Schema> entry : schemaMap.entrySet()) {
preloadedSchemaUrls[i] = entry.getKey().intern();
Schema s = entry.getValue();
String u = entry.getKey();
if (isDataAttributeDroppingSchema(u)) {
s = new DataAttributeDroppingSchemaWrapper(
s);
}
if (isXmlLangAllowingSchema(u)) {
s = new XmlLangAttributeDroppingSchemaWrapper(s);
}
if (isRoleAttributeFilteringSchema(u)) {
s = new RoleAttributeFilteringSchemaWrapper(s);
}
if (isTemplateElementDroppingSchema(u)) {
s = new TemplateElementDroppingSchemaWrapper(s);
}
if (isCustomElementNamespaceChangingSchema(u)) {
s = new NamespaceChangingSchemaWrapper(s);
}
preloadedSchemas[i] = s;
i++;
}
log4j.debug("Reading spec.");
html5spec = Html5SpecBuilder.parseSpec(LocalCacheEntityResolver.getHtml5SpecAsStream());
log4j.debug("Spec read.");
if (new File(FILTER_FILE).isFile()) {
log4j.debug("Reading filter file " + FILTER_FILE);
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(FILTER_FILE),
"UTF-8"))) {
StringBuilder sb = new StringBuilder();
String filterline;
String pipe = "";
while ((filterline = reader.readLine()) != null) {
if (filterline.startsWith("#")) {
continue;
}
sb.append(pipe);
sb.append(filterline);
pipe = "|";
}
if (sb.length() != 0) {
if ("".equals(systemFilterString)) {
systemFilterString = sb.toString();
} else {
systemFilterString += "|" + sb.toString();
}
}
}
log4j.debug("Filter file read.");
}
log4j.debug("Initializing language detector.");
LanguageDetectingXMLReaderWrapper.initialize();
log4j.debug("Initialization complete.");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("deprecation")
protected static String scrub(CharSequence s) {
return Normalizer.normalize(
CharacterUtil.prudentlyScrubCharacterData(s), Normalizer.NFC);
}
private static boolean isDataAttributeDroppingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isXmlLangAllowingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isRoleAttributeFilteringSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isTemplateElementDroppingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isCustomElementNamespaceChangingSchema(String key) {
return ("http://s.validator.nu/xhtml5.rnc".equals(key)
|| "http://s.validator.nu/html5.rnc".equals(key)
|| "http://s.validator.nu/html5-all.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(key)
|| "http://s.validator.nu/html5-its.rnc".equals(key)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(key));
}
private static boolean isCheckerUrl(String url) {
if ("http://c.validator.nu/all/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all/".equals(url)) {
return true;
} else if ("http://c.validator.nu/all-html4/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all-html4/".equals(url)) {
return true;
} else if ("http://c.validator.nu/base/".equals(url)) {
return true;
} else if ("http://c.validator.nu/rdfalite/".equals(url)) {
return true;
}
for (String checker : ALL_CHECKERS) {
if (checker.equals(url)) {
return true;
}
}
return false;
}
/**
* @param request
* @param response
*/
VerifierServletTransaction(HttpServletRequest request,
HttpServletResponse response) {
this.request = request;
this.response = response;
}
protected boolean willValidate() {
if (methodIsGet) {
return document != null;
} else { // POST
return true;
}
}
void service() throws ServletException, IOException {
this.methodIsGet = "GET".equals(request.getMethod())
|| "HEAD".equals(request.getMethod());
this.out = response.getOutputStream();
try {
request.setCharacterEncoding("utf-8");
} catch (NoSuchMethodError e) {
log4j.debug("Vintage Servlet API doesn't support setCharacterEncoding().", e);
}
if (!methodIsGet) {
postContentType = request.getContentType();
if (postContentType == null) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Content-Type missing");
return;
} else if (postContentType.trim().toLowerCase().startsWith(
"application/x-www-form-urlencoded")) {
response.sendError(
HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE,
"application/x-www-form-urlencoded not supported. Please use multipart/form-data.");
return;
}
}
String outFormat = request.getParameter("out");
if (outFormat == null) {
outputFormat = OutputFormat.HTML;
} else {
if ("html".equals(outFormat)) {
outputFormat = OutputFormat.HTML;
} else if ("xhtml".equals(outFormat)) {
outputFormat = OutputFormat.XHTML;
} else if ("text".equals(outFormat)) {
outputFormat = OutputFormat.TEXT;
} else if ("gnu".equals(outFormat)) {
outputFormat = OutputFormat.GNU;
} else if ("xml".equals(outFormat)) {
outputFormat = OutputFormat.XML;
} else if ("json".equals(outFormat)) {
outputFormat = OutputFormat.JSON;
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Unsupported output format");
return;
}
}
if (!methodIsGet) {
document = request.getHeader("Content-Location");
}
if (document == null) {
document = request.getParameter("doc");
}
if (document == null) {
document = request.getParameter("file");
}
document = ("".equals(document)) ? null : document;
if (document != null) {
for (String domain : DENY_LIST) {
if (!"".equals(domain) && document.contains(domain)) {
response.sendError(429, "Too many requests");
return;
}
}
}
String callback = null;
if (outputFormat == OutputFormat.JSON) {
callback = request.getParameter("callback");
if (callback != null) {
Matcher m = JS_IDENTIFIER.matcher(callback);
if (m.matches()) {
if (Arrays.binarySearch(JS_RESERVED_WORDS, callback) >= 0) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Callback is a reserved word.");
return;
}
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Callback is not a valid ECMA 262 IdentifierName.");
return;
}
}
}
if (willValidate()) {
response.setDateHeader("Expires", 0);
response.setHeader("Cache-Control", "no-cache");
} else if (outputFormat == OutputFormat.HTML
|| outputFormat == OutputFormat.XHTML) {
response.setDateHeader("Last-Modified", lastModified);
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"No input document");
return;
}
setup();
String filterString = systemFilterString;
String filterPatternParam = request.getParameter("filterpattern");
if (filterPatternParam != null && !"".equals(filterPatternParam)) {
if ("".equals(filterString)) {
filterString = scrub(filterPatternParam);
} else {
filterString += "|" + scrub(filterPatternParam);
}
}
String filterUrl = request.getParameter("filterurl");
if (filterUrl != null && !"".equals(filterUrl)) {
try {
InputSource filterFile = //
(new PrudentHttpEntityResolver(-1, true, null)) //
.resolveEntity(null, filterUrl);
StringBuilder sb = new StringBuilder();
BufferedReader reader = //
new BufferedReader(new InputStreamReader(
filterFile.getByteStream()));
String line;
String pipe = "";
while ((line = reader.readLine()) != null) {
if (line.startsWith("#")) {
continue;
}
sb.append(pipe);
sb.append(line);
pipe = "|";
}
if (sb.length() != 0) {
if (!"".equals(filterString)) {
filterString = scrub(sb.toString());
} else {
filterString += "|" + scrub(sb.toString());
}
}
} catch (Exception e) {
response.sendError(500, e.getMessage());
}
}
Pattern filterPattern = null;
if (!"".equals(filterString)) {
filterPattern = Pattern.compile(filterString);
}
if (request.getParameter("useragent") != null) {
userAgent = scrub(request.getParameter("useragent"));
} else {
userAgent = USER_AGENT;
}
if (request.getParameter("acceptlanguage") != null) {
request.setAttribute(
"http://validator.nu/properties/accept-language",
scrub(request.getParameter("acceptlanguage")));
}
Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
showSource = (request.getParameter("showsource") != null);
showSource = (showSource || "textarea".equals(inputType));
showOutline = (request.getParameter("showoutline") != null);
if (request.getParameter("checkerrorpages") != null) {
request.setAttribute(
"http://validator.nu/properties/ignore-response-status",
true);
}
if (request.getParameter("showimagereport") != null) {
imageCollector = new ImageCollector(sourceCode);
}
String charset = request.getParameter("charset");
if (charset != null) {
charset = scrub(charset.trim());
if (!"".equals(charset)) {
charsetOverride = charset;
}
}
String nsfilter = request.getParameter("nsfilter");
if (nsfilter != null) {
for (String ns : SPACE.split(nsfilter)) {
if (ns.length() > 0) {
filteredNamespaces.add(ns);
}
}
}
boolean errorsOnly = ("error".equals(request.getParameter("level")));
boolean asciiQuotes = (request.getParameter("asciiquotes") != null);
int lineOffset = 0;
String lineOffsetStr = request.getParameter("lineoffset");
if (lineOffsetStr != null) {
try {
lineOffset = Integer.parseInt(lineOffsetStr);
} catch (NumberFormatException e) {
}
}
try {
if (outputFormat == OutputFormat.HTML
|| outputFormat == OutputFormat.XHTML) {
if (outputFormat == OutputFormat.HTML) {
response.setContentType("text/html; charset=utf-8");
contentHandler = new HtmlSerializer(out);
} else {
response.setContentType("application/xhtml+xml");
contentHandler =
new XmlSerializer(out);
}
emitter = new XhtmlSaxEmitter(contentHandler);
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, imageCollector, lineOffset,
false, new XhtmlMessageEmitter(contentHandler));
PageEmitter.emit(contentHandler, this);
} else {
if (outputFormat == OutputFormat.TEXT) {
response.setContentType("text/plain; charset=utf-8");
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new TextMessageEmitter(out, asciiQuotes));
} else if (outputFormat == OutputFormat.GNU) {
response.setContentType("text/plain; charset=utf-8");
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new GnuMessageEmitter(out, asciiQuotes));
} else if (outputFormat == OutputFormat.XML) {
response.setContentType("application/xml");
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new XmlMessageEmitter(new XmlSerializer(out)));
} else if (outputFormat == OutputFormat.JSON) {
if (callback == null) {
response.setContentType("application/json; charset=utf-8");
} else {
response.setContentType("application/javascript; charset=utf-8");
}
errorHandler = new MessageEmitterAdapter(filterPattern,
sourceCode, showSource, null, lineOffset, false,
new JsonMessageEmitter(
new nu.validator.json.Serializer(out),
callback));
} else {
throw new RuntimeException("Unreachable.");
}
errorHandler.setErrorsOnly(errorsOnly);
validate();
}
} catch (SAXException e) {
log4j.debug("SAXException: " + e.getMessage());
}
}
/**
* @throws ServletException
*/
protected void setup() throws ServletException {
String preset = request.getParameter("preset");
if (preset != null && !"".equals(preset)) {
schemaUrls = preset;
} else {
schemaUrls = request.getParameter("schema");
}
if (schemaUrls == null) {
schemaUrls = "";
}
String parserStr = request.getParameter("parser");
if ("html".equals(parserStr)) {
parser = ParserMode.HTML;
} else if ("xmldtd".equals(parserStr)) {
parser = ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION;
} else if ("xml".equals(parserStr)) {
parser = ParserMode.XML_NO_EXTERNAL_ENTITIES;
} else if ("html5".equals(parserStr)) {
parser = ParserMode.HTML;
} // else auto
laxType = (request.getParameter("laxtype") != null);
}
private boolean useXhtml5Schema() {
if ("".equals(schemaUrls)) {
return false;
}
return (schemaUrls.contains("http://s.validator.nu/xhtml5.rnc")
|| schemaUrls.contains("http://s.validator.nu/xhtml5-all.rnc")
|| schemaUrls.contains("http://s.validator.nu/xhtml5-its.rnc")
|| schemaUrls.contains(
"http://s.validator.nu/xhtml5-rdfalite.rnc"));
}
private boolean isHtmlUnsafePreset() {
if ("".equals(schemaUrls)) {
return false;
}
boolean preset = false;
for (String presetUrl : presetUrls) {
if (presetUrl.equals(schemaUrls)) {
preset = true;
break;
}
}
if (!preset) {
return false;
}
return !(schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-basic.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-strict.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-transitional.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-frameset.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5-all.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5-its.rnc")
|| schemaUrls.startsWith("http://s.validator.nu/html5-rdfalite.rnc"));
}
/**
* @throws SAXException
*/
@SuppressWarnings({ "deprecation", "unchecked" }) void validate() throws SAXException {
if (!willValidate()) {
return;
}
boolean isHtmlOrXhtml = (outputFormat == OutputFormat.HTML || outputFormat == OutputFormat.XHTML);
if (isHtmlOrXhtml) {
try {
out.flush();
} catch (IOException e1) {
throw new SAXException(e1);
}
}
httpRes = new PrudentHttpEntityResolver(SIZE_LIMIT, laxType,
errorHandler, request);
httpRes.setUserAgent(userAgent);
dataRes = new DataUriEntityResolver(httpRes, laxType, errorHandler);
contentTypeParser = new ContentTypeParser(errorHandler, laxType);
entityResolver = new LocalCacheEntityResolver(dataRes);
setAllowRnc(true);
setAllowCss(true);
try {
this.errorHandler.start(document);
PropertyMapBuilder pmb = new PropertyMapBuilder();
pmb.put(ValidateProperty.ERROR_HANDLER, errorHandler);
pmb.put(ValidateProperty.ENTITY_RESOLVER, entityResolver);
pmb.put(ValidateProperty.XML_READER_CREATOR,
new VerifierServletXMLReaderCreator(errorHandler,
entityResolver));
pmb.put(ValidateProperty.SCHEMA_RESOLVER, this);
RngProperty.CHECK_ID_IDREF.add(pmb);
jingPropertyMap = pmb.toPropertyMap();
tryToSetupValidator();
setAllowRnc(false);
loadDocAndSetupParser();
setErrorProfile();
contentType = documentInput.getType();
if ("text/css".equals(contentType)) {
String charset = "UTF-8";
if (documentInput.getEncoding() != null) {
charset = documentInput.getEncoding();
}
List<InputStream> streams = new ArrayList<>();
streams.add(new ByteArrayInputStream(CSS_CHECKING_PROLOG));
streams.add(documentInput.getByteStream());
streams.add(new ByteArrayInputStream(CSS_CHECKING_EPILOG));
Enumeration<InputStream> e = Collections.enumeration(streams);
documentInput.setByteStream(new SequenceInputStream(e));
documentInput.setEncoding(charset);
errorHandler.setLineOffset(-1);
sourceCode.setIsCss();
parser = ParserMode.HTML;
loadDocAndSetupParser();
}
reader.setErrorHandler(errorHandler);
sourceCode.initialize(documentInput);
if (validator == null) {
checkNormalization = true;
}
if (checkNormalization) {
reader.setFeature(
"http://xml.org/sax/features/unicode-normalization-checking",
true);
}
WiretapXMLReaderWrapper wiretap = new WiretapXMLReaderWrapper(
reader);
ContentHandler recorder = sourceCode.getLocationRecorder();
if (baseUriTracker == null) {
wiretap.setWiretapContentHander(recorder);
} else {
wiretap.setWiretapContentHander(new CombineContentHandler(
recorder, baseUriTracker));
}
wiretap.setWiretapLexicalHandler((LexicalHandler) recorder);
reader = wiretap;
if (htmlParser != null) {
htmlParser.addCharacterHandler(sourceCode);
htmlParser.setMappingLangToXmlLang(true);
htmlParser.setErrorHandler(errorHandler.getExactErrorHandler());
htmlParser.setTreeBuilderErrorHandlerOverride(errorHandler);
errorHandler.setHtml(true);
} else if (xmlParser != null) {
// this must be after wiretap!
if (!filteredNamespaces.isEmpty()) {
reader = new NamespaceDroppingXMLReaderWrapper(reader,
filteredNamespaces);
}
xmlParser.setErrorHandler(errorHandler.getExactErrorHandler());
xmlParser.lockErrorHandler();
} else {
throw new RuntimeException("Bug. Unreachable.");
}
reader = new AttributesPermutingXMLReaderWrapper(reader); // make
// RNG
// validation
// better
if (charsetOverride != null) {
String charset = documentInput.getEncoding();
if (charset == null) {
errorHandler.warning(new SAXParseException(
"Overriding document character encoding from none to \u201C"
+ charsetOverride + "\u201D.", null));
} else {
errorHandler.warning(new SAXParseException(
"Overriding document character encoding from \u201C"
+ charset + "\u201D to \u201C"
+ charsetOverride + "\u201D.", null));
}
documentInput.setEncoding(charsetOverride);
}
if (showOutline) {
reader = new OutlineBuildingXMLReaderWrapper(reader, request, false);
reader = new OutlineBuildingXMLReaderWrapper(reader, request, true);
}
reader.parse(documentInput);
if (showOutline) {
outline = (Deque<Section>) request.getAttribute(
"http://validator.nu/properties/document-outline");
headingOutline = (Deque<Section>) request.getAttribute(
"http://validator.nu/properties/heading-outline");
}
} catch (CannotFindPresetSchemaException e) {
} catch (ResourceNotRetrievableException e) {
log4j.debug(e.getMessage());
} catch (NonXmlContentTypeException e) {
log4j.debug(e.getMessage());
} catch (FatalSAXException e) {
log4j.debug(e.getMessage());
} catch (SocketTimeoutException e) {
errorHandler.ioError(new IOException(e.getMessage(), null));
} catch (ConnectTimeoutException e) {
errorHandler.ioError(new IOException(e.getMessage(), null));
} catch (TooManyErrorsException e) {
errorHandler.fatalError(e);
} catch (SAXException e) {
String msg = e.getMessage();
if (!cannotRecover.equals(msg) && !changingEncoding.equals(msg)) {
log4j.debug("SAXException: " + e.getMessage());
}
} catch (IOException e) {
isHtmlOrXhtml = false;
errorHandler.ioError(e);
} catch (IncorrectSchemaException e) {
log4j.debug("IncorrectSchemaException", e);
errorHandler.schemaError(e);
} catch (RuntimeException e) {
isHtmlOrXhtml = false;
log4j.error("RuntimeException, doc: " + document + " schema: "
+ schemaUrls + " lax: " + laxType, e);
errorHandler.internalError(
e,
"Oops. That was not supposed to happen. A bug manifested itself in the application internals. Unable to continue. Sorry. The admin was notified.");
} catch (Error e) {
isHtmlOrXhtml = false;
log4j.error("Error, doc: " + document + " schema: " + schemaUrls
+ " lax: " + laxType, e);
errorHandler.internalError(
e,
"Oops. That was not supposed to happen. A bug manifested itself in the application internals. Unable to continue. Sorry. The admin was notified.");
} finally {
errorHandler.end(successMessage(), failureMessage(),
(String) request.getAttribute(
"http://validator.nu/properties/document-language"));
gatherStatistics();
}
if (isHtmlOrXhtml) {
XhtmlOutlineEmitter outlineEmitter = new XhtmlOutlineEmitter(
contentHandler, outline, headingOutline);
outlineEmitter.emitHeadings();
outlineEmitter.emit();
emitDetails();
StatsEmitter.emit(contentHandler, this);
}
}
private void gatherStatistics() {
Statistics stats = Statistics.STATISTICS;
if (stats == null) {
return;
}
synchronized (stats) {
stats.incrementTotal();
if (charsetOverride != null) {
stats.incrementField(Statistics.Field.CUSTOM_ENC);
}
switch (parser) {
case XML_EXTERNAL_ENTITIES_NO_VALIDATION:
stats.incrementField(Statistics.Field.PARSER_XML_EXTERNAL);
break;
case AUTO:
case HTML:
case XML_NO_EXTERNAL_ENTITIES:
default:
break;
}
if (!filteredNamespaces.isEmpty()) {
stats.incrementField(Statistics.Field.XMLNS_FILTER);
}
if (laxType) {
stats.incrementField(Statistics.Field.LAX_TYPE);
}
if (aboutLegacyCompat) {
stats.incrementField(Statistics.Field.ABOUT_LEGACY_COMPAT);
}
if (xhtml1Doctype) {
stats.incrementField(Statistics.Field.XHTML1_DOCTYPE);
}
if (html4Doctype) {
stats.incrementField(Statistics.Field.HTML4_DOCTYPE);
}
if (imageCollector != null) {
stats.incrementField(Statistics.Field.IMAGE_REPORT);
}
if (showSource) {
stats.incrementField(Statistics.Field.SHOW_SOURCE);
}
if (showOutline) {
stats.incrementField(Statistics.Field.SHOW_OUTLINE);
}
if (methodIsGet) {
stats.incrementField(Statistics.Field.INPUT_GET);
} else { // POST
stats.incrementField(Statistics.Field.INPUT_POST);
Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
if ("textarea".equals(inputType)) {
stats.incrementField(Statistics.Field.INPUT_TEXT_FIELD);
} else if ("file".equals(inputType)) {
stats.incrementField(Statistics.Field.INPUT_FILE_UPLOAD);
} else {
stats.incrementField(Statistics.Field.INPUT_ENTITY_BODY);
}
}
if ("text/css".equals(documentInput.getType())) {
stats.incrementField(Statistics.Field.INPUT_CSS);
} else if (htmlParser != null) {
stats.incrementField(Statistics.Field.INPUT_HTML);
} else if (xmlParser != null) {
stats.incrementField(Statistics.Field.INPUT_XML);
} else {
stats.incrementField(Statistics.Field.INPUT_UNSUPPORTED);
}
switch (outputFormat) {
case GNU:
stats.incrementField(Statistics.Field.OUTPUT_GNU);
break;
case HTML:
stats.incrementField(Statistics.Field.OUTPUT_HTML);
break;
case JSON:
stats.incrementField(Statistics.Field.OUTPUT_JSON);
break;
case TEXT:
stats.incrementField(Statistics.Field.OUTPUT_TEXT);
break;
case XHTML:
stats.incrementField(Statistics.Field.OUTPUT_XHTML);
break;
case XML:
stats.incrementField(Statistics.Field.OUTPUT_XML);
break;
case RELAXED:
case SOAP:
case UNICORN:
default:
break;
}
if (schemaListForStats == null) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
} else {
boolean preset = false;
for (int i = 0; i < presetUrls.length; i++) {
if (presetUrls[i].equals(schemaListForStats)) {
preset = true;
if (externalSchema || externalSchematron) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
} else {
stats.incrementField(Statistics.Field.PRESET_SCHEMA);
/*
* XXX WARNING WARNING: These mappings correspond to
* values in the presets.txt file in the validator
* source repo. They might be bogus if a custom
* presets file is used instead.
*/
switch (i) {
case 0:
case 5:
stats.incrementField(Statistics.Field.HTML5_SCHEMA);
break;
case 1:
case 6:
stats.incrementField(Statistics.Field.HTML5_RDFA_LITE_SCHEMA);
break;
case 2:
stats.incrementField(Statistics.Field.HTML4_STRICT_SCHEMA);
break;
case 3:
stats.incrementField(Statistics.Field.HTML4_TRANSITIONAL_SCHEMA);
break;
case 4:
stats.incrementField(Statistics.Field.HTML4_FRAMESET_SCHEMA);
break;
case 7:
stats.incrementField(Statistics.Field.XHTML1_COMPOUND_SCHEMA);
break;
case 8:
stats.incrementField(Statistics.Field.SVG_SCHEMA);
break;
default:
stats.incrementField(Statistics.Field.LOGIC_ERROR);
break;
}
}
break;
}
}
if (!preset && !externalSchema) {
stats.incrementField(Statistics.Field.BUILT_IN_NON_PRESET);
}
}
if ("".equals(schemaUrls)) {
stats.incrementField(Statistics.Field.AUTO_SCHEMA);
if (externalSchema) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
}
} else if (externalSchema) {
if (externalSchematron) {
stats.incrementField(Statistics.Field.EXTERNAL_SCHEMA_SCHEMATRON);
} else {
stats.incrementField(Statistics.Field.EXTERNAL_SCHEMA_NON_SCHEMATRON);
}
} else if (externalSchematron) {
stats.incrementField(Statistics.Field.LOGIC_ERROR);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-alternate-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-alternate-found")) {
stats.incrementField(Statistics.Field.REL_ALTERNATE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-author-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-author-found")) {
stats.incrementField(Statistics.Field.REL_AUTHOR_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-bookmark-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-bookmark-found")) {
stats.incrementField(Statistics.Field.REL_BOOKMARK_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-canonical-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-canonical-found")) {
stats.incrementField(Statistics.Field.REL_CANONICAL_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-dns-prefetch-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-dns-prefetch-found")) {
stats.incrementField(Statistics.Field.REL_DNS_PREFETCH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-external-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-external-found")) {
stats.incrementField(Statistics.Field.REL_EXTERNAL_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-help-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-help-found")) {
stats.incrementField(Statistics.Field.REL_HELP_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-icon-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-icon-found")) {
stats.incrementField(Statistics.Field.REL_ICON_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-license-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-license-found")) {
stats.incrementField(Statistics.Field.REL_LICENSE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-next-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-next-found")) {
stats.incrementField(Statistics.Field.REL_NEXT_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-nofollow-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-nofollow-found")) {
stats.incrementField(Statistics.Field.REL_NOFOLLOW_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-noopener-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-noopener-found")) {
stats.incrementField(Statistics.Field.REL_NOOPENER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-noreferrer-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-noreferrer-found")) {
stats.incrementField(Statistics.Field.REL_NOREFERRER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-pingback-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-pingback-found")) {
stats.incrementField(Statistics.Field.REL_PINGBACK_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-preconnect-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-preconnect-found")) {
stats.incrementField(Statistics.Field.REL_PRECONNECT_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-prefetch-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-prefetch-found")) {
stats.incrementField(Statistics.Field.REL_PREFETCH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-preload-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-preload-found")) {
stats.incrementField(Statistics.Field.REL_PRELOAD_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-prerender-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-prerender-found")) {
stats.incrementField(Statistics.Field.REL_PRERENDER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-prev-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-prev-found")) {
stats.incrementField(Statistics.Field.REL_PREV_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-search-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-search-found")) {
stats.incrementField(Statistics.Field.REL_SEARCH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-serviceworker-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-serviceworker-found")) {
stats.incrementField(Statistics.Field.REL_SERVICEWORKER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-stylesheet-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-stylesheet-found")) {
stats.incrementField(Statistics.Field.REL_STYLESHEET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/rel-tag-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/rel-tag-found")) {
stats.incrementField(Statistics.Field.REL_TAG_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/link-with-charset-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/link-with-charset-found")) {
stats.incrementField(Statistics.Field.LINK_WITH_CHARSET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/script-with-charset-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/script-with-charset-found")) {
stats.incrementField(Statistics.Field.SCRIPT_WITH_CHARSET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/style-in-body-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/style-in-body-found")) {
stats.incrementField(Statistics.Field.STYLE_IN_BODY_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-found")) {
stats.incrementField(Statistics.Field.MAIN_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-multiple-visible-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-multiple-visible-found")) {
stats.incrementField(Statistics.Field.MAIN_MULTIPLE_VISIBLE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-address-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-address-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_ADDRESS_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-article-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-article-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_ARTICLE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-aside-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-aside-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_ASIDE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-blockquote-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-blockquote-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_BLOCKQUOTE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-caption-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-caption-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_CAPTION_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-dd-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-dd-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DD_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-details-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-details-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DETAILS_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-dialog-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-dialog-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DIALOG_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-div-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-div-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DIV_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-dt-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-dt-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_DT_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-fieldset-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-fieldset-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FIELDSET_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-figcaption-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-figcaption-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FIGCAPTION_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-figure-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-figure-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FIGURE_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-footer-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-footer-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FOOTER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-form-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-form-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_FORM_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-header-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-header-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_HEADER_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-li-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-li-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_LI_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-main-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-main-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_MAIN_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-nav-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-nav-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_NAV_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-section-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-section-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_SECTION_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-td-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-td-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_TD_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/main-in-th-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/main-in-th-found")) {
stats.incrementField(Statistics.Field.MAIN_IN_TH_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/lang-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/lang-found")) {
stats.incrementField(Statistics.Field.LANG_FOUND);
}
if (request.getAttribute(
"http://validator.nu/properties/lang-wrong") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/lang-wrong")) {
stats.incrementField(Statistics.Field.LANG_WRONG);
}
if (request.getAttribute(
"http://validator.nu/properties/lang-empty") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/lang-empty")) {
stats.incrementField(Statistics.Field.LANG_EMPTY);
}
if (request.getAttribute(
"http://validator.nu/properties/apple-touch-icon-with-sizes-found") != null
&& (boolean) request.getAttribute(
"http://validator.nu/properties/apple-touch-icon-with-sizes-found")) {
stats.incrementField(Statistics.Field.APPLE_TOUCH_ICON_WITH_SIZES_FOUND);
}
String fieldName;
String language = (String) request.getAttribute(
"http://validator.nu/properties/document-language");
if (!"".equals(language) && language != null) {
fieldName = "DETECTEDLANG_" + language.toUpperCase();
if ("zh-hans".equals(language)) {
fieldName = "DETECTEDLANG_ZH_HANS";
} else if ("zh-hant".equals(language)) {
fieldName = "DETECTEDLANG_ZH_HANT";
} else if ("sr-latn".equals(language)) {
fieldName = "DETECTEDLANG_SR_LATN";
} else if ("sr-cyrl".equals(language)) {
fieldName = "DETECTEDLANG_SR_CYRL";
} else if ("uz-latn".equals(language)) {
fieldName = "DETECTEDLANG_UZ_LATN";
} else if ("uz-cyrl".equals(language)) {
fieldName = "DETECTEDLANG_UZ_CYRL";
}
try {
stats.incrementField(stats.getFieldFromName(fieldName));
} catch (IllegalArgumentException e) {
log4j.error(e.getMessage(), e);
}
}
String langVal = (String) request.getAttribute(
"http://validator.nu/properties/lang-value");
if (langVal != null) {
if ("".equals(langVal)) {
stats.incrementField(Statistics.Field.LANG_EMPTY);
} else {
if (langVal.contains("_")) {
fieldName = "LANG_"
+ langVal.replace("_", "__").toUpperCase();
} else {
fieldName = "LANG_"
+ langVal.replace("-", "_").toUpperCase();
}
try {
stats.incrementField(stats.getFieldFromName(fieldName));
} catch (IllegalArgumentException e) {
stats.incrementField(Statistics.Field.LANG_OTHER);
}
}
}
}
}
/**
* @return
* @throws SAXException
*/
protected String successMessage() throws SAXException {
return "The document validates according to the specified schema(s).";
}
protected String failureMessage() throws SAXException {
return "There were errors.";
}
void emitDetails() throws SAXException {
Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
String type = documentInput != null ? documentInput.getType() : "";
if ("text/html".equals(type) || "text/html-sandboxed".equals(type)) {
attrs.clear();
emitter.startElementWithClass("div", "details");
if (schemaIsDefault) {
emitter.startElementWithClass("p", "msgschema");
emitter.characters(String.format("Used the schema for %s.",
getPresetLabel(HTML5_SCHEMA)));
emitter.endElement("p");
}
emitter.startElementWithClass("p", "msgmediatype");
if (!isHtmlUnsafePreset()) {
emitter.characters("Used the HTML parser.");
}
if (methodIsGet && !"textarea".equals(inputType)
&& !"file".equals(inputType)) {
String charset = documentInput.getEncoding();
if (charset != null) {
emitter.characters(String.format(
" Externally specified character encoding was %s.", charset));
}
}
emitter.endElement("div");
}
}
/**
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
protected void tryToSetupValidator() throws SAXException, IOException,
IncorrectSchemaException {
validator = validatorByUrls(schemaUrls);
}
protected void setErrorProfile() {
profile = request.getParameter("profile");
HashMap<String, String> profileMap = new HashMap<>();
if ("pedagogical".equals(profile)) {
profileMap.put("xhtml1", "warn");
} else if ("polyglot".equals(profile)) {
profileMap.put("xhtml1", "warn");
profileMap.put("xhtml2", "warn");
} else {
return; // presumed to be permissive
}
htmlParser.setErrorProfile(profileMap);
}
/**
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
* @throws SAXNotRecognizedException
* @throws SAXNotSupportedException
*/
protected void loadDocAndSetupParser() throws SAXException, IOException,
IncorrectSchemaException, SAXNotRecognizedException,
SAXNotSupportedException {
switch (parser) {
case HTML:
if (isHtmlUnsafePreset()) {
String message = "The chosen preset schema is not appropriate for HTML.";
SAXException se = new SAXException(message);
errorHandler.schemaError(se);
throw se;
}
setAllowGenericXml(false);
setAllowHtml(true);
setAcceptAllKnownXmlTypes(false);
setAllowXhtml(false);
loadDocumentInput();
newHtmlParser();
int schemaId;
schemaId = HTML5_SCHEMA;
htmlParser.setDocumentModeHandler(this);
reader = htmlParser;
if (validator == null) {
validator = validatorByDoctype(schemaId);
}
if (validator != null) {
reader.setContentHandler(validator.getContentHandler());
}
reader = new LanguageDetectingXMLReaderWrapper(reader, request,
errorHandler, documentInput.getLanguage(),
documentInput.getSystemId());
if (Statistics.STATISTICS != null) {
reader = new UseCountingXMLReaderWrapper(reader, request,
documentInput.getSystemId());
}
break;
case XML_NO_EXTERNAL_ENTITIES:
case XML_EXTERNAL_ENTITIES_NO_VALIDATION:
setAllowGenericXml(true);
setAllowHtml(false);
setAcceptAllKnownXmlTypes(true);
setAllowXhtml(true);
loadDocumentInput();
setupXmlParser();
break;
default:
setAllowGenericXml(true);
setAllowHtml(true);
setAcceptAllKnownXmlTypes(true);
setAllowXhtml(true);
loadDocumentInput();
String type = documentInput.getType();
if ("text/css".equals(type)) {
break;
} else if ("text/html".equals(type) || "text/html-sandboxed".equals(type)) {
if (isHtmlUnsafePreset()) {
String message = "The Content-Type was \u201C" + type + "\u201D, but the chosen preset schema is not appropriate for HTML.";
SAXException se = new SAXException(message);
errorHandler.schemaError(se);
throw se;
}
newHtmlParser();
htmlParser.setDocumentModeHandler(this);
reader = htmlParser;
if (validator != null) {
reader.setContentHandler(validator.getContentHandler());
}
reader = new LanguageDetectingXMLReaderWrapper(reader,
request, errorHandler, documentInput.getLanguage(),
documentInput.getSystemId());
if (Statistics.STATISTICS != null) {
reader = new UseCountingXMLReaderWrapper(reader,
request, documentInput.getSystemId());
}
} else {
if (contentType != null) {
if ("application/xml".equals(contentType) ||
"text/xml".equals(contentType) ||
(Arrays.binarySearch(KNOWN_CONTENT_TYPES,
contentType)) > -1) {
errorHandler.info("The Content-Type was \u201C"
+ type
+ "\u201D. Using the XML parser (not resolving external entities).");
}
}
setupXmlParser();
}
break;
}
}
/**
*
*/
protected void newHtmlParser() {
htmlParser = new HtmlParser();
htmlParser.setCommentPolicy(XmlViolationPolicy.ALLOW);
htmlParser.setContentNonXmlCharPolicy(XmlViolationPolicy.ALLOW);
htmlParser.setContentSpacePolicy(XmlViolationPolicy.ALTER_INFOSET);
htmlParser.setNamePolicy(XmlViolationPolicy.ALLOW);
htmlParser.setStreamabilityViolationPolicy(XmlViolationPolicy.FATAL);
htmlParser.setXmlnsPolicy(XmlViolationPolicy.ALTER_INFOSET);
htmlParser.setMappingLangToXmlLang(true);
htmlParser.setHeuristics(Heuristics.ALL);
}
protected Validator validatorByDoctype(int schemaId) throws SAXException,
IOException, IncorrectSchemaException {
if (schemaId == 0) {
return null;
}
for (int i = 0; i < presetDoctypes.length; i++) {
if (presetDoctypes[i] == schemaId) {
return validatorByUrls(presetUrls[i]);
}
}
throw new RuntimeException("Doctype mappings not initialized properly.");
}
/**
* @throws SAXNotRecognizedException
* @throws SAXNotSupportedException
*/
protected void setupXmlParser() throws SAXNotRecognizedException,
SAXNotSupportedException {
xmlParser = new SAXDriver();
xmlParser.setCharacterHandler(sourceCode);
if (lexicalHandler != null) {
xmlParser.setProperty("http://xml.org/sax/properties/lexical-handler",
lexicalHandler);
}
reader = new IdFilter(xmlParser);
reader.setFeature("http://xml.org/sax/features/string-interning", true);
reader.setFeature(
"http://xml.org/sax/features/external-general-entities",
parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION);
reader.setFeature(
"http://xml.org/sax/features/external-parameter-entities",
parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION);
if (parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION) {
reader.setEntityResolver(entityResolver);
} else {
reader.setEntityResolver(new NullEntityResolver());
}
if (validator == null) {
bufferingRootNamespaceSniffer = new BufferingRootNamespaceSniffer(
this);
reader.setContentHandler(bufferingRootNamespaceSniffer);
} else {
reader.setContentHandler(new RootNamespaceSniffer(this,
validator.getContentHandler()));
reader.setDTDHandler(validator.getDTDHandler());
}
if (useXhtml5Schema()) {
reader = new LanguageDetectingXMLReaderWrapper(reader, request,
errorHandler, documentInput.getLanguage(),
documentInput.getSystemId());
if (Statistics.STATISTICS != null) {
reader = new UseCountingXMLReaderWrapper(reader, request,
documentInput.getSystemId());
}
}
}
/**
* @param validator
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private Validator validatorByUrls(String schemaList) throws SAXException,
IOException, IncorrectSchemaException {
System.setProperty("nu.validator.schema.rdfa-full", "0");
schemaListForStats = schemaList;
Validator v = null;
String[] schemas = SPACE.split(schemaList);
for (int i = schemas.length - 1; i > -1; i--) {
String url = schemas[i];
if ("http://s.validator.nu/html5-all.rnc".equals(url)) {
System.setProperty("nu.validator.schema.rdfa-full", "1");
}
if ("http://c.validator.nu/all/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all/".equals(url)) {
for (String checker : ALL_CHECKERS) {
v = combineValidatorByUrl(v, checker);
}
} else if ("http://c.validator.nu/all-html4/".equals(url)
|| "http://hsivonen.iki.fi/checkers/all-html4/".equals(url)) {
for (String checker : ALL_CHECKERS_HTML4) {
v = combineValidatorByUrl(v, checker);
}
} else {
v = combineValidatorByUrl(v, url);
}
}
if (imageCollector != null && v != null) {
v = new CombineValidator(imageCollector, v);
}
return v;
}
/**
* @param val
* @param url
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private Validator combineValidatorByUrl(Validator val, String url)
throws SAXException, IOException, IncorrectSchemaException {
if (!"".equals(url)) {
Validator v = validatorByUrl(url);
if (val == null) {
val = v;
} else {
val = new CombineValidator(v, val);
}
}
return val;
}
/**
* @param url
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private Validator validatorByUrl(String url) throws SAXException,
IOException, IncorrectSchemaException {
if (loadedValidatorUrls.contains(url)) {
return null;
}
loadedValidatorUrls.add(url);
if ("http://s.validator.nu/xhtml5.rnc".equals(url)
|| "http://s.validator.nu/html5.rnc".equals(url)
|| "http://s.validator.nu/html5-all.rnc".equals(url)
|| "http://s.validator.nu/xhtml5-all.rnc".equals(url)
|| "http://s.validator.nu/html5-its.rnc".equals(url)
|| "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(url)
|| "http://s.validator.nu/html5-rdfalite.rnc".equals(url)) {
errorHandler.setSpec(html5spec);
}
Schema sch = resolveSchema(url, jingPropertyMap);
Validator validator = sch.createValidator(jingPropertyMap);
if (validator.getContentHandler() instanceof XmlPiChecker) {
lexicalHandler = (LexicalHandler) validator.getContentHandler();
}
return validator;
}
@Override
public Schema resolveSchema(String url, PropertyMap options)
throws SAXException, IOException, IncorrectSchemaException {
int i = Arrays.binarySearch(preloadedSchemaUrls, url);
if (i > -1) {
Schema rv = preloadedSchemas[i];
if (options.contains(WrapProperty.ATTRIBUTE_OWNER)) {
if (rv instanceof CheckerSchema) {
errorHandler.error(new SAXParseException(
"A non-schema checker cannot be used as an attribute schema.",
null, url, -1, -1));
throw new IncorrectSchemaException();
} else {
// ugly fall through
}
} else {
return rv;
}
}
externalSchema = true;
TypedInputSource schemaInput = (TypedInputSource) entityResolver.resolveEntity(
null, url);
SchemaReader sr = null;
if ("application/relax-ng-compact-syntax".equals(schemaInput.getType())) {
sr = CompactSchemaReader.getInstance();
} else {
sr = new AutoSchemaReader();
}
Schema sch = sr.createSchema(schemaInput, options);
if (Statistics.STATISTICS != null && "com.thaiopensource.validate.schematron.SchemaImpl".equals(sch.getClass().getName())) {
externalSchematron = true;
}
return sch;
}
/**
* @param url
* @return
* @throws SAXException
* @throws IOException
* @throws IncorrectSchemaException
*/
private static Schema schemaByUrl(String url, EntityResolver resolver,
PropertyMap pMap) throws SAXException, IOException,
IncorrectSchemaException {
log4j.debug("Will load schema: " + url);
TypedInputSource schemaInput;
try {
schemaInput = (TypedInputSource) resolver.resolveEntity(
null, url);
} catch (ClassCastException e) {
log4j.fatal(url, e);
throw e;
}
SchemaReader sr = null;
if ("application/relax-ng-compact-syntax".equals(schemaInput.getType())) {
sr = CompactSchemaReader.getInstance();
} else {
sr = new AutoSchemaReader();
}
Schema sch = sr.createSchema(schemaInput, pMap);
return sch;
}
/**
* @throws SAXException
*/
void emitTitle(boolean markupAllowed) throws SAXException {
if (willValidate()) {
emitter.characters(RESULTS_TITLE);
emitter.characters(FOR);
if (document != null && document.length() > 0) {
emitter.characters(scrub(shortenDataUri(document)));
} else if (request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.filename") != null) {
emitter.characters("uploaded file "
+ scrub(request.getAttribute(
"nu.validator.servlet.MultipartFormDataFilter.filename").toString()));
} else {
emitter.characters("contents of text-input area");
}
} else {
emitter.characters(SERVICE_TITLE);
if (markupAllowed
&& System.getProperty("nu.validator.servlet.service-name",
"").equals("Validator.nu")) {
emitter.startElement("span");
emitter.characters(LIVING_VERSION);
emitter.endElement("span");
}
}
}
protected String shortenDataUri(String uri) {
if (DataUri.startsWithData(uri)) {
return "data:\u2026";
} else {
return uri;
}
}
void emitForm() throws SAXException {
attrs.clear();
attrs.addAttribute("method", "get");
// attrs.addAttribute("action", request.getRequestURL().toString());
if (isSimple()) {
attrs.addAttribute("class", "simple");
}
// attrs.addAttribute("onsubmit", "formSubmission()");
emitter.startElement("form", attrs);
emitFormContent();
emitter.endElement("form");
}
protected boolean isSimple() {
return false;
}
/**
* @throws SAXException
*/
protected void emitFormContent() throws SAXException {
FormEmitter.emit(contentHandler, this);
}
void emitSchemaField() throws SAXException {
attrs.clear();
attrs.addAttribute("name", "schema");
attrs.addAttribute("id", "schema");
// attrs.addAttribute("onchange", "schemaChanged();");
attrs.addAttribute(
"pattern",
"(?:(?:(?:https?://\\S+)|(?:data:\\S+))(?:\\s+(?:(?:https?://\\S+)|(?:data:\\S+)))*)?");
attrs.addAttribute("title",
"Space-separated list of schema URLs. (Leave blank to let the service guess.)");
if (schemaUrls != null) {
attrs.addAttribute("value", scrub(schemaUrls));
}
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void emitDocField() throws SAXException {
attrs.clear();
attrs.addAttribute("type", "url");
attrs.addAttribute("name", "doc");
attrs.addAttribute("id", "doc");
attrs.addAttribute("pattern", "(?:(?:https?://.+)|(?:data:.+))?");
attrs.addAttribute("title",
"Absolute URL (http, https or data only) of the document to be checked.");
attrs.addAttribute("tabindex", "0");
attrs.addAttribute("autofocus", "autofocus");
if (document != null) {
attrs.addAttribute("value", scrub(document));
}
Object att = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type");
if (att != null) {
attrs.addAttribute("class", att.toString());
}
emitter.startElement("input", attrs);
emitter.endElement("input");
}
/**
* @throws SAXException
*
*/
void emitSchemaDuration() throws SAXException {
}
/**
* @throws SAXException
*
*/
void emitDocDuration() throws SAXException {
}
/**
* @throws SAXException
*
*/
void emitTotalDuration() throws SAXException {
emitter.characters("" + (System.currentTimeMillis() - start));
}
/**
* @throws SAXException
*
*/
void emitPresetOptions() throws SAXException {
for (int i = 0; i < presetUrls.length; i++) {
emitter.option(presetLabels[i], presetUrls[i], false);
}
}
/**
* @throws SAXException
*
*/
void emitParserOptions() throws SAXException {
emitter.option("Automatically from Content-Type", "",
(parser == ParserMode.AUTO));
emitter.option("HTML", "html", (parser == ParserMode.HTML));
emitter.option("XML; don\u2019t load external entities", "xml",
(parser == ParserMode.XML_NO_EXTERNAL_ENTITIES));
emitter.option("XML; load external entities", "xmldtd",
(parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION));
}
/**
* @throws SAXException
*
*/
void emitProfileOptions() throws SAXException {
profile = request.getParameter("profile");
emitter.option("Permissive: only what the spec requires",
"", ("".equals(profile)));
emitter.option("Pedagogical: suitable for teaching purposes",
"pedagogical", ("pedagogical".equals(profile)));
emitter.option("Polyglot: works both as HTML and as XML",
"polyglot", ("polyglot".equals(profile)));
}
/**
* @throws SAXException
*
*/
void emitLaxTypeField() throws SAXException {
emitter.checkbox("laxtype", "yes", laxType);
}
/**
* @throws SAXException
*
*/
void emitShowSourceField() throws SAXException {
emitter.checkbox("showsource", "yes", showSource);
}
/**
* @throws SAXException
*
*/
void emitShowOutlineField() throws SAXException {
emitter.checkbox("showoutline", "yes", showOutline);
}
/**
* @throws SAXException
*
*/
void emitShowImageReportField() throws SAXException {
emitter.checkbox("showimagereport", "yes", imageCollector != null);
}
void emitCheckErrorPagesField() throws SAXException {
emitter.checkbox("checkerrorpages", "yes", checkErrorPages);
}
void rootNamespace(String namespace, Locator locator) throws SAXException {
if (validator == null) {
int index = -1;
for (int i = 0; i < presetNamespaces.length; i++) {
if (namespace.equals(presetNamespaces[i])) {
index = i;
break;
}
}
if (index == -1) {
String message = "Cannot find preset schema for namespace: \u201C"
+ namespace + "\u201D.";
SAXException se = new SAXException(message);
errorHandler.schemaError(se);
throw new CannotFindPresetSchemaException();
}
String label = presetLabels[index];
String urls = presetUrls[index];
errorHandler.info("Using the preset for " + label
+ " based on the root namespace.");
try {
validator = validatorByUrls(urls);
} catch (IncorrectSchemaException | IOException e) {
// At this point the schema comes from memory.
throw new RuntimeException(e);
}
if (bufferingRootNamespaceSniffer == null) {
throw new RuntimeException(
"Bug! bufferingRootNamespaceSniffer was null.");
}
bufferingRootNamespaceSniffer.setContentHandler(validator.getContentHandler());
}
if (!rootNamespaceSeen) {
rootNamespaceSeen = true;
if (contentType != null) {
int i;
if ((i = Arrays.binarySearch(KNOWN_CONTENT_TYPES, contentType)) > -1) {
if (!NAMESPACES_FOR_KNOWN_CONTENT_TYPES[i].equals(namespace)) {
String message = "".equals(namespace) ? "\u201C"
+ contentType
+ "\u201D is not an appropriate Content-Type for a document whose root element is not in a namespace."
: "\u201C"
+ contentType
+ "\u201D is not an appropriate Content-Type for a document whose root namespace is \u201C"
+ namespace + "\u201D.";
SAXParseException spe = new SAXParseException(message,
locator);
errorHandler.warning(spe);
}
}
}
}
}
@Override
public void documentMode(DocumentMode mode, String publicIdentifier,
String systemIdentifier)
throws SAXException {
if (systemIdentifier != null) {
if ("about:legacy-compat".equals(systemIdentifier)) {
aboutLegacyCompat = true;
errorHandler.warning(new SAXParseException(
"Documents should not use"
+ " \u201cabout:legacy-compat\u201d,"
+ " except if generated by legacy systems"
+ " that can't output the standard"
+ " \u201c<!DOCTYPE html>\u201d doctype.",
null));
}
if (systemIdentifier.contains("http://www.w3.org/TR/xhtml1")) {
xhtml1Doctype = true;
}
if (systemIdentifier.contains("http://www.w3.org/TR/html4")) {
html4Doctype = true;
}
}
if (publicIdentifier != null) {
if (publicIdentifier.contains("-//W3C//DTD HTML 4")) {
html4Doctype = true;
}
}
if (validator == null) {
try {
if ("yes".equals(request.getParameter("sniffdoctype"))) {
if ("-//W3C//DTD XHTML 1.0 Transitional//EN".equals(publicIdentifier)) {
errorHandler.info("XHTML 1.0 Transitional doctype seen. Appendix C is not supported. Proceeding anyway for your convenience. The parser is still an HTML parser, so namespace processing is not performed and \u201Cxml:*\u201D attributes are not supported. Using the schema for "
+ getPresetLabel(XHTML1TRANSITIONAL_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA);
} else if ("-//W3C//DTD XHTML 1.0 Strict//EN".equals(publicIdentifier)) {
errorHandler.info("XHTML 1.0 Strict doctype seen. Appendix C is not supported. Proceeding anyway for your convenience. The parser is still an HTML parser, so namespace processing is not performed and \u201Cxml:*\u201D attributes are not supported. Using the schema for "
+ getPresetLabel(XHTML1STRICT_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1STRICT_SCHEMA);
} else if ("-//W3C//DTD HTML 4.01 Transitional//EN".equals(publicIdentifier)) {
errorHandler.info("HTML 4.01 Transitional doctype seen. Using the schema for "
+ getPresetLabel(XHTML1TRANSITIONAL_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA);
} else if ("-//W3C//DTD HTML 4.01//EN".equals(publicIdentifier)) {
errorHandler.info("HTML 4.01 Strict doctype seen. Using the schema for "
+ getPresetLabel(XHTML1STRICT_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1STRICT_SCHEMA);
} else if ("-//W3C//DTD HTML 4.0 Transitional//EN".equals(publicIdentifier)) {
errorHandler.info("Legacy HTML 4.0 Transitional doctype seen. Please consider using HTML 4.01 Transitional instead. Proceeding anyway for your convenience with the schema for "
+ getPresetLabel(XHTML1TRANSITIONAL_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA);
} else if ("-//W3C//DTD HTML 4.0//EN".equals(publicIdentifier)) {
errorHandler.info("Legacy HTML 4.0 Strict doctype seen. Please consider using HTML 4.01 instead. Proceeding anyway for your convenience with the schema for "
+ getPresetLabel(XHTML1STRICT_SCHEMA)
+ ".");
validator = validatorByDoctype(XHTML1STRICT_SCHEMA);
}
} else {
schemaIsDefault = true;
validator = validatorByDoctype(HTML5_SCHEMA);
}
} catch (IncorrectSchemaException | IOException e) {
// At this point the schema comes from memory.
throw new RuntimeException(e);
}
ContentHandler ch = validator.getContentHandler();
ch.setDocumentLocator(htmlParser.getDocumentLocator());
ch.startDocument();
reader.setContentHandler(ch);
}
}
private String getPresetLabel(int schemaId) {
for (int i = 0; i < presetDoctypes.length; i++) {
if (presetDoctypes[i] == schemaId) {
return presetLabels[i];
}
}
return "unknown";
}
/**
* @param acceptAllKnownXmlTypes
* @see nu.validator.xml.ContentTypeParser#setAcceptAllKnownXmlTypes(boolean)
*/
protected void setAcceptAllKnownXmlTypes(boolean acceptAllKnownXmlTypes) {
contentTypeParser.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes);
dataRes.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes);
httpRes.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes);
}
/**
* @param allowGenericXml
* @see nu.validator.xml.ContentTypeParser#setAllowGenericXml(boolean)
*/
protected void setAllowGenericXml(boolean allowGenericXml) {
contentTypeParser.setAllowGenericXml(allowGenericXml);
httpRes.setAllowGenericXml(allowGenericXml);
dataRes.setAllowGenericXml(allowGenericXml);
}
/**
* @param allowHtml
* @see nu.validator.xml.ContentTypeParser#setAllowHtml(boolean)
*/
protected void setAllowHtml(boolean allowHtml) {
contentTypeParser.setAllowHtml(allowHtml);
httpRes.setAllowHtml(allowHtml);
dataRes.setAllowHtml(allowHtml);
}
/**
* @param allowCss
* @see nu.validator.xml.ContentTypeParser#setAllowCss(boolean)
*/
protected void setAllowCss(boolean allowCss) {
contentTypeParser.setAllowCss(allowCss);
httpRes.setAllowCss(allowCss);
dataRes.setAllowCss(allowCss);
}
/**
* @param allowRnc
* @see nu.validator.xml.ContentTypeParser#setAllowRnc(boolean)
*/
protected void setAllowRnc(boolean allowRnc) {
contentTypeParser.setAllowRnc(allowRnc);
httpRes.setAllowRnc(allowRnc);
dataRes.setAllowRnc(allowRnc);
entityResolver.setAllowRnc(allowRnc);
}
/**
* @param allowXhtml
* @see nu.validator.xml.ContentTypeParser#setAllowXhtml(boolean)
*/
protected void setAllowXhtml(boolean allowXhtml) {
contentTypeParser.setAllowXhtml(allowXhtml);
httpRes.setAllowXhtml(allowXhtml);
dataRes.setAllowXhtml(allowXhtml);
}
/**
* @throws SAXException
* @throws IOException
*/
protected void loadDocumentInput() throws SAXException, IOException {
if (documentInput != null) {
return;
}
if (methodIsGet) {
documentInput = (TypedInputSource) entityResolver.resolveEntity(
null, document);
errorHandler.setLoggingOk(true);
} else { // POST
long len = request.getContentLength();
if (len > SIZE_LIMIT) {
throw new StreamBoundException("Resource size exceeds limit.");
}
documentInput = contentTypeParser.buildTypedInputSource(document,
null, postContentType);
documentInput.setByteStream(len < 0 ? new BoundedInputStream(
request.getInputStream(), SIZE_LIMIT, document)
: request.getInputStream());
documentInput.setSystemId(request.getHeader("Content-Location"));
}
if (imageCollector != null) {
baseUriTracker = new BaseUriTracker(documentInput.getSystemId(),
documentInput.getLanguage());
imageCollector.initializeContext(baseUriTracker);
}
}
void emitStyle() throws SAXException {
attrs.clear();
attrs.addAttribute("href", STYLE_SHEET);
attrs.addAttribute("rel", "stylesheet");
emitter.startElement("link", attrs);
emitter.endElement("link");
}
void emitIcon() throws SAXException {
attrs.clear();
attrs.addAttribute("href", ICON);
attrs.addAttribute("rel", "icon");
emitter.startElement("link", attrs);
emitter.endElement("link");
}
void emitScript() throws SAXException {
attrs.clear();
attrs.addAttribute("src", SCRIPT);
emitter.startElement("script", attrs);
emitter.endElement("script");
}
void emitAbout() throws SAXException {
attrs.clear();
attrs.addAttribute("href", ABOUT_PAGE);
emitter.startElement("a", attrs);
emitter.characters(ABOUT_THIS_SERVICE);
emitter.endElement("a");
}
void emitVersion() throws SAXException {
emitter.characters(VERSION);
}
void emitUserAgentInput() throws SAXException {
attrs.clear();
attrs.addAttribute("name", "useragent");
attrs.addAttribute("list", "useragents");
attrs.addAttribute("value", userAgent);
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void emitAcceptLanguageInput() throws SAXException {
attrs.clear();
attrs.addAttribute("id", "acceptlanguage");
attrs.addAttribute("name", "acceptlanguage");
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void emitOtherFacetLink() throws SAXException {
attrs.clear();
attrs.addAttribute("href", HTML5_FACET);
emitter.startElement("a", attrs);
emitter.characters(SIMPLE_UI);
emitter.endElement("a");
}
void emitNsfilterField() throws SAXException {
attrs.clear();
attrs.addAttribute("name", "nsfilter");
attrs.addAttribute("id", "nsfilter");
attrs.addAttribute("pattern", "(?:.+:.+(?:\\s+.+:.+)*)?");
attrs.addAttribute("title",
"Space-separated namespace URIs for vocabularies to be filtered out.");
if (!filteredNamespaces.isEmpty()) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (String ns : filteredNamespaces) {
if (!first) {
sb.append(' ');
}
sb.append(ns);
first = false;
}
attrs.addAttribute("value", scrub(sb));
}
emitter.startElement("input", attrs);
emitter.endElement("input");
}
void maybeEmitNsfilterField() throws SAXException {
NsFilterEmitter.emit(contentHandler, this);
}
void emitCharsetOptions() throws SAXException {
boolean found = false;
for (int i = 0; i < CHARSETS.length; i++) {
String charset = CHARSETS[i];
boolean selected = charset.equalsIgnoreCase(charsetOverride); // XXX
// use
// ASCII-caseinsensitivity
emitter.option(CHARSET_DESCRIPTIONS[i], charset, selected);
if (selected) {
found = true;
}
}
if (!found && charsetOverride != null) {
emitter.option(charsetOverride, charsetOverride, true);
}
}
void maybeEmitCharsetField() throws SAXException {
CharsetEmitter.emit(contentHandler, this);
}
class CannotFindPresetSchemaException extends SAXException {
CannotFindPresetSchemaException() {
super();
}
}
}
| Prevent NPE caused by previous commit
| src/nu/validator/servlet/VerifierServletTransaction.java | Prevent NPE caused by previous commit |
|
Java | mit | f825ed0769485fa8c446caf86f1dcf255ca40dc3 | 0 | astdb/Misc,astdb/TB_ShortProbs,astdb/Misc,astdb/Misc,astdb/TB_ShortProbs,astdb/Misc,astdb/Misc,astdb/Misc,astdb/TB_ShortProbs |
// find the contiguous subarray with the max total of elements, given an array of ints
import java.util.*;
public class ContigSubArrayTotal {
public static void main(String[] args) {
int[] input = {-2,1,-3,4,-1,2,1,-5,4}; // expect [4,-1,2,1]
System.out.println(Arrays.toString(maxSubArray(input)));
}
public static int[] maxSubArray(int[] nums) {
// array to store maximum subarray total and subarray boundaries e.g. max = {total, substart, subend+1}
int[] maxSubTot = {0,0,0};
// subarray size
for(int i = 1; i < nums.length-1; i++) {
// go through subarrays of i length, starting from index j
// System.out.printf("\tLooking for subarrays of size %d\n\t\tStarting at...", i);
for(int j = 0; j+i <= nums.length ; j++) {
// System.out.printf("\t%d ", j);
if(i == 1 && j == 0) {
// inititalize max
maxSubTot[0] = nums[0];
maxSubTot[1] = 0;
maxSubTot[2] = 1;
} else {
int tot = 0;
for(int k = j; k < j+i; k++) {
tot = tot + nums[k];
}
if(tot > maxSubTot[0]) {
maxSubTot[0] = tot;
maxSubTot[1] = j;
maxSubTot[2] = j+i;
}
}
}
// System.out.printf("\n\t\tDone [total: %d, start: %d, end+1: %d]\n\n", maxSubTot[0], maxSubTot[1], maxSubTot[2]);
}
int[] result = new int[maxSubTot[2]-maxSubTot[1]];
for(int i = maxSubTot[1], j = 0; i < maxSubTot[2]; i++, j++){
result[j] = nums[i];
}
return result;
}
}
| ContigSubArrayTotal.java |
// find the contiguous subarray with the max total of elements, given an array of ints
import java.util.*;
public class ContigSubArrayTotal {
public static void main(String[] args) {
int[] input = {-2,1,-3,4,-1,2,1,-5,4}; // expect [4,-1,2,1]
System.out.println(Arrays.toString(maxSubArray(input)));
}
public static int[] maxSubArray(int[] nums) {
// array to store maximum subarray total and subarray boundaries e.g. max = {total, substart, subend+1}
int[] maxSubTot = {0,0,0};
// subarray size
for(int i = 1; i < nums.length-1; i++) {
// go through subarrays of i length, starting from index j
for(int j = 0; j+i <= nums.length ; j++) {
if(i == 1 && j == 0) {
// inititalize max
maxSubTot[0] = nums[0];
maxSubTot[1] = 0;
maxSubTot[2] = 1;
} else {
int tot = 0;
for(int k = j; k < j+i; k++) {
tot =+ nums[k];
}
if(tot > maxSubTot[0]) {
maxSubTot[0] = tot;
maxSubTot[1] = j;
maxSubTot[2] = j+i;
}
}
}
}
int[] result = new int[maxSubTot[2]-maxSubTot[1]];
for(int i = maxSubTot[1], j = 0; i < maxSubTot[2]; i++, j++){
result[j] = nums[i];
}
return result;
}
}
| Bugfix (=+ instead of += - typography, kids) | ContigSubArrayTotal.java | Bugfix (=+ instead of += - typography, kids) |
|
Java | mit | e0cabc0192ea9dc872a25c3423216d9b9b8d3a30 | 0 | ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform | /*
* Copyright (C) 2015 Actor LLC. <https://actor.im>
*/
package im.actor.runtime.js.websocket;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.typedarrays.shared.ArrayBuffer;
import com.google.gwt.typedarrays.shared.TypedArrays;
import com.google.gwt.typedarrays.shared.Uint8Array;
import im.actor.runtime.Log;
import im.actor.runtime.js.utils.Conversion;
import im.actor.runtime.mtproto.AsyncConnection;
import im.actor.runtime.mtproto.AsyncConnectionInterface;
import im.actor.runtime.mtproto.ConnectionEndpoint;
public class WebSocketConnection extends AsyncConnection {
private static int connectionId = 0;
private final String TAG = "WebSocketConnection#" + (connectionId++);
private JavaScriptObject jsWebSocket;
private boolean isClosed;
public WebSocketConnection(ConnectionEndpoint endpoint, AsyncConnectionInterface connection) {
super(endpoint, connection);
}
@Override
public void doConnect() {
Log.d(TAG, "doConnect");
isClosed = true;
String url;
if (getEndpoint().getType() == ConnectionEndpoint.Type.WS) {
url = "ws://" + getEndpoint().getHost() + ":" + getEndpoint().getPort() + "/";
} else if (getEndpoint().getType() == ConnectionEndpoint.Type.WS_TLS) {
url = "wss://" + getEndpoint().getHost() + ":" + getEndpoint().getPort() + "/";
} else {
throw new RuntimeException();
}
this.jsWebSocket = createJSWebSocket(url, this);
}
@Override
public void doSend(byte[] data) {
Log.d(TAG, "doSend");
if (isClosed) {
return;
}
Uint8Array push = TypedArrays.createUint8Array(data.length);
for (int i = 0; i < data.length; i++) {
push.set(i, data[i]);
}
send(push);
}
@Override
public void doClose() {
Log.d(TAG, "doClose");
isClosed = true;
close();
}
private void onRawMessage(ArrayBuffer message) {
Log.d(TAG, "onRawMessage");
if (isClosed) {
return;
}
onReceived(Conversion.convertBytes(message));
}
private void onRawConnected() {
Log.d(TAG, "onRawConnected");
isClosed = false;
onConnected();
}
private void onRawClosed() {
Log.d(TAG, "onRawClosed");
isClosed = true;
onClosed();
}
// Native interfaces
public native void send(Uint8Array message) /*-{
if (message == null)
return;
[email protected]::jsWebSocket.send(message);
}-*/;
public native void close() /*-{
[email protected]::jsWebSocket.close();
}-*/;
public native int getBufferedAmount() /*-{
return [email protected]::jsWebSocket.bufferedAmount;
}-*/;
public native int getReadyState() /*-{
return [email protected]::jsWebSocket.readyState;
}-*/;
public native String getURL() /*-{
return [email protected]::jsWebSocket.url;
}-*/;
/**
* Creates the JavaScript WebSocket component and set's all callback handlers.
*
* @param url
*/
private native JavaScriptObject createJSWebSocket(final String url, final WebSocketConnection webSocket) /*-{
var jsWebSocket = new WebSocket(url, ['binary']);
jsWebSocket.binaryType = "arraybuffer"
jsWebSocket.onopen = function () {
[email protected]::onRawConnected()();
}
jsWebSocket.onclose = function () {
[email protected]::onRawClosed()();
}
jsWebSocket.onerror = function () {
[email protected]::onRawClosed()();
}
jsWebSocket.onmessage = function (socketResponse) {
if (socketResponse.data) {
[email protected]::onRawMessage(*)(socketResponse.data);
}
}
return jsWebSocket;
}-*/;
}
| actor-sdk/sdk-core/runtime/runtime-js/src/main/java/im/actor/runtime/js/websocket/WebSocketConnection.java | /*
* Copyright (C) 2015 Actor LLC. <https://actor.im>
*/
package im.actor.runtime.js.websocket;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.typedarrays.shared.ArrayBuffer;
import com.google.gwt.typedarrays.shared.TypedArrays;
import com.google.gwt.typedarrays.shared.Uint8Array;
import im.actor.runtime.Log;
import im.actor.runtime.js.utils.Conversion;
import im.actor.runtime.mtproto.AsyncConnection;
import im.actor.runtime.mtproto.AsyncConnectionInterface;
import im.actor.runtime.mtproto.ConnectionEndpoint;
public class WebSocketConnection extends AsyncConnection {
private static int connectionId = 0;
private final String TAG = "WebSocketConnection#" + (connectionId++);
private JavaScriptObject jsWebSocket;
private boolean isClosed;
public WebSocketConnection(ConnectionEndpoint endpoint, AsyncConnectionInterface connection) {
super(endpoint, connection);
}
@Override
public void doConnect() {
Log.d(TAG, "doConnect");
isClosed = true;
String url;
if (getEndpoint().getType() == ConnectionEndpoint.Type.WS) {
url = "ws://" + getEndpoint().getHost() + ":" + getEndpoint().getPort() + "/";
} else if (getEndpoint().getType() == ConnectionEndpoint.Type.WS_TLS) {
url = "wss://" + getEndpoint().getHost() + ":" + getEndpoint().getPort() + "/";
} else {
throw new RuntimeException();
}
this.jsWebSocket = createJSWebSocket(url, this);
}
@Override
public void doSend(byte[] data) {
Log.d(TAG, "doSend");
if (isClosed) {
return;
}
Uint8Array push = TypedArrays.createUint8Array(data.length);
for (int i = 0; i < data.length; i++) {
push.set(i, data[i]);
}
send(push);
}
@Override
public void doClose() {
Log.d(TAG, "doClose");
isClosed = true;
close();
}
private void onRawMessage(ArrayBuffer message) {
Log.d(TAG, "onRawMessage");
if (isClosed) {
return;
}
onReceived(Conversion.convertBytes(message));
}
private void onRawConnected() {
Log.d(TAG, "onRawConnected");
isClosed = false;
onConnected();
}
private void onRawClosed() {
Log.d(TAG, "onRawClosed");
isClosed = true;
onClosed();
}
// Native interfaces
public native void send(Uint8Array message) /*-{
if (message == null)
return;
[email protected]::jsWebSocket.send(message);
}-*/;
public native void close() /*-{
[email protected]::jsWebSocket.close();
}-*/;
public native int getBufferedAmount() /*-{
return [email protected]::jsWebSocket.bufferedAmount;
}-*/;
public native int getReadyState() /*-{
return [email protected]::jsWebSocket.readyState;
}-*/;
public native String getURL() /*-{
return [email protected]::jsWebSocket.url;
}-*/;
/**
* Creates the JavaScript WebSocket component and set's all callback handlers.
*
* @param url
*/
private native JavaScriptObject createJSWebSocket(final String url, final WebSocketConnection webSocket) /*-{
var jsWebSocket = new WebSocket(url);
jsWebSocket.binaryType = "arraybuffer"
jsWebSocket.onopen = function () {
[email protected]::onRawConnected()();
}
jsWebSocket.onclose = function () {
[email protected]::onRawClosed()();
}
jsWebSocket.onerror = function () {
[email protected]::onRawClosed()();
}
jsWebSocket.onmessage = function (socketResponse) {
if (socketResponse.data) {
[email protected]::onRawMessage(*)(socketResponse.data);
}
}
return jsWebSocket;
}-*/;
}
| fix(js): Passing explicitly what kind of ws is supported in WebSocket's constructor
| actor-sdk/sdk-core/runtime/runtime-js/src/main/java/im/actor/runtime/js/websocket/WebSocketConnection.java | fix(js): Passing explicitly what kind of ws is supported in WebSocket's constructor |
|
Java | epl-1.0 | 29809b070941a7ee27c6594ad175fe34b4656965 | 0 | edgarmueller/emfstore-rest | /*******************************************************************************
* Copyright (c) 2008-2011 Chair for Applied Software Engineering,
* Technische Universitaet Muenchen.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* wesendon
******************************************************************************/
package org.eclipse.emf.emfstore.server.test;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.addAndCommit;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.addElement;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.commit;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.commitToBranch;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.defaultName;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.share;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.tag;
import static org.eclipse.emf.emfstore.internal.common.APIUtil.toInternal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import org.eclipse.emf.emfstore.client.ESLocalProject;
import org.eclipse.emf.emfstore.client.ESServer;
import org.eclipse.emf.emfstore.client.ESUsersession;
import org.eclipse.emf.emfstore.client.exceptions.ESServerStartFailedException;
import org.eclipse.emf.emfstore.client.test.common.dsl.Create;
import org.eclipse.emf.emfstore.client.test.common.dsl.CreateAPI;
import org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil;
import org.eclipse.emf.emfstore.internal.client.model.ESWorkspaceProviderImpl;
import org.eclipse.emf.emfstore.internal.client.model.ModelFactory;
import org.eclipse.emf.emfstore.internal.client.model.ProjectSpace;
import org.eclipse.emf.emfstore.internal.client.model.ServerInfo;
import org.eclipse.emf.emfstore.internal.client.model.Usersession;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.ConnectionManager;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.KeyStoreManager;
import org.eclipse.emf.emfstore.internal.client.model.impl.api.ESLocalProjectImpl;
import org.eclipse.emf.emfstore.internal.client.model.impl.api.ESServerImpl;
import org.eclipse.emf.emfstore.internal.common.model.Project;
import org.eclipse.emf.emfstore.internal.server.exceptions.FatalESException;
import org.eclipse.emf.emfstore.internal.server.exceptions.InvalidProjectIdException;
import org.eclipse.emf.emfstore.internal.server.exceptions.InvalidVersionSpecException;
import org.eclipse.emf.emfstore.internal.server.model.ProjectId;
import org.eclipse.emf.emfstore.internal.server.model.ProjectInfo;
import org.eclipse.emf.emfstore.internal.server.model.versioning.ChangePackage;
import org.eclipse.emf.emfstore.internal.server.model.versioning.PrimaryVersionSpec;
import org.eclipse.emf.emfstore.server.exceptions.ESException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class ServerInterfaceTest {
private static ESServer server;
private static ESUsersession session;
@BeforeClass
public static void beforeClass() throws IllegalArgumentException, ESServerStartFailedException,
FatalESException, ESException, IOException {
// server = ServerUtil.startServer();
final ServerInfo serverInfo = ModelFactory.eINSTANCE.createServerInfo();
serverInfo.setCertificateAlias(KeyStoreManager.getInstance().DEFAULT_CERTIFICATE);
serverInfo.setUrl("localhost");
serverInfo.setPort(8081);
server = new ESServerImpl(serverInfo);
session = server.login("super", "super");
// deleteRemoteProjects(server, session);
// deleteLocalProjects();
}
@AfterClass
public static void afterClass() throws ESException {
// session.logout();
}
@After
public void after() throws Exception {
ProjectUtil.deleteRemoteProjects(server, session);
ProjectUtil.deleteLocalProjects();
}
@Test
public void testCreateEmptyProject() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final Usersession usersession = toInternal(Usersession.class, session);
final ProjectId projectId = Create.projectId();
projectId.setId(ProjectUtil.defaultName());
final ProjectInfo projectInfo = connectionManager.createEmptyProject(
usersession.getSessionId(),
ProjectUtil.defaultName(),
"Example Description",
Create.logMessage());
assertNotNull(projectInfo);
final List<ProjectInfo> projectInfos = connectionManager.getProjectList(usersession.getSessionId());
assertEquals(1, projectInfos.size());
assertEquals(projectInfo.getName(), projectInfos.get(0).getName());
assertEquals(projectInfo.getName(), ProjectUtil.defaultName());
}
@Test
public void testCreateProject() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final Usersession usersession = toInternal(Usersession.class, session);
final ProjectId projectId = Create.projectId();
projectId.setId(ProjectUtil.defaultName());
final ProjectInfo projectInfo = connectionManager.createProject(
usersession.getSessionId(),
ProjectUtil.defaultName(),
"Example Description",
Create.logMessage(),
ESLocalProjectImpl.class.cast(Create.project("testName")).toInternalAPI().getProject()
);
assertNotNull(projectInfo);
final List<ProjectInfo> projectInfos = connectionManager.getProjectList(usersession.getSessionId());
assertEquals(1, projectInfos.size());
assertEquals(projectInfo.getName(), projectInfos.get(0).getName());
assertEquals(projectInfo.getName(), ProjectUtil.defaultName());
}
@Test
public void testGetProject() throws ESException {
// create a Project
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final Usersession usersession = toInternal(Usersession.class, session);
final ProjectId projectId = Create.projectId();
projectId.setId(ProjectUtil.defaultName());
final Project project = ESLocalProjectImpl.class.cast(Create.project("testName")).toInternalAPI().getProject();
final ProjectInfo projectInfo = connectionManager.createProject(
usersession.getSessionId(),
ProjectUtil.defaultName(),
"Example Description",
Create.logMessage(),
project
);
assertNotNull(projectInfo);
// get the Project
final Project retrievedProject = connectionManager.getProject(usersession.getSessionId(),
projectInfo.getProjectId(),
projectInfo.getVersion());
assertNotNull(retrievedProject);
assertEquals(project, retrievedProject);
}
@Test(expected = InvalidProjectIdException.class)
public void testGetProjectWithInvalidProjectId() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
connectionManager.getProject(
toInternal(Usersession.class, session).getSessionId(),
Create.projectId(),
Create.primaryVersionSpec(0));
}
@Test(expected = InvalidProjectIdException.class)
public void testGetProjectWithNullProjectId() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
connectionManager.getProject(
toInternal(Usersession.class, session).getSessionId(),
Create.projectId(),
Create.primaryVersionSpec(0));
}
// @Test(expected = InvalidProjectIdException.class)
// public void testCreateVersion() throws ESException {
//
// ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
//
// ESLocalProject localProject = ProjectUtil.CreateAPI.project(ProjectUtil.defaultName());
// ProjectSpace projectSpace = APIUtil.toInternal(ProjectSpace.class, localProject);
// Usersession usersession = APIUtil.toInternal(Usersession.class, session);
// PrimaryVersionSpec baseVersionSpec = VersioningFactory.eINSTANCE.createPrimaryVersionSpec();
// BranchVersionSpec branchVersionSpec = VersioningFactory.eINSTANCE.createBranchVersionSpec();
// ProjectId projectId = ModelFactory.eINSTANCE.CreateAPI.projectId();
//
// projectId.setId(ProjectUtil.defaultName());
// baseVersionSpec.setBranch("trunk");
// baseVersionSpec.setIdentifier(0);
// branchVersionSpec.setBranch("trunk");
//
// PrimaryVersionSpec versionSpec = connectionManager.createVersion(
// usersession.getSessionId(),
// projectId,
// baseVersionSpec,
// VersioningFactory.eINSTANCE.createChangePackage(),
// branchVersionSpec,
// projectSpace.getMergedVersion(),
// VersioningFactory.eINSTANCE.createLogMessage());
// List<ProjectInfo> projectInfos = connectionManager.getProjectList(usersession.getSessionId());
// }
@Test
public void testGetChanges() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = commit(
addElement(
share(session, CreateAPI.project(defaultName())),
Create.testElement()));
final List<ChangePackage> changes = connectionManager.getChanges(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.primaryVersionSpec(0),
Create.primaryVersionSpec(1));
assertEquals(1, changes.size());
}
@Test
public void testResolvePagedUpdateVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.pagedUpdateVersionSpec(
CreateAPI.primaryVersionSpec(0), 1));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveTagVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
tag(localProject, CreateAPI.primaryVersionSpec(1), "trunk", "footag");
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.tagVersionSpec("trunk", "footag"));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveBranchVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.branchVersionSpec());
assertEquals(3, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveAncestorVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(1);
commitToBranch(addElement(localProject, Create.testElement()), "foo-branch");
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.ancestorVersionSpec(
Create.primaryVersionSpec(1),
Create.primaryVersionSpec(2, "foo-branch")));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveDateVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(1);
final Date now = new Date();
commit(addElement(localProject, Create.testElement()));
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.dateVersionSpec(now));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testDeleteProject() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(1);
connectionManager.deleteProject(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
true);
assertEquals(0, connectionManager.getProjectList(
toInternal(Usersession.class, session).getSessionId()).size());
}
@Test(expected = InvalidVersionSpecException.class)
public void testAddTag() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
// add more elements in order to create different VERSIONS
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
connectionManager.addTag(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.primaryVersionSpec(1),
Create.tagVersionSpec("trunk", "footag"));
connectionManager.removeTag(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.primaryVersionSpec(1),
Create.tagVersionSpec("trunk", "footag"));
connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.tagVersionSpec("trunk", "footag"));
}
}
| tests/org.eclipse.emf.emfstore.server.test/src/org/eclipse/emf/emfstore/server/test/ServerInterfaceTest.java | /*******************************************************************************
* Copyright (c) 2008-2011 Chair for Applied Software Engineering,
* Technische Universitaet Muenchen.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* wesendon
******************************************************************************/
package org.eclipse.emf.emfstore.server.test;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.addAndCommit;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.addElement;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.commit;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.commitToBranch;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.defaultName;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.share;
import static org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil.tag;
import static org.eclipse.emf.emfstore.internal.common.APIUtil.toInternal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import org.eclipse.emf.emfstore.client.ESLocalProject;
import org.eclipse.emf.emfstore.client.ESServer;
import org.eclipse.emf.emfstore.client.ESUsersession;
import org.eclipse.emf.emfstore.client.exceptions.ESServerStartFailedException;
import org.eclipse.emf.emfstore.client.test.common.dsl.Create;
import org.eclipse.emf.emfstore.client.test.common.dsl.CreateAPI;
import org.eclipse.emf.emfstore.client.test.common.util.ProjectUtil;
import org.eclipse.emf.emfstore.internal.client.model.ESWorkspaceProviderImpl;
import org.eclipse.emf.emfstore.internal.client.model.ModelFactory;
import org.eclipse.emf.emfstore.internal.client.model.ProjectSpace;
import org.eclipse.emf.emfstore.internal.client.model.ServerInfo;
import org.eclipse.emf.emfstore.internal.client.model.Usersession;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.ConnectionManager;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.KeyStoreManager;
import org.eclipse.emf.emfstore.internal.client.model.impl.api.ESLocalProjectImpl;
import org.eclipse.emf.emfstore.internal.client.model.impl.api.ESServerImpl;
import org.eclipse.emf.emfstore.internal.common.model.Project;
import org.eclipse.emf.emfstore.internal.server.exceptions.FatalESException;
import org.eclipse.emf.emfstore.internal.server.exceptions.InvalidProjectIdException;
import org.eclipse.emf.emfstore.internal.server.exceptions.InvalidVersionSpecException;
import org.eclipse.emf.emfstore.internal.server.model.ProjectId;
import org.eclipse.emf.emfstore.internal.server.model.ProjectInfo;
import org.eclipse.emf.emfstore.internal.server.model.versioning.ChangePackage;
import org.eclipse.emf.emfstore.internal.server.model.versioning.PrimaryVersionSpec;
import org.eclipse.emf.emfstore.server.exceptions.ESException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class ServerInterfaceTest {
private static ESServer server;
private static ESUsersession session;
@BeforeClass
public static void beforeClass() throws IllegalArgumentException, ESServerStartFailedException,
FatalESException, ESException, IOException {
// server = ServerUtil.startServer();
final ServerInfo serverInfo = ModelFactory.eINSTANCE.createServerInfo();
serverInfo.setCertificateAlias(KeyStoreManager.getInstance().DEFAULT_CERTIFICATE);
serverInfo.setUrl("localhost");
serverInfo.setPort(8081);
server = new ESServerImpl(serverInfo);
session = server.login("super", "super");
// deleteRemoteProjects(server, session);
// deleteLocalProjects();
}
@AfterClass
public static void afterClass() throws ESException {
// session.logout();
}
@After
public void after() throws Exception {
ProjectUtil.deleteRemoteProjects(server, session);
ProjectUtil.deleteLocalProjects();
}
@Test
public void testCreateEmptyProject() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final Usersession usersession = toInternal(Usersession.class, session);
final ProjectId projectId = Create.projectId();
projectId.setId(ProjectUtil.defaultName());
final ProjectInfo projectInfo = connectionManager.createEmptyProject(
usersession.getSessionId(),
ProjectUtil.defaultName(),
"Example Description",
Create.logMessage());
assertNotNull(projectInfo);
final List<ProjectInfo> projectInfos = connectionManager.getProjectList(usersession.getSessionId());
assertEquals(1, projectInfos.size());
assertEquals(projectInfo.getName(), projectInfos.get(0).getName());
assertEquals(projectInfo.getName(), ProjectUtil.defaultName());
}
@Test
public void testCreateProject() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final Usersession usersession = toInternal(Usersession.class, session);
final ProjectId projectId = Create.projectId();
projectId.setId(ProjectUtil.defaultName());
final ProjectInfo projectInfo = connectionManager.createProject(
usersession.getSessionId(),
ProjectUtil.defaultName(),
"Example Description",
Create.logMessage(),
ESLocalProjectImpl.class.cast(Create.project("testName")).toInternalAPI().getProject()
);
assertNotNull(projectInfo);
final List<ProjectInfo> projectInfos = connectionManager.getProjectList(usersession.getSessionId());
assertEquals(1, projectInfos.size());
assertEquals(projectInfo.getName(), projectInfos.get(0).getName());
assertEquals(projectInfo.getName(), ProjectUtil.defaultName());
}
@Test
public void testGetProject() throws ESException {
// create a Project
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final Usersession usersession = toInternal(Usersession.class, session);
final ProjectId projectId = Create.projectId();
projectId.setId(ProjectUtil.defaultName());
final Project project = ESLocalProjectImpl.class.cast(Create.project("testName")).toInternalAPI().getProject();
final ProjectInfo projectInfo = connectionManager.createProject(
usersession.getSessionId(),
ProjectUtil.defaultName(),
"Example Description",
Create.logMessage(),
project
);
assertNotNull(projectInfo);
// get the Project
final Project retrievedProject = connectionManager.getProject(usersession.getSessionId(), projectId,
projectInfo.getVersion());
assertNotNull(retrievedProject);
assertEquals(project, retrievedProject);
}
@Test(expected = InvalidProjectIdException.class)
public void testGetProjectWithInvalidProjectId() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
connectionManager.getProject(
toInternal(Usersession.class, session).getSessionId(),
Create.projectId(),
Create.primaryVersionSpec(0));
}
@Test(expected = InvalidProjectIdException.class)
public void testGetProjectWithNullProjectId() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
connectionManager.getProject(
toInternal(Usersession.class, session).getSessionId(),
Create.projectId(),
Create.primaryVersionSpec(0));
}
// @Test(expected = InvalidProjectIdException.class)
// public void testCreateVersion() throws ESException {
//
// ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
//
// ESLocalProject localProject = ProjectUtil.CreateAPI.project(ProjectUtil.defaultName());
// ProjectSpace projectSpace = APIUtil.toInternal(ProjectSpace.class, localProject);
// Usersession usersession = APIUtil.toInternal(Usersession.class, session);
// PrimaryVersionSpec baseVersionSpec = VersioningFactory.eINSTANCE.createPrimaryVersionSpec();
// BranchVersionSpec branchVersionSpec = VersioningFactory.eINSTANCE.createBranchVersionSpec();
// ProjectId projectId = ModelFactory.eINSTANCE.CreateAPI.projectId();
//
// projectId.setId(ProjectUtil.defaultName());
// baseVersionSpec.setBranch("trunk");
// baseVersionSpec.setIdentifier(0);
// branchVersionSpec.setBranch("trunk");
//
// PrimaryVersionSpec versionSpec = connectionManager.createVersion(
// usersession.getSessionId(),
// projectId,
// baseVersionSpec,
// VersioningFactory.eINSTANCE.createChangePackage(),
// branchVersionSpec,
// projectSpace.getMergedVersion(),
// VersioningFactory.eINSTANCE.createLogMessage());
// List<ProjectInfo> projectInfos = connectionManager.getProjectList(usersession.getSessionId());
// }
@Test
public void testGetChanges() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = commit(
addElement(
share(session, CreateAPI.project(defaultName())),
Create.testElement()));
final List<ChangePackage> changes = connectionManager.getChanges(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.primaryVersionSpec(0),
Create.primaryVersionSpec(1));
assertEquals(1, changes.size());
}
@Test
public void testResolvePagedUpdateVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.pagedUpdateVersionSpec(
CreateAPI.primaryVersionSpec(0), 1));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveTagVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
tag(localProject, CreateAPI.primaryVersionSpec(1), "trunk", "footag");
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.tagVersionSpec("trunk", "footag"));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveBranchVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.branchVersionSpec());
assertEquals(3, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveAncestorVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(1);
commitToBranch(addElement(localProject, Create.testElement()), "foo-branch");
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.ancestorVersionSpec(
Create.primaryVersionSpec(1),
Create.primaryVersionSpec(2, "foo-branch")));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testResolveDateVersionSpec() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(1);
final Date now = new Date();
commit(addElement(localProject, Create.testElement()));
final PrimaryVersionSpec resolvedVersionSpec = connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.dateVersionSpec(now));
assertEquals(1, resolvedVersionSpec.getIdentifier());
}
@Test
public void testDeleteProject() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(1);
connectionManager.deleteProject(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
true);
assertEquals(0, connectionManager.getProjectList(
toInternal(Usersession.class, session).getSessionId()).size());
}
@Test(expected = InvalidVersionSpecException.class)
public void testAddTag() throws ESException {
final ConnectionManager connectionManager = ESWorkspaceProviderImpl.getInstance().getConnectionManager();
// add more elements in order to create different VERSIONS
final ESLocalProject localProject = addAndCommit(share(session, CreateAPI.project(defaultName()))).times(3);
connectionManager.addTag(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.primaryVersionSpec(1),
Create.tagVersionSpec("trunk", "footag"));
connectionManager.removeTag(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.primaryVersionSpec(1),
Create.tagVersionSpec("trunk", "footag"));
connectionManager.resolveVersionSpec(
toInternal(Usersession.class, session).getSessionId(),
toInternal(ProjectSpace.class, localProject).getProjectId(),
Create.tagVersionSpec("trunk", "footag"));
}
}
| debugged test case
| tests/org.eclipse.emf.emfstore.server.test/src/org/eclipse/emf/emfstore/server/test/ServerInterfaceTest.java | debugged test case |
|
Java | epl-1.0 | 01f1f32e6f3f12d4e1cd7a8cf9b54d33dd0ef69f | 0 | ollie314/che-plugins,ollie314/che-plugins,codenvy/che-plugins,ollie314/che-plugins,codenvy/che-plugins,ollie314/che-plugins,codenvy/che-plugins | /*******************************************************************************
* Copyright (c) 2012-2015 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.env.local.server;
import org.eclipse.che.api.core.NotFoundException;
import org.eclipse.che.api.core.ServerException;
import org.eclipse.che.api.core.UnauthorizedException;
import org.eclipse.che.api.user.server.dao.UserDao;
import org.eclipse.che.commons.env.EnvironmentContext;
import org.eclipse.che.commons.user.User;
import org.eclipse.che.commons.user.UserImpl;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.security.Principal;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* Set up environment variable. Only for local packaging with single workspace. Don't use it in production packaging.
*
* @author andrew00x
*/
@Singleton
public class SingleEnvironmentFilter implements Filter {
@Inject
private UserDao userDao;
private String wsName;
private String wsId;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
wsName = filterConfig.getInitParameter("ws-name");
wsId = filterConfig.getInitParameter("ws-id");
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
final HttpServletRequest httpRequest = (HttpServletRequest)request;
HttpSession session = httpRequest.getSession(false);
User user = null;
if (session != null) {
user = (User)session.getAttribute("codenvy_user");
}
if (user == null) {
final String query = httpRequest.getQueryString();
String username = null;
String password = null;
if (query != null) {
username = getParameter("username", query);
password = getParameter("password", query);
}
if (username == null) {
username = "[email protected]";
}
org.eclipse.che.api.user.server.dao.User daoUser;
try {
daoUser = userDao.getByAlias(username);
if (password != null) {
userDao.authenticate(username, password);
}
} catch (NotFoundException | ServerException | UnauthorizedException e) {
throw new ServletException(e);
}
final List<String> roles = new LinkedList<>();
Collections.addAll(roles, "workspace/admin", "workspace/developer", "system/admin", "system/manager", "user");
user = new UserImpl(daoUser.getName(), daoUser.getId(), "dummy_token", roles, false);
session = httpRequest.getSession();
session.setAttribute("codenvy_user", user);
}
final EnvironmentContext env = EnvironmentContext.getCurrent();
try {
env.setWorkspaceName(wsName);
env.setWorkspaceId(wsId);
env.setUser(user);
chain.doFilter(addUserInRequest(httpRequest, user), response);
} finally {
EnvironmentContext.reset();
}
}
private HttpServletRequest addUserInRequest(final HttpServletRequest httpRequest, final User user) {
return new HttpServletRequestWrapper(httpRequest) {
@Override
public String getRemoteUser() {
return user.getName();
}
@Override
public boolean isUserInRole(String role) {
return user.isMemberOf(role);
}
@Override
public Principal getUserPrincipal() {
return new Principal() {
@Override
public String getName() {
return user.getName();
}
};
}
};
}
@Override
public void destroy() {
}
private String getParameter(String name, String query) {
int start;
int length;
if (query.startsWith(name + "=")) {
start = name.length() + 1;
} else {
start = query.indexOf("&" + name + "=");
if (start > 0) {
start += (name.length() + 2);
}
}
if (start >= 0) {
length = query.indexOf('&', start);
if (length == -1) {
length = query.length();
}
return query.substring(start, length);
}
return null;
}
}
| plugin-sdk/che-plugin-sdk-env-local/src/main/java/org/eclipse/che/env/local/server/SingleEnvironmentFilter.java | /*******************************************************************************
* Copyright (c) 2012-2015 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.env.local.server;
import org.eclipse.che.api.core.NotFoundException;
import org.eclipse.che.api.core.ServerException;
import org.eclipse.che.api.user.server.dao.UserDao;
import org.eclipse.che.commons.env.EnvironmentContext;
import org.eclipse.che.commons.user.User;
import org.eclipse.che.commons.user.UserImpl;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.security.Principal;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* Set up environment variable. Only for local packaging with single workspace. Don't use it in production packaging.
*
* @author andrew00x
*/
@Singleton
public class SingleEnvironmentFilter implements Filter {
@Inject
private UserDao userDao;
private String wsName;
private String wsId;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
wsName = filterConfig.getInitParameter("ws-name");
wsId = filterConfig.getInitParameter("ws-id");
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
final HttpServletRequest httpRequest = (HttpServletRequest)request;
HttpSession session = httpRequest.getSession(false);
User user = null;
if (session != null) {
user = (User)session.getAttribute("codenvy_user");
}
if (user == null) {
final String query = httpRequest.getQueryString();
String username = null;
String password = null;
if (query != null) {
username = getParameter("username", query);
password = getParameter("password", query);
}
if (username == null) {
username = "[email protected]";
}
org.eclipse.che.api.user.server.dao.User daoUser;
try {
daoUser = userDao.getByAlias(username);
if (password != null) {
userDao.authenticate(username, password);
}
} catch (NotFoundException | ServerException e) {
throw new ServletException(e);
}
final List<String> roles = new LinkedList<>();
Collections.addAll(roles, "workspace/admin", "workspace/developer", "system/admin", "system/manager", "user");
user = new UserImpl(daoUser.getName(), daoUser.getId(), "dummy_token", roles, false);
session = httpRequest.getSession();
session.setAttribute("codenvy_user", user);
}
final EnvironmentContext env = EnvironmentContext.getCurrent();
try {
env.setWorkspaceName(wsName);
env.setWorkspaceId(wsId);
env.setUser(user);
chain.doFilter(addUserInRequest(httpRequest, user), response);
} finally {
EnvironmentContext.reset();
}
}
private HttpServletRequest addUserInRequest(final HttpServletRequest httpRequest, final User user) {
return new HttpServletRequestWrapper(httpRequest) {
@Override
public String getRemoteUser() {
return user.getName();
}
@Override
public boolean isUserInRole(String role) {
return user.isMemberOf(role);
}
@Override
public Principal getUserPrincipal() {
return new Principal() {
@Override
public String getName() {
return user.getName();
}
};
}
};
}
@Override
public void destroy() {
}
private String getParameter(String name, String query) {
int start;
int length;
if (query.startsWith(name + "=")) {
start = name.length() + 1;
} else {
start = query.indexOf("&" + name + "=");
if (start > 0) {
start += (name.length() + 2);
}
}
if (start >= 0) {
length = query.indexOf('&', start);
if (length == -1) {
length = query.length();
}
return query.substring(start, length);
}
return null;
}
}
| Fix build: catching UnauthorizedException
| plugin-sdk/che-plugin-sdk-env-local/src/main/java/org/eclipse/che/env/local/server/SingleEnvironmentFilter.java | Fix build: catching UnauthorizedException |
|
Java | mpl-2.0 | 24f7dad069f60730a0f1d8a863b821ffabd56124 | 0 | maurya/openmrs-module-dhisreport,maurya/openmrs-module-dhisreport,maurya/openmrs-module-dhisreport,maurya/openmrs-module-dhisreport | /**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.module.dhisreport.api.impl;
import java.io.InputStream;
import java.net.URL;
import java.util.Collection;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.openmrs.api.impl.BaseOpenmrsService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.dxf2.datavalueset.DataValue;
import org.hisp.dhis.dxf2.datavalueset.DataValueSet;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.openmrs.Location;
import org.openmrs.module.dhisreport.api.DHIS2ReportingService;
import org.openmrs.module.dhisreport.api.db.DHIS2ReportingDAO;
import org.openmrs.module.dhisreport.api.dhis.DhisException;
import org.openmrs.module.dhisreport.api.dhis.HttpDhis2Server;
import org.openmrs.module.dhisreport.api.model.DataElement;
import org.openmrs.module.dhisreport.api.model.DataValueTemplate;
import org.openmrs.module.dhisreport.api.model.Disaggregation;
import org.openmrs.module.dhisreport.api.model.ReportDefinition;
import org.openmrs.module.dhisreport.api.utils.MonthlyPeriod;
/**
* It is a default implementation of {@link DHIS2ReportingService}.
*/
public class DHIS2ReportingServiceImpl extends BaseOpenmrsService implements DHIS2ReportingService {
protected final Log log = LogFactory.getLog(this.getClass());
private DHIS2ReportingDAO dao;
private HttpDhis2Server dhis2Server;
public HttpDhis2Server getDhis2Server()
{
return dhis2Server;
}
public void setDhis2Server( HttpDhis2Server dhis2Server )
{
this.dhis2Server = dhis2Server;
}
/**
* @param dao the dao to set
*/
public void setDao(DHIS2ReportingDAO dao) {
this.dao = dao;
}
/**
* @return the dao
*/
public DHIS2ReportingDAO getDao() {
return dao;
}
@Override
public void setDHISParams( URL url, String user, String password )
{
dhis2Server.setUrl( url);
dhis2Server.setUsername( user );
dhis2Server.setPassword( password );
}
@Override
public ReportDefinition getReportTemplates()
{
ReportDefinition reportTemplate = null;
try
{
reportTemplate = dhis2Server.fetchReportTemplates();
} catch ( DhisException ex )
{
Logger.getLogger( DHIS2ReportingServiceImpl.class.getName() ).log( Level.SEVERE, null, ex );
}
return reportTemplate;
}
@Override
public ImportSummary postDataValueSet(DataValueSet dvset)
{
ImportSummary summary = null;
try
{
summary = dhis2Server.postReport( dvset );
} catch ( DhisException ex )
{
Logger.getLogger( DHIS2ReportingServiceImpl.class.getName() ).log( Level.SEVERE, null, ex );
}
return summary;
}
@Override
public DataElement getDataElement( Integer id )
{
return dao.getDataElement( id );
}
@Override
public DataElement saveDataElement( DataElement de )
{
return dao.saveDataElement( de );
}
@Override
public void purgeDataElement( DataElement de )
{
dao.deleteDataElement( de );
}
@Override
public Disaggregation getDisaggregation( Integer id )
{
return dao.getDisaggregation( id );
}
@Override
public Disaggregation saveDisaggregation( Disaggregation disagg )
{
return dao.saveDisaggregation( disagg );
}
@Override
public ReportDefinition getReportDefinition( Integer id )
{
return dao.getReportDefinition( id );
}
@Override
public ReportDefinition saveReportDefinition( ReportDefinition reportDefinition )
{
return dao.saveReportDefinition( reportDefinition );
}
@Override
public Collection<DataElement> getAllDataElements()
{
return dao.getAllDataElements();
}
@Override
public void purgeDisaggregation( Disaggregation disagg )
{
dao.deleteDisaggregation( disagg );
}
@Override
public Collection<Disaggregation> getAllDisaggregations()
{
return dao.getAllDisaggregations();
}
@Override
public void purgeReportDefinition( ReportDefinition rd )
{
dao.deleteReportDefinition( rd );
}
@Override
public Collection<ReportDefinition> getAllReportDefinitions()
{
return dao.getAllReportDefinitions();
}
@Override
public String evaluateDataValueTemplate( DataValueTemplate dv, MonthlyPeriod period, Location location )
{
return dao.evaluateDataValueTemplate( dv, period, location );
}
/**
* Create a datavalueset report
* TODO: handle the sql query exceptions which are bound to happen
*
* @param reportDefinition
* @param period
* @param location
* @return
*/
@Override
public DataValueSet evaluateReportDefinition( ReportDefinition reportDefinition, MonthlyPeriod period, Location location )
{
Collection<DataValueTemplate> templates = reportDefinition.getDataValueTemplates();
DataValueSet dataValueSet = new DataValueSet();
dataValueSet.setDataElementIdScheme( "code");
dataValueSet.setOrgUnitIdScheme( "code");
dataValueSet.setPeriod( period.getAsIsoString());
dataValueSet.setOrgUnit( "OU_" + location.getId());
Collection<DataValue> dataValues = dataValueSet.getDataValues();
for (DataValueTemplate dvt : templates)
{
DataValue dataValue = new DataValue();
dataValue.setDataElement( dvt.getDataelement().getCode());
dataValue.setValue( dao.evaluateDataValueTemplate( dvt, period, location) );
dataValues.add( dataValue );
}
return dataValueSet;
}
} | api/src/main/java/org/openmrs/module/dhisreport/api/impl/DHIS2ReportingServiceImpl.java | /**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.module.dhisreport.api.impl;
import java.io.InputStream;
import java.net.URL;
import java.util.Collection;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.openmrs.api.impl.BaseOpenmrsService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.dxf2.datavalueset.DataValueSet;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.openmrs.Location;
import org.openmrs.module.dhisreport.api.DHIS2ReportingService;
import org.openmrs.module.dhisreport.api.db.DHIS2ReportingDAO;
import org.openmrs.module.dhisreport.api.dhis.DhisException;
import org.openmrs.module.dhisreport.api.dhis.HttpDhis2Server;
import org.openmrs.module.dhisreport.api.model.DataElement;
import org.openmrs.module.dhisreport.api.model.DataValueTemplate;
import org.openmrs.module.dhisreport.api.model.Disaggregation;
import org.openmrs.module.dhisreport.api.model.ReportDefinition;
import org.openmrs.module.dhisreport.api.utils.MonthlyPeriod;
/**
* It is a default implementation of {@link DHIS2ReportingService}.
*/
public class DHIS2ReportingServiceImpl extends BaseOpenmrsService implements DHIS2ReportingService {
protected final Log log = LogFactory.getLog(this.getClass());
private DHIS2ReportingDAO dao;
private HttpDhis2Server dhis2Server;
public HttpDhis2Server getDhis2Server()
{
return dhis2Server;
}
public void setDhis2Server( HttpDhis2Server dhis2Server )
{
this.dhis2Server = dhis2Server;
}
/**
* @param dao the dao to set
*/
public void setDao(DHIS2ReportingDAO dao) {
this.dao = dao;
}
/**
* @return the dao
*/
public DHIS2ReportingDAO getDao() {
return dao;
}
@Override
public void setDHISParams( URL url, String user, String password )
{
dhis2Server.setUrl( url);
dhis2Server.setUsername( user );
dhis2Server.setPassword( password );
}
@Override
public ReportDefinition getReportTemplates()
{
ReportDefinition reportTemplate = null;
try
{
reportTemplate = dhis2Server.fetchReportTemplates();
} catch ( DhisException ex )
{
Logger.getLogger( DHIS2ReportingServiceImpl.class.getName() ).log( Level.SEVERE, null, ex );
}
return reportTemplate;
}
@Override
public ImportSummary postDataValueSet(DataValueSet dvset)
{
ImportSummary summary = null;
try
{
summary = dhis2Server.postReport( dvset );
} catch ( DhisException ex )
{
Logger.getLogger( DHIS2ReportingServiceImpl.class.getName() ).log( Level.SEVERE, null, ex );
}
return summary;
}
@Override
public DataElement getDataElement( Integer id )
{
return dao.getDataElement( id );
}
@Override
public DataElement saveDataElement( DataElement de )
{
return dao.saveDataElement( de );
}
@Override
public void purgeDataElement( DataElement de )
{
dao.deleteDataElement( de );
}
@Override
public Disaggregation getDisaggregation( Integer id )
{
return dao.getDisaggregation( id );
}
@Override
public Disaggregation saveDisaggregation( Disaggregation disagg )
{
return dao.saveDisaggregation( disagg );
}
@Override
public ReportDefinition getReportDefinition( Integer id )
{
return dao.getReportDefinition( id );
}
@Override
public ReportDefinition saveReportDefinition( ReportDefinition reportDefinition )
{
return dao.saveReportDefinition( reportDefinition );
}
@Override
public Collection<DataElement> getAllDataElements()
{
return dao.getAllDataElements();
}
@Override
public void purgeDisaggregation( Disaggregation disagg )
{
dao.deleteDisaggregation( disagg );
}
@Override
public Collection<Disaggregation> getAllDisaggregations()
{
return dao.getAllDisaggregations();
}
@Override
public void purgeReportDefinition( ReportDefinition rd )
{
dao.deleteReportDefinition( rd );
}
@Override
public Collection<ReportDefinition> getAllReportDefinitions()
{
return dao.getAllReportDefinitions();
}
@Override
public String evaluateDataValueTemplate( DataValueTemplate dv, MonthlyPeriod period, Location location )
{
return dao.evaluateDataValueTemplate( dv, period, location );
}
@Override
public DataValueSet evaluateReportDefinition( ReportDefinition reportDefinition, MonthlyPeriod period, Location location )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
} | implemented evaluate report definition
| api/src/main/java/org/openmrs/module/dhisreport/api/impl/DHIS2ReportingServiceImpl.java | implemented evaluate report definition |
|
Java | agpl-3.0 | 5ef70ce99fd6e5503e6d8f0aeda64dada7ffdd6f | 0 | geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-client-gwt2,geomajas/geomajas-project-server,geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-client-gwt2 | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2011 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.plugin.caching.step;
import org.geomajas.global.GeomajasConstant;
import org.geomajas.internal.layer.feature.InternalFeatureImpl;
import org.geomajas.layer.VectorLayer;
import org.geomajas.layer.VectorLayerService;
import org.geomajas.layer.feature.InternalFeature;
import org.geomajas.layer.feature.attribute.StringAttribute;
import org.geomajas.plugin.caching.service.CacheCategory;
import org.geomajas.plugin.caching.service.CacheManagerServiceImpl;
import org.geomajas.service.GeoService;
import org.geomajas.service.TestRecorder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.ArrayList;
import java.util.List;
/**
* Tests for the cached variant of the GetBounds pipeline.
*
* @author Joachim Van der Auwera
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"/org/geomajas/spring/geomajasContext.xml",
"/META-INF/geomajasContext.xml", "/org/geomajas/plugin/caching/DefaultCachedPipelines.xml",
"/pipelineContext.xml", "/dummySecurity.xml", "/org/geomajas/testdata/layerBeans.xml",
"/org/geomajas/spring/testRecorder.xml"})
public class GetFeaturesInvalidateUpdateTest {
private static final String LAYER_BEANS = "beans";
@Autowired
@Qualifier(LAYER_BEANS)
private VectorLayer layerBeans;
@Autowired
private TestRecorder recorder;
@Autowired
private CacheManagerServiceImpl cacheManager;
@Autowired
private VectorLayerService vectorLayerService;
@Autowired
private GeoService geoService;
@Autowired
private org.geomajas.security.SecurityManager securityManager;
@Before
public void init() {
cacheManager.drop(layerBeans);
securityManager.createSecurityContext(null); // assure a security context exists for this thread
}
@Test
@DirtiesContext
public void testFeaturesInvalidateUpdate() throws Exception {
List<InternalFeature> features;
// first run, this should put things in the cache
recorder.clear();
features = vectorLayerService.getFeatures(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), null, null,
GeomajasConstant.FEATURE_INCLUDE_ATTRIBUTES);
Assert.assertNotNull(features);
Assert.assertEquals(3, features.size());
Assert.assertEquals("", recorder.matches(CacheCategory.FEATURE,
"Put item in cache"));
// get features again, it should now use the cache
recorder.clear();
features = vectorLayerService.getFeatures(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), null, null,
GeomajasConstant.FEATURE_INCLUDE_ATTRIBUTES);
Assert.assertNotNull(features);
Assert.assertEquals(3, features.size());
Assert.assertEquals("1", features.get(0).getId()); // assure testing same object later
Assert.assertNotSame("something has changed", features.get(0).getAttributes().get("stringAttr").getValue());
Assert.assertEquals("", recorder.matches(CacheCategory.FEATURE,
"Got item from cache"));
// update one item
recorder.clear();
InternalFeature org = features.get(0);
InternalFeature upd = org.clone();
upd.getAttributes().put("stringAttr", new StringAttribute("something has changed"));
List<InternalFeature> orgFeatures = new ArrayList<InternalFeature>();
orgFeatures.add(org);
List<InternalFeature> updFeatures = new ArrayList<InternalFeature>();
updFeatures.add(upd);
vectorLayerService.saveOrUpdate(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), orgFeatures, updFeatures);
Assert.assertEquals("", recorder.matches("layer",
"Invalidate geometry for old version of feature")); // not invalidating on new as not given/changed
// get features again, it should *not* use the cache
recorder.clear();
features = vectorLayerService.getFeatures(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), null, null,
GeomajasConstant.FEATURE_INCLUDE_ATTRIBUTES);
Assert.assertNotNull(features);
Assert.assertEquals(3, features.size());
Assert.assertEquals("1", features.get(0).getId()); // assure testing same object
Assert.assertEquals("something has changed", features.get(0).getAttributes().get("stringAttr").getValue());
Assert.assertEquals("", recorder.matches(CacheCategory.FEATURE,
"Put item in cache"));
}
}
| plugin/geomajas-plugin-caching/caching/src/test/java/org/geomajas/plugin/caching/step/GetFeaturesInvalidateUpdateTest.java | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2011 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.plugin.caching.step;
import org.geomajas.global.GeomajasConstant;
import org.geomajas.internal.layer.feature.InternalFeatureImpl;
import org.geomajas.layer.VectorLayer;
import org.geomajas.layer.VectorLayerService;
import org.geomajas.layer.feature.InternalFeature;
import org.geomajas.layer.feature.attribute.StringAttribute;
import org.geomajas.plugin.caching.service.CacheCategory;
import org.geomajas.plugin.caching.service.CacheManagerServiceImpl;
import org.geomajas.service.GeoService;
import org.geomajas.service.TestRecorder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.ArrayList;
import java.util.List;
/**
* Tests for the cached variant of the GetBounds pipeline.
*
* @author Joachim Van der Auwera
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"/org/geomajas/spring/geomajasContext.xml",
"/META-INF/geomajasContext.xml", "/org/geomajas/plugin/caching/DefaultCachedPipelines.xml",
"/pipelineContext.xml", "/dummySecurity.xml", "/org/geomajas/testdata/layerBeans.xml",
"/org/geomajas/spring/testRecorder.xml"})
public class GetFeaturesInvalidateUpdateTest {
private static final String LAYER_BEANS = "beans";
@Autowired
@Qualifier(LAYER_BEANS)
private VectorLayer layerBeans;
@Autowired
private TestRecorder recorder;
@Autowired
private CacheManagerServiceImpl cacheManager;
@Autowired
private VectorLayerService vectorLayerService;
@Autowired
private GeoService geoService;
@Autowired
private org.geomajas.security.SecurityManager securityManager;
@Before
public void init() {
cacheManager.drop(layerBeans);
securityManager.createSecurityContext(null); // assure a security context exists for this thread
}
@Test
@DirtiesContext
public void testFeaturesInvalidateUpdate() throws Exception {
List<InternalFeature> features;
// first run, this should put things in the cache
recorder.clear();
features = vectorLayerService.getFeatures(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), null, null,
GeomajasConstant.FEATURE_INCLUDE_ATTRIBUTES);
Assert.assertNotNull(features);
Assert.assertEquals(3, features.size());
Assert.assertEquals("", recorder.matches(CacheCategory.FEATURE,
"Put item in cache"));
// get features again, it should now use the cache
recorder.clear();
features = vectorLayerService.getFeatures(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), null, null,
GeomajasConstant.FEATURE_INCLUDE_ATTRIBUTES);
Assert.assertNotNull(features);
Assert.assertEquals(3, features.size());
Assert.assertEquals("1", features.get(0).getId()); // assure testing same object later
Assert.assertNotSame("something has changed", features.get(0).getAttributes().get("stringAttr").getValue());
Assert.assertEquals("", recorder.matches(CacheCategory.FEATURE,
"Got item from cache"));
// update one item
recorder.clear();
InternalFeature org = features.get(0);
InternalFeature upd = new InternalFeatureImpl(org);
upd.getAttributes().put("stringAttr", new StringAttribute("something has changed"));
List<InternalFeature> orgFeatures = new ArrayList<InternalFeature>();
orgFeatures.add(org);
List<InternalFeature> updFeatures = new ArrayList<InternalFeature>();
updFeatures.add(upd);
vectorLayerService.saveOrUpdate(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), orgFeatures, updFeatures);
Assert.assertEquals("", recorder.matches("layer",
"Invalidate geometry for old version of feature")); // not invalidating on new as not given/changed
// get features again, it should *not* use the cache
recorder.clear();
features = vectorLayerService.getFeatures(LAYER_BEANS, geoService.getCrs2("EPSG:4326"), null, null,
GeomajasConstant.FEATURE_INCLUDE_ATTRIBUTES);
Assert.assertNotNull(features);
Assert.assertEquals(3, features.size());
Assert.assertEquals("1", features.get(0).getId()); // assure testing same object
Assert.assertEquals("something has changed", features.get(0).getAttributes().get("stringAttr").getValue());
Assert.assertEquals("", recorder.matches(CacheCategory.FEATURE,
"Put item in cache"));
}
}
| GBE-144: removed duplicate clone method
| plugin/geomajas-plugin-caching/caching/src/test/java/org/geomajas/plugin/caching/step/GetFeaturesInvalidateUpdateTest.java | GBE-144: removed duplicate clone method |
|
Java | agpl-3.0 | 20ae9279b3f66665b8e99104f761dd17148d8652 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | fcff0d02-2e61-11e5-9284-b827eb9e62be | hello.java | fcf98102-2e61-11e5-9284-b827eb9e62be | fcff0d02-2e61-11e5-9284-b827eb9e62be | hello.java | fcff0d02-2e61-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | 0bdb8325cf64f67337ae1125e7e91aefe4f8c06c | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | ee8524ba-2e60-11e5-9284-b827eb9e62be | hello.java | ee7fbed0-2e60-11e5-9284-b827eb9e62be | ee8524ba-2e60-11e5-9284-b827eb9e62be | hello.java | ee8524ba-2e60-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | c6fa7624d3207640397305bebada426be637354a | 0 | ua-eas/kfs-devops-automation-fork,kkronenb/kfs,UniversityOfHawaii/kfs,ua-eas/kfs-devops-automation-fork,UniversityOfHawaii/kfs,ua-eas/kfs-devops-automation-fork,smith750/kfs,ua-eas/kfs,kuali/kfs,bhutchinson/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,smith750/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs,bhutchinson/kfs,kuali/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,bhutchinson/kfs,kuali/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,kuali/kfs,ua-eas/kfs,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,ua-eas/kfs,quikkian-ua-devops/kfs,smith750/kfs,bhutchinson/kfs,kkronenb/kfs,smith750/kfs,kuali/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs | /*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on Aug 17, 2004
*
*/
package org.kuali.kfs.pdp.businessobject;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.LinkedHashMap;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.kuali.rice.kns.bo.TransientBusinessObjectBase;
/**
*
*/
public class FormatResult extends TransientBusinessObjectBase implements Comparable {
private Integer procId;
private boolean pymtAttachment;
private boolean pymtSpecialHandling;
private boolean processImmediate;
private CustomerProfile cust;
private int payments;
private BigDecimal amount;
private DisbursementType disbursementType;
private int beginDisbursementNbr;
private int endDisbursementNbr;
private String sortGroup = null;
public FormatResult() {
super();
amount = new BigDecimal(0);
payments = 0;
}
public FormatResult(Integer p, CustomerProfile c) {
procId = p;
cust = c;
amount = new BigDecimal(0);
payments = 0;
}
public String getSortGroupId() {
return sortGroup;
}
public String getSortGroupOverride() {
return sortGroup;
}
public void setSortGroupOverride(String sortGroup) {
this.sortGroup = sortGroup;
}
public boolean isProcessImmediate() {
return processImmediate;
}
public void setProcessImmediate(boolean processImmediate) {
this.processImmediate = processImmediate;
}
public boolean isPymtAttachment() {
return pymtAttachment;
}
public void setPymtAttachment(boolean pymtAttachment) {
this.pymtAttachment = pymtAttachment;
}
public boolean isPymtSpecialHandling() {
return pymtSpecialHandling;
}
public void setPymtSpecialHandling(boolean pymtSpecialHandling) {
this.pymtSpecialHandling = pymtSpecialHandling;
}
public int getBeginDisbursementNbr() {
return beginDisbursementNbr;
}
public void setBeginDisbursementNbr(int beginDisbursementNbr) {
this.beginDisbursementNbr = beginDisbursementNbr;
}
public DisbursementType getDisbursementType() {
return disbursementType;
}
public void setDisbursementType(DisbursementType disbursementType) {
this.disbursementType = disbursementType;
}
public int getEndDisbursementNbr() {
return endDisbursementNbr;
}
public void setEndDisbursementNbr(int endDisbursementNbr) {
this.endDisbursementNbr = endDisbursementNbr;
}
public BigDecimal getAmount() {
return amount;
}
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
public CustomerProfile getCust() {
return cust;
}
public void setCust(CustomerProfile cust) {
this.cust = cust;
}
public int getPayments() {
return payments;
}
public void setPayments(int payments) {
this.payments = payments;
}
public Integer getProcId() {
return procId;
}
public void setProcId(Integer procId) {
this.procId = procId;
}
public String getSortString() {
StringBuffer sb = new StringBuffer();
if (getDisbursementType() != null) {
if ("CHCK".equals(getDisbursementType().getCode())) {
sb.append("B");
}
else {
sb.append("A");
}
}
else {
sb.append("A");
}
sb.append(getSortGroupId());
sb.append(cust.getChartCode());
sb.append(cust.getOrgCode());
sb.append(cust.getSubUnitCode());
return sb.toString();
}
public int compareTo(Object a) {
FormatResult f = (FormatResult) a;
return this.getSortString().compareTo(f.getSortString());
}
public boolean equals(Object obj) {
if (!(obj instanceof FormatResult)) {
return false;
}
FormatResult o = (FormatResult) obj;
return new EqualsBuilder().append(procId, o.getProcId()).append(getSortGroupId(), o.getSortGroupId()).append(cust, o.getCust()).isEquals();
}
public int hashCode() {
return new HashCodeBuilder(7, 3).append(procId).append(getSortGroupId()).append(cust).toHashCode();
}
public String toString() {
return new ToStringBuilder(this).append("procId", procId).append("sortGroupId", getSortGroupId()).append("cust", cust).toString();
}
@Override
protected LinkedHashMap toStringMapper() {
LinkedHashMap m = new LinkedHashMap();
m.put("procId", this.procId);
return m;
}
}
| work/src/org/kuali/kfs/pdp/businessobject/FormatResult.java | /*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on Aug 17, 2004
*
*/
package org.kuali.kfs.pdp.businessobject;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.LinkedHashMap;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.kuali.rice.kns.bo.TransientBusinessObjectBase;
/**
*
*/
public class FormatResult extends TransientBusinessObjectBase implements Comparable {
private Integer procId;
private boolean pymtAttachment;
private boolean pymtSpecialHandling;
private boolean processImmediate;
private CustomerProfile cust;
private int payments;
private BigDecimal amount;
private DisbursementType disbursementType;
private int beginDisbursementNbr;
private int endDisbursementNbr;
private String sortGroup = null;
public FormatResult() {
super();
amount = new BigDecimal(0);
payments = 0;
}
public FormatResult(Integer p, CustomerProfile c) {
procId = p;
cust = c;
amount = new BigDecimal(0);
payments = 0;
}
public String getSortGroupId() {
if (sortGroup == null) {
if (isProcessImmediate()) {
return "B";
}
else if (isPymtSpecialHandling()) {
return "C";
}
else if (isPymtAttachment()) {
return "D";
}
else {
return "E";
}
}
else {
return sortGroup;
}
}
public String getSortGroupName() {
String sortGroup = getSortGroupId();
if ("B".equals(sortGroup)) {
return "Immediate";
}
else if ("C".equals(sortGroup)) {
return "Special Handling";
}
else if ("D".equals(sortGroup)) {
return "Attachment";
}
else {
return "Other";
}
}
public String getSortGroupOverride() {
return sortGroup;
}
public void setSortGroupOverride(String sortGroup) {
this.sortGroup = sortGroup;
}
public boolean isProcessImmediate() {
return processImmediate;
}
public void setProcessImmediate(boolean processImmediate) {
this.processImmediate = processImmediate;
}
public boolean isPymtAttachment() {
return pymtAttachment;
}
public void setPymtAttachment(boolean pymtAttachment) {
this.pymtAttachment = pymtAttachment;
}
public boolean isPymtSpecialHandling() {
return pymtSpecialHandling;
}
public void setPymtSpecialHandling(boolean pymtSpecialHandling) {
this.pymtSpecialHandling = pymtSpecialHandling;
}
public int getBeginDisbursementNbr() {
return beginDisbursementNbr;
}
public void setBeginDisbursementNbr(int beginDisbursementNbr) {
this.beginDisbursementNbr = beginDisbursementNbr;
}
public DisbursementType getDisbursementType() {
return disbursementType;
}
public void setDisbursementType(DisbursementType disbursementType) {
this.disbursementType = disbursementType;
}
public int getEndDisbursementNbr() {
return endDisbursementNbr;
}
public void setEndDisbursementNbr(int endDisbursementNbr) {
this.endDisbursementNbr = endDisbursementNbr;
}
public BigDecimal getAmount() {
return amount;
}
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
public CustomerProfile getCust() {
return cust;
}
public void setCust(CustomerProfile cust) {
this.cust = cust;
}
public int getPayments() {
return payments;
}
public void setPayments(int payments) {
this.payments = payments;
}
public Integer getProcId() {
return procId;
}
public void setProcId(Integer procId) {
this.procId = procId;
}
public String getSortString() {
StringBuffer sb = new StringBuffer();
if (getDisbursementType() != null) {
if ("CHCK".equals(getDisbursementType().getCode())) {
sb.append("B");
}
else {
sb.append("A");
}
}
else {
sb.append("A");
}
sb.append(getSortGroupId());
sb.append(cust.getChartCode());
sb.append(cust.getOrgCode());
sb.append(cust.getSubUnitCode());
return sb.toString();
}
public int compareTo(Object a) {
FormatResult f = (FormatResult) a;
return this.getSortString().compareTo(f.getSortString());
}
public boolean equals(Object obj) {
if (!(obj instanceof FormatResult)) {
return false;
}
FormatResult o = (FormatResult) obj;
return new EqualsBuilder().append(procId, o.getProcId()).append(getSortGroupId(), o.getSortGroupId()).append(cust, o.getCust()).isEquals();
}
public int hashCode() {
return new HashCodeBuilder(7, 3).append(procId).append(getSortGroupId()).append(cust).toHashCode();
}
public String toString() {
return new ToStringBuilder(this).append("procId", procId).append("sortGroupId", getSortGroupId()).append("cust", cust).toString();
}
@Override
protected LinkedHashMap toStringMapper() {
LinkedHashMap m = new LinkedHashMap();
m.put("procId", this.procId);
return m;
}
}
| KFSMI-236
| work/src/org/kuali/kfs/pdp/businessobject/FormatResult.java | KFSMI-236 |
|
Java | lgpl-2.1 | 267933dc2afc6832dc31b93c6c496d246ba8c3d6 | 0 | tadamski/wildfly,golovnin/wildfly,iweiss/wildfly,rhusar/wildfly,99sono/wildfly,golovnin/wildfly,tadamski/wildfly,tomazzupan/wildfly,tomazzupan/wildfly,xasx/wildfly,rhusar/wildfly,jstourac/wildfly,pferraro/wildfly,wildfly/wildfly,xasx/wildfly,99sono/wildfly,tadamski/wildfly,rhusar/wildfly,jstourac/wildfly,jstourac/wildfly,iweiss/wildfly,xasx/wildfly,iweiss/wildfly,iweiss/wildfly,wildfly/wildfly,pferraro/wildfly,pferraro/wildfly,tomazzupan/wildfly,golovnin/wildfly,99sono/wildfly,wildfly/wildfly,jstourac/wildfly,rhusar/wildfly,wildfly/wildfly,pferraro/wildfly | package org.jboss.as.controller;
import java.util.ArrayList;
import org.jboss.dmr.ModelNode;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceName;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
/**
* Abstract remove step handler that simply removes a service. If the operation is rolled
* back it delegates the rollback to the corresponding add operations
* {@link AbstractAddStepHandler#performRuntime(OperationContext, org.jboss.dmr.ModelNode, org.jboss.dmr.ModelNode, ServiceVerificationHandler, java.util.List)}
* method
*
* @author Stuart Douglas
*/
public class ServiceRemoveStepHandler extends AbstractRemoveStepHandler {
private final ServiceName baseServiceName;
private final AbstractAddStepHandler addOperation;
public ServiceRemoveStepHandler(final ServiceName baseServiceName, final AbstractAddStepHandler addOperation) {
this.baseServiceName = baseServiceName;
this.addOperation = addOperation;
}
protected ServiceRemoveStepHandler(final AbstractAddStepHandler addOperation) {
this(null, addOperation);
}
protected void performRuntime(OperationContext context, ModelNode operation, ModelNode model) {
if (context.isResourceServiceRestartAllowed()) {
final PathAddress address = PathAddress.pathAddress(operation.require(OP_ADDR));
final String name = address.getLastElement().getValue();
context.removeService(serviceName(name, address));
} else {
context.reloadRequired();
}
}
/**
* The service name to be removed. Can be overridden for unusual service naming patterns
* @param name The name of the resource being removed
* @param address The address of the resource being removed
* @return The service name to remove
*/
protected ServiceName serviceName(String name, PathAddress address) {
return serviceName(name);
}
/**
* The service name to be removed. Can be overridden for unusual service naming patterns
* @param name The name of the resource being removed
* @return The service name to remove
*/
protected ServiceName serviceName(final String name) {
return baseServiceName.append(name);
}
protected void recoverServices(OperationContext context, ModelNode operation, ModelNode model) throws OperationFailedException {
if (context.isResourceServiceRestartAllowed()) {
addOperation.performRuntime(context, operation, model, new ServiceVerificationHandler(), new ArrayList<ServiceController<?>>());
} else {
context.revertReloadRequired();
}
}
}
| controller/src/main/java/org/jboss/as/controller/ServiceRemoveStepHandler.java | package org.jboss.as.controller;
import java.util.ArrayList;
import org.jboss.dmr.ModelNode;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceName;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
/**
* Abstract remove step handler that simply removes a service. If the operation is rolled
* back it delegates the rollback to the corresponding add operations
* {@link AbstractAddStepHandler#performRuntime(OperationContext, org.jboss.dmr.ModelNode, org.jboss.dmr.ModelNode, ServiceVerificationHandler, java.util.List)}
* method
*
* @author Stuart Douglas
*/
public class ServiceRemoveStepHandler extends AbstractRemoveStepHandler {
private final ServiceName baseServiceName;
private final AbstractAddStepHandler addOperation;
public ServiceRemoveStepHandler(final ServiceName baseServiceName, final AbstractAddStepHandler addOperation) {
this.baseServiceName = baseServiceName;
this.addOperation = addOperation;
}
protected ServiceRemoveStepHandler(final AbstractAddStepHandler addOperation) {
this(null, addOperation);
}
protected void performRuntime(OperationContext context, ModelNode operation, ModelNode model) {
if (context.isResourceServiceRestartAllowed()) {
final PathAddress address = PathAddress.pathAddress(operation.require(OP_ADDR));
final String name = address.getLastElement().getValue();
context.removeService(serviceName(name));
} else {
context.reloadRequired();
}
}
/**
* The service name to be removed. Can be overridden for unusual service naming patterns
* @param name The name of the resource being removed
* @return The service name to remove
*/
protected ServiceName serviceName(final String name) {
return baseServiceName.append(name);
}
protected void recoverServices(OperationContext context, ModelNode operation, ModelNode model) throws OperationFailedException {
if (context.isResourceServiceRestartAllowed()) {
addOperation.performRuntime(context, operation, model, new ServiceVerificationHandler(), new ArrayList<ServiceController<?>>());
} else {
context.revertReloadRequired();
}
}
}
| Enhance ServiceRemoveStepHandler to handle service names that are built from the full address
| controller/src/main/java/org/jboss/as/controller/ServiceRemoveStepHandler.java | Enhance ServiceRemoveStepHandler to handle service names that are built from the full address |
|
Java | apache-2.0 | d32b967e6149939f870a2321a9c4e0c77203dfc3 | 0 | HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,andrhamm/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,andrhamm/Singularity,andrhamm/Singularity | package com.hubspot.singularity.data;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.utils.ZKPaths;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.io.BaseEncoding;
import com.google.inject.Inject;
import com.hubspot.singularity.SingularityUserSettings;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.data.transcoders.Transcoder;
public class UserManager extends CuratorManager {
private final Transcoder<SingularityUserSettings> settingsTranscoder;
private final SingularityValidator validator;
private static final String USER_ROOT = "/users";
private static final String SETTINGS_ROOT = USER_ROOT + "/settings";
@Inject
public UserManager(CuratorFramework curator, SingularityConfiguration configuration, MetricRegistry metricRegistry,
Transcoder<SingularityUserSettings> settingsTranscoder, SingularityValidator validator) {
super(curator, configuration, metricRegistry);
this.settingsTranscoder = settingsTranscoder;
this.validator = validator;
}
private String encodeUserId(String userId) {
validator.checkUserId(userId);
return BaseEncoding.base64Url().encode(userId.getBytes(Charsets.UTF_8));
}
private String getUserSettingsPath(String userId) {
return ZKPaths.makePath(SETTINGS_ROOT, encodeUserId(userId));
}
public void updateUserSettings(String userId, SingularityUserSettings userSettings) {
save(getUserSettingsPath(userId), userSettings, settingsTranscoder);
}
public Optional<SingularityUserSettings> getUserSettings(String userId) {
return getData(getUserSettingsPath(userId), settingsTranscoder);
}
public void deleteUserSettings(String userId) {
delete(getUserSettingsPath(userId));
}
}
| SingularityService/src/main/java/com/hubspot/singularity/data/UserManager.java | package com.hubspot.singularity.data;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.utils.ZKPaths;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Optional;
import com.google.inject.Inject;
import com.hubspot.singularity.SingularityUserSettings;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.data.transcoders.Transcoder;
public class UserManager extends CuratorManager {
private final Transcoder<SingularityUserSettings> settingsTranscoder;
private static final String USER_ROOT = "/users";
private static final String SETTINGS_ROOT = USER_ROOT + "/settings";
@Inject
public UserManager(CuratorFramework curator, SingularityConfiguration configuration, MetricRegistry metricRegistry,
Transcoder<SingularityUserSettings> settingsTranscoder) {
super(curator, configuration, metricRegistry);
this.settingsTranscoder = settingsTranscoder;
}
private String getUserSettingsPath(String userId) {
return ZKPaths.makePath(SETTINGS_ROOT, userId);
}
public void updateUserSettings(String userId, SingularityUserSettings userSettings) {
save(getUserSettingsPath(userId), userSettings, settingsTranscoder);
}
public Optional<SingularityUserSettings> getUserSettings(String userId) {
return getData(getUserSettingsPath(userId), settingsTranscoder);
}
public void deleteUserSettings(String userId) {
delete(getUserSettingsPath(userId));
}
}
| Move encoding work for ZK userId into UserManager
| SingularityService/src/main/java/com/hubspot/singularity/data/UserManager.java | Move encoding work for ZK userId into UserManager |
|
Java | apache-2.0 | d5ac27d79b1c3fd817b5798b13b0e0e45965659b | 0 | EMBL-EBI-SUBS/subs-api,EMBL-EBI-SUBS/subs-api | package uk.ac.ebi.subs.api.resourceAssembly;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.*;
import org.springframework.stereotype.Component;
import uk.ac.ebi.subs.repository.model.StoredSubmittable;
@Component
public class SimpleResourceAssembler<T extends Identifiable> implements ResourceAssembler<T, Resource<T>> {
public SimpleResourceAssembler(@Autowired EntityLinks entityLinks) {
this.entityLinks = entityLinks;
}
private ResourceProcessor<Resource<T>> resourceProcessor;
private EntityLinks entityLinks;
@Override
public Resource<T> toResource(T entity) {
Resource<T> resource = new Resource<T>(entity);
if (resource.getContent() != null) {
Link link = entityLinks.linkToSingleResource(resource.getContent());
resource.add(link);
resource.add(link.withSelfRel());
}
if (
resource.getContent() != null &&
resource.getContent() instanceof StoredSubmittable &&
((StoredSubmittable) resource.getContent()).getSubmission() != null
) {
StoredSubmittable storedSubmittable = (StoredSubmittable) resource.getContent();
Link link = entityLinks.linkToSingleResource(storedSubmittable.getSubmission());
resource.add(link);
}
return resource;
}
}
| src/main/java/uk/ac/ebi/subs/api/resourceAssembly/SimpleResourceAssembler.java | package uk.ac.ebi.subs.api.resourceAssembly;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.*;
import org.springframework.stereotype.Component;
import uk.ac.ebi.subs.repository.model.StoredSubmittable;
@Component
public class SimpleResourceAssembler<T extends Identifiable> implements ResourceAssembler<T, Resource<T>> {
public SimpleResourceAssembler(@Autowired EntityLinks entityLinks) {
this.entityLinks = entityLinks;
}
private ResourceProcessor<Resource<T>> resourceProcessor;
private EntityLinks entityLinks;
@Override
public Resource<T> toResource(T entity) {
Resource<T> resource = new Resource<T>(entity);
if (resource.getContent() != null ) {
Link link = entityLinks.linkToSingleResource(resource.getContent());
resource.add(link);
resource.add(link.withSelfRel());
}
if (resource.getContent() != null && resource.getContent() instanceof StoredSubmittable){
StoredSubmittable storedSubmittable = (StoredSubmittable)resource.getContent();
Link link = entityLinks.linkToSingleResource(storedSubmittable.getSubmission());
resource.add(link);
}
return resource;
}
}
| ensure you have a submission to link to
| src/main/java/uk/ac/ebi/subs/api/resourceAssembly/SimpleResourceAssembler.java | ensure you have a submission to link to |
|
Java | apache-2.0 | 59ed808e7c5dfad6c3e32a26064225a3614abb6b | 0 | ibissource/iaf,ibissource/iaf,ibissource/iaf,ibissource/iaf,ibissource/iaf | /*
Copyright 2016 Integration Partners B.V.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.webcontrol.api;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.Principal;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.annotation.security.RolesAllowed;
import javax.servlet.ServletConfig;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import org.apache.commons.lang.StringUtils;
import org.jboss.resteasy.plugins.providers.multipart.InputPart;
import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput;
import nl.nn.adapterframework.configuration.Configuration;
import nl.nn.adapterframework.configuration.ConfigurationUtils;
import nl.nn.adapterframework.configuration.classloaders.DatabaseClassLoader;
import nl.nn.adapterframework.jdbc.FixedQuerySender;
import nl.nn.adapterframework.jms.JmsRealmFactory;
import nl.nn.adapterframework.util.Misc;
/**
* Shows the configuration (with resolved variables).
*
* @author Niels Meijer
*/
@Path("/")
public final class ShowConfiguration extends Base {
@Context ServletConfig servletConfig;
@Context SecurityContext securityContext;
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations")
@Produces(MediaType.APPLICATION_XML)
public Response getXMLConfiguration(@QueryParam("loadedConfiguration") boolean loadedConfiguration) throws ApiException {
initBase(servletConfig);
String result = "";
for (Configuration configuration : ibisManager.getConfigurations()) {
if (loadedConfiguration) {
result = result + configuration.getOriginalConfiguration();
} else {
result = result + configuration.getLoadedConfiguration();
}
}
return Response.status(Response.Status.CREATED).entity(result).build();
}
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations/{configuration}")
@Produces(MediaType.APPLICATION_XML)
public Response getConfigurationByName(@PathParam("configuration") String configurationName, @QueryParam("loadedConfiguration") boolean loadedConfiguration) throws ApiException {
initBase(servletConfig);
String result = "";
Configuration configuration = ibisManager.getConfiguration(configurationName);
if (loadedConfiguration) {
result = configuration.getOriginalConfiguration();
} else {
result = configuration.getLoadedConfiguration();
}
return Response.status(Response.Status.CREATED).entity(result).build();
}
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations/manage/{configuration}")
@Produces(MediaType.APPLICATION_JSON)
public Response getConfigurationDetailsByName(@PathParam("configuration") String configurationName, @QueryParam("realm") String jmsRealm) throws ApiException {
initBase(servletConfig);
Configuration configuration = ibisManager.getConfiguration(configurationName);
if(configuration == null) {
throw new ApiException("Configuration not found!");
}
if(configuration.getClassLoader().getParent() instanceof DatabaseClassLoader) {
return Response.status(Response.Status.CREATED).entity(getConfigFromDatabase(configurationName, jmsRealm)).build();
}
return Response.status(Response.Status.NO_CONTENT).build();
}
@POST
@RolesAllowed({"IbisTester"})
@Path("configurations")
@Produces(MediaType.APPLICATION_JSON)
public Response uploadConfiguration(MultipartFormDataInput input) throws ApiException {
initBase(servletConfig);
String jmsRealm = null, name = null, version = null, fileName = null, fileEncoding = Misc.DEFAULT_INPUT_STREAM_ENCODING;
InputStream file = null;
boolean multiple_configs = false, activate_config = true, automatic_reload = false;
Map<String, List<InputPart>> inputDataMap = input.getFormDataMap();
if(inputDataMap == null) {
throw new ApiException("Missing post parameters");
}
try {
if(inputDataMap.get("realm") != null)
jmsRealm = inputDataMap.get("realm").get(0).getBodyAsString();
else
throw new ApiException("JMS realm not defined", 400);
if(inputDataMap.get("name") != null)
name = inputDataMap.get("name").get(0).getBodyAsString();
else
throw new ApiException("No name specified", 400);
if(inputDataMap.get("file_encoding") != null)
fileEncoding = inputDataMap.get("file_encoding").get(0).getBodyAsString();
if(inputDataMap.get("version") != null)
version = inputDataMap.get("version").get(0).getBodyAsString();
else
throw new ApiException("No version specified", 400);
if(inputDataMap.get("multiple_configs") != null)
multiple_configs = inputDataMap.get("multiple_configs").get(0).getBody(boolean.class, null);
if(inputDataMap.get("activate_config") != null)
activate_config = inputDataMap.get("activate_config").get(0).getBody(boolean.class, null);
if(inputDataMap.get("automatic_reload") != null)
automatic_reload = inputDataMap.get("automatic_reload").get(0).getBody(boolean.class, null);
if(inputDataMap.get("file") != null)
file = inputDataMap.get("file").get(0).getBody(InputStream.class, null);
else
throw new ApiException("No file specified", 400);
MultivaluedMap<String, String> headers = inputDataMap.get("file").get(0).getHeaders();
String[] contentDispositionHeader = headers.getFirst("Content-Disposition").split(";");
for (String fName : contentDispositionHeader) {
if ((fName.trim().startsWith("filename"))) {
String[] tmp = fName.split("=");
fileName = tmp[1].trim().replaceAll("\"","");
}
}
}
catch (IOException e) {
throw new ApiException("Failed to parse one or more parameters!");
}
try {
String result = "";
if(multiple_configs) {
if (StringUtils.isEmpty(name) && StringUtils.isEmpty(version)) {
String[] fnArray = splitFilename(fileName);
if (fnArray[0] != null) {
name = fnArray[0];
}
if (fnArray[1] != null) {
version = fnArray[1];
}
}
}
String user = null;
Principal principal = securityContext.getUserPrincipal();
if(principal != null)
user = ""+principal;
if(multiple_configs) {
try {
result = processZipFile(file, fileEncoding, fileName, automatic_reload, automatic_reload, user);
} catch (IOException e) {
throw new ApiException(e);
}
} else {
ConfigurationUtils.addConfigToDatabase(ibisContext, jmsRealm, activate_config, automatic_reload, name, version, fileName, file, user);
}
return Response.status(Response.Status.CREATED).entity(result).build();
} catch (Exception e) {
throw new ApiException("Failed to upload Configuration!");
}
}
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations/download/{configuration}")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response downloadConfiguration(@PathParam("configuration") String configurationName) throws ApiException {
initBase(servletConfig);
try {
Map<String, Object> configuration = ConfigurationUtils.getConfigFromDatabase(ibisContext, configurationName);
return Response
.status(Response.Status.OK)
.entity(configuration.get("CONFIG"))
.header("Content-Disposition", "attachment; filename=\"" + configuration.get("FILENAME") + "\"")
.build();
} catch (Exception e) {
throw new ApiException("Could not find configuration!");
}
}
private List<Map<String, Object>> getConfigFromDatabase(String configurationName, String jmsRealm) {
List<Map<String, Object>> returnMap = new ArrayList<Map<String, Object>>();
if (StringUtils.isEmpty(jmsRealm)) {
jmsRealm = JmsRealmFactory.getInstance().getFirstDatasourceJmsRealm();
if (StringUtils.isEmpty(jmsRealm)) {
return null;
}
}
Connection conn = null;
ResultSet rs = null;
FixedQuerySender qs = (FixedQuerySender)ibisContext.createBeanAutowireByName(FixedQuerySender.class);
qs.setJmsRealm(jmsRealm);
qs.setQuery("SELECT COUNT(*) FROM IBISCONFIG");
try {
qs.configure();
qs.open();
conn = qs.getConnection();
String query = "SELECT NAME, VERSION, FILENAME, RUSER, ACTIVECONFIG, CRE_TYDST FROM IBISCONFIG WHERE NAME=? ORDER BY CRE_TYDST";
PreparedStatement stmt = conn.prepareStatement(query);
stmt.setString(1, configurationName);
rs = stmt.executeQuery();
while (rs.next()) {
Map<String, Object> config = new HashMap<String, Object>();
config.put("name", rs.getString(1));
config.put("version", rs.getString(2));
config.put("filename", rs.getString(3));
config.put("user", rs.getString(4));
config.put("active", rs.getBoolean(5));
config.put("created", rs.getString(6));
returnMap.add(config);
}
} catch (Exception e) {
throw new ApiException(e);
} finally {
qs.close();
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
log.warn("Could not close resultset", e);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
log.warn("Could not close connection", e);
}
}
}
return returnMap;
}
private String[] splitFilename(String fileName) {
String name = null;
String version = null;
if (StringUtils.isNotEmpty(fileName)) {
int i = fileName.lastIndexOf(".");
if (i != -1) {
name = fileName.substring(0, i);
int j = name.lastIndexOf("-");
if (j != -1) {
name = name.substring(0, j);
j = name.lastIndexOf("-");
if (j != -1) {
name = fileName.substring(0, j);
version = fileName.substring(j + 1, i);
}
}
}
}
return new String[] { name, version };
}
private String processZipFile(InputStream inputStream, String fileEncoding, String jmsRealm, boolean automatic_reload, boolean activate_config, String user) throws Exception {
String result = "";
if (inputStream.available() > 0) {
ZipInputStream archive = new ZipInputStream(inputStream);
int counter = 1;
for (ZipEntry entry = archive.getNextEntry(); entry != null; entry = archive.getNextEntry()) {
String entryName = entry.getName();
int size = (int) entry.getSize();
if (size > 0) {
byte[] b = new byte[size];
int rb = 0;
int chunk = 0;
while (((int) size - rb) > 0) {
chunk = archive.read(b, rb, (int) size - rb);
if (chunk == -1) {
break;
}
rb += chunk;
}
ByteArrayInputStream bais = new ByteArrayInputStream(b, 0, rb);
String fileName = "file_zipentry" + counter;
if (StringUtils.isNotEmpty(result)) {
result += "\n";
}
String name = "";
String version = "";
String[] fnArray = splitFilename(entryName);
if (fnArray[0] != null) {
name = fnArray[0];
}
if (fnArray[1] != null) {
version = fnArray[1];
}
result += entryName + ":" +
ConfigurationUtils.addConfigToDatabase(ibisContext, jmsRealm, activate_config, automatic_reload, name, version, fileName, bais, user);
}
archive.closeEntry();
counter++;
}
archive.close();
}
return result;
}
}
| core/src/main/java/nl/nn/adapterframework/webcontrol/api/ShowConfiguration.java | /*
Copyright 2016 Integration Partners B.V.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.webcontrol.api;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.security.Principal;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.annotation.security.RolesAllowed;
import javax.servlet.ServletConfig;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import org.apache.commons.lang.StringUtils;
import org.jboss.resteasy.plugins.providers.multipart.InputPart;
import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput;
import nl.nn.adapterframework.configuration.Configuration;
import nl.nn.adapterframework.configuration.ConfigurationException;
import nl.nn.adapterframework.configuration.ConfigurationUtils;
import nl.nn.adapterframework.configuration.classloaders.DatabaseClassLoader;
import nl.nn.adapterframework.core.IMessageBrowser;
import nl.nn.adapterframework.core.IMessageBrowsingIterator;
import nl.nn.adapterframework.core.IMessageBrowsingIteratorItem;
import nl.nn.adapterframework.core.IPipeLineSession;
import nl.nn.adapterframework.core.ListenerException;
import nl.nn.adapterframework.core.PipeLineSessionBase;
import nl.nn.adapterframework.core.PipeRunException;
import nl.nn.adapterframework.jdbc.FixedQuerySender;
import nl.nn.adapterframework.jms.JmsMessageBrowser;
import nl.nn.adapterframework.jms.JmsRealmFactory;
import nl.nn.adapterframework.jms.JmsSender;
import nl.nn.adapterframework.parameters.Parameter;
import nl.nn.adapterframework.parameters.ParameterResolutionContext;
import nl.nn.adapterframework.util.Misc;
import nl.nn.adapterframework.util.XmlBuilder;
import nl.nn.adapterframework.util.XmlUtils;
import nl.nn.adapterframework.webcontrol.pipes.UploadConfig;
/**
* Shows the configuration (with resolved variables).
*
* @author Niels Meijer
*/
@Path("/")
public final class ShowConfiguration extends Base {
@Context ServletConfig servletConfig;
@Context SecurityContext securityContext;
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations")
@Produces(MediaType.APPLICATION_XML)
public Response getXMLConfiguration(@QueryParam("loadedConfiguration") boolean loadedConfiguration) throws ApiException {
initBase(servletConfig);
String result = "";
for (Configuration configuration : ibisManager.getConfigurations()) {
if (loadedConfiguration) {
result = result + configuration.getOriginalConfiguration();
} else {
result = result + configuration.getLoadedConfiguration();
}
}
return Response.status(Response.Status.CREATED).entity(result).build();
}
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations/{configuration}")
@Produces(MediaType.APPLICATION_XML)
public Response getConfigurationByName(@PathParam("configuration") String configurationName, @QueryParam("loadedConfiguration") boolean loadedConfiguration) throws ApiException {
initBase(servletConfig);
String result = "";
Configuration configuration = ibisManager.getConfiguration(configurationName);
if (loadedConfiguration) {
result = configuration.getOriginalConfiguration();
} else {
result = configuration.getLoadedConfiguration();
}
return Response.status(Response.Status.CREATED).entity(result).build();
}
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations/manage/{configuration}")
@Produces(MediaType.APPLICATION_JSON)
public Response getConfigurationDetailsByName(@PathParam("configuration") String configurationName, @QueryParam("realm") String jmsRealm) throws ApiException {
initBase(servletConfig);
Configuration configuration = ibisManager.getConfiguration(configurationName);
if(configuration == null) {
throw new ApiException("Configuration not found!");
}
if(configuration.getClassLoader().getParent() instanceof DatabaseClassLoader) {
return Response.status(Response.Status.CREATED).entity(getConfigFromDatabase(configurationName, jmsRealm)).build();
}
return Response.status(Response.Status.NO_CONTENT).build();
}
@POST
@RolesAllowed({"IbisTester"})
@Path("configurations")
@Produces(MediaType.APPLICATION_JSON)
public Response uploadConfiguration(MultipartFormDataInput input) throws ApiException {
initBase(servletConfig);
String jmsRealm = null, name = null, version = null, fileName = null, fileEncoding = Misc.DEFAULT_INPUT_STREAM_ENCODING;
InputStream file = null;
boolean multiple_configs = false, activate_config = true, automatic_reload = false;
Map<String, List<InputPart>> inputDataMap = input.getFormDataMap();
if(inputDataMap == null) {
throw new ApiException("Missing post parameters");
}
try {
if(inputDataMap.get("realm") != null)
jmsRealm = inputDataMap.get("realm").get(0).getBodyAsString();
else
throw new ApiException("JMS realm not defined", 400);
if(inputDataMap.get("name") != null)
name = inputDataMap.get("name").get(0).getBodyAsString();
else
throw new ApiException("No name specified", 400);
if(inputDataMap.get("file_encoding") != null)
fileEncoding = inputDataMap.get("file_encoding").get(0).getBodyAsString();
if(inputDataMap.get("version") != null)
version = inputDataMap.get("version").get(0).getBodyAsString();
else
throw new ApiException("No version specified", 400);
if(inputDataMap.get("multiple_configs") != null)
multiple_configs = inputDataMap.get("multiple_configs").get(0).getBody(boolean.class, null);
if(inputDataMap.get("activate_config") != null)
activate_config = inputDataMap.get("activate_config").get(0).getBody(boolean.class, null);
if(inputDataMap.get("automatic_reload") != null)
automatic_reload = inputDataMap.get("automatic_reload").get(0).getBody(boolean.class, null);
if(inputDataMap.get("file") != null)
file = inputDataMap.get("file").get(0).getBody(InputStream.class, null);
else
throw new ApiException("No file specified", 400);
MultivaluedMap<String, String> headers = inputDataMap.get("file").get(0).getHeaders();
String[] contentDispositionHeader = headers.getFirst("Content-Disposition").split(";");
for (String fName : contentDispositionHeader) {
if ((fName.trim().startsWith("filename"))) {
String[] tmp = fName.split("=");
fileName = tmp[1].trim().replaceAll("\"","");
}
}
}
catch (IOException e) {
throw new ApiException("Failed to parse one or more parameters!");
}
try {
String result = "";
FixedQuerySender qs = (FixedQuerySender) ibisContext.createBeanAutowireByName(FixedQuerySender.class);
if(multiple_configs) {
if (StringUtils.isEmpty(name) && StringUtils.isEmpty(version)) {
String[] fnArray = splitFilename(fileName);
if (fnArray[0] != null) {
name = fnArray[0];
}
if (fnArray[1] != null) {
version = fnArray[1];
}
}
}
String user = null;
Principal principal = securityContext.getUserPrincipal();
if(principal != null)
user = ""+principal;
if(multiple_configs) {
try {
result = processZipFile(file, fileEncoding, fileName, automatic_reload, automatic_reload, user);
} catch (IOException e) {
throw new ApiException(e);
}
} else {
ConfigurationUtils.addConfigToDatabase(ibisContext, jmsRealm, activate_config, automatic_reload, name, version, fileName, file, user);
}
return Response.status(Response.Status.CREATED).entity(result).build();
} catch (Exception e) {
throw new ApiException("Failed to upload Configuration!");
}
}
@GET
@RolesAllowed({"IbisObserver", "IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/configurations/download/{configuration}")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response downloadConfiguration(@PathParam("configuration") String configurationName) throws ApiException {
initBase(servletConfig);
try {
Map<String, Object> configuration = ConfigurationUtils.getConfigFromDatabase(ibisContext, configurationName);
return Response
.status(Response.Status.OK)
.entity(configuration.get("CONFIG"))
.header("Content-Disposition", "attachment; filename=\"" + configuration.get("FILENAME") + "\"")
.build();
} catch (Exception e) {
throw new ApiException("Could not find configuration!");
}
}
private List<Map<String, Object>> getConfigFromDatabase(String configurationName, String jmsRealm) {
List<Map<String, Object>> returnMap = new ArrayList<Map<String, Object>>();
if (StringUtils.isEmpty(jmsRealm)) {
jmsRealm = JmsRealmFactory.getInstance().getFirstDatasourceJmsRealm();
if (StringUtils.isEmpty(jmsRealm)) {
return null;
}
}
Connection conn = null;
ResultSet rs = null;
FixedQuerySender qs = (FixedQuerySender)ibisContext.createBeanAutowireByName(FixedQuerySender.class);
qs.setJmsRealm(jmsRealm);
qs.setQuery("SELECT COUNT(*) FROM IBISCONFIG");
try {
qs.configure();
qs.open();
conn = qs.getConnection();
String query = "SELECT NAME, VERSION, FILENAME, RUSER, ACTIVECONFIG, CRE_TYDST FROM IBISCONFIG WHERE NAME=? ORDER BY CRE_TYDST";
PreparedStatement stmt = conn.prepareStatement(query);
stmt.setString(1, configurationName);
rs = stmt.executeQuery();
while (rs.next()) {
Map<String, Object> config = new HashMap<String, Object>();
config.put("name", rs.getString(1));
config.put("version", rs.getString(2));
config.put("filename", rs.getString(3));
config.put("user", rs.getString(4));
config.put("active", rs.getBoolean(5));
config.put("created", rs.getString(6));
returnMap.add(config);
}
} catch (Exception e) {
throw new ApiException(e);
} finally {
qs.close();
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
log.warn("Could not close resultset", e);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
log.warn("Could not close connection", e);
}
}
}
return returnMap;
}
private String[] splitFilename(String fileName) {
String name = null;
String version = null;
if (StringUtils.isNotEmpty(fileName)) {
int i = fileName.lastIndexOf(".");
if (i != -1) {
name = fileName.substring(0, i);
int j = name.lastIndexOf("-");
if (j != -1) {
name = name.substring(0, j);
j = name.lastIndexOf("-");
if (j != -1) {
name = fileName.substring(0, j);
version = fileName.substring(j + 1, i);
}
}
}
}
return new String[] { name, version };
}
private String processZipFile(InputStream inputStream, String fileEncoding, String jmsRealm, boolean automatic_reload, boolean activate_config, String user) throws Exception {
String result = "";
if (inputStream.available() > 0) {
ZipInputStream archive = new ZipInputStream(inputStream);
int counter = 1;
for (ZipEntry entry = archive.getNextEntry(); entry != null; entry = archive.getNextEntry()) {
String entryName = entry.getName();
int size = (int) entry.getSize();
if (size > 0) {
byte[] b = new byte[size];
int rb = 0;
int chunk = 0;
while (((int) size - rb) > 0) {
chunk = archive.read(b, rb, (int) size - rb);
if (chunk == -1) {
break;
}
rb += chunk;
}
ByteArrayInputStream bais = new ByteArrayInputStream(b, 0, rb);
String fileName = "file_zipentry" + counter;
if (StringUtils.isNotEmpty(result)) {
result += "\n";
}
String name = "";
String version = "";
String[] fnArray = splitFilename(entryName);
if (fnArray[0] != null) {
name = fnArray[0];
}
if (fnArray[1] != null) {
version = fnArray[1];
}
result += entryName + ":" +
ConfigurationUtils.addConfigToDatabase(ibisContext, jmsRealm, activate_config, automatic_reload, name, version, fileName, bais, user);
}
archive.closeEntry();
counter++;
}
archive.close();
}
return result;
}
}
| Remove unused imports
| core/src/main/java/nl/nn/adapterframework/webcontrol/api/ShowConfiguration.java | Remove unused imports |
|
Java | apache-2.0 | f95c90ba6880aef1630aca826771095ad5ee08d8 | 0 | apache/logging-log4j2,xnslong/logging-log4j2,apache/logging-log4j2,xnslong/logging-log4j2,apache/logging-log4j2,codescale/logging-log4j2,codescale/logging-log4j2,xnslong/logging-log4j2,GFriedrich/logging-log4j2,GFriedrich/logging-log4j2,GFriedrich/logging-log4j2,codescale/logging-log4j2 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.lookup;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.ConfigurationAware;
import org.apache.logging.log4j.core.config.plugins.util.PluginManager;
import org.apache.logging.log4j.core.config.plugins.util.PluginType;
import org.apache.logging.log4j.core.util.Loader;
import org.apache.logging.log4j.core.util.ReflectionUtil;
import org.apache.logging.log4j.status.StatusLogger;
/**
* Proxies all the other {@link StrLookup}s.
*/
public class Interpolator extends AbstractConfigurationAwareLookup {
private static final Logger LOGGER = StatusLogger.getLogger();
/** Constant for the prefix separator. */
private static final char PREFIX_SEPARATOR = ':';
private final Map<String, StrLookup> lookups = new HashMap<>();
private final StrLookup defaultLookup;
public Interpolator(final StrLookup defaultLookup) {
this(defaultLookup, null);
}
/**
* Constructs an Interpolator using a given StrLookup and a list of packages to find Lookup plugins in.
*
* @param defaultLookup the default StrLookup to use as a fallback
* @param pluginPackages a list of packages to scan for Lookup plugins
* @since 2.1
*/
public Interpolator(final StrLookup defaultLookup, final List<String> pluginPackages) {
this.defaultLookup = defaultLookup == null ? new MapLookup(new HashMap<String, String>()) : defaultLookup;
final PluginManager manager = new PluginManager(CATEGORY);
manager.collectPlugins(pluginPackages);
final Map<String, PluginType<?>> plugins = manager.getPlugins();
for (final Map.Entry<String, PluginType<?>> entry : plugins.entrySet()) {
try {
final Class<? extends StrLookup> clazz = entry.getValue().getPluginClass().asSubclass(StrLookup.class);
lookups.put(entry.getKey(), ReflectionUtil.instantiate(clazz));
} catch (final Exception ex) {
LOGGER.error("Unable to create Lookup for {}", entry.getKey(), ex);
}
}
}
/**
* Create the default Interpolator using only Lookups that work without an event.
*/
public Interpolator() {
this((Map<String, String>) null);
}
/**
* Creates the Interpolator using only Lookups that work without an event and initial properties.
*/
public Interpolator(final Map<String, String> properties) {
this.defaultLookup = new MapLookup(properties == null ? new HashMap<String, String>() : properties);
// TODO: this ought to use the PluginManager
lookups.put("log4j", new Log4jLookup());
lookups.put("sys", new SystemPropertiesLookup());
lookups.put("env", new EnvironmentLookup());
lookups.put("main", MainMapLookup.MAIN_SINGLETON);
lookups.put("marker", new MarkerLookup());
lookups.put("java", new JavaLookup());
// JNDI
try {
// [LOG4J2-703] We might be on Android
lookups.put("jndi",
Loader.newCheckedInstanceOf("org.apache.logging.log4j.core.lookup.JndiLookup", StrLookup.class));
} catch (final Throwable e) {
// java.lang.VerifyError: org/apache/logging/log4j/core/lookup/JndiLookup
LOGGER.warn( // LOG4J2-1582 don't print the whole stack trace (it is just a warning...)
"JNDI lookup class is not available because this JRE does not support JNDI." +
" JNDI string lookups will not be available, continuing configuration. Ignoring " + e);
}
// JMX input args
try {
// We might be on Android
lookups.put("jvmrunargs",
Loader.newCheckedInstanceOf("org.apache.logging.log4j.core.lookup.JmxRuntimeInputArgumentsLookup",
StrLookup.class));
} catch (final Throwable e) {
// java.lang.VerifyError: org/apache/logging/log4j/core/lookup/JmxRuntimeInputArgumentsLookup
LOGGER.warn(
"JMX runtime input lookup class is not available because this JRE does not support JMX. " +
"JMX lookups will not be available, continuing configuration. Ignoring " + e);
}
lookups.put("date", new DateLookup());
lookups.put("ctx", new ContextMapLookup());
if (Loader.isClassAvailable("javax.servlet.ServletContext")) {
try {
lookups.put("web",
Loader.newCheckedInstanceOf("org.apache.logging.log4j.web.WebLookup", StrLookup.class));
} catch (final Exception ignored) {
LOGGER.info("Log4j appears to be running in a Servlet environment, but there's no log4j-web module " +
"available. If you want better web container support, please add the log4j-web JAR to your " +
"web archive or server lib directory.");
}
} else {
LOGGER.debug("Not in a ServletContext environment, thus not loading WebLookup plugin.");
}
}
/**
* Resolves the specified variable. This implementation will try to extract
* a variable prefix from the given variable name (the first colon (':') is
* used as prefix separator). It then passes the name of the variable with
* the prefix stripped to the lookup object registered for this prefix. If
* no prefix can be found or if the associated lookup object cannot resolve
* this variable, the default lookup object will be used.
*
* @param event The current LogEvent or null.
* @param var the name of the variable whose value is to be looked up
* @return the value of this variable or <b>null</b> if it cannot be
* resolved
*/
@Override
public String lookup(final LogEvent event, String var) {
if (var == null) {
return null;
}
final int prefixPos = var.indexOf(PREFIX_SEPARATOR);
if (prefixPos >= 0) {
final String prefix = var.substring(0, prefixPos);
final String name = var.substring(prefixPos + 1);
final StrLookup lookup = lookups.get(prefix);
if (lookup instanceof ConfigurationAware) {
((ConfigurationAware) lookup).setConfiguration(configuration);
}
String value = null;
if (lookup != null) {
value = event == null ? lookup.lookup(name) : lookup.lookup(event, name);
}
if (value != null) {
return value;
}
var = var.substring(prefixPos + 1);
}
if (defaultLookup != null) {
return event == null ? defaultLookup.lookup(var) : defaultLookup.lookup(event, var);
}
return null;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
for (final String name : lookups.keySet()) {
if (sb.length() == 0) {
sb.append('{');
} else {
sb.append(", ");
}
sb.append(name);
}
if (sb.length() > 0) {
sb.append('}');
}
return sb.toString();
}
}
| log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/Interpolator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.lookup;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.ConfigurationAware;
import org.apache.logging.log4j.core.config.plugins.util.PluginManager;
import org.apache.logging.log4j.core.config.plugins.util.PluginType;
import org.apache.logging.log4j.core.util.Loader;
import org.apache.logging.log4j.core.util.ReflectionUtil;
import org.apache.logging.log4j.status.StatusLogger;
/**
* Proxies all the other {@link StrLookup}s.
*/
public class Interpolator extends AbstractConfigurationAwareLookup {
private static final Logger LOGGER = StatusLogger.getLogger();
/** Constant for the prefix separator. */
private static final char PREFIX_SEPARATOR = ':';
private final Map<String, StrLookup> lookups = new HashMap<>();
private final StrLookup defaultLookup;
public Interpolator(final StrLookup defaultLookup) {
this(defaultLookup, null);
}
/**
* Constructs an Interpolator using a given StrLookup and a list of packages to find Lookup plugins in.
*
* @param defaultLookup the default StrLookup to use as a fallback
* @param pluginPackages a list of packages to scan for Lookup plugins
* @since 2.1
*/
public Interpolator(final StrLookup defaultLookup, final List<String> pluginPackages) {
this.defaultLookup = defaultLookup == null ? new MapLookup(new HashMap<String, String>()) : defaultLookup;
final PluginManager manager = new PluginManager(CATEGORY);
manager.collectPlugins(pluginPackages);
final Map<String, PluginType<?>> plugins = manager.getPlugins();
for (final Map.Entry<String, PluginType<?>> entry : plugins.entrySet()) {
try {
final Class<? extends StrLookup> clazz = entry.getValue().getPluginClass().asSubclass(StrLookup.class);
lookups.put(entry.getKey(), ReflectionUtil.instantiate(clazz));
} catch (final Exception ex) {
LOGGER.error("Unable to create Lookup for {}", entry.getKey(), ex);
}
}
}
/**
* Create the default Interpolator using only Lookups that work without an event.
*/
public Interpolator() {
this((Map<String, String>) null);
}
/**
* Creates the Interpolator using only Lookups that work without an event and initial properties.
*/
public Interpolator(final Map<String, String> properties) {
this.defaultLookup = new MapLookup(properties == null ? new HashMap<String, String>() : properties);
// TODO: this ought to use the PluginManager
lookups.put("log4j", new Log4jLookup());
lookups.put("sys", new SystemPropertiesLookup());
lookups.put("env", new EnvironmentLookup());
lookups.put("main", MainMapLookup.MAIN_SINGLETON);
lookups.put("marker", new MarkerLookup());
lookups.put("java", new JavaLookup());
// JNDI
try {
// [LOG4J2-703] We might be on Android
lookups.put("jndi",
Loader.newCheckedInstanceOf("org.apache.logging.log4j.core.lookup.JndiLookup", StrLookup.class));
} catch (final Throwable e) {
// java.lang.VerifyError: org/apache/logging/log4j/core/lookup/JndiLookup
LOGGER.warn( // LOG4J2-1582 don't print the whole stack trace (it is just a warning...)
"JNDI lookup class is not available because this JRE does not support JNDI." +
" JNDI string lookups will not be available, continuing configuration. Ignoring: " + e);
}
// JMX input args
try {
// We might be on Android
lookups.put("jvmrunargs",
Loader.newCheckedInstanceOf("org.apache.logging.log4j.core.lookup.JmxRuntimeInputArgumentsLookup", StrLookup.class));
} catch (final Throwable e) {
// java.lang.VerifyError: org/apache/logging/log4j/core/lookup/JmxRuntimeInputArgumentsLookup
LOGGER.warn(
"JMX runtime input lookup class is not available because this JRE does not support JMX. JMX lookups will not be available, continuing configuration.",
e);
}
lookups.put("date", new DateLookup());
lookups.put("ctx", new ContextMapLookup());
if (Loader.isClassAvailable("javax.servlet.ServletContext")) {
try {
lookups.put("web",
Loader.newCheckedInstanceOf("org.apache.logging.log4j.web.WebLookup", StrLookup.class));
} catch (final Exception ignored) {
LOGGER.info("Log4j appears to be running in a Servlet environment, but there's no log4j-web module " +
"available. If you want better web container support, please add the log4j-web JAR to your " +
"web archive or server lib directory.");
}
} else {
LOGGER.debug("Not in a ServletContext environment, thus not loading WebLookup plugin.");
}
}
/**
* Resolves the specified variable. This implementation will try to extract
* a variable prefix from the given variable name (the first colon (':') is
* used as prefix separator). It then passes the name of the variable with
* the prefix stripped to the lookup object registered for this prefix. If
* no prefix can be found or if the associated lookup object cannot resolve
* this variable, the default lookup object will be used.
*
* @param event The current LogEvent or null.
* @param var the name of the variable whose value is to be looked up
* @return the value of this variable or <b>null</b> if it cannot be
* resolved
*/
@Override
public String lookup(final LogEvent event, String var) {
if (var == null) {
return null;
}
final int prefixPos = var.indexOf(PREFIX_SEPARATOR);
if (prefixPos >= 0) {
final String prefix = var.substring(0, prefixPos);
final String name = var.substring(prefixPos + 1);
final StrLookup lookup = lookups.get(prefix);
if (lookup instanceof ConfigurationAware) {
((ConfigurationAware) lookup).setConfiguration(configuration);
}
String value = null;
if (lookup != null) {
value = event == null ? lookup.lookup(name) : lookup.lookup(event, name);
}
if (value != null) {
return value;
}
var = var.substring(prefixPos + 1);
}
if (defaultLookup != null) {
return event == null ? defaultLookup.lookup(var) : defaultLookup.lookup(event, var);
}
return null;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
for (final String name : lookups.keySet()) {
if (sb.length() == 0) {
sb.append('{');
} else {
sb.append(", ");
}
sb.append(name);
}
if (sb.length() > 0) {
sb.append('}');
}
return sb.toString();
}
}
| LOG4J2-1582 When initializing on platforms where JMX is not available, Interpolator component should not print stack trace for warning messages.
| log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/Interpolator.java | LOG4J2-1582 When initializing on platforms where JMX is not available, Interpolator component should not print stack trace for warning messages. |
|
Java | apache-2.0 | a00f09e8c774402536e47d614e4692682a21ae75 | 0 | thymeleaf/thymeleafsandbox-stsm-mvc,thymeleaf/thymeleaf,thymeleaf/thymeleafsandbox-stsm-mvc,thymeleaf/thymeleaf | /*
* =============================================================================
*
* Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package thymeleafexamples.stsm;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import thymeleafexamples.stsm.web.conversion.DateFormatter;
import thymeleafexamples.stsm.web.conversion.VarietyFormatter;
@Configuration
public class StsmBootWebConfig {
/*
* --------------------------------------
* FORMATTERS
* --------------------------------------
*/
@Bean
public VarietyFormatter varietyFormatter() {
return new VarietyFormatter();
}
@Bean
public DateFormatter dateFormatter() {
return new DateFormatter();
}
}
| src/main/java/thymeleafexamples/stsm/StsmBootWebConfig.java | /*
* =============================================================================
*
* Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package thymeleafexamples.stsm;
import org.springframework.boot.autoconfigure.thymeleaf.ThymeleafProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.thymeleaf.spring4.SpringTemplateEngine;
import org.thymeleaf.spring4.templateresolver.SpringResourceTemplateResolver;
import org.thymeleaf.spring4.view.ThymeleafViewResolver;
import thymeleafexamples.stsm.web.conversion.DateFormatter;
import thymeleafexamples.stsm.web.conversion.VarietyFormatter;
@Configuration
@EnableConfigurationProperties(ThymeleafProperties.class)
public class StsmBootWebConfig {
// TODO * Once there is a Spring Boot starter for thymeleaf-spring5, there would be no need to have
// TODO that @EnableConfigurationProperties annotation or use it for declaring the beans down in the
// TODO "thymeleaf" section below.
private ApplicationContext applicationContext;
private ThymeleafProperties thymeleafProperties;
public StsmBootWebConfig(
final ApplicationContext applicationContext,
final ThymeleafProperties thymeleafProperties) {
super();
this.applicationContext = applicationContext;
this.thymeleafProperties = thymeleafProperties;
}
/*
* --------------------------------------
* FORMATTERS
* --------------------------------------
*/
@Bean
public VarietyFormatter varietyFormatter() {
return new VarietyFormatter();
}
@Bean
public DateFormatter dateFormatter() {
return new DateFormatter();
}
/*
* --------------------------------------
* THYMELEAF CONFIGURATION
* --------------------------------------
*/
// TODO * If there was a Spring Boot starter for thymeleaf-spring5 most probably some or all of these
// TODO resolver and engine beans would not need to be specifically declared here.
@Bean
public SpringResourceTemplateResolver thymeleafTemplateResolver() {
final SpringResourceTemplateResolver resolver = new SpringResourceTemplateResolver();
resolver.setApplicationContext(this.applicationContext);
resolver.setPrefix(this.thymeleafProperties.getPrefix());
resolver.setSuffix(this.thymeleafProperties.getSuffix());
resolver.setTemplateMode(this.thymeleafProperties.getMode());
if (this.thymeleafProperties.getEncoding() != null) {
resolver.setCharacterEncoding(this.thymeleafProperties.getEncoding().name());
}
resolver.setCacheable(this.thymeleafProperties.isCache());
final Integer order = this.thymeleafProperties.getTemplateResolverOrder();
if (order != null) {
resolver.setOrder(order);
}
resolver.setCheckExistence(this.thymeleafProperties.isCheckTemplate());
return resolver;
}
@Bean
public SpringTemplateEngine thymeleafTemplateEngine(){
final SpringTemplateEngine templateEngine = new SpringTemplateEngine();
templateEngine.setTemplateResolver(thymeleafTemplateResolver());
return templateEngine;
}
@Bean
public ThymeleafViewResolver thymeleafChunkedAndDataDrivenViewResolver(){
final ThymeleafViewResolver viewResolver = new ThymeleafViewResolver();
viewResolver.setTemplateEngine(thymeleafTemplateEngine());
return viewResolver;
}
}
| Properly configured app for Spring 4 + Spring Boot 1.4.3
| src/main/java/thymeleafexamples/stsm/StsmBootWebConfig.java | Properly configured app for Spring 4 + Spring Boot 1.4.3 |
|
Java | apache-2.0 | 1e9da9e199e2f9feb2bf27a6b049eec787664413 | 0 | mohanvive/siddhi,mohanvive/siddhi,dilini-muthumala/siddhi,dilini-muthumala/siddhi,ramindu90/siddhi,ramindu90/siddhi,suhothayan/siddhi,tishan89/siddhi,wso2/siddhi,grainier/siddhi,suhothayan/siddhi,gokul/siddhi,gokul/siddhi,wso2/siddhi,grainier/siddhi,tishan89/siddhi | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.util.collection.operator;
import io.siddhi.core.aggregation.IncrementalDataAggregator;
import io.siddhi.core.aggregation.IncrementalExecutor;
import io.siddhi.core.aggregation.IncrementalExternalTimestampDataAggregator;
import io.siddhi.core.config.SiddhiAppContext;
import io.siddhi.core.event.ComplexEvent;
import io.siddhi.core.event.ComplexEventChunk;
import io.siddhi.core.event.state.StateEvent;
import io.siddhi.core.event.stream.MetaStreamEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.event.stream.StreamEventCloner;
import io.siddhi.core.event.stream.StreamEventPool;
import io.siddhi.core.event.stream.populater.ComplexEventPopulater;
import io.siddhi.core.exception.SiddhiAppRuntimeException;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.query.selector.GroupByKeyGenerator;
import io.siddhi.core.table.Table;
import io.siddhi.query.api.aggregation.TimePeriod;
import io.siddhi.query.api.definition.AggregationDefinition;
import io.siddhi.query.api.definition.Attribute;
import io.siddhi.query.api.exception.SiddhiAppValidationException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static io.siddhi.query.api.expression.Expression.Time.normalizeDuration;
/**
* Defines the logic to find a matching event from an incremental aggregator (retrieval from incremental aggregator),
* based on the logical conditions defined herewith.
*/
public class IncrementalAggregateCompileCondition implements CompiledCondition {
private final StreamEventPool streamEventPoolForTableMeta;
private final StreamEventCloner tableEventCloner;
private final StreamEventPool streamEventPoolForAggregateMeta;
private final StreamEventCloner aggregateEventCloner;
private final List<Attribute> additionalAttributes;
private Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions;
private CompiledCondition inMemoryStoreCompileCondition;
private CompiledCondition onCompiledCondition;
private MetaStreamEvent tableMetaStreamEvent;
private MetaStreamEvent aggregateMetaStreamEvent;
private ComplexEventPopulater complexEventPopulater;
private MatchingMetaInfoHolder alteredMatchingMetaInfoHolder;
private ExpressionExecutor perExpressionExecutor;
private ExpressionExecutor startTimeEndTimeExpressionExecutor;
private boolean isProcessingOnExternalTime;
public IncrementalAggregateCompileCondition(
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions,
CompiledCondition inMemoryStoreCompileCondition, CompiledCondition onCompiledCondition,
MetaStreamEvent tableMetaStreamEvent, MetaStreamEvent aggregateMetaSteamEvent,
List<Attribute> additionalAttributes, MatchingMetaInfoHolder alteredMatchingMetaInfoHolder,
ExpressionExecutor perExpressionExecutor, ExpressionExecutor startTimeEndTimeExpressionExecutor,
boolean isProcessingOnExternalTime) {
this.withinTableCompiledConditions = withinTableCompiledConditions;
this.inMemoryStoreCompileCondition = inMemoryStoreCompileCondition;
this.onCompiledCondition = onCompiledCondition;
this.tableMetaStreamEvent = tableMetaStreamEvent;
this.aggregateMetaStreamEvent = aggregateMetaSteamEvent;
this.streamEventPoolForTableMeta = new StreamEventPool(tableMetaStreamEvent, 10);
this.tableEventCloner = new StreamEventCloner(tableMetaStreamEvent, streamEventPoolForTableMeta);
this.streamEventPoolForAggregateMeta = new StreamEventPool(aggregateMetaSteamEvent, 10);
this.aggregateEventCloner = new StreamEventCloner(aggregateMetaSteamEvent, streamEventPoolForAggregateMeta);
this.additionalAttributes = additionalAttributes;
this.alteredMatchingMetaInfoHolder = alteredMatchingMetaInfoHolder;
this.perExpressionExecutor = perExpressionExecutor;
this.startTimeEndTimeExpressionExecutor = startTimeEndTimeExpressionExecutor;
this.isProcessingOnExternalTime = isProcessingOnExternalTime;
}
@Override
public CompiledCondition cloneCompilation(String key) {
Map<TimePeriod.Duration, CompiledCondition> copyOfWithinTableCompiledConditions = new HashMap<>();
for (Map.Entry<TimePeriod.Duration, CompiledCondition> entry : withinTableCompiledConditions.entrySet()) {
copyOfWithinTableCompiledConditions.put(entry.getKey(), entry.getValue().cloneCompilation(key));
}
return new IncrementalAggregateCompileCondition(copyOfWithinTableCompiledConditions,
inMemoryStoreCompileCondition.cloneCompilation(key),
onCompiledCondition.cloneCompilation(key), tableMetaStreamEvent, aggregateMetaStreamEvent,
additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor,
startTimeEndTimeExpressionExecutor, isProcessingOnExternalTime);
}
public StreamEvent find(StateEvent matchingEvent, AggregationDefinition aggregationDefinition,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
Map<TimePeriod.Duration, Table> aggregationTables,
List<TimePeriod.Duration> incrementalDurations,
List<ExpressionExecutor> baseExecutors,
List<ExpressionExecutor> outputExpressionExecutors,
SiddhiAppContext siddhiAppContext,
List<List<ExpressionExecutor>> aggregateProcessingExecutorsList,
List<GroupByKeyGenerator> groupbyKeyGeneratorList,
ExpressionExecutor shouldUpdateExpressionExecutor,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMapForPartitions) {
ComplexEventChunk<StreamEvent> complexEventChunkToHoldWithinMatches = new ComplexEventChunk<>(true);
// Retrieve per value
String perValueAsString = perExpressionExecutor.execute(matchingEvent).toString();
TimePeriod.Duration perValue;
try {
// Per time function verification
perValue = normalizeDuration(perValueAsString);
} catch (SiddhiAppValidationException e) {
throw new SiddhiAppRuntimeException(
"Aggregation Query's per value is expected to be of a valid time function of the " +
"following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES + ", "
+ TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", "
+ TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
}
if (!incrementalExecutorMap.keySet().contains(perValue)) {
throw new SiddhiAppRuntimeException("The aggregate values for " + perValue.toString()
+ " granularity cannot be provided since aggregation definition " +
aggregationDefinition.getId() + " does not contain " + perValue.toString() + " duration");
}
Table tableForPerDuration = aggregationTables.get(perValue);
Long[] startTimeEndTime = (Long[]) startTimeEndTimeExpressionExecutor.execute(matchingEvent);
if (startTimeEndTime == null) {
throw new SiddhiAppRuntimeException("Start and end times for within duration cannot be retrieved");
}
complexEventPopulater.populateComplexEvent(matchingEvent.getStreamEvent(0), startTimeEndTime);
// Get all the aggregates within the given duration, from table corresponding to "per" duration
StreamEvent withinMatchFromPersistedEvents = tableForPerDuration.find(matchingEvent,
withinTableCompiledConditions.get(perValue));
complexEventChunkToHoldWithinMatches.add(withinMatchFromPersistedEvents);
// Optimization step.
long oldestInMemoryEventTimestamp = getOldestInMemoryEventTimestamp(incrementalExecutorMap,
incrementalDurations, perValue);
ExpressionExecutor shouldUpdateExpressionExecutorClone =
(shouldUpdateExpressionExecutor == null) ? null : shouldUpdateExpressionExecutor.cloneExecutor(null);
//If processing on external time, the in-memory data also needs to be queried
if (isProcessingOnExternalTime || requiresAggregatingInMemoryData(oldestInMemoryEventTimestamp,
startTimeEndTime)) {
List<ExpressionExecutor> clonedBaseExecutors = baseExecutors.stream().map(expressionExecutor ->
expressionExecutor.cloneExecutor("")).collect(Collectors.toList());
IncrementalDataAggregator incrementalDataAggregator = new IncrementalDataAggregator(incrementalDurations,
perValue, oldestInMemoryEventTimestamp, clonedBaseExecutors, tableMetaStreamEvent,
siddhiAppContext, shouldUpdateExpressionExecutorClone);
ComplexEventChunk<StreamEvent> aggregatedInMemoryEventChunk;
// Aggregate in-memory data and create an event chunk out of it
if (incrementalExecutorMapForPartitions != null) {
aggregatedInMemoryEventChunk = incrementalDataAggregator
.aggregateInMemoryData(incrementalExecutorMapForPartitions);
} else {
aggregatedInMemoryEventChunk = incrementalDataAggregator
.aggregateInMemoryData(incrementalExecutorMap);
}
// Get the in-memory aggregate data, which is within given duration
StreamEvent withinMatchFromInMemory = ((Operator) inMemoryStoreCompileCondition).find(matchingEvent,
aggregatedInMemoryEventChunk, tableEventCloner);
complexEventChunkToHoldWithinMatches.add(withinMatchFromInMemory);
}
ComplexEventChunk<StreamEvent> processedEvents;
if (isProcessingOnExternalTime) {
int durationIndex = incrementalDurations.indexOf(perValue);
List<ExpressionExecutor> expressionExecutors = aggregateProcessingExecutorsList.get(durationIndex);
List<ExpressionExecutor> clonedExecutors = expressionExecutors.stream().map(expressionExecutor ->
expressionExecutor.cloneExecutor("")).collect(Collectors.toList());
GroupByKeyGenerator groupByKeyGenerator = groupbyKeyGeneratorList.get(durationIndex);
ExpressionExecutor shouldUpdateExpressionExecutorCloneExt =
(shouldUpdateExpressionExecutor == null) ? null :
shouldUpdateExpressionExecutor.cloneExecutor(null);
IncrementalExternalTimestampDataAggregator incrementalExternalTimestampDataAggregator =
new IncrementalExternalTimestampDataAggregator(clonedExecutors, groupByKeyGenerator,
tableMetaStreamEvent, siddhiAppContext, shouldUpdateExpressionExecutorCloneExt);
processedEvents = incrementalExternalTimestampDataAggregator
.aggregateData(complexEventChunkToHoldWithinMatches);
} else {
processedEvents = complexEventChunkToHoldWithinMatches;
}
// Get the final event chunk from the data which is within given duration. This event chunk contains the values
// in the select clause of an aggregate definition.
ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = createAggregateSelectionEventChunk(
processedEvents, outputExpressionExecutors);
// Execute the on compile condition
return ((Operator) onCompiledCondition).find(matchingEvent, aggregateSelectionComplexEventChunk,
aggregateEventCloner);
}
private ComplexEventChunk<StreamEvent> createAggregateSelectionEventChunk(
ComplexEventChunk<StreamEvent> complexEventChunkToHoldMatches,
List<ExpressionExecutor> outputExpressionExecutors) {
ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = new ComplexEventChunk<>(true);
StreamEvent resetEvent = streamEventPoolForTableMeta.borrowEvent();
resetEvent.setType(ComplexEvent.Type.RESET);
while (complexEventChunkToHoldMatches.hasNext()) {
StreamEvent streamEvent = complexEventChunkToHoldMatches.next();
StreamEvent newStreamEvent = streamEventPoolForAggregateMeta.borrowEvent();
Object outputData[] = new Object[newStreamEvent.getOutputData().length];
for (int i = 0; i < outputExpressionExecutors.size(); i++) {
outputData[i] = outputExpressionExecutors.get(i).execute(streamEvent);
}
newStreamEvent.setTimestamp(streamEvent.getTimestamp());
newStreamEvent.setOutputData(outputData);
aggregateSelectionComplexEventChunk.add(newStreamEvent);
}
for (ExpressionExecutor expressionExecutor : outputExpressionExecutors) {
expressionExecutor.execute(resetEvent);
}
return aggregateSelectionComplexEventChunk;
}
private boolean requiresAggregatingInMemoryData(long oldestInMemoryEventTimestamp, Long[] startTimeEndTime) {
if (oldestInMemoryEventTimestamp == -1) {
return false;
}
long endTimeForWithin = startTimeEndTime[1];
return endTimeForWithin > oldestInMemoryEventTimestamp;
}
private long getOldestInMemoryEventTimestamp(Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
List<TimePeriod.Duration> incrementalDurations,
TimePeriod.Duration perValue) {
long oldestEvent;
TimePeriod.Duration incrementalDuration;
for (int i = perValue.ordinal(); i >= incrementalDurations.get(0).ordinal(); i--) {
incrementalDuration = TimePeriod.Duration.values()[i];
//If the reduced granularity is not configured
if (incrementalExecutorMap.containsKey(incrementalDuration)) {
oldestEvent = incrementalExecutorMap.get(incrementalDuration).getAggregationStartTimestamp();
if (oldestEvent != -1) {
return oldestEvent;
}
}
}
return -1;
}
public void setComplexEventPopulater(ComplexEventPopulater complexEventPopulater) {
this.complexEventPopulater = complexEventPopulater;
}
public List<Attribute> getAdditionalAttributes() {
return this.additionalAttributes;
}
public MatchingMetaInfoHolder getAlteredMatchingMetaInfoHolder() {
return this.alteredMatchingMetaInfoHolder;
}
}
| modules/siddhi-core/src/main/java/io/siddhi/core/util/collection/operator/IncrementalAggregateCompileCondition.java | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.util.collection.operator;
import io.siddhi.core.aggregation.IncrementalDataAggregator;
import io.siddhi.core.aggregation.IncrementalExecutor;
import io.siddhi.core.aggregation.IncrementalExternalTimestampDataAggregator;
import io.siddhi.core.config.SiddhiAppContext;
import io.siddhi.core.event.ComplexEvent;
import io.siddhi.core.event.ComplexEventChunk;
import io.siddhi.core.event.state.StateEvent;
import io.siddhi.core.event.stream.MetaStreamEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.event.stream.StreamEventCloner;
import io.siddhi.core.event.stream.StreamEventPool;
import io.siddhi.core.event.stream.populater.ComplexEventPopulater;
import io.siddhi.core.exception.SiddhiAppRuntimeException;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.query.selector.GroupByKeyGenerator;
import io.siddhi.core.table.Table;
import io.siddhi.query.api.aggregation.TimePeriod;
import io.siddhi.query.api.definition.AggregationDefinition;
import io.siddhi.query.api.definition.Attribute;
import io.siddhi.query.api.exception.SiddhiAppValidationException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static io.siddhi.query.api.expression.Expression.Time.normalizeDuration;
/**
* Defines the logic to find a matching event from an incremental aggregator (retrieval from incremental aggregator),
* based on the logical conditions defined herewith.
*/
public class IncrementalAggregateCompileCondition implements CompiledCondition {
private final StreamEventPool streamEventPoolForTableMeta;
private final StreamEventCloner tableEventCloner;
private final StreamEventPool streamEventPoolForAggregateMeta;
private final StreamEventCloner aggregateEventCloner;
private final List<Attribute> additionalAttributes;
private Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions;
private CompiledCondition inMemoryStoreCompileCondition;
private CompiledCondition onCompiledCondition;
private MetaStreamEvent tableMetaStreamEvent;
private MetaStreamEvent aggregateMetaStreamEvent;
private ComplexEventPopulater complexEventPopulater;
private MatchingMetaInfoHolder alteredMatchingMetaInfoHolder;
private ExpressionExecutor perExpressionExecutor;
private ExpressionExecutor startTimeEndTimeExpressionExecutor;
private boolean isProcessingOnExternalTime;
public IncrementalAggregateCompileCondition(
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions,
CompiledCondition inMemoryStoreCompileCondition, CompiledCondition onCompiledCondition,
MetaStreamEvent tableMetaStreamEvent, MetaStreamEvent aggregateMetaSteamEvent,
List<Attribute> additionalAttributes, MatchingMetaInfoHolder alteredMatchingMetaInfoHolder,
ExpressionExecutor perExpressionExecutor, ExpressionExecutor startTimeEndTimeExpressionExecutor,
boolean isProcessingOnExternalTime) {
this.withinTableCompiledConditions = withinTableCompiledConditions;
this.inMemoryStoreCompileCondition = inMemoryStoreCompileCondition;
this.onCompiledCondition = onCompiledCondition;
this.tableMetaStreamEvent = tableMetaStreamEvent;
this.aggregateMetaStreamEvent = aggregateMetaSteamEvent;
this.streamEventPoolForTableMeta = new StreamEventPool(tableMetaStreamEvent, 10);
this.tableEventCloner = new StreamEventCloner(tableMetaStreamEvent, streamEventPoolForTableMeta);
this.streamEventPoolForAggregateMeta = new StreamEventPool(aggregateMetaSteamEvent, 10);
this.aggregateEventCloner = new StreamEventCloner(aggregateMetaSteamEvent, streamEventPoolForAggregateMeta);
this.additionalAttributes = additionalAttributes;
this.alteredMatchingMetaInfoHolder = alteredMatchingMetaInfoHolder;
this.perExpressionExecutor = perExpressionExecutor;
this.startTimeEndTimeExpressionExecutor = startTimeEndTimeExpressionExecutor;
this.isProcessingOnExternalTime = isProcessingOnExternalTime;
}
@Override
public CompiledCondition cloneCompilation(String key) {
Map<TimePeriod.Duration, CompiledCondition> copyOfWithinTableCompiledConditions = new HashMap<>();
for (Map.Entry<TimePeriod.Duration, CompiledCondition> entry : withinTableCompiledConditions.entrySet()) {
copyOfWithinTableCompiledConditions.put(entry.getKey(), entry.getValue().cloneCompilation(key));
}
return new IncrementalAggregateCompileCondition(copyOfWithinTableCompiledConditions,
inMemoryStoreCompileCondition.cloneCompilation(key),
onCompiledCondition.cloneCompilation(key), tableMetaStreamEvent, aggregateMetaStreamEvent,
additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor,
startTimeEndTimeExpressionExecutor, isProcessingOnExternalTime);
}
public StreamEvent find(StateEvent matchingEvent, AggregationDefinition aggregationDefinition,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
Map<TimePeriod.Duration, Table> aggregationTables,
List<TimePeriod.Duration> incrementalDurations,
List<ExpressionExecutor> baseExecutors,
List<ExpressionExecutor> outputExpressionExecutors,
SiddhiAppContext siddhiAppContext,
List<List<ExpressionExecutor>> aggregateProcessingExecutorsList,
List<GroupByKeyGenerator> groupbyKeyGeneratorList,
ExpressionExecutor shouldUpdateExpressionExecutor,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMapForPartitions) {
ComplexEventChunk<StreamEvent> complexEventChunkToHoldWithinMatches = new ComplexEventChunk<>(true);
// Retrieve per value
String perValueAsString = perExpressionExecutor.execute(matchingEvent).toString();
TimePeriod.Duration perValue;
try {
// Per time function verification
perValue = normalizeDuration(perValueAsString);
} catch (SiddhiAppValidationException e) {
throw new SiddhiAppRuntimeException(
"Aggregation Query's per value is expected to be of a valid time function of the " +
"following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES + ", "
+ TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", "
+ TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
}
if (!incrementalExecutorMap.keySet().contains(perValue)) {
throw new SiddhiAppRuntimeException("The aggregate values for " + perValue.toString()
+ " granularity cannot be provided since aggregation definition " +
aggregationDefinition.getId() + " does not contain " + perValue.toString() + " duration");
}
Table tableForPerDuration = aggregationTables.get(perValue);
Long[] startTimeEndTime = (Long[]) startTimeEndTimeExpressionExecutor.execute(matchingEvent);
if (startTimeEndTime == null) {
throw new SiddhiAppRuntimeException("Start and end times for within duration cannot be retrieved");
}
complexEventPopulater.populateComplexEvent(matchingEvent.getStreamEvent(0), startTimeEndTime);
// Get all the aggregates within the given duration, from table corresponding to "per" duration
StreamEvent withinMatchFromPersistedEvents = tableForPerDuration.find(matchingEvent,
withinTableCompiledConditions.get(perValue));
complexEventChunkToHoldWithinMatches.add(withinMatchFromPersistedEvents);
// Optimization step.
long oldestInMemoryEventTimestamp = getOldestInMemoryEventTimestamp(incrementalExecutorMap,
incrementalDurations, perValue);
ExpressionExecutor shouldUpdateExpressionExecutorClone =
(shouldUpdateExpressionExecutor == null) ? null : shouldUpdateExpressionExecutor.cloneExecutor(null);
//If processing on external time, the in-memory data also needs to be queried
if (isProcessingOnExternalTime || requiresAggregatingInMemoryData(oldestInMemoryEventTimestamp,
startTimeEndTime)) {
List<ExpressionExecutor> clonedBaseExecutors = baseExecutors.stream().map(expressionExecutor ->
expressionExecutor.cloneExecutor("")).collect(Collectors.toList());
IncrementalDataAggregator incrementalDataAggregator = new IncrementalDataAggregator(incrementalDurations,
perValue, oldestInMemoryEventTimestamp, clonedBaseExecutors, tableMetaStreamEvent,
siddhiAppContext, shouldUpdateExpressionExecutorClone);
ComplexEventChunk<StreamEvent> aggregatedInMemoryEventChunk;
// Aggregate in-memory data and create an event chunk out of it
if (incrementalExecutorMapForPartitions != null) {
aggregatedInMemoryEventChunk = incrementalDataAggregator
.aggregateInMemoryData(incrementalExecutorMapForPartitions);
} else {
aggregatedInMemoryEventChunk = incrementalDataAggregator
.aggregateInMemoryData(incrementalExecutorMap);
}
// Get the in-memory aggregate data, which is within given duration
StreamEvent withinMatchFromInMemory = ((Operator) inMemoryStoreCompileCondition).find(matchingEvent,
aggregatedInMemoryEventChunk, tableEventCloner);
complexEventChunkToHoldWithinMatches.add(withinMatchFromInMemory);
}
ComplexEventChunk<StreamEvent> processedEvents;
if (isProcessingOnExternalTime) {
int durationIndex = incrementalDurations.indexOf(perValue);
List<ExpressionExecutor> expressionExecutors = aggregateProcessingExecutorsList.get(durationIndex);
GroupByKeyGenerator groupByKeyGenerator = groupbyKeyGeneratorList.get(durationIndex);
ExpressionExecutor shouldUpdateExpressionExecutorCloneExt =
(shouldUpdateExpressionExecutor == null) ? null :
shouldUpdateExpressionExecutor.cloneExecutor(null);
IncrementalExternalTimestampDataAggregator incrementalExternalTimestampDataAggregator =
new IncrementalExternalTimestampDataAggregator(expressionExecutors, groupByKeyGenerator,
tableMetaStreamEvent, siddhiAppContext, shouldUpdateExpressionExecutorCloneExt);
processedEvents = incrementalExternalTimestampDataAggregator
.aggregateData(complexEventChunkToHoldWithinMatches);
} else {
processedEvents = complexEventChunkToHoldWithinMatches;
}
// Get the final event chunk from the data which is within given duration. This event chunk contains the values
// in the select clause of an aggregate definition.
ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = createAggregateSelectionEventChunk(
processedEvents, outputExpressionExecutors);
// Execute the on compile condition
return ((Operator) onCompiledCondition).find(matchingEvent, aggregateSelectionComplexEventChunk,
aggregateEventCloner);
}
private ComplexEventChunk<StreamEvent> createAggregateSelectionEventChunk(
ComplexEventChunk<StreamEvent> complexEventChunkToHoldMatches,
List<ExpressionExecutor> outputExpressionExecutors) {
ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = new ComplexEventChunk<>(true);
StreamEvent resetEvent = streamEventPoolForTableMeta.borrowEvent();
resetEvent.setType(ComplexEvent.Type.RESET);
while (complexEventChunkToHoldMatches.hasNext()) {
StreamEvent streamEvent = complexEventChunkToHoldMatches.next();
StreamEvent newStreamEvent = streamEventPoolForAggregateMeta.borrowEvent();
Object outputData[] = new Object[newStreamEvent.getOutputData().length];
for (int i = 0; i < outputExpressionExecutors.size(); i++) {
outputData[i] = outputExpressionExecutors.get(i).execute(streamEvent);
}
newStreamEvent.setTimestamp(streamEvent.getTimestamp());
newStreamEvent.setOutputData(outputData);
aggregateSelectionComplexEventChunk.add(newStreamEvent);
}
for (ExpressionExecutor expressionExecutor : outputExpressionExecutors) {
expressionExecutor.execute(resetEvent);
}
return aggregateSelectionComplexEventChunk;
}
private boolean requiresAggregatingInMemoryData(long oldestInMemoryEventTimestamp, Long[] startTimeEndTime) {
if (oldestInMemoryEventTimestamp == -1) {
return false;
}
long endTimeForWithin = startTimeEndTime[1];
return endTimeForWithin > oldestInMemoryEventTimestamp;
}
private long getOldestInMemoryEventTimestamp(Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
List<TimePeriod.Duration> incrementalDurations,
TimePeriod.Duration perValue) {
long oldestEvent;
TimePeriod.Duration incrementalDuration;
for (int i = perValue.ordinal(); i >= incrementalDurations.get(0).ordinal(); i--) {
incrementalDuration = TimePeriod.Duration.values()[i];
//If the reduced granularity is not configured
if (incrementalExecutorMap.containsKey(incrementalDuration)) {
oldestEvent = incrementalExecutorMap.get(incrementalDuration).getAggregationStartTimestamp();
if (oldestEvent != -1) {
return oldestEvent;
}
}
}
return -1;
}
public void setComplexEventPopulater(ComplexEventPopulater complexEventPopulater) {
this.complexEventPopulater = complexEventPopulater;
}
public List<Attribute> getAdditionalAttributes() {
return this.additionalAttributes;
}
public MatchingMetaInfoHolder getAlteredMatchingMetaInfoHolder() {
return this.alteredMatchingMetaInfoHolder;
}
}
| Clone executors for out of order event smoothing
| modules/siddhi-core/src/main/java/io/siddhi/core/util/collection/operator/IncrementalAggregateCompileCondition.java | Clone executors for out of order event smoothing |
|
Java | apache-2.0 | 7f0ec90844d47165c159e6bf0e9009320ec51a0d | 0 | Alik72/JAVA_RUSH | package com.javarush.test;
/* Реализовать метод printMainInfo
level 14.lesson08.home04;
1. Напиши реализацию метода printMainInfo, чтобы:
1.1. Если в метод передают объект типа Drawable, у этого объекта вызывался метод draw.
1.2. Если в метод передают объект типа Movable, у этого объекта вызывался метод move.
2. Метод main менять нельзя.
*/
public class Solution
{
public static void main(String[] args)
{
Object obj = new Circle();
Movable movable = (Movable) obj;
Drawable drawable = new Rectangle();
printMainInfo(drawable);
printMainInfo(movable);
}
public static void printMainInfo(Object object)
{
//Add your code here
}
static interface Movable
{
void move();
}
static class Circle implements Movable
{
public void draw()
{
System.out.println("can be drawn");
}
public void move()
{
System.out.println("can be moved");
}
}
static interface Drawable
{
void draw();
}
static class Rectangle implements Drawable
{
public void draw()
{
System.out.println("can be drawn");
}
public void move()
{
System.out.println("can be moved");
}
}
}
| JavaSolutions/src/com/javarush/test/Solution.java | package com.javarush.test;
| Реализовать метод printMainInfo
level 14.lesson08.home04;
| JavaSolutions/src/com/javarush/test/Solution.java | Реализовать метод printMainInfo |
|
Java | apache-2.0 | d3b223d9ea4284ce3d3761531f8872a116698b27 | 0 | charlesccychen/beam,chamikaramj/beam,lukecwik/incubator-beam,iemejia/incubator-beam,apache/beam,lukecwik/incubator-beam,tgroh/beam,markflyhigh/incubator-beam,markflyhigh/incubator-beam,apache/beam,charlesccychen/incubator-beam,chamikaramj/beam,chamikaramj/beam,tgroh/beam,rangadi/beam,apache/beam,charlesccychen/incubator-beam,rangadi/beam,markflyhigh/incubator-beam,markflyhigh/incubator-beam,chamikaramj/beam,charlesccychen/beam,tgroh/incubator-beam,RyanSkraba/beam,apache/beam,rangadi/beam,chamikaramj/beam,charlesccychen/beam,apache/beam,robertwb/incubator-beam,charlesccychen/beam,apache/beam,robertwb/incubator-beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,rangadi/incubator-beam,apache/beam,RyanSkraba/beam,tgroh/beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,rangadi/beam,charlesccychen/beam,apache/beam,mxm/incubator-beam,rangadi/beam,RyanSkraba/beam,charlesccychen/beam,lukecwik/incubator-beam,rangadi/incubator-beam,tgroh/beam,lukecwik/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,rangadi/incubator-beam,apache/beam,chamikaramj/beam,RyanSkraba/beam,chamikaramj/beam,charlesccychen/incubator-beam,lukecwik/incubator-beam,tgroh/incubator-beam,markflyhigh/incubator-beam,chamikaramj/beam,chamikaramj/beam,robertwb/incubator-beam,RyanSkraba/beam,RyanSkraba/beam,iemejia/incubator-beam,mxm/incubator-beam,chamikaramj/beam,charlesccychen/beam,rangadi/beam,robertwb/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,apache/beam,lukecwik/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,RyanSkraba/beam,rangadi/beam,apache/beam | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.fnexecution.environment;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeoutException;
import org.apache.beam.model.pipeline.v1.RunnerApi.Environment;
import org.apache.beam.runners.fnexecution.GrpcFnServer;
import org.apache.beam.runners.fnexecution.artifact.ArtifactRetrievalService;
import org.apache.beam.runners.fnexecution.control.ControlClientPool;
import org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService;
import org.apache.beam.runners.fnexecution.control.InstructionRequestHandler;
import org.apache.beam.runners.fnexecution.logging.GrpcLoggingService;
import org.apache.beam.runners.fnexecution.provisioning.StaticGrpcProvisionService;
import org.apache.beam.sdk.fn.IdGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An {@link EnvironmentFactory} that creates docker containers by shelling out to docker. Returned
* {@link RemoteEnvironment RemoteEnvironments} own their respective docker containers. Not
* thread-safe.
*/
public class DockerEnvironmentFactory implements EnvironmentFactory {
private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class);
/**
* Returns a {@link DockerEnvironmentFactory} for the provided {@link GrpcFnServer servers} using
* the default {@link DockerCommand}.
*/
public static DockerEnvironmentFactory forServices(
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool.Source clientSource,
IdGenerator idGenerator) {
return forServicesWithDocker(
DockerCommand.getDefault(),
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
clientSource,
idGenerator);
}
static DockerEnvironmentFactory forServicesWithDocker(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool.Source clientSource,
IdGenerator idGenerator) {
return new DockerEnvironmentFactory(
docker,
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
idGenerator,
clientSource);
}
private final DockerCommand docker;
private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer;
private final GrpcFnServer<GrpcLoggingService> loggingServiceServer;
private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer;
private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer;
private final IdGenerator idGenerator;
private final ControlClientPool.Source clientSource;
private DockerEnvironmentFactory(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
IdGenerator idGenerator,
ControlClientPool.Source clientSource) {
this.docker = docker;
this.controlServiceServer = controlServiceServer;
this.loggingServiceServer = loggingServiceServer;
this.retrievalServiceServer = retrievalServiceServer;
this.provisioningServiceServer = provisioningServiceServer;
this.idGenerator = idGenerator;
this.clientSource = clientSource;
}
/** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */
@Override
public RemoteEnvironment createEnvironment(Environment environment) throws Exception {
String workerId = idGenerator.getId();
// Prepare docker invocation.
Path workerPersistentDirectory = Files.createTempDirectory("worker_persistent_directory");
Path semiPersistentDirectory = Files.createTempDirectory("semi_persistent_dir");
String containerImage = environment.getUrl();
// TODO: https://issues.apache.org/jira/browse/BEAM-4148 The default service address will not
// work for Docker for Mac.
String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl();
String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl();
String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl();
String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl();
List<String> args =
Arrays.asList(
"-v",
// TODO: Mac only allows temporary mounts under /tmp by default (as of 17.12).
String.format("%s:%s", workerPersistentDirectory, semiPersistentDirectory),
// NOTE: Host networking does not work on Mac, but the command line flag is accepted.
"--network=host",
containerImage,
String.format("--id=%s", workerId),
String.format("--logging_endpoint=%s", loggingEndpoint),
String.format("--artifact_endpoint=%s", artifactEndpoint),
String.format("--provision_endpoint=%s", provisionEndpoint),
String.format("--control_endpoint=%s", controlEndpoint),
String.format("--semi_persist_dir=%s", semiPersistentDirectory));
// Wrap the blocking call to clientSource.get in case an exception is thrown.
String containerId = null;
InstructionRequestHandler instructionHandler = null;
try {
containerId = docker.runImage(containerImage, args);
// Wait on a client from the gRPC server.
while (instructionHandler == null) {
try {
instructionHandler = clientSource.take(workerId, Duration.ofMinutes(2));
} catch (TimeoutException timeoutEx) {
LOG.info(
"Still waiting for startup of environment {} for worker id {}",
environment.getUrl(),
workerId);
} catch (InterruptedException interruptEx) {
Thread.currentThread().interrupt();
throw new RuntimeException(interruptEx);
}
}
} catch (Exception e) {
if (containerId != null) {
// Kill the launched docker container if we can't retrieve a client for it.
try {
docker.killContainer(containerId);
} catch (Exception dockerException) {
e.addSuppressed(dockerException);
}
}
throw e;
}
return DockerContainerEnvironment.create(docker, environment, containerId, instructionHandler);
}
}
| runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.fnexecution.environment;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeoutException;
import org.apache.beam.model.pipeline.v1.RunnerApi.Environment;
import org.apache.beam.runners.fnexecution.GrpcFnServer;
import org.apache.beam.runners.fnexecution.artifact.ArtifactRetrievalService;
import org.apache.beam.runners.fnexecution.control.ControlClientPool;
import org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService;
import org.apache.beam.runners.fnexecution.control.InstructionRequestHandler;
import org.apache.beam.runners.fnexecution.logging.GrpcLoggingService;
import org.apache.beam.runners.fnexecution.provisioning.StaticGrpcProvisionService;
import org.apache.beam.sdk.fn.IdGenerator;
import org.apache.beam.sdk.fn.IdGenerators;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An {@link EnvironmentFactory} that creates docker containers by shelling out to docker. Returned
* {@link RemoteEnvironment RemoteEnvironments} own their respective docker containers. Not
* thread-safe.
*/
public class DockerEnvironmentFactory implements EnvironmentFactory {
private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class);
/**
* Returns a {@link DockerEnvironmentFactory} for the provided {@link GrpcFnServer servers} using
* the default {@link DockerCommand} and {@link IdGenerators}.
*/
public static DockerEnvironmentFactory forServices(
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool.Source clientSource) {
return forServicesWithDocker(
DockerCommand.getDefault(),
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
clientSource,
IdGenerators.incrementingLongs());
}
static DockerEnvironmentFactory forServicesWithDocker(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool.Source clientSource,
IdGenerator idGenerator) {
return new DockerEnvironmentFactory(
docker,
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
idGenerator,
clientSource);
}
private final DockerCommand docker;
private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer;
private final GrpcFnServer<GrpcLoggingService> loggingServiceServer;
private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer;
private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer;
private final IdGenerator idGenerator;
private final ControlClientPool.Source clientSource;
private DockerEnvironmentFactory(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
IdGenerator idGenerator,
ControlClientPool.Source clientSource) {
this.docker = docker;
this.controlServiceServer = controlServiceServer;
this.loggingServiceServer = loggingServiceServer;
this.retrievalServiceServer = retrievalServiceServer;
this.provisioningServiceServer = provisioningServiceServer;
this.idGenerator = idGenerator;
this.clientSource = clientSource;
}
/** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */
@Override
public RemoteEnvironment createEnvironment(Environment environment) throws Exception {
String workerId = idGenerator.getId();
// Prepare docker invocation.
Path workerPersistentDirectory = Files.createTempDirectory("worker_persistent_directory");
Path semiPersistentDirectory = Files.createTempDirectory("semi_persistent_dir");
String containerImage = environment.getUrl();
// TODO: https://issues.apache.org/jira/browse/BEAM-4148 The default service address will not
// work for Docker for Mac.
String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl();
String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl();
String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl();
String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl();
List<String> args =
Arrays.asList(
"-v",
// TODO: Mac only allows temporary mounts under /tmp by default (as of 17.12).
String.format("%s:%s", workerPersistentDirectory, semiPersistentDirectory),
// NOTE: Host networking does not work on Mac, but the command line flag is accepted.
"--network=host",
containerImage,
String.format("--id=%s", workerId),
String.format("--logging_endpoint=%s", loggingEndpoint),
String.format("--artifact_endpoint=%s", artifactEndpoint),
String.format("--provision_endpoint=%s", provisionEndpoint),
String.format("--control_endpoint=%s", controlEndpoint),
String.format("--semi_persist_dir=%s", semiPersistentDirectory));
// Wrap the blocking call to clientSource.get in case an exception is thrown.
String containerId = null;
InstructionRequestHandler instructionHandler = null;
try {
containerId = docker.runImage(containerImage, args);
// Wait on a client from the gRPC server.
while (instructionHandler == null) {
try {
instructionHandler = clientSource.take(workerId, Duration.ofMinutes(2));
} catch (TimeoutException timeoutEx) {
LOG.info(
"Still waiting for startup of environment {} for worker id {}",
environment.getUrl(),
workerId);
} catch (InterruptedException interruptEx) {
Thread.currentThread().interrupt();
throw new RuntimeException(interruptEx);
}
}
} catch (Exception e) {
if (containerId != null) {
// Kill the launched docker container if we can't retrieve a client for it.
try {
docker.killContainer(containerId);
} catch (Exception dockerException) {
e.addSuppressed(dockerException);
}
}
throw e;
}
return DockerContainerEnvironment.create(docker, environment, containerId, instructionHandler);
}
}
| Require explicit IdGenerator for all DockerEnvironmentFactory constructors
The SDK worker id generator passed to DockerEnvironmentFactory must be
generate unique ids scoped to a given control service instance. Since
the service is passed through the constructor, the id generator should
be passed along with it to ensure that callers have scoped id generators
appropriately.
| runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java | Require explicit IdGenerator for all DockerEnvironmentFactory constructors |
|
Java | apache-2.0 | a2da808683c599e858e52e116790b83f521f68ea | 0 | subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/base,subutai-io/base | package org.safehaus.subutai.core.monitor.impl;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.safehaus.subutai.common.exception.HTTPException;
import org.safehaus.subutai.common.util.FileUtil;
import org.safehaus.subutai.common.util.HttpUtil;
import org.safehaus.subutai.core.monitor.api.Metric;
import org.safehaus.subutai.core.monitor.api.Monitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MonitorImpl implements Monitor
{
private static final Logger LOG = LoggerFactory.getLogger( MonitorImpl.class );
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String QUERY = FileUtil.getContent( "elasticsearch/query.json", MonitorImpl.class );
private final DateFormat dateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss" );
/**
* The method uses the simplest way to get the data for all the metrics. In terms of performance this is not the
* best way but the speed should not be an issue for near future. Further optimization should be done with rewriting
* the query to elasticsearch. See: query.json.
*/
@Override
public Map<Metric, Map<Date, Double>> getDataForAllMetrics( String host, Date startDate, Date endDate )
{
Map<Metric, Map<Date, Double>> data = new HashMap<>();
for ( Metric metric : Metric.values() )
{
Map<Date, Double> metricData = getData( host, metric, startDate, endDate );
data.put( metric, metricData );
}
return data;
}
@Override
public Map<Date, Double> getData( String host, Metric metric, Date startDate, Date endDate )
{
LOG.info( "host: {}, metric: {}, startDate: {}, endDate: {}", host, metric, startDate, endDate );
Map<Date, Double> data = Collections.emptyMap();
try
{
data = execute( host, metric, startDate, endDate );
}
catch ( IOException e )
{
LOG.error( "Error while executing query: ", e );
}
return data;
}
private Map<Date, Double> execute( String host, Metric metric, Date startDate, Date endDate ) throws IOException
{
String query = QUERY.replace( "$host", host ).replace( "$metricName", metric.name().toLowerCase() )
.replace( "$startDate", dateToStr( startDate ) )
.replace( "$endDate", dateToStr( endDate ) );
LOG.debug( "query: {}", query );
String response = "";
try
{
Map<String, String> params = new HashMap<>();
params.put( "source", query );
response = HttpUtil.request( HttpUtil.RequestType.GET, "http://127.0.0.1:9200/_all/logs/_search", params );
}
catch ( HTTPException e )
{
LOG.error( "Error in execute", e );
}
List<JsonNode> nodes = toNodes( response );
LOG.info( "nodes count: {}", nodes.size() );
// Reversing the list b/c the query returns the data in desc order (to get the latest values first).
Collections.reverse( nodes );
return toMap( nodes );
}
private static List<JsonNode> toNodes( String response ) throws IOException
{
JsonNode json = OBJECT_MAPPER.readTree( response );
JsonNode hits = json.get( "hits" ).get( "hits" );
List<JsonNode> nodes = new ArrayList<>();
for ( int i = 0; i < hits.size(); i++ )
{
JsonNode node = hits.get( i ).get( "_source" );
nodes.add( node );
LOG.debug( "node: {}", node );
}
return nodes;
}
private Map<Date, Double> toMap( List<JsonNode> nodes )
{
Map<Date, Double> values = new TreeMap<>();
for ( JsonNode node : nodes )
{
Date date = strToDate( node.get( "@timestamp" ).asText() );
double value = node.get( "val" ).asDouble();
values.put( date, value );
}
return values;
}
private Date strToDate( String dateStr )
{
String target = dateStr.replace( "T", " " ).replace( "Z", "" );
Date date = null;
try
{
date = dateFormat.parse( target );
}
catch ( ParseException e )
{
LOG.error( "Error while parsing time: ", e );
}
return date;
}
private String dateToStr( Date date )
{
return dateFormat.format( date ).replace( " ", "T" );
}
}
| management/server/core/monitoring/monitoring-impl/src/main/java/org/safehaus/subutai/core/monitor/impl/MonitorImpl.java | package org.safehaus.subutai.core.monitor.impl;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.safehaus.subutai.common.exception.HTTPException;
import org.safehaus.subutai.common.util.FileUtil;
import org.safehaus.subutai.common.util.HttpUtil;
import org.safehaus.subutai.core.monitor.api.Metric;
import org.safehaus.subutai.core.monitor.api.Monitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MonitorImpl implements Monitor
{
private static final Logger LOG = LoggerFactory.getLogger( MonitorImpl.class );
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String QUERY = FileUtil.getContent( "elasticsearch/query.json", MonitorImpl.class );
private final DateFormat dateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss" );
/**
* The method uses the simplest way to get the data for all the metrics. In terms of performance this is not the
* best way but the speed should not be an issue for near future. Further optimization should be done with rewriting
* the query to elasticsearch. See: query.json.
*/
@Override
public Map<Metric, Map<Date, Double>> getDataForAllMetrics( String host, Date startDate, Date endDate )
{
Map<Metric, Map<Date, Double>> data = new HashMap<>();
for ( Metric metric : Metric.values() )
{
Map<Date, Double> metricData = getData( host, metric, startDate, endDate );
data.put( metric, metricData );
}
return data;
}
@Override
public Map<Date, Double> getData( String host, Metric metric, Date startDate, Date endDate )
{
LOG.info( "host: {}, metric: {}, startDate: {}, endDate: {}", host, metric, startDate, endDate );
Map<Date, Double> data = Collections.emptyMap();
try
{
data = execute( host, metric, startDate, endDate );
}
catch ( IOException e )
{
LOG.error( "Error while executing query: ", e );
}
return data;
}
private Map<Date, Double> execute( String host, Metric metric, Date startDate, Date endDate ) throws IOException
{
String query = QUERY.replace( "$host", host ).replace( "$metricName", metric.name().toLowerCase() )
.replace( "$startDate", dateToStr( startDate ) )
.replace( "$endDate", dateToStr( endDate ) );
LOG.debug( "query: {}", query );
String response = "";
try
{
response = HttpUtil.request( HttpUtil.RequestType.POST, query, null );
}
catch ( HTTPException e )
{
LOG.error( "Error in execute", e );
}
List<JsonNode> nodes = toNodes( response );
LOG.info( "nodes count: {}", nodes.size() );
// Reversing the list b/c the query returns the data in desc order (to get the latest values first).
Collections.reverse( nodes );
return toMap( nodes );
}
private static List<JsonNode> toNodes( String response ) throws IOException
{
JsonNode json = OBJECT_MAPPER.readTree( response );
JsonNode hits = json.get( "hits" ).get( "hits" );
List<JsonNode> nodes = new ArrayList<>();
for ( int i = 0; i < hits.size(); i++ )
{
JsonNode node = hits.get( i ).get( "_source" );
nodes.add( node );
LOG.debug( "node: {}", node );
}
return nodes;
}
private Map<Date, Double> toMap( List<JsonNode> nodes )
{
Map<Date, Double> values = new TreeMap<>();
for ( JsonNode node : nodes )
{
Date date = strToDate( node.get( "@timestamp" ).asText() );
double value = node.get( "val" ).asDouble();
values.put( date, value );
}
return values;
}
private Date strToDate( String dateStr )
{
String target = dateStr.replace( "T", " " ).replace( "Z", "" );
Date date = null;
try
{
date = dateFormat.parse( target );
}
catch ( ParseException e )
{
LOG.error( "Error while parsing time: ", e );
}
return date;
}
private String dateToStr( Date date )
{
return dateFormat.format( date ).replace( " ", "T" );
}
}
| monitor bugfix
| management/server/core/monitoring/monitoring-impl/src/main/java/org/safehaus/subutai/core/monitor/impl/MonitorImpl.java | monitor bugfix |
|
Java | apache-2.0 | f62567979d02f58b799fd7e00b6f78a2d01f14aa | 0 | mraible/generator-jhipster,pascalgrimaud/generator-jhipster,JulienMrgrd/generator-jhipster,ruddell/generator-jhipster,hdurix/generator-jhipster,cbornet/generator-jhipster,dimeros/generator-jhipster,jkutner/generator-jhipster,gzsombor/generator-jhipster,sendilkumarn/generator-jhipster,sohibegit/generator-jhipster,gmarziou/generator-jhipster,jhipster/generator-jhipster,gzsombor/generator-jhipster,ruddell/generator-jhipster,deepu105/generator-jhipster,cbornet/generator-jhipster,mraible/generator-jhipster,mraible/generator-jhipster,PierreBesson/generator-jhipster,mosoft521/generator-jhipster,jkutner/generator-jhipster,robertmilowski/generator-jhipster,ctamisier/generator-jhipster,gmarziou/generator-jhipster,ziogiugno/generator-jhipster,jhipster/generator-jhipster,gmarziou/generator-jhipster,sendilkumarn/generator-jhipster,duderoot/generator-jhipster,danielpetisme/generator-jhipster,ruddell/generator-jhipster,mraible/generator-jhipster,atomfrede/generator-jhipster,deepu105/generator-jhipster,mosoft521/generator-jhipster,erikkemperman/generator-jhipster,deepu105/generator-jhipster,siliconharborlabs/generator-jhipster,ramzimaalej/generator-jhipster,PierreBesson/generator-jhipster,eosimosu/generator-jhipster,hdurix/generator-jhipster,dynamicguy/generator-jhipster,dimeros/generator-jhipster,atomfrede/generator-jhipster,Tcharl/generator-jhipster,eosimosu/generator-jhipster,erikkemperman/generator-jhipster,rkohel/generator-jhipster,jhipster/generator-jhipster,pascalgrimaud/generator-jhipster,siliconharborlabs/generator-jhipster,robertmilowski/generator-jhipster,robertmilowski/generator-jhipster,ramzimaalej/generator-jhipster,dimeros/generator-jhipster,sohibegit/generator-jhipster,liseri/generator-jhipster,mosoft521/generator-jhipster,ctamisier/generator-jhipster,gzsombor/generator-jhipster,vivekmore/generator-jhipster,hdurix/generator-jhipster,cbornet/generator-jhipster,vivekmore/generator-jhipster,liseri/generator-jhipster,sendilkumarn/generator-jhipster,JulienMrgrd/generator-jhipster,sendilkumarn/generator-jhipster,duderoot/generator-jhipster,vivekmore/generator-jhipster,jkutner/generator-jhipster,nkolosnjaji/generator-jhipster,eosimosu/generator-jhipster,dimeros/generator-jhipster,vivekmore/generator-jhipster,duderoot/generator-jhipster,ramzimaalej/generator-jhipster,ziogiugno/generator-jhipster,wmarques/generator-jhipster,hdurix/generator-jhipster,jhipster/generator-jhipster,siliconharborlabs/generator-jhipster,wmarques/generator-jhipster,erikkemperman/generator-jhipster,hdurix/generator-jhipster,PierreBesson/generator-jhipster,pascalgrimaud/generator-jhipster,nkolosnjaji/generator-jhipster,pascalgrimaud/generator-jhipster,erikkemperman/generator-jhipster,ctamisier/generator-jhipster,sohibegit/generator-jhipster,gmarziou/generator-jhipster,Tcharl/generator-jhipster,rifatdover/generator-jhipster,nkolosnjaji/generator-jhipster,mosoft521/generator-jhipster,ruddell/generator-jhipster,sendilkumarn/generator-jhipster,cbornet/generator-jhipster,nkolosnjaji/generator-jhipster,ziogiugno/generator-jhipster,gzsombor/generator-jhipster,pascalgrimaud/generator-jhipster,rkohel/generator-jhipster,robertmilowski/generator-jhipster,liseri/generator-jhipster,Tcharl/generator-jhipster,Tcharl/generator-jhipster,danielpetisme/generator-jhipster,erikkemperman/generator-jhipster,atomfrede/generator-jhipster,jhipster/generator-jhipster,cbornet/generator-jhipster,atomfrede/generator-jhipster,PierreBesson/generator-jhipster,ruddell/generator-jhipster,dynamicguy/generator-jhipster,ziogiugno/generator-jhipster,deepu105/generator-jhipster,gzsombor/generator-jhipster,robertmilowski/generator-jhipster,deepu105/generator-jhipster,JulienMrgrd/generator-jhipster,rkohel/generator-jhipster,eosimosu/generator-jhipster,mosoft521/generator-jhipster,ctamisier/generator-jhipster,dynamicguy/generator-jhipster,duderoot/generator-jhipster,wmarques/generator-jhipster,ctamisier/generator-jhipster,dimeros/generator-jhipster,siliconharborlabs/generator-jhipster,JulienMrgrd/generator-jhipster,wmarques/generator-jhipster,atomfrede/generator-jhipster,JulienMrgrd/generator-jhipster,liseri/generator-jhipster,vivekmore/generator-jhipster,mraible/generator-jhipster,nkolosnjaji/generator-jhipster,wmarques/generator-jhipster,Tcharl/generator-jhipster,jkutner/generator-jhipster,liseri/generator-jhipster,sohibegit/generator-jhipster,danielpetisme/generator-jhipster,rkohel/generator-jhipster,PierreBesson/generator-jhipster,eosimosu/generator-jhipster,duderoot/generator-jhipster,rkohel/generator-jhipster,danielpetisme/generator-jhipster,ziogiugno/generator-jhipster,rifatdover/generator-jhipster,sohibegit/generator-jhipster,gmarziou/generator-jhipster,dynamicguy/generator-jhipster,danielpetisme/generator-jhipster,jkutner/generator-jhipster,rifatdover/generator-jhipster,siliconharborlabs/generator-jhipster | <%#
Copyright 2013-2017 the original author or authors from the JHipster project.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
package <%=packageName%>.web.rest;
import <%=packageName%>.web.rest.vm.RouteVM;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.cloud.netflix.zuul.filters.Route;
import org.springframework.cloud.netflix.zuul.filters.RouteLocator;
import org.springframework.http.*;
import org.springframework.web.bind.annotation.*;
import com.codahale.metrics.annotation.Timed;
/**
* REST controller for managing Gateway configuration.
*/
@RestController
@RequestMapping("/api/gateway")
public class GatewayResource {
private final Logger log = LoggerFactory.getLogger(GatewayResource.class);
private final RouteLocator routeLocator;
private final DiscoveryClient discoveryClient;
public GatewayResource(RouteLocator routeLocator, DiscoveryClient discoveryClient) {
this.routeLocator = routeLocator;
this.discoveryClient = discoveryClient;
}
/**
* GET /routes : get the active routes.
*
* @return the ResponseEntity with status 200 (OK) and with body the list of routes
*/
@GetMapping("/routes")
@Timed
public ResponseEntity<List<RouteVM>> activeRoutes() {
List<Route> routes = routeLocator.getRoutes();
List<RouteVM> routeVMs = new ArrayList<>();
routes.forEach(route -> {
RouteVM routeVM = new RouteVM();
routeVM.setPath(route.getFullPath());
routeVM.setServiceId(route.getId());
routeVM.setServiceInstances(discoveryClient.getInstances(route.getLocation()));
routeVMs.add(routeVM);
});
return new ResponseEntity<>(routeVMs, HttpStatus.OK);
}
}
| generators/server/templates/src/main/java/package/web/rest/_GatewayResource.java | <%#
Copyright 2013-2017 the original author or authors from the JHipster project.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
package <%=packageName%>.web.rest;
import <%=packageName%>.web.rest.vm.RouteVM;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.cloud.netflix.zuul.filters.Route;
import org.springframework.cloud.netflix.zuul.filters.RouteLocator;
import org.springframework.http.*;
import org.springframework.web.bind.annotation.*;
import com.codahale.metrics.annotation.Timed;
/**
* REST controller for managing Gateway configuration.
*/
@RestController
@RequestMapping("/api/gateway")
public class GatewayResource {
private final Logger log = LoggerFactory.getLogger(GatewayResource.class);
private final RouteLocator routeLocator;
private final DiscoveryClient discoveryClient;
public GatewayResource(RouteLocator routeLocator, DiscoveryClient discoveryClient) {
this.routeLocator = routeLocator;
this.discoveryClient = discoveryClient;
}
/**
* GET /routes : get the active routes.
*
* @return the ResponseEntity with status 200 (OK) and with body the list of routes
*/
@GetMapping("/routes")
@Timed
public ResponseEntity<List<RouteVM>> activeRoutes() {
List<Route> routes = routeLocator.getRoutes();
List<RouteVM> routeVMs = new ArrayList<>();
routes.forEach(route -> {
RouteVM routeVM = new RouteVM();
routeVM.setPath(route.getFullPath());
routeVM.setServiceId(route.getId());
routeVM.setServiceInstances(discoveryClient.getInstances(route.getId()));
routeVMs.add(routeVM);
});
return new ResponseEntity<>(routeVMs, HttpStatus.OK);
}
}
| use routeLocation instead of routeId to lookup service-instances
Fix #5733
| generators/server/templates/src/main/java/package/web/rest/_GatewayResource.java | use routeLocation instead of routeId to lookup service-instances |
|
Java | apache-2.0 | 562c63c4c766336aa4e9edc55b801d6a4123fd67 | 0 | drsquidop/camel,veithen/camel,allancth/camel,jmandawg/camel,veithen/camel,pax95/camel,chirino/camel,gnodet/camel,acartapanis/camel,tlehoux/camel,hqstevenson/camel,sirlatrom/camel,cunningt/camel,isavin/camel,onders86/camel,jlpedrosa/camel,tdiesler/camel,onders86/camel,dmvolod/camel,ssharma/camel,arnaud-deprez/camel,scranton/camel,borcsokj/camel,borcsokj/camel,JYBESSON/camel,ssharma/camel,tkopczynski/camel,jlpedrosa/camel,dmvolod/camel,punkhorn/camel-upstream,apache/camel,FingolfinTEK/camel,prashant2402/camel,mcollovati/camel,snurmine/camel,dmvolod/camel,nikvaessen/camel,gautric/camel,jkorab/camel,JYBESSON/camel,tkopczynski/camel,arnaud-deprez/camel,sverkera/camel,DariusX/camel,apache/camel,objectiser/camel,Thopap/camel,ssharma/camel,mgyongyosi/camel,borcsokj/camel,punkhorn/camel-upstream,curso007/camel,jarst/camel,onders86/camel,DariusX/camel,YoshikiHigo/camel,kevinearls/camel,prashant2402/camel,bgaudaen/camel,curso007/camel,zregvart/camel,jonmcewen/camel,DariusX/camel,anoordover/camel,tdiesler/camel,scranton/camel,w4tson/camel,mcollovati/camel,snurmine/camel,onders86/camel,tlehoux/camel,tkopczynski/camel,rmarting/camel,mgyongyosi/camel,pmoerenhout/camel,CodeSmell/camel,NickCis/camel,mgyongyosi/camel,nikvaessen/camel,gautric/camel,veithen/camel,nikhilvibhav/camel,sirlatrom/camel,akhettar/camel,ullgren/camel,gilfernandes/camel,nboukhed/camel,chirino/camel,driseley/camel,neoramon/camel,jkorab/camel,snurmine/camel,yuruki/camel,tlehoux/camel,tkopczynski/camel,nikhilvibhav/camel,RohanHart/camel,w4tson/camel,Fabryprog/camel,Thopap/camel,anoordover/camel,drsquidop/camel,bgaudaen/camel,cunningt/camel,oalles/camel,NickCis/camel,bgaudaen/camel,oalles/camel,oalles/camel,oalles/camel,Thopap/camel,punkhorn/camel-upstream,nicolaferraro/camel,ssharma/camel,akhettar/camel,CodeSmell/camel,nikvaessen/camel,sabre1041/camel,bhaveshdt/camel,nboukhed/camel,neoramon/camel,akhettar/camel,jkorab/camel,FingolfinTEK/camel,YoshikiHigo/camel,tkopczynski/camel,snurmine/camel,tadayosi/camel,pkletsko/camel,neoramon/camel,pkletsko/camel,yuruki/camel,anton-k11/camel,pmoerenhout/camel,salikjan/camel,lburgazzoli/apache-camel,borcsokj/camel,Fabryprog/camel,pmoerenhout/camel,lburgazzoli/camel,tdiesler/camel,bhaveshdt/camel,FingolfinTEK/camel,acartapanis/camel,onders86/camel,adessaigne/camel,prashant2402/camel,gilfernandes/camel,jlpedrosa/camel,davidkarlsen/camel,jarst/camel,sabre1041/camel,JYBESSON/camel,jamesnetherton/camel,sabre1041/camel,salikjan/camel,jamesnetherton/camel,w4tson/camel,driseley/camel,rmarting/camel,anoordover/camel,borcsokj/camel,gautric/camel,jmandawg/camel,christophd/camel,cunningt/camel,kevinearls/camel,tlehoux/camel,kevinearls/camel,hqstevenson/camel,mgyongyosi/camel,apache/camel,nikvaessen/camel,drsquidop/camel,nikhilvibhav/camel,jamesnetherton/camel,anoordover/camel,jmandawg/camel,christophd/camel,rmarting/camel,chirino/camel,jonmcewen/camel,nicolaferraro/camel,scranton/camel,prashant2402/camel,nboukhed/camel,kevinearls/camel,drsquidop/camel,jamesnetherton/camel,isavin/camel,hqstevenson/camel,RohanHart/camel,pkletsko/camel,RohanHart/camel,tadayosi/camel,yuruki/camel,RohanHart/camel,alvinkwekel/camel,nicolaferraro/camel,driseley/camel,zregvart/camel,pkletsko/camel,christophd/camel,lburgazzoli/apache-camel,sirlatrom/camel,gnodet/camel,objectiser/camel,jlpedrosa/camel,FingolfinTEK/camel,akhettar/camel,snurmine/camel,sverkera/camel,anton-k11/camel,YoshikiHigo/camel,pax95/camel,jmandawg/camel,NickCis/camel,punkhorn/camel-upstream,tdiesler/camel,jonmcewen/camel,bhaveshdt/camel,lburgazzoli/camel,FingolfinTEK/camel,yuruki/camel,lburgazzoli/apache-camel,zregvart/camel,jonmcewen/camel,bgaudaen/camel,neoramon/camel,DariusX/camel,nboukhed/camel,gilfernandes/camel,isavin/camel,jarst/camel,mcollovati/camel,jonmcewen/camel,arnaud-deprez/camel,w4tson/camel,neoramon/camel,sabre1041/camel,sverkera/camel,acartapanis/camel,rmarting/camel,davidkarlsen/camel,ullgren/camel,jkorab/camel,CodeSmell/camel,ullgren/camel,isavin/camel,JYBESSON/camel,ssharma/camel,acartapanis/camel,apache/camel,sverkera/camel,pax95/camel,nboukhed/camel,isavin/camel,mgyongyosi/camel,sirlatrom/camel,hqstevenson/camel,nikhilvibhav/camel,Fabryprog/camel,curso007/camel,cunningt/camel,adessaigne/camel,scranton/camel,scranton/camel,gilfernandes/camel,tadayosi/camel,zregvart/camel,gnodet/camel,isavin/camel,NickCis/camel,gilfernandes/camel,anoordover/camel,sabre1041/camel,Thopap/camel,jonmcewen/camel,acartapanis/camel,gautric/camel,tadayosi/camel,nicolaferraro/camel,davidkarlsen/camel,arnaud-deprez/camel,tdiesler/camel,jlpedrosa/camel,Fabryprog/camel,anton-k11/camel,allancth/camel,pmoerenhout/camel,drsquidop/camel,objectiser/camel,alvinkwekel/camel,anton-k11/camel,jamesnetherton/camel,tlehoux/camel,bhaveshdt/camel,borcsokj/camel,adessaigne/camel,sirlatrom/camel,driseley/camel,driseley/camel,NickCis/camel,jamesnetherton/camel,yuruki/camel,davidkarlsen/camel,chirino/camel,rmarting/camel,lburgazzoli/camel,alvinkwekel/camel,jkorab/camel,pmoerenhout/camel,hqstevenson/camel,onders86/camel,pkletsko/camel,jlpedrosa/camel,pkletsko/camel,sirlatrom/camel,rmarting/camel,sverkera/camel,snurmine/camel,allancth/camel,YoshikiHigo/camel,YoshikiHigo/camel,arnaud-deprez/camel,JYBESSON/camel,lburgazzoli/apache-camel,curso007/camel,sabre1041/camel,dmvolod/camel,drsquidop/camel,scranton/camel,jmandawg/camel,lburgazzoli/camel,objectiser/camel,tdiesler/camel,jarst/camel,FingolfinTEK/camel,curso007/camel,NickCis/camel,sverkera/camel,gilfernandes/camel,jarst/camel,christophd/camel,anton-k11/camel,gnodet/camel,lburgazzoli/apache-camel,pax95/camel,adessaigne/camel,tadayosi/camel,RohanHart/camel,nboukhed/camel,pax95/camel,mcollovati/camel,ssharma/camel,oalles/camel,veithen/camel,hqstevenson/camel,tkopczynski/camel,akhettar/camel,RohanHart/camel,driseley/camel,cunningt/camel,neoramon/camel,bgaudaen/camel,prashant2402/camel,jarst/camel,gautric/camel,bhaveshdt/camel,oalles/camel,mgyongyosi/camel,allancth/camel,christophd/camel,akhettar/camel,w4tson/camel,nikvaessen/camel,kevinearls/camel,christophd/camel,Thopap/camel,lburgazzoli/apache-camel,acartapanis/camel,jmandawg/camel,tadayosi/camel,nikvaessen/camel,apache/camel,cunningt/camel,bhaveshdt/camel,tlehoux/camel,dmvolod/camel,chirino/camel,apache/camel,anoordover/camel,gautric/camel,pax95/camel,allancth/camel,CodeSmell/camel,chirino/camel,dmvolod/camel,veithen/camel,curso007/camel,adessaigne/camel,prashant2402/camel,bgaudaen/camel,alvinkwekel/camel,anton-k11/camel,yuruki/camel,w4tson/camel,jkorab/camel,gnodet/camel,lburgazzoli/camel,allancth/camel,YoshikiHigo/camel,kevinearls/camel,adessaigne/camel,pmoerenhout/camel,arnaud-deprez/camel,lburgazzoli/camel,veithen/camel,ullgren/camel,Thopap/camel,JYBESSON/camel | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kubernetes.producer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.fabric8.kubernetes.api.model.Node;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesTestSupport;
import org.apache.camel.util.ObjectHelper;
import org.junit.Test;
public class KubernetesNodesProducerTest extends KubernetesTestSupport {
@Test
public void listTest() throws Exception {
if (ObjectHelper.isEmpty(authToken)) {
return;
}
List<Node> result = template.requestBody("direct:list", "",
List.class);
assertTrue(result.size() == 1);
}
@Test
public void listByLabelsTest() throws Exception {
if (ObjectHelper.isEmpty(authToken)) {
return;
}
Exchange ex = template.request("direct:listByLabels", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> labels = new HashMap<String, String>();
labels.put("kubernetes.io/hostname", "172.28.128.4");
exchange.getIn()
.setHeader(
KubernetesConstants.KUBERNETES_NODES_LABELS,
labels);
}
});
List<Node> result = ex.getOut().getBody(List.class);
Node node = result.get(0);
assertTrue(node.getStatus().getCapacity().get("pods").getAmount().equals("110"));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:list")
.toF("kubernetes://%s?oauthToken=%s&category=nodes&operation=listNodes",
host, authToken);
from("direct:listByLabels")
.toF("kubernetes://%s?oauthToken=%s&category=nodes&operation=listNodesByLabels",
host, authToken);
}
};
}
}
| components/camel-kubernetes/src/test/java/org/apache/camel/component/kubernetes/producer/KubernetesNodesProducerTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kubernetes.producer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.fabric8.kubernetes.api.model.Node;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesTestSupport;
import org.apache.camel.util.ObjectHelper;
import org.junit.Test;
public class KubernetesNodesProducerTest extends KubernetesTestSupport {
@Test
public void listTest() throws Exception {
if (ObjectHelper.isEmpty(authToken)) {
return;
}
List<Node> result = template.requestBody("direct:list", "",
List.class);
assertTrue(result.size() == 1);
}
@Test
public void listByLabelsTest() throws Exception {
if (ObjectHelper.isEmpty(authToken)) {
return;
}
Exchange ex = template.request("direct:listByLabels", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> labels = new HashMap<String, String>();
labels.put("kubernetes.io/hostname", "172.28.128.4");
exchange.getIn()
.setHeader(
KubernetesConstants.KUBERNETES_NODES_LABELS,
labels);
}
});
List<Node> result = ex.getOut().getBody(List.class);
Node node = result.get(0);
assertTrue(node.getStatus().getCapacity().get("pods").getAmount().equals("40"));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:list")
.toF("kubernetes://%s?oauthToken=%s&category=nodes&operation=listNodes",
host, authToken);
from("direct:listByLabels")
.toF("kubernetes://%s?oauthToken=%s&category=nodes&operation=listNodesByLabels",
host, authToken);
}
};
}
}
| Camel-Kubernetes: Fixed integration tests
| components/camel-kubernetes/src/test/java/org/apache/camel/component/kubernetes/producer/KubernetesNodesProducerTest.java | Camel-Kubernetes: Fixed integration tests |
|
Java | apache-2.0 | e7d0779ca50785868b662833c7265af3fc83c8fb | 0 | mtransitapps/ca-calgary-transit-bus-parser | package org.mtransit.parser.ca_calgary_transit_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
// https://www.calgarytransit.com/developer-resources
// https://data.calgary.ca/OpenData/Pages/DatasetDetails.aspx?DatasetID=PDC0-99999-99999-00501-P(CITYonlineDefault)
// https://data.calgary.ca/_layouts/OpenData/DownloadDataset.ashx?Format=FILE&DatasetId=PDC0-99999-99999-00501-P(CITYonlineDefault)&VariantId=5(CITYonlineDefault)
public class CalgaryTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-calgary-transit-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new CalgaryTransitBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating Calgary Transit bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this);
super.start(args);
System.out.printf("\nGenerating Calgary Transit bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
private static final String RSN_FLOATER = "FLT";
private static final long RID_FLOATER = 10001l;
@Override
public long getRouteId(GRoute gRoute) {
if (!Utils.isDigitsOnly(gRoute.getRouteShortName())) {
if (RSN_FLOATER.equals(gRoute.getRouteShortName())) {
return RID_FLOATER;
}
}
return Long.parseLong(gRoute.getRouteShortName()); // using route short name as route ID
}
private static final Pattern CLEAN_STREET_POINT = Pattern.compile("((\\s)*(ave|st|mt)\\.(\\s)*)", Pattern.CASE_INSENSITIVE);
private static final String CLEAN_AVE_POINT_REPLACEMENT = "$2$3$4";
@Override
public String getRouteLongName(GRoute gRoute) {
String gRouteLongName = gRoute.getRouteLongName();
gRouteLongName = CleanUtils.cleanSlashes(gRouteLongName);
gRouteLongName = CLEAN_STREET_POINT.matcher(gRouteLongName).replaceAll(CLEAN_AVE_POINT_REPLACEMENT);
gRouteLongName = CleanUtils.cleanStreetTypes(gRouteLongName);
return CleanUtils.cleanLabel(gRouteLongName);
}
private static final String AGENCY_COLOR_RED = "B83A3F"; // LIGHT RED (from web site CSS)
private static final String AGENCY_COLOR = AGENCY_COLOR_RED;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
private static final String COLOR_BUS_ROUTES = "004B85"; // BLUE (from PDF map)
private static final String COLOR_BUS_ROUTES_EXPRESS = "00BBE5"; // LIGHT BLUE (from PDF map)
private static final String COLOR_BUS_ROUTES_BRT = "ED1C2E"; // RED (from PDF map)
private static final String COLOR_BUS_ROUTES_SCHOOL = "E4A024"; // YELLOW (from PDF map)
@Override
public String getRouteColor(GRoute gRoute) {
if (!Utils.isDigitsOnly(gRoute.getRouteShortName())) {
if (RSN_FLOATER.equals(gRoute.getRouteShortName())) {
return null;
}
}
int rsn = Integer.parseInt(gRoute.getRouteShortName());
switch (rsn) {
// @formatter:off
case 1: return null;
case 2: return null;
case 3: return null;
case 4: return null;
case 5: return null;
case 6: return null;
case 7: return null;
case 8: return null;
case 9: return null;
case 10: return null;
case 11: return null;
case 12: return null;
case 13: return null;
case 14: return null;
case 15: return null;
case 16: return null;
case 17: return null;
case 18: return null;
case 19: return null;
case 20: return null;
case 21: return null;
case 22: return COLOR_BUS_ROUTES_EXPRESS;
case 23: return COLOR_BUS_ROUTES_EXPRESS;
case 24: return null;
case 25: return null;
case 26: return null;
case 27: return COLOR_BUS_ROUTES;
case 28: return null;
case 29: return null;
case 30: return COLOR_BUS_ROUTES;
case 32: return null;
case 33: return COLOR_BUS_ROUTES;
case 34: return COLOR_BUS_ROUTES;
case 35: return COLOR_BUS_ROUTES;
case 36: return null;
case 37: return null;
case 38: return null;
case 39: return COLOR_BUS_ROUTES;
case 40: return null;
case 41: return null;
case 42: return null;
case 43: return null;
case 44: return COLOR_BUS_ROUTES;
case 45: return null;
case 46: return null;
case 47: return COLOR_BUS_ROUTES;
case 48: return null;
case 49: return null;
case 50: return null;
case 51: return null;
case 52: return null;
case 54: return null;
case 55: return null;
case 56: return null;
case 57: return null;
case 60: return null;
case 61: return COLOR_BUS_ROUTES;
case 62: return COLOR_BUS_ROUTES_EXPRESS;
case 63: return COLOR_BUS_ROUTES_EXPRESS;
case 64: return COLOR_BUS_ROUTES_EXPRESS;
case 66: return COLOR_BUS_ROUTES_EXPRESS;
case 69: return COLOR_BUS_ROUTES;
case 70: return COLOR_BUS_ROUTES_EXPRESS;
case 71: return COLOR_BUS_ROUTES;
case 72: return null;
case 73: return null;
case 74: return null;
case 75: return COLOR_BUS_ROUTES_EXPRESS;
case 76: return null;
case 77: return COLOR_BUS_ROUTES;
case 78: return null;
case 79: return null;
case 80: return null;
case 81: return COLOR_BUS_ROUTES;
case 83: return null;
case 84: return COLOR_BUS_ROUTES;
case 85: return null;
case 86: return null;
case 88: return null;
case 89: return COLOR_BUS_ROUTES;
case 91: return COLOR_BUS_ROUTES;
case 92: return null;
case 93: return COLOR_BUS_ROUTES;
case 94: return COLOR_BUS_ROUTES;
case 95: return COLOR_BUS_ROUTES;
case 96: return null;
case 98: return COLOR_BUS_ROUTES;
case 100: return null;
case 102: return COLOR_BUS_ROUTES_EXPRESS;
case 103: return COLOR_BUS_ROUTES_EXPRESS;
case 107: return COLOR_BUS_ROUTES;
case 109: return COLOR_BUS_ROUTES_EXPRESS;
case 110: return COLOR_BUS_ROUTES_EXPRESS;
case 112: return null;
case 114: return null;
case 116: return COLOR_BUS_ROUTES_EXPRESS;
case 117: return COLOR_BUS_ROUTES_EXPRESS;
case 118: return null;
case 122: return COLOR_BUS_ROUTES;
case 125: return COLOR_BUS_ROUTES_EXPRESS;
case 126: return COLOR_BUS_ROUTES_EXPRESS;
case 127: return null;
case 133: return COLOR_BUS_ROUTES_EXPRESS;
case 136: return COLOR_BUS_ROUTES;
case 137: return null;
case 142: return COLOR_BUS_ROUTES_EXPRESS;
case 143: return null;
case 145: return COLOR_BUS_ROUTES;
case 146: return COLOR_BUS_ROUTES;
case 151: return COLOR_BUS_ROUTES_EXPRESS;
case 152: return COLOR_BUS_ROUTES;
case 153: return COLOR_BUS_ROUTES;
case 154: return null;
case 157: return null;
case 158: return null;
case 159: return COLOR_BUS_ROUTES;
case 169: return null;
case 174: return null;
case 176: return COLOR_BUS_ROUTES_EXPRESS;
case 178: return COLOR_BUS_ROUTES;
case 181: return COLOR_BUS_ROUTES_EXPRESS;
case 182: return COLOR_BUS_ROUTES_EXPRESS;
case 199: return null;
case 299: return null;
case 300: return COLOR_BUS_ROUTES_BRT;
case 301: return COLOR_BUS_ROUTES_BRT;
case 302: return COLOR_BUS_ROUTES_BRT;
case 304: return COLOR_BUS_ROUTES_BRT;
case 305: return COLOR_BUS_ROUTES_BRT;
case 306: return COLOR_BUS_ROUTES_BRT;
case 308: return COLOR_BUS_ROUTES_BRT;
case 402: return COLOR_BUS_ROUTES;
case 404: return COLOR_BUS_ROUTES;
case 405: return COLOR_BUS_ROUTES;
case 406: return COLOR_BUS_ROUTES;
case 407: return COLOR_BUS_ROUTES;
case 408: return null;
case 409: return COLOR_BUS_ROUTES;
case 410: return COLOR_BUS_ROUTES;
case 411: return COLOR_BUS_ROUTES;
case 412: return COLOR_BUS_ROUTES;
case 414: return COLOR_BUS_ROUTES;
case 419: return COLOR_BUS_ROUTES;
case 420: return COLOR_BUS_ROUTES;
case 421: return COLOR_BUS_ROUTES;
case 425: return COLOR_BUS_ROUTES;
case 429: return COLOR_BUS_ROUTES;
case 430: return COLOR_BUS_ROUTES;
case 439: return COLOR_BUS_ROUTES;
case 440: return COLOR_BUS_ROUTES;
case 444: return COLOR_BUS_ROUTES;
case 445: return COLOR_BUS_ROUTES;
case 452: return COLOR_BUS_ROUTES;
case 453: return COLOR_BUS_ROUTES;
case 454: return COLOR_BUS_ROUTES;
case 456: return COLOR_BUS_ROUTES;
case 468: return COLOR_BUS_ROUTES;
case 502: return null;
case 506: return COLOR_BUS_ROUTES;
case 555: return null;
case 697: return COLOR_BUS_ROUTES_SCHOOL;
case 698: return COLOR_BUS_ROUTES_SCHOOL;
case 699: return COLOR_BUS_ROUTES_SCHOOL;
case 703: return COLOR_BUS_ROUTES_SCHOOL;
case 704: return COLOR_BUS_ROUTES_SCHOOL;
case 705: return COLOR_BUS_ROUTES_SCHOOL;
case 706: return COLOR_BUS_ROUTES_SCHOOL;
case 710: return COLOR_BUS_ROUTES_SCHOOL;
case 711: return COLOR_BUS_ROUTES_SCHOOL;
case 712: return COLOR_BUS_ROUTES_SCHOOL;
case 713: return COLOR_BUS_ROUTES_SCHOOL;
case 714: return COLOR_BUS_ROUTES_SCHOOL;
case 715: return COLOR_BUS_ROUTES_SCHOOL;
case 716: return COLOR_BUS_ROUTES_SCHOOL;
case 717: return COLOR_BUS_ROUTES_SCHOOL;
case 718: return COLOR_BUS_ROUTES_SCHOOL;
case 719: return COLOR_BUS_ROUTES_SCHOOL;
case 721: return COLOR_BUS_ROUTES_SCHOOL;
case 724: return COLOR_BUS_ROUTES_SCHOOL;
case 725: return COLOR_BUS_ROUTES_SCHOOL;
case 731: return COLOR_BUS_ROUTES_SCHOOL;
case 732: return COLOR_BUS_ROUTES_SCHOOL;
case 733: return COLOR_BUS_ROUTES_SCHOOL;
case 734: return COLOR_BUS_ROUTES_SCHOOL;
case 735: return COLOR_BUS_ROUTES_SCHOOL;
case 737: return COLOR_BUS_ROUTES_SCHOOL;
case 738: return COLOR_BUS_ROUTES_SCHOOL;
case 739: return COLOR_BUS_ROUTES_SCHOOL;
case 740: return COLOR_BUS_ROUTES_SCHOOL;
case 741: return COLOR_BUS_ROUTES_SCHOOL;
case 742: return COLOR_BUS_ROUTES_SCHOOL;
case 743: return COLOR_BUS_ROUTES_SCHOOL;
case 744: return COLOR_BUS_ROUTES_SCHOOL;
case 745: return COLOR_BUS_ROUTES_SCHOOL;
case 746: return COLOR_BUS_ROUTES_SCHOOL;
case 747: return COLOR_BUS_ROUTES_SCHOOL;
case 751: return COLOR_BUS_ROUTES_SCHOOL;
case 752: return COLOR_BUS_ROUTES_SCHOOL;
case 753: return COLOR_BUS_ROUTES_SCHOOL;
case 754: return COLOR_BUS_ROUTES_SCHOOL;
case 755: return COLOR_BUS_ROUTES_SCHOOL;
case 756: return COLOR_BUS_ROUTES_SCHOOL;
case 757: return COLOR_BUS_ROUTES_SCHOOL;
case 758: return COLOR_BUS_ROUTES_SCHOOL;
case 759: return COLOR_BUS_ROUTES_SCHOOL;
case 760: return COLOR_BUS_ROUTES_SCHOOL;
case 761: return COLOR_BUS_ROUTES_SCHOOL;
case 762: return COLOR_BUS_ROUTES_SCHOOL;
case 763: return COLOR_BUS_ROUTES_SCHOOL;
case 764: return COLOR_BUS_ROUTES_SCHOOL;
case 765: return COLOR_BUS_ROUTES_SCHOOL;
case 766: return COLOR_BUS_ROUTES_SCHOOL;
case 770: return COLOR_BUS_ROUTES_SCHOOL;
case 771: return COLOR_BUS_ROUTES_SCHOOL;
case 773: return COLOR_BUS_ROUTES_SCHOOL;
case 774: return COLOR_BUS_ROUTES_SCHOOL;
case 775: return COLOR_BUS_ROUTES_SCHOOL;
case 776: return COLOR_BUS_ROUTES_SCHOOL;
case 778: return COLOR_BUS_ROUTES_SCHOOL;
case 779: return COLOR_BUS_ROUTES_SCHOOL;
case 780: return COLOR_BUS_ROUTES_SCHOOL;
case 791: return COLOR_BUS_ROUTES_SCHOOL;
case 792: return COLOR_BUS_ROUTES_SCHOOL;
case 795: return COLOR_BUS_ROUTES_SCHOOL;
case 796: return COLOR_BUS_ROUTES_SCHOOL;
case 797: return COLOR_BUS_ROUTES_SCHOOL;
case 798: return COLOR_BUS_ROUTES_SCHOOL;
case 799: return COLOR_BUS_ROUTES_SCHOOL;
case 801: return COLOR_BUS_ROUTES_SCHOOL;
case 802: return COLOR_BUS_ROUTES_SCHOOL;
case 804: return COLOR_BUS_ROUTES_SCHOOL;
case 805: return COLOR_BUS_ROUTES_SCHOOL;
case 807: return COLOR_BUS_ROUTES_SCHOOL;
case 811: return COLOR_BUS_ROUTES_SCHOOL;
case 812: return COLOR_BUS_ROUTES_SCHOOL;
case 813: return COLOR_BUS_ROUTES_SCHOOL;
case 814: return COLOR_BUS_ROUTES_SCHOOL;
case 815: return COLOR_BUS_ROUTES_SCHOOL;
case 816: return COLOR_BUS_ROUTES_SCHOOL;
case 817: return COLOR_BUS_ROUTES_SCHOOL;
case 818: return COLOR_BUS_ROUTES_SCHOOL;
case 819: return COLOR_BUS_ROUTES_SCHOOL;
case 821: return COLOR_BUS_ROUTES_SCHOOL;
case 822: return COLOR_BUS_ROUTES_SCHOOL;
case 830: return COLOR_BUS_ROUTES_SCHOOL;
case 831: return COLOR_BUS_ROUTES_SCHOOL;
case 832: return COLOR_BUS_ROUTES_SCHOOL;
case 834: return COLOR_BUS_ROUTES_SCHOOL;
case 835: return COLOR_BUS_ROUTES_SCHOOL;
case 837: return COLOR_BUS_ROUTES_SCHOOL;
case 838: return COLOR_BUS_ROUTES_SCHOOL;
case 841: return COLOR_BUS_ROUTES_SCHOOL;
case 842: return COLOR_BUS_ROUTES_SCHOOL;
case 851: return COLOR_BUS_ROUTES_SCHOOL;
case 853: return COLOR_BUS_ROUTES_SCHOOL;
case 857: return COLOR_BUS_ROUTES_SCHOOL;
case 860: return COLOR_BUS_ROUTES_SCHOOL;
case 861: return COLOR_BUS_ROUTES_SCHOOL;
case 878: return COLOR_BUS_ROUTES_SCHOOL;
case 880: return COLOR_BUS_ROUTES_SCHOOL;
case 883: return COLOR_BUS_ROUTES_SCHOOL;
case 884: return COLOR_BUS_ROUTES_SCHOOL;
case 888: return COLOR_BUS_ROUTES_SCHOOL;
case 889: return COLOR_BUS_ROUTES_SCHOOL;
case 892: return COLOR_BUS_ROUTES_SCHOOL;
// @formatter:on
default:
System.out.println("Unexpected route color " + gRoute);
System.exit(-1);
return null;
}
}
private static final String _69_ST_STATION = "69 St Sta";
private static final String ACADIA = "Acadia";
private static final String OAKRIDGE = "Oakridge";
private static final String ACADIA_OAKRIDGE = ACADIA + " / " + OAKRIDGE;
private static final String AIRPORT = "Airport";
private static final String ANDERSON = "Anderson";
private static final String ANDERSON_STATION = ANDERSON; // "Anderson Sta";
private static final String ANNIE_GALE = "Annie Gale";
private static final String APPLEWOOD = "Applewood";
private static final String ARBOUR_LK = "Arbour Lk";
private static final String AUBURN_BAY = "Auburn Bay";
private static final String B_GRANDIN = "B Grandin";
private static final String BARLOW_STATION = "Barlow Sta";
private static final String BEAVERBROOK = "Beaverbrook";
private static final String BEDDINGTON = "Beddington";
private static final String BISHOP_O_BYRNE = "B O'Byrne";
private static final String BONAVISTA = "Bonavista";
private static final String BONAVISTA_WEST = "W " + BONAVISTA;
private static final String BOWNESS = "Bowness";
private static final String BREBEUF = "Brebeuf";
private static final String BRENTWOOD = "Brentwood";
private static final String BRENTWOOD_STATION = BRENTWOOD; // "Brentwood Sta";
private static final String BRIDGELAND = "Bridgeland";
private static final String CASTLERIDGE = "Castleridge";
private static final String CENTRAL_MEMORIAL = "Central Memorial";
private static final String CHAPARRAL = "Chaparral";
private static final String CHATEAU_ESTS = "Chateau Ests";
private static final String CHINOOK = "Chinook";
private static final String CHINOOK_STATION = CHINOOK; // "Chinook Sta";
private static final String CHURCHILL = "Churchill";
private static final String CIRCLE_ROUTE = "Circle Route";
private static final String CITADEL = "Citadel";
private static final String CITY_CTR = "City Ctr";
private static final String COACH_HL = "Coach Hl";
private static final String COPPERFIELD = "Copperfield";
private static final String CORAL_SPGS = "Coral Spgs";
private static final String COUGAR_RDG = "Cougar Rdg";
private static final String COUNTRY_HLS = "Country Hls";
private static final String COUNTRY_VLG = "Country Vlg";
private static final String COVENTRY = "Coventry";
private static final String COVENTRY_HLS = COVENTRY + " Hls";
private static final String COVENTRY_SOUTH = "S" + COVENTRY;
private static final String CRANSTON = "Cranston";
private static final String CRESCENT_HTS = "Crescent Hts";
private static final String DALHOUSIE = "Dalhousie";
private static final String DEER_RUN = "Deer Run";
private static final String DEERFOOT_CTR = "Deerfoot Ctr";
private static final String DIEFENBAKER = "Diefenbaker";
private static final String DISCOVERY_RIDGE = "Discovery Rdg";
private static final String DOUGLASDALE = "Douglasdale";
private static final String DOUGLAS_GLEN = "Douglas Glen";
private static final String DOWNTOWN = "Downtown";
private static final String EDGEBROOK_RISE = "Edgebrook Rise";
private static final String EDGEMONT = "Edgemont";
private static final String ELBOW_DR = "Elbow Dr";
private static final String ERIN_WOODS = "Erin Woods";
private static final String ERINWOODS = "Erinwoods";
private static final String EVANSTON = "Evanston";
private static final String EVERGREEN = "Evergreen";
private static final String SOMERSET = "Somerset";
private static final String EVERGREEN_SOMERSET = EVERGREEN + " / " + SOMERSET;
private static final String F_WHELIHAN = "F Whelihan";
private static final String FALCONRIDGE = "Falconridge";
private static final String FOOTHILLS = "Foothills";
private static final String FOOTHILLS_IND = "Foothills Ind";
private static final String FOREST_HTS = "Forest Hts";
private static final String FOREST_LAWN = "Forest Lawn";
private static final String FOWLER = "Fowler";
private static final String FRANKLIN = "Franklin";
private static final String GLAMORGAN = "Glamorgan";
private static final String GREENWOOD = "Greenwood";
private static final String HAMPTONS = "Hamptons";
private static final String HARVEST_HLS = "Harvest Hls";
private static final String HAWKWOOD = "Hawkwood";
private static final String HERITAGE = "Heritage";
private static final String HERITAGE_STATION = HERITAGE; // "Heritage Sta";
private static final String HIDDEN_VLY = "Hidden Vly";
private static final String HILLHURST = "Hillhurst";
private static final String HUNTINGTON = "Huntington";
private static final String KINCORA = "Kincora";
private static final String LAKEVIEW = "Lakeview";
private static final String LIONS_PARK = "Lions Park";
private static final String LIONS_PARK_STATION = LIONS_PARK; // "Lions Park Sta";
private static final String LYNNWOOD = "Lynnwood";
private static final String M_D_HOUET = "M d'Houet";
private static final String MAC_EWAN = "MacEwan";
private static final String MARLBOROUGH = "Marlborough";
private static final String MARTINDALE = "Martindale";
private static final String MC_CALL_WAY = "McCall Way";
private static final String MC_KENZIE = "McKenzie";
private static final String MC_KENZIE_LK_WAY = MC_KENZIE + " Lk Way";
private static final String MC_KENZIE_TOWNE = MC_KENZIE + " Towne";
private static final String MC_KENZIE_TOWNE_DR = MC_KENZIE_TOWNE; // "McKenzie Towne Dr";
private static final String MC_KINGHT_WESTWINDS = "McKinght-Westwinds";
private static final String MC_KNIGHT_WESTWINDS = "McKnight-Westwinds";
private static final String MRU = "MRU";
private static final String MRU_NORTH = MRU + " North";
private static final String MRU_SOUTH = MRU + " South";
private static final String MT_ROYAL_U = MRU; // "Mt Royal U";
private static final String MTN_PARK = "Mtn Park";
private static final String NEW_BRIGHTON = "New Brighton";
private static final String NORTH_HAVEN = "North Haven";
private static final String NORTH_POINTE = "North Pte";
private static final String NORTHLAND = "Northland";
private static final String NORTHMOUNT_DR = "Northmount Dr";
private static final String NORTHWEST_LOOP = "Northwest Loop";
private static final String NOTRE_DAME = "Notre Dame";
private static final String OAKRIDGE_ACADIA = OAKRIDGE + " / " + ACADIA;
private static final String OGDEN = "Ogden";
private static final String OGDEN_NORTH = "North " + OGDEN;
private static final String PALLISER_OAKRIDGE = "Palliser / Oakridge";
private static final String PANORAMA = "Panorama";
private static final String PANORAMA_HLS = PANORAMA + " Hls";
private static final String PANORAMA_HLS_NORTH = "N " + PANORAMA_HLS;
private static final String PARKHILL = "Parkhill";
private static final String PARKLAND = "Parkland";
private static final String PARK_GATE_HERITAGE = "Pk Gt Heritage";
private static final String PRESTWICK = "Prestwick";
private static final String QUEEN_ELIZABETH = "Queen Elizabeth";
private static final String QUEENSLAND = "Queensland";
private static final String R_THIRSK = "R Thirsk";
private static final String RAMSAY = "Ramsay";
private static final String RENFREW = "Renfrew";
private static final String RIVERBEND = "Riverbend";
private static final String ROCKY_RIDGE = "Rocky Rdg";
private static final String ROYAL_OAK = "Royal Oak";
private static final String SADDLECREST = "Saddlecrest";
private static final String SADDLE_RIDGE = "Saddle Rdg";
private static final String SADDLETOWN = "Saddletown";
private static final String SADDLETOWNE = "Saddletowne";
private static final String SAGE_HILL_KINCORA = "Sage Hill / Kincora";
private static final String SANDSTONE = "Sandstone";
private static final String SANDSTONE_AIRPORT = "Sandstone / " + AIRPORT;
private static final String SARCEE_RD = "Sarcee Rd";
private static final String SCARLETT = "Scarlett";
private static final String SCENIC_ACRES = "Scenic Acres";
private static final String SCENIC_ACRES_SOUTH = "S " + SCENIC_ACRES;
private static final String SCENIC_ACRES_NORTH = "N " + SCENIC_ACRES;
private static final String SHAWVILLE = "Shawville";
private static final String SHERWOOD = "Sherwood";
private static final String SILVER_SPGS = "Silver Spgs";
private static final String SKYVIEW_RANCH = "Skyview Ranch";
private static final String SOMERSET_BRIDLEWOOD_STATION = SOMERSET + "-Bridlewood Sta";
private static final String SOUTH_CALGARY = "South Calgary";
private static final String SOUTH_HEALTH = "South Health";
private static final String SOUTHCENTER = "Southcentre";
private static final String SOUTHLAND = "Southland";
private static final String SOUTHLAND_STATION = SOUTHLAND; // "Southland Sta";
private static final String ST_AUGUSTINE = "St Augustine";
private static final String ST_FRANCIS = "St Francis";
private static final String ST_ISABELLA = "St Isabella";
private static final String ST_MARGARET = "St Margaret";
private static final String ST_MATTHEW = "St Matthew";
private static final String ST_STEPHEN = "St Stephen";
private static final String STATION_HERITAGE = "Sta Heritage";
private static final String STRATHCONA = "Strathcona";
private static final String TARADALE = "Taradale";
private static final String TOM_BAINES = "Tom Baines";
private static final String TUSCANY = "Tuscany";
private static final String VALLEY_RIDGE = "Vly Rdg";
private static final String VARSITY_ACRES = "Varsity Acres";
private static final String VINCENT_MASSEY = "V Massey";
private static final String VISTA_HTS = "Vista Hts";
private static final String WCHS_ST_MARY_S = "WCHS / St Mary''s";
private static final String WESTBROOK = "Westbrook";
private static final String WESTBROOK_STATION = WESTBROOK + " Sta";
private static final String WESTERN_CANADA = "Western Canada";
private static final String WESTGATE = "Westgate";
private static final String WESTHILLS = "Westhills";
private static final String WHITEHORN = "Whitehorn";
private static final String WHITEHORN_STATION = WHITEHORN; // WHITEHORN + " Sta";
private static final String WISE_WOOD = "Wise Wood";
private static final String WOODBINE = "Woodbine";
private static final String WOODLANDS = "Woodlands";
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
map2.put(56l, new RouteTripSpec(56l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHLAND_STATION, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ANDERSON_STATION) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6461", "6097" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6097", "6562", "6461" })) //
.compileBothTripSort());
map2.put(94l, new RouteTripSpec(94l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTBROOK_STATION) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3741", "5315", "8379", "6515" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6515", "3732", "7597", "3741" })) //
.compileBothTripSort());
map2.put(98l, new RouteTripSpec(98l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COUGAR_RDG, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _69_ST_STATION) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8374", "8822" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8822", "8373" })) //
.compileBothTripSort());
map2.put(419l, new RouteTripSpec(419l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, FOOTHILLS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKHILL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5574", "5299", "5227", "8339" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8339", "5108", "5580", "5574" })) //
.compileBothTripSort());
map2.put(439l, new RouteTripSpec(439l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _69_ST_STATION, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DISCOVERY_RIDGE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9365", "3785" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "3785", "9365" })) //
.compileBothTripSort());
map2.put(502l, new RouteTripSpec(502l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STATION_HERITAGE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARK_GATE_HERITAGE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7592", "5192", "5762" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5762", "4577", "7592" })) //
.compileBothTripSort());
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return ALL_ROUTE_TRIPS2.get(mRoute.id).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.id));
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return; // split
}
if (mRoute.id == 1l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(FOREST_LAWN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 2l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 3l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SANDSTONE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ELBOW_DR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 4l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HUNTINGTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 5l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(NORTH_HAVEN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 6l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(WESTBROOK_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 7l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SOUTH_CALGARY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 9l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BRIDGELAND, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(VARSITY_ACRES, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 10l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(DALHOUSIE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SOUTHCENTER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 13l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(WESTHILLS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 15l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 17l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(RENFREW, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(RAMSAY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 18l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MT_ROYAL_U, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 19l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.EAST);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.WEST);
return;
}
} else if (mRoute.id == 20l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NORTHMOUNT_DR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(HERITAGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 22l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(DALHOUSIE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 23l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOOTHILLS_IND, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 24l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 26l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(FRANKLIN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MARLBOROUGH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 30l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 33l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(VISTA_HTS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BARLOW_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 37l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NORTHWEST_LOOP, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 41l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(LYNNWOOD, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 49l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOREST_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 52l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(EVERGREEN_SOMERSET, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 55l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(FALCONRIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 57l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MC_CALL_WAY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ERINWOODS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 62l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HIDDEN_VLY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 63l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(LAKEVIEW, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 64l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MAC_EWAN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 66l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHINOOK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 69l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(DEERFOOT_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 70l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(VALLEY_RIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 71l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KINGHT_WESTWINDS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 72l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CIRCLE_ROUTE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 73l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CIRCLE_ROUTE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 74l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(TUSCANY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 79l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ACADIA_OAKRIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 80l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(OAKRIDGE_ACADIA, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 81l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 85l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KNIGHT_WESTWINDS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 86l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 91l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(LIONS_PARK_STATION, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BRENTWOOD_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 92l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ANDERSON_STATION, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KENZIE_TOWNE_DR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 93l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WESTBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(COACH_HL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 94l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(_69_ST_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 98l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(_69_ST_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 100l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(AIRPORT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KNIGHT_WESTWINDS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 102l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DOUGLASDALE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 103l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KENZIE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 107l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SOUTH_CALGARY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 109l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HARVEST_HLS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 110l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 112l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SARCEE_RD, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 116l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(COVENTRY_HLS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 117l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KENZIE_TOWNE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 125l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ERIN_WOODS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 126l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(APPLEWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 133l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRANSTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 142l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(PANORAMA, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 145l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NORTHLAND, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 151l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(NEW_BRIGHTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 152l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NEW_BRIGHTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 158l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 174l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(TUSCANY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 176l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 178l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHAPARRAL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 181l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MRU_NORTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 182l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MRU_SOUTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 300l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(AIRPORT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DOWNTOWN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 301l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(COUNTRY_VLG, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 302l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SOUTH_HEALTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 305l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.EAST);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.WEST);
return;
}
} else if (mRoute.id == 306l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WESTBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(HERITAGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 405l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BRENTWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(HILLHURST, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 406l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MC_KENZIE_TOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SHAWVILLE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 407l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BRENTWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(GREENWOOD, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 408l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BRENTWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(VALLEY_RIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 411l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 412l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(WESTGATE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 425l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SAGE_HILL_KINCORA, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 430l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SANDSTONE_AIRPORT, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 439l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(DISCOVERY_RIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 440l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CHATEAU_ESTS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FRANKLIN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 445l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SKYVIEW_RANCH, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SADDLETOWN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 697l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(EVANSTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 698l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WCHS_ST_MARY_S, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(_69_ST_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 699l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 703l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SHERWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 704l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(COUNTRY_HLS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 705l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(EDGEBROOK_RISE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 706l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HAMPTONS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 710l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRANSTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 711l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DOUGLAS_GLEN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 712l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(PARKLAND, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 713l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DEER_RUN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 714l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(PRESTWICK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 715l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(QUEENSLAND, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 716l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(NEW_BRIGHTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 717l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(COPPERFIELD, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 718l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DOUGLASDALE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 719l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KENZIE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 721l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(TUSCANY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 724l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(TUSCANY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 725l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SILVER_SPGS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 731l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(RIVERBEND, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 732l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(GLAMORGAN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 733l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(LAKEVIEW, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 734l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(OGDEN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 735l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(OGDEN_NORTH, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 737l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HARVEST_HLS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DIEFENBAKER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 738l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(PANORAMA_HLS_NORTH, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DIEFENBAKER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 739l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(PANORAMA_HLS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(DIEFENBAKER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 740l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 741l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLECREST, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 742l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLE_RIDGE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 743l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WHITEHORN_STATION, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 744l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(COVENTRY_SOUTH, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 745l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(VISTA_HTS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 746l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(COVENTRY_HLS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 747l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HIDDEN_VLY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 751l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(TARADALE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 752l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MARTINDALE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 753l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(EVANSTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 754l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 755l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CASTLERIDGE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 756l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MARTINDALE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 757l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CORAL_SPGS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 758l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(TARADALE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 759l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(FALCONRIDGE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 760l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BONAVISTA_WEST, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 761l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(AUBURN_BAY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 762l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BONAVISTA, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 763l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(WOODBINE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 764l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SOMERSET_BRIDLEWOOD_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 765l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SOMERSET_BRIDLEWOOD_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 766l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(EVERGREEN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 770l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WESTERN_CANADA, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(OGDEN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 771l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHINOOK_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 773l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(R_THIRSK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ROCKY_RIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 774l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(R_THIRSK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 775l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CITADEL, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(R_THIRSK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 776l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(PALLISER_OAKRIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 778l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(WOODLANDS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 779l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(WOODBINE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 780l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(OAKRIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 791l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MAC_EWAN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(QUEEN_ELIZABETH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 792l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SANDSTONE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(QUEEN_ELIZABETH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 795l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(VINCENT_MASSEY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(STRATHCONA, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 796l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(EDGEMONT, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(TOM_BAINES, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 798l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(TARADALE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ANNIE_GALE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 799l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(CORAL_SPGS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ANNIE_GALE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 801l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BREBEUF, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 802l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BREBEUF, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(HAWKWOOD, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 804l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SHERWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BREBEUF, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 805l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HAMPTONS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BREBEUF, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 807l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BREBEUF, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ROCKY_RIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 811l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(TUSCANY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 812l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITADEL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 813l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ARBOUR_LK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 814l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 815l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ARBOUR_LK, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 816l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CITADEL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 817l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ROCKY_RIDGE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 818l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(HAMPTONS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 819l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SHERWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 821l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MTN_PARK, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BISHOP_O_BYRNE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 822l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(MC_KENZIE_LK_WAY, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(BISHOP_O_BYRNE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 830l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(SANDSTONE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(M_D_HOUET, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 831l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_NORTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 832l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_SOUTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 834l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(DALHOUSIE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(M_D_HOUET, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 835l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ANDERSON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 837l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_SOUTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 838l) {
if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_NORTH, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 841l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NOTRE_DAME, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(HIDDEN_VLY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 842l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NOTRE_DAME, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MAC_EWAN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 851l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(LYNNWOOD, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ST_AUGUSTINE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 853l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(RIVERBEND, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ST_AUGUSTINE, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 857l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_STEPHEN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(EVERGREEN, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 860l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(B_GRANDIN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CRANSTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 861l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(B_GRANDIN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(AUBURN_BAY, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 878l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(F_WHELIHAN, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(CHAPARRAL, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 880l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_MATTHEW, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(HERITAGE_STATION, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 883l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(EVANSTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 884l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(KINCORA, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 888l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(NORTH_POINTE, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(ST_MARGARET, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 889l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(BEDDINGTON, gTrip.getDirectionId());
return;
}
} else if (mRoute.id == 892l) {
if (gTrip.getDirectionId() == 0) {
mTrip.setHeadsignString(ST_ISABELLA, gTrip.getDirectionId());
return;
} else if (gTrip.getDirectionId() == 1) {
mTrip.setHeadsignString(MC_KENZIE, gTrip.getDirectionId());
return;
}
}
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), gTrip.getDirectionId());
}
@Override
public String cleanTripHeadsign(String tripHeadsign) {
tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern ENDS_WITH_BOUND = Pattern.compile("([\\s]*[s|e|w|n]b[\\s]$)", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_BOUND = Pattern.compile("(^[\\s]*[s|e|w|n]b[\\s]*)", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_SLASH = Pattern.compile("(^[\\s]*/[\\s]*)", Pattern.CASE_INSENSITIVE);
private static final String REGEX_START_END = "((^|[^A-Z]){1}(%s)([^a-zA-Z]|$){1})";
private static final String REGEX_START_END_REPLACEMENT = "$2 %s $4";
private static final Pattern AT_SIGN = Pattern.compile("([\\s]*@[\\s]*)", Pattern.CASE_INSENSITIVE);
private static final String AT_SIGN_REPLACEMENT = " / ";
private static final Pattern AV = Pattern.compile(String.format(REGEX_START_END, "AV|AVE"));
private static final String AV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Avenue");
private static final Pattern PA = Pattern.compile(String.format(REGEX_START_END, "PA"));
private static final String PA_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Park");
private static final Pattern HT = Pattern.compile(String.format(REGEX_START_END, "HT"));
private static final String HT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Heights");
private static final Pattern GV = Pattern.compile(String.format(REGEX_START_END, "GV"));
private static final String GV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Grove");
private static final Pattern PT = Pattern.compile(String.format(REGEX_START_END, "PT"));
private static final String PT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Point");
private static final Pattern TC = Pattern.compile(String.format(REGEX_START_END, "TC"));
private static final String TC_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Terrace");
private static final Pattern RI = Pattern.compile(String.format(REGEX_START_END, "RI"));
private static final String RI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Rise");
private static final Pattern MR = Pattern.compile(String.format(REGEX_START_END, "MR"));
private static final String MR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Manor");
private static final Pattern DR = Pattern.compile(String.format(REGEX_START_END, "DR"));
private static final String DR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Drive");
private static final Pattern ST = Pattern.compile(String.format(REGEX_START_END, "ST"));
private static final String ST_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Street");
private static final Pattern VI = Pattern.compile(String.format(REGEX_START_END, "VI"));
private static final String VI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Villas");
private static final Pattern PZ = Pattern.compile(String.format(REGEX_START_END, "PZ"));
private static final String PZ_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Plaza");
private static final Pattern WY = Pattern.compile(String.format(REGEX_START_END, "WY"));
private static final String WY_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Way");
private static final Pattern GR = Pattern.compile(String.format(REGEX_START_END, "GR"));
private static final String GR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Green");
private static final Pattern BV = Pattern.compile(String.format(REGEX_START_END, "BV"));
private static final String BV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Boulevard");
private static final Pattern GA = Pattern.compile(String.format(REGEX_START_END, "GA"));
private static final String GA_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Gate");
private static final Pattern RD = Pattern.compile(String.format(REGEX_START_END, "RD"));
private static final String RD_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Road");
private static final Pattern LI = Pattern.compile(String.format(REGEX_START_END, "LI|LINK"));
private static final String LI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Link");
private static final Pattern PL = Pattern.compile(String.format(REGEX_START_END, "PL"));
private static final String PL_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Place");
private static final Pattern SQ = Pattern.compile(String.format(REGEX_START_END, "SQ"));
private static final String SQ_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Square");
private static final Pattern CL = Pattern.compile(String.format(REGEX_START_END, "CL"));
private static final String CL_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Close");
private static final Pattern CR = Pattern.compile(String.format(REGEX_START_END, "CR"));
private static final String CR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Crescent");
private static final Pattern GD = Pattern.compile(String.format(REGEX_START_END, "GD"));
private static final String GD_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Gardens");
private static final Pattern LN = Pattern.compile(String.format(REGEX_START_END, "LN"));
private static final String LN_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Lane");
private static final Pattern CO = Pattern.compile(String.format(REGEX_START_END, "CO"));
private static final String CO_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Ct");
private static final Pattern CI = Pattern.compile(String.format(REGEX_START_END, "CI"));
private static final String CI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Circle");
private static final Pattern HE = Pattern.compile(String.format(REGEX_START_END, "HE"));
private static final String HE_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Heath");
private static final Pattern ME = Pattern.compile(String.format(REGEX_START_END, "ME"));
private static final String ME_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Mews");
private static final Pattern TR = Pattern.compile(String.format(REGEX_START_END, "TR"));
private static final String TR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Trail");
private static final Pattern LD = Pattern.compile(String.format(REGEX_START_END, "LD"));
private static final String LD_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Landing");
private static final Pattern HL = Pattern.compile(String.format(REGEX_START_END, "HL"));
private static final String HL_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Hill");
private static final Pattern PK = Pattern.compile(String.format(REGEX_START_END, "PK"));
private static final String PK_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Park");
private static final Pattern CM = Pattern.compile(String.format(REGEX_START_END, "CM"));
private static final String CM_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Common");
private static final Pattern GT = Pattern.compile(String.format(REGEX_START_END, "GT"));
private static final String GT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Gate");
private static final Pattern CV = Pattern.compile(String.format(REGEX_START_END, "CV"));
private static final String CV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Cove");
private static final Pattern VW = Pattern.compile(String.format(REGEX_START_END, "VW"));
private static final String VW_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "View");
private static final Pattern BY = Pattern.compile(String.format(REGEX_START_END, "BY|BA|BAY"));
private static final String BY_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Bay");
private static final Pattern CE = Pattern.compile(String.format(REGEX_START_END, "CE"));
private static final String CE_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Center");
private static final Pattern CTR = Pattern.compile(String.format(REGEX_START_END, "CTR"));
private static final String CTR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Center");
private static final Pattern MOUNT_ROYAL_UNIVERSITY = Pattern.compile(String.format(REGEX_START_END, "Mount Royal University"));
private static final String MOUNT_ROYAL_UNIVERSITY_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "MRU");
private static final Pattern MOUNT = Pattern.compile(String.format(REGEX_START_END, "Mount"));
private static final String MOUNT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Mt");
@Override
public String cleanStopName(String gStopName) {
gStopName = STARTS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = ENDS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = AT_SIGN.matcher(gStopName).replaceAll(AT_SIGN_REPLACEMENT);
gStopName = AV.matcher(gStopName).replaceAll(AV_REPLACEMENT);
gStopName = PA.matcher(gStopName).replaceAll(PA_REPLACEMENT);
gStopName = HT.matcher(gStopName).replaceAll(HT_REPLACEMENT);
gStopName = GV.matcher(gStopName).replaceAll(GV_REPLACEMENT);
gStopName = PT.matcher(gStopName).replaceAll(PT_REPLACEMENT);
gStopName = TC.matcher(gStopName).replaceAll(TC_REPLACEMENT);
gStopName = RI.matcher(gStopName).replaceAll(RI_REPLACEMENT);
gStopName = MR.matcher(gStopName).replaceAll(MR_REPLACEMENT);
gStopName = DR.matcher(gStopName).replaceAll(DR_REPLACEMENT);
gStopName = ST.matcher(gStopName).replaceAll(ST_REPLACEMENT);
gStopName = VI.matcher(gStopName).replaceAll(VI_REPLACEMENT);
gStopName = PZ.matcher(gStopName).replaceAll(PZ_REPLACEMENT);
gStopName = WY.matcher(gStopName).replaceAll(WY_REPLACEMENT);
gStopName = GR.matcher(gStopName).replaceAll(GR_REPLACEMENT);
gStopName = BV.matcher(gStopName).replaceAll(BV_REPLACEMENT);
gStopName = GA.matcher(gStopName).replaceAll(GA_REPLACEMENT);
gStopName = RD.matcher(gStopName).replaceAll(RD_REPLACEMENT);
gStopName = LI.matcher(gStopName).replaceAll(LI_REPLACEMENT);
gStopName = PL.matcher(gStopName).replaceAll(PL_REPLACEMENT);
gStopName = SQ.matcher(gStopName).replaceAll(SQ_REPLACEMENT);
gStopName = CL.matcher(gStopName).replaceAll(CL_REPLACEMENT);
gStopName = CR.matcher(gStopName).replaceAll(CR_REPLACEMENT);
gStopName = GD.matcher(gStopName).replaceAll(GD_REPLACEMENT);
gStopName = LN.matcher(gStopName).replaceAll(LN_REPLACEMENT);
gStopName = CO.matcher(gStopName).replaceAll(CO_REPLACEMENT);
gStopName = ME.matcher(gStopName).replaceAll(ME_REPLACEMENT);
gStopName = TR.matcher(gStopName).replaceAll(TR_REPLACEMENT);
gStopName = CI.matcher(gStopName).replaceAll(CI_REPLACEMENT);
gStopName = HE.matcher(gStopName).replaceAll(HE_REPLACEMENT);
gStopName = LD.matcher(gStopName).replaceAll(LD_REPLACEMENT);
gStopName = HL.matcher(gStopName).replaceAll(HL_REPLACEMENT);
gStopName = PK.matcher(gStopName).replaceAll(PK_REPLACEMENT);
gStopName = CM.matcher(gStopName).replaceAll(CM_REPLACEMENT);
gStopName = GT.matcher(gStopName).replaceAll(GT_REPLACEMENT);
gStopName = CV.matcher(gStopName).replaceAll(CV_REPLACEMENT);
gStopName = VW.matcher(gStopName).replaceAll(VW_REPLACEMENT);
gStopName = BY.matcher(gStopName).replaceAll(BY_REPLACEMENT);
gStopName = CE.matcher(gStopName).replaceAll(CE_REPLACEMENT);
gStopName = CTR.matcher(gStopName).replaceAll(CTR_REPLACEMENT);
gStopName = MOUNT_ROYAL_UNIVERSITY.matcher(gStopName).replaceAll(MOUNT_ROYAL_UNIVERSITY_REPLACEMENT);
gStopName = MOUNT.matcher(gStopName).replaceAll(MOUNT_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
gStopName = STARTS_WITH_SLASH.matcher(gStopName).replaceAll(StringUtils.EMPTY);
return CleanUtils.cleanLabel(gStopName);
}
}
| src/org/mtransit/parser/ca_calgary_transit_bus/CalgaryTransitBusAgencyTools.java | package org.mtransit.parser.ca_calgary_transit_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
// https://www.calgarytransit.com/developer-resources
// https://data.calgary.ca/OpenData/Pages/DatasetDetails.aspx?DatasetID=PDC0-99999-99999-00501-P(CITYonlineDefault)
// https://data.calgary.ca/_layouts/OpenData/DownloadDataset.ashx?Format=FILE&DatasetId=PDC0-99999-99999-00501-P(CITYonlineDefault)&VariantId=5(CITYonlineDefault)
public class CalgaryTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-calgary-transit-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new CalgaryTransitBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating Calgary Transit bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this);
super.start(args);
System.out.printf("\nGenerating Calgary Transit bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
private static final String RSN_FLOATER = "FLT";
private static final long RID_FLOATER = 10001l;
@Override
public long getRouteId(GRoute gRoute) {
if (!Utils.isDigitsOnly(gRoute.route_short_name)) {
if (RSN_FLOATER.equals(gRoute.route_short_name)) {
return RID_FLOATER;
}
}
return Long.parseLong(gRoute.route_short_name); // using route short name as route ID
}
private static final Pattern CLEAN_STREET_POINT = Pattern.compile("((\\s)*(ave|st|mt)\\.(\\s)*)", Pattern.CASE_INSENSITIVE);
private static final String CLEAN_AVE_POINT_REPLACEMENT = "$2$3$4";
@Override
public String getRouteLongName(GRoute gRoute) {
String gRouteLongName = gRoute.route_long_name;
gRouteLongName = CleanUtils.CLEAN_SLASHES.matcher(gRouteLongName).replaceAll(CleanUtils.CLEAN_SLASHES_REPLACEMENT);
gRouteLongName = CLEAN_STREET_POINT.matcher(gRouteLongName).replaceAll(CLEAN_AVE_POINT_REPLACEMENT);
gRouteLongName = CleanUtils.cleanStreetTypes(gRouteLongName);
return CleanUtils.cleanLabel(gRouteLongName);
}
private static final String AGENCY_COLOR_RED = "B83A3F"; // LIGHT RED (from web site CSS)
private static final String AGENCY_COLOR = AGENCY_COLOR_RED;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
private static final String COLOR_BUS_ROUTES = "004B85"; // BLUE (from PDF map)
private static final String COLOR_BUS_ROUTES_EXPRESS = "00BBE5"; // LIGHT BLUE (from PDF map)
private static final String COLOR_BUS_ROUTES_BRT = "ED1C2E"; // RED (from PDF map)
private static final String COLOR_BUS_ROUTES_SCHOOL = "E4A024"; // YELLOW (from PDF map)
@Override
public String getRouteColor(GRoute gRoute) {
if (!Utils.isDigitsOnly(gRoute.route_short_name)) {
if (RSN_FLOATER.equals(gRoute.route_short_name)) {
return null;
}
}
int rsn = Integer.parseInt(gRoute.route_short_name);
switch (rsn) {
// @formatter:off
case 1: return null;
case 2: return null;
case 3: return null;
case 4: return null;
case 5: return null;
case 6: return null;
case 7: return null;
case 8: return null;
case 9: return null;
case 10: return null;
case 11: return null;
case 12: return null;
case 13: return null;
case 14: return null;
case 15: return null;
case 16: return null;
case 17: return null;
case 18: return null;
case 19: return null;
case 20: return null;
case 21: return null;
case 22: return COLOR_BUS_ROUTES_EXPRESS;
case 23: return COLOR_BUS_ROUTES_EXPRESS;
case 24: return null;
case 25: return null;
case 26: return null;
case 27: return COLOR_BUS_ROUTES;
case 28: return null;
case 29: return null;
case 30: return COLOR_BUS_ROUTES;
case 32: return null;
case 33: return COLOR_BUS_ROUTES;
case 34: return COLOR_BUS_ROUTES;
case 35: return COLOR_BUS_ROUTES;
case 36: return null;
case 37: return null;
case 38: return null;
case 39: return COLOR_BUS_ROUTES;
case 40: return null;
case 41: return null;
case 42: return null;
case 43: return null;
case 44: return COLOR_BUS_ROUTES;
case 45: return null;
case 46: return null;
case 47: return COLOR_BUS_ROUTES;
case 48: return null;
case 49: return null;
case 50: return null;
case 51: return null;
case 52: return null;
case 54: return null;
case 55: return null;
case 56: return null;
case 57: return null;
case 60: return null;
case 61: return COLOR_BUS_ROUTES;
case 62: return COLOR_BUS_ROUTES_EXPRESS;
case 63: return COLOR_BUS_ROUTES_EXPRESS;
case 64: return COLOR_BUS_ROUTES_EXPRESS;
case 66: return COLOR_BUS_ROUTES_EXPRESS;
case 69: return COLOR_BUS_ROUTES;
case 70: return COLOR_BUS_ROUTES_EXPRESS;
case 71: return COLOR_BUS_ROUTES;
case 72: return null;
case 73: return null;
case 74: return null;
case 75: return COLOR_BUS_ROUTES_EXPRESS;
case 76: return null;
case 77: return COLOR_BUS_ROUTES;
case 78: return null;
case 79: return null;
case 80: return null;
case 81: return COLOR_BUS_ROUTES;
case 83: return null;
case 84: return COLOR_BUS_ROUTES;
case 85: return null;
case 86: return null;
case 88: return null;
case 89: return COLOR_BUS_ROUTES;
case 91: return COLOR_BUS_ROUTES;
case 92: return null;
case 93: return COLOR_BUS_ROUTES;
case 94: return COLOR_BUS_ROUTES;
case 95: return COLOR_BUS_ROUTES;
case 96: return null;
case 98: return COLOR_BUS_ROUTES;
case 100: return null;
case 102: return COLOR_BUS_ROUTES_EXPRESS;
case 103: return COLOR_BUS_ROUTES_EXPRESS;
case 107: return COLOR_BUS_ROUTES;
case 109: return COLOR_BUS_ROUTES_EXPRESS;
case 110: return COLOR_BUS_ROUTES_EXPRESS;
case 112: return null;
case 114: return null;
case 116: return COLOR_BUS_ROUTES_EXPRESS;
case 117: return COLOR_BUS_ROUTES_EXPRESS;
case 118: return null;
case 122: return COLOR_BUS_ROUTES;
case 125: return COLOR_BUS_ROUTES_EXPRESS;
case 126: return COLOR_BUS_ROUTES_EXPRESS;
case 127: return null;
case 133: return COLOR_BUS_ROUTES_EXPRESS;
case 136: return COLOR_BUS_ROUTES;
case 137: return null;
case 142: return COLOR_BUS_ROUTES_EXPRESS;
case 143: return null;
case 145: return COLOR_BUS_ROUTES;
case 146: return COLOR_BUS_ROUTES;
case 151: return COLOR_BUS_ROUTES_EXPRESS;
case 152: return COLOR_BUS_ROUTES;
case 153: return COLOR_BUS_ROUTES;
case 154: return null;
case 157: return null;
case 158: return null;
case 159: return COLOR_BUS_ROUTES;
case 169: return null;
case 174: return null;
case 176: return COLOR_BUS_ROUTES_EXPRESS;
case 178: return COLOR_BUS_ROUTES;
case 181: return COLOR_BUS_ROUTES_EXPRESS;
case 182: return COLOR_BUS_ROUTES_EXPRESS;
case 199: return null;
case 299: return null;
case 300: return COLOR_BUS_ROUTES_BRT;
case 301: return COLOR_BUS_ROUTES_BRT;
case 302: return COLOR_BUS_ROUTES_BRT;
case 304: return COLOR_BUS_ROUTES_BRT;
case 305: return COLOR_BUS_ROUTES_BRT;
case 306: return COLOR_BUS_ROUTES_BRT;
case 308: return COLOR_BUS_ROUTES_BRT;
case 402: return COLOR_BUS_ROUTES;
case 404: return COLOR_BUS_ROUTES;
case 405: return COLOR_BUS_ROUTES;
case 406: return COLOR_BUS_ROUTES;
case 407: return COLOR_BUS_ROUTES;
case 408: return null;
case 409: return COLOR_BUS_ROUTES;
case 410: return COLOR_BUS_ROUTES;
case 411: return COLOR_BUS_ROUTES;
case 412: return COLOR_BUS_ROUTES;
case 414: return COLOR_BUS_ROUTES;
case 419: return COLOR_BUS_ROUTES;
case 420: return COLOR_BUS_ROUTES;
case 421: return COLOR_BUS_ROUTES;
case 425: return COLOR_BUS_ROUTES;
case 429: return COLOR_BUS_ROUTES;
case 430: return COLOR_BUS_ROUTES;
case 439: return COLOR_BUS_ROUTES;
case 440: return COLOR_BUS_ROUTES;
case 444: return COLOR_BUS_ROUTES;
case 445: return COLOR_BUS_ROUTES;
case 452: return COLOR_BUS_ROUTES;
case 453: return COLOR_BUS_ROUTES;
case 454: return COLOR_BUS_ROUTES;
case 456: return COLOR_BUS_ROUTES;
case 468: return COLOR_BUS_ROUTES;
case 502: return null;
case 506: return COLOR_BUS_ROUTES;
case 555: return null;
case 697: return COLOR_BUS_ROUTES_SCHOOL;
case 698: return COLOR_BUS_ROUTES_SCHOOL;
case 699: return COLOR_BUS_ROUTES_SCHOOL;
case 703: return COLOR_BUS_ROUTES_SCHOOL;
case 704: return COLOR_BUS_ROUTES_SCHOOL;
case 705: return COLOR_BUS_ROUTES_SCHOOL;
case 706: return COLOR_BUS_ROUTES_SCHOOL;
case 710: return COLOR_BUS_ROUTES_SCHOOL;
case 711: return COLOR_BUS_ROUTES_SCHOOL;
case 712: return COLOR_BUS_ROUTES_SCHOOL;
case 713: return COLOR_BUS_ROUTES_SCHOOL;
case 714: return COLOR_BUS_ROUTES_SCHOOL;
case 715: return COLOR_BUS_ROUTES_SCHOOL;
case 716: return COLOR_BUS_ROUTES_SCHOOL;
case 717: return COLOR_BUS_ROUTES_SCHOOL;
case 718: return COLOR_BUS_ROUTES_SCHOOL;
case 719: return COLOR_BUS_ROUTES_SCHOOL;
case 721: return COLOR_BUS_ROUTES_SCHOOL;
case 724: return COLOR_BUS_ROUTES_SCHOOL;
case 725: return COLOR_BUS_ROUTES_SCHOOL;
case 731: return COLOR_BUS_ROUTES_SCHOOL;
case 732: return COLOR_BUS_ROUTES_SCHOOL;
case 733: return COLOR_BUS_ROUTES_SCHOOL;
case 734: return COLOR_BUS_ROUTES_SCHOOL;
case 735: return COLOR_BUS_ROUTES_SCHOOL;
case 737: return COLOR_BUS_ROUTES_SCHOOL;
case 738: return COLOR_BUS_ROUTES_SCHOOL;
case 739: return COLOR_BUS_ROUTES_SCHOOL;
case 740: return COLOR_BUS_ROUTES_SCHOOL;
case 741: return COLOR_BUS_ROUTES_SCHOOL;
case 742: return COLOR_BUS_ROUTES_SCHOOL;
case 743: return COLOR_BUS_ROUTES_SCHOOL;
case 744: return COLOR_BUS_ROUTES_SCHOOL;
case 745: return COLOR_BUS_ROUTES_SCHOOL;
case 746: return COLOR_BUS_ROUTES_SCHOOL;
case 747: return COLOR_BUS_ROUTES_SCHOOL;
case 751: return COLOR_BUS_ROUTES_SCHOOL;
case 752: return COLOR_BUS_ROUTES_SCHOOL;
case 753: return COLOR_BUS_ROUTES_SCHOOL;
case 754: return COLOR_BUS_ROUTES_SCHOOL;
case 755: return COLOR_BUS_ROUTES_SCHOOL;
case 756: return COLOR_BUS_ROUTES_SCHOOL;
case 757: return COLOR_BUS_ROUTES_SCHOOL;
case 758: return COLOR_BUS_ROUTES_SCHOOL;
case 759: return COLOR_BUS_ROUTES_SCHOOL;
case 760: return COLOR_BUS_ROUTES_SCHOOL;
case 761: return COLOR_BUS_ROUTES_SCHOOL;
case 762: return COLOR_BUS_ROUTES_SCHOOL;
case 763: return COLOR_BUS_ROUTES_SCHOOL;
case 764: return COLOR_BUS_ROUTES_SCHOOL;
case 765: return COLOR_BUS_ROUTES_SCHOOL;
case 766: return COLOR_BUS_ROUTES_SCHOOL;
case 770: return COLOR_BUS_ROUTES_SCHOOL;
case 771: return COLOR_BUS_ROUTES_SCHOOL;
case 773: return COLOR_BUS_ROUTES_SCHOOL;
case 774: return COLOR_BUS_ROUTES_SCHOOL;
case 775: return COLOR_BUS_ROUTES_SCHOOL;
case 776: return COLOR_BUS_ROUTES_SCHOOL;
case 778: return COLOR_BUS_ROUTES_SCHOOL;
case 779: return COLOR_BUS_ROUTES_SCHOOL;
case 780: return COLOR_BUS_ROUTES_SCHOOL;
case 791: return COLOR_BUS_ROUTES_SCHOOL;
case 792: return COLOR_BUS_ROUTES_SCHOOL;
case 795: return COLOR_BUS_ROUTES_SCHOOL;
case 796: return COLOR_BUS_ROUTES_SCHOOL;
case 797: return COLOR_BUS_ROUTES_SCHOOL;
case 798: return COLOR_BUS_ROUTES_SCHOOL;
case 799: return COLOR_BUS_ROUTES_SCHOOL;
case 801: return COLOR_BUS_ROUTES_SCHOOL;
case 802: return COLOR_BUS_ROUTES_SCHOOL;
case 804: return COLOR_BUS_ROUTES_SCHOOL;
case 805: return COLOR_BUS_ROUTES_SCHOOL;
case 807: return COLOR_BUS_ROUTES_SCHOOL;
case 811: return COLOR_BUS_ROUTES_SCHOOL;
case 812: return COLOR_BUS_ROUTES_SCHOOL;
case 813: return COLOR_BUS_ROUTES_SCHOOL;
case 814: return COLOR_BUS_ROUTES_SCHOOL;
case 815: return COLOR_BUS_ROUTES_SCHOOL;
case 816: return COLOR_BUS_ROUTES_SCHOOL;
case 817: return COLOR_BUS_ROUTES_SCHOOL;
case 818: return COLOR_BUS_ROUTES_SCHOOL;
case 819: return COLOR_BUS_ROUTES_SCHOOL;
case 821: return COLOR_BUS_ROUTES_SCHOOL;
case 822: return COLOR_BUS_ROUTES_SCHOOL;
case 830: return COLOR_BUS_ROUTES_SCHOOL;
case 831: return COLOR_BUS_ROUTES_SCHOOL;
case 832: return COLOR_BUS_ROUTES_SCHOOL;
case 834: return COLOR_BUS_ROUTES_SCHOOL;
case 835: return COLOR_BUS_ROUTES_SCHOOL;
case 837: return COLOR_BUS_ROUTES_SCHOOL;
case 838: return COLOR_BUS_ROUTES_SCHOOL;
case 841: return COLOR_BUS_ROUTES_SCHOOL;
case 842: return COLOR_BUS_ROUTES_SCHOOL;
case 851: return COLOR_BUS_ROUTES_SCHOOL;
case 853: return COLOR_BUS_ROUTES_SCHOOL;
case 857: return COLOR_BUS_ROUTES_SCHOOL;
case 860: return COLOR_BUS_ROUTES_SCHOOL;
case 861: return COLOR_BUS_ROUTES_SCHOOL;
case 878: return COLOR_BUS_ROUTES_SCHOOL;
case 880: return COLOR_BUS_ROUTES_SCHOOL;
case 883: return COLOR_BUS_ROUTES_SCHOOL;
case 884: return COLOR_BUS_ROUTES_SCHOOL;
case 888: return COLOR_BUS_ROUTES_SCHOOL;
case 889: return COLOR_BUS_ROUTES_SCHOOL;
case 892: return COLOR_BUS_ROUTES_SCHOOL;
// @formatter:on
default:
System.out.println("Unexpected route color " + gRoute);
System.exit(-1);
return null;
}
}
private static final String _69_ST_STATION = "69 St Sta";
private static final String ACADIA = "Acadia";
private static final String OAKRIDGE = "Oakridge";
private static final String ACADIA_OAKRIDGE = ACADIA + " / " + OAKRIDGE;
private static final String AIRPORT = "Airport";
private static final String ANDERSON = "Anderson";
private static final String ANDERSON_STATION = ANDERSON; // "Anderson Sta";
private static final String ANNIE_GALE = "Annie Gale";
private static final String APPLEWOOD = "Applewood";
private static final String ARBOUR_LK = "Arbour Lk";
private static final String AUBURN_BAY = "Auburn Bay";
private static final String B_GRANDIN = "B Grandin";
private static final String BARLOW_STATION = "Barlow Sta";
private static final String BEAVERBROOK = "Beaverbrook";
private static final String BEDDINGTON = "Beddington";
private static final String BISHOP_O_BYRNE = "B O'Byrne";
private static final String BONAVISTA = "Bonavista";
private static final String BONAVISTA_WEST = "W " + BONAVISTA;
private static final String BOWNESS = "Bowness";
private static final String BREBEUF = "Brebeuf";
private static final String BRENTWOOD = "Brentwood";
private static final String BRENTWOOD_STATION = BRENTWOOD; // "Brentwood Sta";
private static final String BRIDGELAND = "Bridgeland";
private static final String CASTLERIDGE = "Castleridge";
private static final String CENTRAL_MEMORIAL = "Central Memorial";
private static final String CHAPARRAL = "Chaparral";
private static final String CHATEAU_ESTS = "Chateau Ests";
private static final String CHINOOK = "Chinook";
private static final String CHINOOK_STATION = CHINOOK; // "Chinook Sta";
private static final String CHURCHILL = "Churchill";
private static final String CIRCLE_ROUTE = "Circle Route";
private static final String CITADEL = "Citadel";
private static final String CITY_CTR = "City Ctr";
private static final String COACH_HL = "Coach Hl";
private static final String COPPERFIELD = "Copperfield";
private static final String CORAL_SPGS = "Coral Spgs";
private static final String COUGAR_RDG = "Cougar Rdg";
private static final String COUNTRY_HLS = "Country Hls";
private static final String COUNTRY_VLG = "Country Vlg";
private static final String COVENTRY = "Coventry";
private static final String COVENTRY_HLS = COVENTRY + " Hls";
private static final String COVENTRY_SOUTH = "S" + COVENTRY;
private static final String CRANSTON = "Cranston";
private static final String CRESCENT_HTS = "Crescent Hts";
private static final String DALHOUSIE = "Dalhousie";
private static final String DEER_RUN = "Deer Run";
private static final String DEERFOOT_CTR = "Deerfoot Ctr";
private static final String DIEFENBAKER = "Diefenbaker";
private static final String DISCOVERY_RIDGE = "Discovery Rdg";
private static final String DOUGLASDALE = "Douglasdale";
private static final String DOUGLAS_GLEN = "Douglas Glen";
private static final String DOWNTOWN = "Downtown";
private static final String EDGEBROOK_RISE = "Edgebrook Rise";
private static final String EDGEMONT = "Edgemont";
private static final String ELBOW_DR = "Elbow Dr";
private static final String ERIN_WOODS = "Erin Woods";
private static final String ERINWOODS = "Erinwoods";
private static final String EVANSTON = "Evanston";
private static final String EVERGREEN = "Evergreen";
private static final String SOMERSET = "Somerset";
private static final String EVERGREEN_SOMERSET = EVERGREEN + " / " + SOMERSET;
private static final String F_WHELIHAN = "F Whelihan";
private static final String FALCONRIDGE = "Falconridge";
private static final String FOOTHILLS = "Foothills";
private static final String FOOTHILLS_IND = "Foothills Ind";
private static final String FOREST_HTS = "Forest Hts";
private static final String FOREST_LAWN = "Forest Lawn";
private static final String FOWLER = "Fowler";
private static final String FRANKLIN = "Franklin";
private static final String GLAMORGAN = "Glamorgan";
private static final String GREENWOOD = "Greenwood";
private static final String HAMPTONS = "Hamptons";
private static final String HARVEST_HLS = "Harvest Hls";
private static final String HAWKWOOD = "Hawkwood";
private static final String HERITAGE = "Heritage";
private static final String HERITAGE_STATION = HERITAGE; // "Heritage Sta";
private static final String HIDDEN_VLY = "Hidden Vly";
private static final String HILLHURST = "Hillhurst";
private static final String HUNTINGTON = "Huntington";
private static final String KINCORA = "Kincora";
private static final String LAKEVIEW = "Lakeview";
private static final String LIONS_PARK = "Lions Park";
private static final String LIONS_PARK_STATION = LIONS_PARK; // "Lions Park Sta";
private static final String LYNNWOOD = "Lynnwood";
private static final String M_D_HOUET = "M d'Houet";
private static final String MAC_EWAN = "MacEwan";
private static final String MARLBOROUGH = "Marlborough";
private static final String MARTINDALE = "Martindale";
private static final String MC_CALL_WAY = "McCall Way";
private static final String MC_KENZIE = "McKenzie";
private static final String MC_KENZIE_LK_WAY = MC_KENZIE + " Lk Way";
private static final String MC_KENZIE_TOWNE = MC_KENZIE + " Towne";
private static final String MC_KENZIE_TOWNE_DR = MC_KENZIE_TOWNE; // "McKenzie Towne Dr";
private static final String MC_KINGHT_WESTWINDS = "McKinght-Westwinds";
private static final String MC_KNIGHT_WESTWINDS = "McKnight-Westwinds";
private static final String MRU = "MRU";
private static final String MRU_NORTH = MRU + " North";
private static final String MRU_SOUTH = MRU + " South";
private static final String MT_ROYAL_U = MRU; // "Mt Royal U";
private static final String MTN_PARK = "Mtn Park";
private static final String NEW_BRIGHTON = "New Brighton";
private static final String NORTH_HAVEN = "North Haven";
private static final String NORTH_POINTE = "North Pte";
private static final String NORTHLAND = "Northland";
private static final String NORTHMOUNT_DR = "Northmount Dr";
private static final String NORTHWEST_LOOP = "Northwest Loop";
private static final String NOTRE_DAME = "Notre Dame";
private static final String OAKRIDGE_ACADIA = OAKRIDGE + " / " + ACADIA;
private static final String OGDEN = "Ogden";
private static final String OGDEN_NORTH = "North " + OGDEN;
private static final String PALLISER_OAKRIDGE = "Palliser / Oakridge";
private static final String PANORAMA = "Panorama";
private static final String PANORAMA_HLS = PANORAMA + " Hls";
private static final String PANORAMA_HLS_NORTH = "N " + PANORAMA_HLS;
private static final String PARKHILL = "Parkhill";
private static final String PARKLAND = "Parkland";
private static final String PARK_GATE_HERITAGE = "Pk Gt Heritage";
private static final String PRESTWICK = "Prestwick";
private static final String QUEEN_ELIZABETH = "Queen Elizabeth";
private static final String QUEENSLAND = "Queensland";
private static final String R_THIRSK = "R Thirsk";
private static final String RAMSAY = "Ramsay";
private static final String RENFREW = "Renfrew";
private static final String RIVERBEND = "Riverbend";
private static final String ROCKY_RIDGE = "Rocky Rdg";
private static final String ROYAL_OAK = "Royal Oak";
private static final String SADDLECREST = "Saddlecrest";
private static final String SADDLE_RIDGE = "Saddle Rdg";
private static final String SADDLETOWN = "Saddletown";
private static final String SADDLETOWNE = "Saddletowne";
private static final String SAGE_HILL_KINCORA = "Sage Hill / Kincora";
private static final String SANDSTONE = "Sandstone";
private static final String SANDSTONE_AIRPORT = "Sandstone / " + AIRPORT;
private static final String SARCEE_RD = "Sarcee Rd";
private static final String SCARLETT = "Scarlett";
private static final String SCENIC_ACRES = "Scenic Acres";
private static final String SCENIC_ACRES_SOUTH = "S " + SCENIC_ACRES;
private static final String SCENIC_ACRES_NORTH = "N " + SCENIC_ACRES;
private static final String SHAWVILLE = "Shawville";
private static final String SHERWOOD = "Sherwood";
private static final String SILVER_SPGS = "Silver Spgs";
private static final String SKYVIEW_RANCH = "Skyview Ranch";
private static final String SOMERSET_BRIDLEWOOD_STATION = SOMERSET + "-Bridlewood Sta";
private static final String SOUTH_CALGARY = "South Calgary";
private static final String SOUTH_HEALTH = "South Health";
private static final String SOUTHCENTER = "Southcentre";
private static final String SOUTHLAND = "Southland";
private static final String SOUTHLAND_STATION = SOUTHLAND; // "Southland Sta";
private static final String ST_AUGUSTINE = "St Augustine";
private static final String ST_FRANCIS = "St Francis";
private static final String ST_ISABELLA = "St Isabella";
private static final String ST_MARGARET = "St Margaret";
private static final String ST_MATTHEW = "St Matthew";
private static final String ST_STEPHEN = "St Stephen";
private static final String STATION_HERITAGE = "Sta Heritage";
private static final String STRATHCONA = "Strathcona";
private static final String TARADALE = "Taradale";
private static final String TOM_BAINES = "Tom Baines";
private static final String TUSCANY = "Tuscany";
private static final String VALLEY_RIDGE = "Vly Rdg";
private static final String VARSITY_ACRES = "Varsity Acres";
private static final String VINCENT_MASSEY = "V Massey";
private static final String VISTA_HTS = "Vista Hts";
private static final String WCHS_ST_MARY_S = "WCHS / St Mary''s";
private static final String WESTBROOK = "Westbrook";
private static final String WESTBROOK_STATION = WESTBROOK + " Sta";
private static final String WESTERN_CANADA = "Western Canada";
private static final String WESTGATE = "Westgate";
private static final String WESTHILLS = "Westhills";
private static final String WHITEHORN = "Whitehorn";
private static final String WHITEHORN_STATION = WHITEHORN; // WHITEHORN + " Sta";
private static final String WISE_WOOD = "Wise Wood";
private static final String WOODBINE = "Woodbine";
private static final String WOODLANDS = "Woodlands";
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
map2.put(56l, new RouteTripSpec(56l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHLAND_STATION, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ANDERSON_STATION) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6461", "6097" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6097", "6562", "6461" })) //
.compileBothTripSort());
map2.put(94l, new RouteTripSpec(94l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTBROOK_STATION) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3741", "5315", "8379", "6515" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6515", "3732", "7597", "3741" })) //
.compileBothTripSort());
map2.put(98l, new RouteTripSpec(98l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COUGAR_RDG, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _69_ST_STATION) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8374", "8822" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8822", "8373" })) //
.compileBothTripSort());
map2.put(419l, new RouteTripSpec(419l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, FOOTHILLS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKHILL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5574", "5299", "5227", "8339" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8339", "5108", "5580", "5574" })) //
.compileBothTripSort());
map2.put(439l, new RouteTripSpec(439l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _69_ST_STATION, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DISCOVERY_RIDGE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9365", "3785" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "3785", "9365" })) //
.compileBothTripSort());
map2.put(502l, new RouteTripSpec(502l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STATION_HERITAGE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARK_GATE_HERITAGE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7592", "5192", "5762" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5762", "4577", "7592" })) //
.compileBothTripSort());
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return ALL_ROUTE_TRIPS2.get(mRoute.id).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
RouteTripSpec rts = ALL_ROUTE_TRIPS2.get(mRoute.id);
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, //
rts.getBeforeAfterStopIds(0), //
rts.getBeforeAfterStopIds(1), //
rts.getBeforeAfterBothStopIds(0), //
rts.getBeforeAfterBothStopIds(1), //
rts.getTripId(0), //
rts.getTripId(1), //
rts.getAllBeforeAfterStopIds());
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return; // split
}
if (mRoute.id == 1l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(FOREST_LAWN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 2l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 3l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SANDSTONE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ELBOW_DR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 4l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HUNTINGTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 5l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(NORTH_HAVEN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 6l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(WESTBROOK_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 7l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SOUTH_CALGARY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 9l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BRIDGELAND, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(VARSITY_ACRES, gTrip.direction_id);
return;
}
} else if (mRoute.id == 10l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(DALHOUSIE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SOUTHCENTER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 13l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(WESTHILLS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 15l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 17l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(RENFREW, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(RAMSAY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 18l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MT_ROYAL_U, gTrip.direction_id);
return;
}
} else if (mRoute.id == 19l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.EAST);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.WEST);
return;
}
} else if (mRoute.id == 20l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NORTHMOUNT_DR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(HERITAGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 22l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(DALHOUSIE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 23l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOOTHILLS_IND, gTrip.direction_id);
return;
}
} else if (mRoute.id == 24l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 26l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(FRANKLIN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MARLBOROUGH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 30l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 33l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(VISTA_HTS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BARLOW_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 37l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NORTHWEST_LOOP, gTrip.direction_id);
return;
}
} else if (mRoute.id == 41l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(LYNNWOOD, gTrip.direction_id);
return;
}
} else if (mRoute.id == 49l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOREST_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 52l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(EVERGREEN_SOMERSET, gTrip.direction_id);
return;
}
} else if (mRoute.id == 55l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(FALCONRIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 57l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MC_CALL_WAY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ERINWOODS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 62l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HIDDEN_VLY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 63l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(LAKEVIEW, gTrip.direction_id);
return;
}
} else if (mRoute.id == 64l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MAC_EWAN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 66l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHINOOK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 69l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(DEERFOOT_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 70l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(VALLEY_RIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 71l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KINGHT_WESTWINDS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 72l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CIRCLE_ROUTE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 73l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CIRCLE_ROUTE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 74l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(TUSCANY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 79l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ACADIA_OAKRIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 80l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(OAKRIDGE_ACADIA, gTrip.direction_id);
return;
}
} else if (mRoute.id == 81l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 85l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KNIGHT_WESTWINDS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 86l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 91l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(LIONS_PARK_STATION, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BRENTWOOD_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 92l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ANDERSON_STATION, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KENZIE_TOWNE_DR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 93l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WESTBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(COACH_HL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 94l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(_69_ST_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 98l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(_69_ST_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 100l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(AIRPORT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KNIGHT_WESTWINDS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 102l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DOUGLASDALE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 103l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KENZIE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 107l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SOUTH_CALGARY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 109l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HARVEST_HLS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 110l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 112l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SARCEE_RD, gTrip.direction_id);
return;
}
} else if (mRoute.id == 116l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(COVENTRY_HLS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 117l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KENZIE_TOWNE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 125l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ERIN_WOODS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 126l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(APPLEWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 133l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRANSTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 142l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(PANORAMA, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 145l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NORTHLAND, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 151l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(NEW_BRIGHTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 152l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NEW_BRIGHTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 158l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 174l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(TUSCANY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 176l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 178l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHAPARRAL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 181l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MRU_NORTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 182l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MRU_SOUTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 300l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(AIRPORT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DOWNTOWN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 301l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(COUNTRY_VLG, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 302l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SOUTH_HEALTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 305l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.EAST);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.WEST);
return;
}
} else if (mRoute.id == 306l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WESTBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(HERITAGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 405l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BRENTWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(HILLHURST, gTrip.direction_id);
return;
}
} else if (mRoute.id == 406l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MC_KENZIE_TOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SHAWVILLE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 407l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BRENTWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(GREENWOOD, gTrip.direction_id);
return;
}
} else if (mRoute.id == 408l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BRENTWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(VALLEY_RIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 411l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITY_CTR, gTrip.direction_id);
return;
}
} else if (mRoute.id == 412l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(WESTGATE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 425l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SAGE_HILL_KINCORA, gTrip.direction_id);
return;
}
} else if (mRoute.id == 430l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SANDSTONE_AIRPORT, gTrip.direction_id);
return;
}
} else if (mRoute.id == 439l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(DISCOVERY_RIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 440l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CHATEAU_ESTS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FRANKLIN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 445l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SKYVIEW_RANCH, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SADDLETOWN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 697l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(EVANSTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 698l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WCHS_ST_MARY_S, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(_69_ST_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 699l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignDirection(MDirectionType.NORTH);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignDirection(MDirectionType.SOUTH);
return;
}
} else if (mRoute.id == 703l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SHERWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 704l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(COUNTRY_HLS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 705l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(EDGEBROOK_RISE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 706l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HAMPTONS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHURCHILL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 710l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRANSTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 711l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DOUGLAS_GLEN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 712l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(PARKLAND, gTrip.direction_id);
return;
}
} else if (mRoute.id == 713l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DEER_RUN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 714l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(PRESTWICK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 715l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(QUEENSLAND, gTrip.direction_id);
return;
}
} else if (mRoute.id == 716l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(NEW_BRIGHTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 717l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(COPPERFIELD, gTrip.direction_id);
return;
}
} else if (mRoute.id == 718l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DOUGLASDALE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 719l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEAVERBROOK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KENZIE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 721l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(TUSCANY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 724l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(TUSCANY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 725l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SILVER_SPGS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BOWNESS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 731l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(RIVERBEND, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 732l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(GLAMORGAN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 733l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(LAKEVIEW, gTrip.direction_id);
return;
}
} else if (mRoute.id == 734l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(OGDEN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 735l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(OGDEN_NORTH, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CENTRAL_MEMORIAL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 737l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HARVEST_HLS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DIEFENBAKER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 738l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(PANORAMA_HLS_NORTH, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DIEFENBAKER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 739l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(PANORAMA_HLS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(DIEFENBAKER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 740l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 741l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLECREST, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 742l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLE_RIDGE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 743l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WHITEHORN_STATION, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 744l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(COVENTRY_SOUTH, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 745l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(VISTA_HTS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 746l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(COVENTRY_HLS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 747l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HIDDEN_VLY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRESCENT_HTS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 751l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(TARADALE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 752l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MARTINDALE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 753l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(EVANSTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 754l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SADDLETOWNE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 755l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CASTLERIDGE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 756l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MARTINDALE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 757l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CORAL_SPGS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 758l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(TARADALE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 759l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(FALCONRIDGE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(FOWLER, gTrip.direction_id);
return;
}
} else if (mRoute.id == 760l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BONAVISTA_WEST, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
}
} else if (mRoute.id == 761l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(AUBURN_BAY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 762l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BONAVISTA, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
}
} else if (mRoute.id == 763l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(WOODBINE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 764l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SOMERSET_BRIDLEWOOD_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 765l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SOMERSET_BRIDLEWOOD_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 766l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SCARLETT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(EVERGREEN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 770l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WESTERN_CANADA, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(OGDEN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 771l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHINOOK_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 773l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(R_THIRSK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ROCKY_RIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 774l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(R_THIRSK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 775l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CITADEL, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(R_THIRSK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 776l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(PALLISER_OAKRIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 778l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(WOODLANDS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 779l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(WOODBINE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 780l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(WISE_WOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(OAKRIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 791l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MAC_EWAN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(QUEEN_ELIZABETH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 792l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SANDSTONE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(QUEEN_ELIZABETH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 795l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(VINCENT_MASSEY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(STRATHCONA, gTrip.direction_id);
return;
}
} else if (mRoute.id == 796l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(EDGEMONT, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(TOM_BAINES, gTrip.direction_id);
return;
}
} else if (mRoute.id == 798l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(TARADALE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ANNIE_GALE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 799l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(CORAL_SPGS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ANNIE_GALE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 801l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BREBEUF, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 802l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BREBEUF, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(HAWKWOOD, gTrip.direction_id);
return;
}
} else if (mRoute.id == 804l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SHERWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BREBEUF, gTrip.direction_id);
return;
}
} else if (mRoute.id == 805l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HAMPTONS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BREBEUF, gTrip.direction_id);
return;
}
} else if (mRoute.id == 807l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BREBEUF, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ROCKY_RIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 811l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(TUSCANY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 812l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITADEL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 813l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ARBOUR_LK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 814l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ROYAL_OAK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 815l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ARBOUR_LK, gTrip.direction_id);
return;
}
} else if (mRoute.id == 816l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CITADEL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 817l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ROCKY_RIDGE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 818l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(HAMPTONS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 819l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SHERWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
}
} else if (mRoute.id == 821l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MTN_PARK, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BISHOP_O_BYRNE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 822l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(MC_KENZIE_LK_WAY, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(BISHOP_O_BYRNE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 830l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(SANDSTONE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(M_D_HOUET, gTrip.direction_id);
return;
}
} else if (mRoute.id == 831l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_NORTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 832l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_FRANCIS, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_SOUTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 834l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(DALHOUSIE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(M_D_HOUET, gTrip.direction_id);
return;
}
} else if (mRoute.id == 835l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ANDERSON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 837l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_SOUTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 838l) {
if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(SCENIC_ACRES_NORTH, gTrip.direction_id);
return;
}
} else if (mRoute.id == 841l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NOTRE_DAME, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(HIDDEN_VLY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 842l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NOTRE_DAME, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MAC_EWAN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 851l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(LYNNWOOD, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ST_AUGUSTINE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 853l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(RIVERBEND, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ST_AUGUSTINE, gTrip.direction_id);
return;
}
} else if (mRoute.id == 857l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_STEPHEN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(EVERGREEN, gTrip.direction_id);
return;
}
} else if (mRoute.id == 860l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(B_GRANDIN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CRANSTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 861l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(B_GRANDIN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(AUBURN_BAY, gTrip.direction_id);
return;
}
} else if (mRoute.id == 878l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(F_WHELIHAN, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(CHAPARRAL, gTrip.direction_id);
return;
}
} else if (mRoute.id == 880l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_MATTHEW, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(HERITAGE_STATION, gTrip.direction_id);
return;
}
} else if (mRoute.id == 883l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(EVANSTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 884l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(KINCORA, gTrip.direction_id);
return;
}
} else if (mRoute.id == 888l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(NORTH_POINTE, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(ST_MARGARET, gTrip.direction_id);
return;
}
} else if (mRoute.id == 889l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(BEDDINGTON, gTrip.direction_id);
return;
}
} else if (mRoute.id == 892l) {
if (gTrip.direction_id == 0) {
mTrip.setHeadsignString(ST_ISABELLA, gTrip.direction_id);
return;
} else if (gTrip.direction_id == 1) {
mTrip.setHeadsignString(MC_KENZIE, gTrip.direction_id);
return;
}
}
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.trip_headsign), gTrip.direction_id);
}
@Override
public String cleanTripHeadsign(String tripHeadsign) {
tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern ENDS_WITH_BOUND = Pattern.compile("([\\s]*[s|e|w|n]b[\\s]$)", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_BOUND = Pattern.compile("(^[\\s]*[s|e|w|n]b[\\s]*)", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_SLASH = Pattern.compile("(^[\\s]*/[\\s]*)", Pattern.CASE_INSENSITIVE);
private static final String REGEX_START_END = "((^|[^A-Z]){1}(%s)([^a-zA-Z]|$){1})";
private static final String REGEX_START_END_REPLACEMENT = "$2 %s $4";
private static final Pattern AT_SIGN = Pattern.compile("([\\s]*@[\\s]*)", Pattern.CASE_INSENSITIVE);
private static final String AT_SIGN_REPLACEMENT = " / ";
private static final Pattern AV = Pattern.compile(String.format(REGEX_START_END, "AV|AVE"));
private static final String AV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Avenue");
private static final Pattern PA = Pattern.compile(String.format(REGEX_START_END, "PA"));
private static final String PA_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Park");
private static final Pattern HT = Pattern.compile(String.format(REGEX_START_END, "HT"));
private static final String HT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Heights");
private static final Pattern GV = Pattern.compile(String.format(REGEX_START_END, "GV"));
private static final String GV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Grove");
private static final Pattern PT = Pattern.compile(String.format(REGEX_START_END, "PT"));
private static final String PT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Point");
private static final Pattern TC = Pattern.compile(String.format(REGEX_START_END, "TC"));
private static final String TC_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Terrace");
private static final Pattern RI = Pattern.compile(String.format(REGEX_START_END, "RI"));
private static final String RI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Rise");
private static final Pattern MR = Pattern.compile(String.format(REGEX_START_END, "MR"));
private static final String MR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Manor");
private static final Pattern DR = Pattern.compile(String.format(REGEX_START_END, "DR"));
private static final String DR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Drive");
private static final Pattern ST = Pattern.compile(String.format(REGEX_START_END, "ST"));
private static final String ST_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Street");
private static final Pattern VI = Pattern.compile(String.format(REGEX_START_END, "VI"));
private static final String VI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Villas");
private static final Pattern PZ = Pattern.compile(String.format(REGEX_START_END, "PZ"));
private static final String PZ_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Plaza");
private static final Pattern WY = Pattern.compile(String.format(REGEX_START_END, "WY"));
private static final String WY_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Way");
private static final Pattern GR = Pattern.compile(String.format(REGEX_START_END, "GR"));
private static final String GR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Green");
private static final Pattern BV = Pattern.compile(String.format(REGEX_START_END, "BV"));
private static final String BV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Boulevard");
private static final Pattern GA = Pattern.compile(String.format(REGEX_START_END, "GA"));
private static final String GA_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Gate");
private static final Pattern RD = Pattern.compile(String.format(REGEX_START_END, "RD"));
private static final String RD_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Road");
private static final Pattern LI = Pattern.compile(String.format(REGEX_START_END, "LI|LINK"));
private static final String LI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Link");
private static final Pattern PL = Pattern.compile(String.format(REGEX_START_END, "PL"));
private static final String PL_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Place");
private static final Pattern SQ = Pattern.compile(String.format(REGEX_START_END, "SQ"));
private static final String SQ_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Square");
private static final Pattern CL = Pattern.compile(String.format(REGEX_START_END, "CL"));
private static final String CL_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Close");
private static final Pattern CR = Pattern.compile(String.format(REGEX_START_END, "CR"));
private static final String CR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Crescent");
private static final Pattern GD = Pattern.compile(String.format(REGEX_START_END, "GD"));
private static final String GD_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Gardens");
private static final Pattern LN = Pattern.compile(String.format(REGEX_START_END, "LN"));
private static final String LN_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Lane");
private static final Pattern CO = Pattern.compile(String.format(REGEX_START_END, "CO"));
private static final String CO_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Ct");
private static final Pattern CI = Pattern.compile(String.format(REGEX_START_END, "CI"));
private static final String CI_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Circle");
private static final Pattern HE = Pattern.compile(String.format(REGEX_START_END, "HE"));
private static final String HE_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Heath");
private static final Pattern ME = Pattern.compile(String.format(REGEX_START_END, "ME"));
private static final String ME_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Mews");
private static final Pattern TR = Pattern.compile(String.format(REGEX_START_END, "TR"));
private static final String TR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Trail");
private static final Pattern LD = Pattern.compile(String.format(REGEX_START_END, "LD"));
private static final String LD_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Landing");
private static final Pattern HL = Pattern.compile(String.format(REGEX_START_END, "HL"));
private static final String HL_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Hill");
private static final Pattern PK = Pattern.compile(String.format(REGEX_START_END, "PK"));
private static final String PK_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Park");
private static final Pattern CM = Pattern.compile(String.format(REGEX_START_END, "CM"));
private static final String CM_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Common");
private static final Pattern GT = Pattern.compile(String.format(REGEX_START_END, "GT"));
private static final String GT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Gate");
private static final Pattern CV = Pattern.compile(String.format(REGEX_START_END, "CV"));
private static final String CV_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Cove");
private static final Pattern VW = Pattern.compile(String.format(REGEX_START_END, "VW"));
private static final String VW_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "View");
private static final Pattern BY = Pattern.compile(String.format(REGEX_START_END, "BY|BA|BAY"));
private static final String BY_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Bay");
private static final Pattern CE = Pattern.compile(String.format(REGEX_START_END, "CE"));
private static final String CE_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Center");
private static final Pattern CTR = Pattern.compile(String.format(REGEX_START_END, "CTR"));
private static final String CTR_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Center");
private static final Pattern MOUNT_ROYAL_UNIVERSITY = Pattern.compile(String.format(REGEX_START_END, "Mount Royal University"));
private static final String MOUNT_ROYAL_UNIVERSITY_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "MRU");
private static final Pattern MOUNT = Pattern.compile(String.format(REGEX_START_END, "Mount"));
private static final String MOUNT_REPLACEMENT = String.format(REGEX_START_END_REPLACEMENT, "Mt");
@Override
public String cleanStopName(String gStopName) {
gStopName = STARTS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = ENDS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = AT_SIGN.matcher(gStopName).replaceAll(AT_SIGN_REPLACEMENT);
gStopName = AV.matcher(gStopName).replaceAll(AV_REPLACEMENT);
gStopName = PA.matcher(gStopName).replaceAll(PA_REPLACEMENT);
gStopName = HT.matcher(gStopName).replaceAll(HT_REPLACEMENT);
gStopName = GV.matcher(gStopName).replaceAll(GV_REPLACEMENT);
gStopName = PT.matcher(gStopName).replaceAll(PT_REPLACEMENT);
gStopName = TC.matcher(gStopName).replaceAll(TC_REPLACEMENT);
gStopName = RI.matcher(gStopName).replaceAll(RI_REPLACEMENT);
gStopName = MR.matcher(gStopName).replaceAll(MR_REPLACEMENT);
gStopName = DR.matcher(gStopName).replaceAll(DR_REPLACEMENT);
gStopName = ST.matcher(gStopName).replaceAll(ST_REPLACEMENT);
gStopName = VI.matcher(gStopName).replaceAll(VI_REPLACEMENT);
gStopName = PZ.matcher(gStopName).replaceAll(PZ_REPLACEMENT);
gStopName = WY.matcher(gStopName).replaceAll(WY_REPLACEMENT);
gStopName = GR.matcher(gStopName).replaceAll(GR_REPLACEMENT);
gStopName = BV.matcher(gStopName).replaceAll(BV_REPLACEMENT);
gStopName = GA.matcher(gStopName).replaceAll(GA_REPLACEMENT);
gStopName = RD.matcher(gStopName).replaceAll(RD_REPLACEMENT);
gStopName = LI.matcher(gStopName).replaceAll(LI_REPLACEMENT);
gStopName = PL.matcher(gStopName).replaceAll(PL_REPLACEMENT);
gStopName = SQ.matcher(gStopName).replaceAll(SQ_REPLACEMENT);
gStopName = CL.matcher(gStopName).replaceAll(CL_REPLACEMENT);
gStopName = CR.matcher(gStopName).replaceAll(CR_REPLACEMENT);
gStopName = GD.matcher(gStopName).replaceAll(GD_REPLACEMENT);
gStopName = LN.matcher(gStopName).replaceAll(LN_REPLACEMENT);
gStopName = CO.matcher(gStopName).replaceAll(CO_REPLACEMENT);
gStopName = ME.matcher(gStopName).replaceAll(ME_REPLACEMENT);
gStopName = TR.matcher(gStopName).replaceAll(TR_REPLACEMENT);
gStopName = CI.matcher(gStopName).replaceAll(CI_REPLACEMENT);
gStopName = HE.matcher(gStopName).replaceAll(HE_REPLACEMENT);
gStopName = LD.matcher(gStopName).replaceAll(LD_REPLACEMENT);
gStopName = HL.matcher(gStopName).replaceAll(HL_REPLACEMENT);
gStopName = PK.matcher(gStopName).replaceAll(PK_REPLACEMENT);
gStopName = CM.matcher(gStopName).replaceAll(CM_REPLACEMENT);
gStopName = GT.matcher(gStopName).replaceAll(GT_REPLACEMENT);
gStopName = CV.matcher(gStopName).replaceAll(CV_REPLACEMENT);
gStopName = VW.matcher(gStopName).replaceAll(VW_REPLACEMENT);
gStopName = BY.matcher(gStopName).replaceAll(BY_REPLACEMENT);
gStopName = CE.matcher(gStopName).replaceAll(CE_REPLACEMENT);
gStopName = CTR.matcher(gStopName).replaceAll(CTR_REPLACEMENT);
gStopName = MOUNT_ROYAL_UNIVERSITY.matcher(gStopName).replaceAll(MOUNT_ROYAL_UNIVERSITY_REPLACEMENT);
gStopName = MOUNT.matcher(gStopName).replaceAll(MOUNT_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
gStopName = STARTS_WITH_SLASH.matcher(gStopName).replaceAll(StringUtils.EMPTY);
return CleanUtils.cleanLabel(gStopName);
}
}
| Code cleaning...
| src/org/mtransit/parser/ca_calgary_transit_bus/CalgaryTransitBusAgencyTools.java | Code cleaning... |
|
Java | apache-2.0 | a9607971b5a51870b09419e940efea49cd20bc60 | 0 | consulo/consulo-nodejs | /*
* Copyright 2013-2014 must-be.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mustbe.consulo.nodejs.module.extension;
import javax.swing.JComponent;
import org.consulo.module.extension.MutableModuleExtensionWithSdk;
import org.consulo.module.extension.MutableModuleInheritableNamedPointer;
import org.consulo.module.extension.ui.ModuleExtensionSdkBoxBuilder;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mustbe.consulo.RequiredDispatchThread;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModuleRootLayer;
/**
* @author VISTALL
* @since 14.03.14
*/
public class NodeJSMutableModuleExtension extends NodeJSModuleExtension implements MutableModuleExtensionWithSdk<NodeJSModuleExtension>
{
public NodeJSMutableModuleExtension(@NotNull String id, @NotNull ModuleRootLayer rootModel)
{
super(id, rootModel);
}
@NotNull
@Override
public MutableModuleInheritableNamedPointer<Sdk> getInheritableSdk()
{
return (MutableModuleInheritableNamedPointer<Sdk>) super.getInheritableSdk();
}
@Nullable
@Override
@RequiredDispatchThread
public JComponent createConfigurablePanel(@Nullable Runnable runnable)
{
return wrapToNorth(ModuleExtensionSdkBoxBuilder.createAndDefine(this, runnable).build());
}
@Override
public void setEnabled(boolean b)
{
myIsEnabled = b;
}
@Override
public boolean isModified(@NotNull NodeJSModuleExtension nodeJSModuleExtension)
{
return isModifiedImpl(nodeJSModuleExtension);
}
}
| src/org/mustbe/consulo/nodejs/module/extension/NodeJSMutableModuleExtension.java | /*
* Copyright 2013-2014 must-be.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mustbe.consulo.nodejs.module.extension;
import javax.swing.JComponent;
import org.consulo.module.extension.MutableModuleExtensionWithSdk;
import org.consulo.module.extension.MutableModuleInheritableNamedPointer;
import org.consulo.module.extension.ui.ModuleExtensionWithSdkPanel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModuleRootLayer;
/**
* @author VISTALL
* @since 14.03.14
*/
public class NodeJSMutableModuleExtension extends NodeJSModuleExtension implements MutableModuleExtensionWithSdk<NodeJSModuleExtension>
{
public NodeJSMutableModuleExtension(@NotNull String id, @NotNull ModuleRootLayer rootModel)
{
super(id, rootModel);
}
@NotNull
@Override
public MutableModuleInheritableNamedPointer<Sdk> getInheritableSdk()
{
return (MutableModuleInheritableNamedPointer<Sdk>) super.getInheritableSdk();
}
@Nullable
@Override
public JComponent createConfigurablePanel(@Nullable Runnable runnable)
{
return wrapToNorth(new ModuleExtensionWithSdkPanel(this, runnable));
}
@Override
public void setEnabled(boolean b)
{
myIsEnabled = b;
}
@Override
public boolean isModified(@NotNull NodeJSModuleExtension nodeJSModuleExtension)
{
return isModifiedImpl(nodeJSModuleExtension);
}
}
| deprecation cleanup
| src/org/mustbe/consulo/nodejs/module/extension/NodeJSMutableModuleExtension.java | deprecation cleanup |
|
Java | apache-2.0 | 0c63af46cfaeb5a395dcbe3db0c364542c5831a3 | 0 | fbonzon/graphhopper,graphhopper/graphhopper,boldtrn/graphhopper,boldtrn/graphhopper,boldtrn/graphhopper,graphhopper/map-matching,fbonzon/graphhopper,fbonzon/graphhopper,graphhopper/graphhopper,graphhopper/graphhopper,fbonzon/graphhopper,graphhopper/map-matching,boldtrn/graphhopper,graphhopper/graphhopper,graphhopper/map-matching | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.matching.http;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.graphhopper.GHResponse;
import com.graphhopper.GraphHopper;
import com.graphhopper.PathWrapper;
import com.graphhopper.http.WebHelper;
import com.graphhopper.matching.*;
import com.graphhopper.routing.AlgorithmOptions;
import com.graphhopper.routing.Path;
import com.graphhopper.routing.util.EncodingManager;
import com.graphhopper.routing.util.HintsMap;
import com.graphhopper.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.*;
import static com.graphhopper.util.Parameters.Routing.*;
/**
* Resource to use map matching of GraphHopper in a remote client application.
*
* @author Peter Karich
*/
@javax.ws.rs.Path("match")
public class MapMatchingResource {
private static final Logger logger = LoggerFactory.getLogger(MapMatchingResource.class);
private final GraphHopper graphHopper;
private final EncodingManager encodingManager;
private final TranslationMap trMap;
@Inject
public MapMatchingResource(GraphHopper graphHopper, EncodingManager encodingManager,
TranslationMap trMap) {
this.graphHopper = graphHopper;
this.encodingManager = encodingManager;
this.trMap = trMap;
}
@POST
@Consumes({MediaType.APPLICATION_XML, "application/gpx+xml"})
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, "application/gpx+xml"})
public Response doGet(
Document body,
@Context HttpServletRequest request,
@QueryParam(WAY_POINT_MAX_DISTANCE) @DefaultValue("1") double minPathPrecision,
@QueryParam("type") @DefaultValue("json") String outType,
@QueryParam(INSTRUCTIONS) @DefaultValue("true") boolean instructions,
@QueryParam(CALC_POINTS) @DefaultValue("true") boolean calcPoints,
@QueryParam("elevation") @DefaultValue("false") boolean enableElevation,
@QueryParam("points_encoded") @DefaultValue("true") boolean pointsEncoded,
@QueryParam("vehicle") @DefaultValue("car") String vehicleStr,
@QueryParam("locale") @DefaultValue("en") String localeStr,
@QueryParam(Parameters.DETAILS.PATH_DETAILS) List<String> pathDetails,
@QueryParam("gpx.route") @DefaultValue("true") boolean withRoute /* default to false for the route part in next API version, see #437 */,
@QueryParam("gpx.track") @DefaultValue("true") boolean withTrack,
@QueryParam("gpx.waypoints") @DefaultValue("false") boolean withWayPoints,
@QueryParam("gpx.trackname") @DefaultValue("GraphHopper Track") String trackName,
@QueryParam("gpx.millis") String timeString,
@QueryParam("traversal_keys") @DefaultValue("false") boolean enableTraversalKeys,
@QueryParam(MAX_VISITED_NODES) @DefaultValue("3000") int maxVisitedNodes,
@QueryParam("gps_accuracy") @DefaultValue("40") double gpsAccuracy) {
boolean writeGPX = "gpx".equalsIgnoreCase(outType);
if (!encodingManager.supports(vehicleStr)) {
throw new WebApplicationException(WebHelper.errorResponse(new IllegalArgumentException("Vehicle not supported: " + vehicleStr), writeGPX));
}
GPXFile file = new GPXFile();
GPXFile gpxFile = file.doImport(body, 20);
instructions = writeGPX || instructions;
StopWatch sw = new StopWatch().start();
AlgorithmOptions opts = AlgorithmOptions.start()
.traversalMode(graphHopper.getTraversalMode())
.maxVisitedNodes(maxVisitedNodes)
.hints(new HintsMap().put("vehicle", vehicleStr))
.build();
MapMatching matching = new MapMatching(graphHopper, opts);
matching.setMeasurementErrorSigma(gpsAccuracy);
MatchResult matchRsp = matching.doWork(gpxFile.getEntries());
// TODO: Request logging and timing should perhaps be done somewhere outside
float took = sw.stop().getSeconds();
String infoStr = request.getRemoteAddr() + " " + request.getLocale() + " " + request.getHeader("User-Agent");
String logStr = request.getQueryString() + ", " + infoStr + ", took:" + took + ", entries:" + gpxFile.getEntries().size();
logger.info(logStr);
if ("extended_json".equals(outType)) {
return Response.ok(convertToTree(matchRsp, enableElevation, pointsEncoded)).
header("X-GH-Took", "" + Math.round(took * 1000)).
build();
} else {
Path path = matching.calcPath(matchRsp);
Translation tr = trMap.getWithFallBack(Helper.getLocale(localeStr));
DouglasPeucker peucker = new DouglasPeucker().setMaxDistance(minPathPrecision);
PathMerger pathMerger = new PathMerger().
setEnableInstructions(instructions).
setPathDetailsBuilders(graphHopper.getPathDetailsBuilderFactory(), pathDetails).
setDouglasPeucker(peucker).
setSimplifyResponse(minPathPrecision > 0);
PathWrapper pathWrapper = new PathWrapper();
pathMerger.doWork(pathWrapper, Collections.singletonList(path), tr);
// GraphHopper thinks an empty path is an invalid path, and further than an invalid path is still a path but
// marked with a non-empty list of Exception objects. I disagree, so I clear it.
pathWrapper.getErrors().clear();
GHResponse rsp = new GHResponse();
rsp.add(pathWrapper);
if (writeGPX) {
return WebHelper.gpxSuccessResponseBuilder(rsp, timeString, trackName, enableElevation, withRoute, withTrack, withWayPoints).
header("X-GH-Took", "" + Math.round(took * 1000)).
build();
} else {
ObjectNode map = WebHelper.jsonObject(rsp, instructions, calcPoints, enableElevation, pointsEncoded, took);
Map<String, Object> matchResult = new HashMap<>();
matchResult.put("distance", matchRsp.getMatchLength());
matchResult.put("time", matchRsp.getMatchMillis());
matchResult.put("original_distance", matchRsp.getGpxEntriesLength());
matchResult.put("original_time", matchRsp.getGpxEntriesMillis());
map.putPOJO("map_matching", matchResult);
if (enableTraversalKeys) {
List<Integer> traversalKeylist = new ArrayList<>();
for (EdgeMatch em : matchRsp.getEdgeMatches()) {
EdgeIteratorState edge = em.getEdgeState();
// encode edges as traversal keys which includes orientation, decode simply by multiplying with 0.5
traversalKeylist.add(GHUtility.createEdgeKey(edge.getBaseNode(), edge.getAdjNode(), edge.getEdge(), false));
}
map.putPOJO("traversal_keys", traversalKeylist);
}
return Response.ok(map).
header("X-GH-Took", "" + Math.round(took * 1000)).
build();
}
}
}
static JsonNode convertToTree(MatchResult result, boolean elevation, boolean pointsEncoded) {
ObjectNode root = JsonNodeFactory.instance.objectNode();
ObjectNode diary = root.putObject("diary");
ArrayNode entries = diary.putArray("entries");
ObjectNode route = entries.addObject();
ArrayNode links = route.putArray("links");
for (int emIndex = 0; emIndex < result.getEdgeMatches().size(); emIndex++) {
ObjectNode link = links.addObject();
EdgeMatch edgeMatch = result.getEdgeMatches().get(emIndex);
PointList pointList = edgeMatch.getEdgeState().fetchWayGeometry(emIndex == 0 ? 3 : 2);
final ObjectNode geometry = link.putObject("geometry");
if (pointList.size() < 2) {
geometry.putPOJO("coordinates", pointsEncoded ? WebHelper.encodePolyline(pointList, elevation) : pointList.toLineString(elevation));
geometry.put("type", "Point");
} else {
geometry.putPOJO("coordinates", pointsEncoded ? WebHelper.encodePolyline(pointList, elevation) : pointList.toLineString(elevation));
geometry.put("type", "LineString");
}
link.put("id", edgeMatch.getEdgeState().getEdge());
ArrayNode wpts = link.putArray("wpts");
for (GPXExtension extension : edgeMatch.getGpxExtensions()) {
ObjectNode wpt = wpts.addObject();
wpt.put("x", extension.getQueryResult().getSnappedPoint().lon);
wpt.put("y", extension.getQueryResult().getSnappedPoint().lat);
wpt.put("timestamp", extension.getEntry().getTime());
}
}
return root;
}
}
| matching-web/src/main/java/com/graphhopper/matching/http/MapMatchingResource.java | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.matching.http;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.graphhopper.GHResponse;
import com.graphhopper.GraphHopper;
import com.graphhopper.PathWrapper;
import com.graphhopper.http.WebHelper;
import com.graphhopper.matching.*;
import com.graphhopper.routing.AlgorithmOptions;
import com.graphhopper.routing.Path;
import com.graphhopper.routing.util.EncodingManager;
import com.graphhopper.routing.util.HintsMap;
import com.graphhopper.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.*;
import static com.graphhopper.util.Parameters.Routing.*;
/**
* Resource to use map matching of GraphHopper in a remote client application.
*
* @author Peter Karich
*/
@javax.ws.rs.Path("match")
public class MapMatchingResource {
private static final Logger logger = LoggerFactory.getLogger(MapMatchingResource.class);
private final GraphHopper graphHopper;
private final EncodingManager encodingManager;
private final TranslationMap trMap;
@Inject
public MapMatchingResource(GraphHopper graphHopper, EncodingManager encodingManager,
TranslationMap trMap) {
this.graphHopper = graphHopper;
this.encodingManager = encodingManager;
this.trMap = trMap;
}
@POST
@Consumes({MediaType.APPLICATION_XML, "application/gpx+xml"})
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, "application/gpx+xml"})
public Response doGet(
Document body,
@Context HttpServletRequest request,
@QueryParam(WAY_POINT_MAX_DISTANCE) @DefaultValue("1") double minPathPrecision,
@QueryParam("type") @DefaultValue("json") String outType,
@QueryParam(INSTRUCTIONS) @DefaultValue("true") boolean instructions,
@QueryParam(CALC_POINTS) @DefaultValue("true") boolean calcPoints,
@QueryParam("elevation") @DefaultValue("false") boolean enableElevation,
@QueryParam("points_encoded") @DefaultValue("true") boolean pointsEncoded,
@QueryParam("vehicle") @DefaultValue("car") String vehicleStr,
@QueryParam("locale") @DefaultValue("en") String localeStr,
@QueryParam(Parameters.DETAILS.PATH_DETAILS) List<String> pathDetails,
@QueryParam("gpx.route") @DefaultValue("true") boolean withRoute /* default to false for the route part in next API version, see #437 */,
@QueryParam("gpx.track") @DefaultValue("true") boolean withTrack,
@QueryParam("gpx.waypoints") @DefaultValue("false") boolean withWayPoints,
@QueryParam("gpx.trackname") @DefaultValue("GraphHopper Track") String trackName,
@QueryParam("gpx.millis") String timeString,
@QueryParam("traversal_keys") @DefaultValue("false") boolean enableTraversalKeys,
@QueryParam(MAX_VISITED_NODES) @DefaultValue("3000") int maxVisitedNodes,
@QueryParam("gps_accuracy") @DefaultValue("40") double gpsAccuracy) {
boolean writeGPX = "gpx".equalsIgnoreCase(outType);
if (!encodingManager.supports(vehicleStr)) {
throw new WebApplicationException(WebHelper.errorResponse(new IllegalArgumentException("Vehicle not supported: " + vehicleStr), writeGPX));
}
PathWrapper pathWrapper = new PathWrapper();
GPXFile file = new GPXFile();
GPXFile gpxFile = file.doImport(body, 20);
instructions = writeGPX || instructions;
MatchResult matchRsp = null;
StopWatch sw = new StopWatch().start();
AlgorithmOptions opts = AlgorithmOptions.start()
.traversalMode(graphHopper.getTraversalMode())
.maxVisitedNodes(maxVisitedNodes)
.hints(new HintsMap().put("vehicle", vehicleStr))
.build();
MapMatching matching = new MapMatching(graphHopper, opts);
matching.setMeasurementErrorSigma(gpsAccuracy);
matchRsp = matching.doWork(gpxFile.getEntries());
// fill GHResponse for identical structure
Path path = matching.calcPath(matchRsp);
Translation tr = trMap.getWithFallBack(Helper.getLocale(localeStr));
DouglasPeucker peucker = new DouglasPeucker().setMaxDistance(minPathPrecision);
PathMerger pathMerger = new PathMerger().
setEnableInstructions(instructions).
setPathDetailsBuilders(graphHopper.getPathDetailsBuilderFactory(), pathDetails).
setDouglasPeucker(peucker).
setSimplifyResponse(minPathPrecision > 0);
pathMerger.doWork(pathWrapper, Collections.singletonList(path), tr);
// GraphHopper thinks an empty path is an invalid path, and an invalid path is still a path but
// marked with a non-empty list of exception objects. I disagree, so I clear it.
pathWrapper.getErrors().clear();
// TODO: Request logging and timing should perhaps be done somewhere outside
float took = sw.stop().getSeconds();
String infoStr = request.getRemoteAddr() + " " + request.getLocale() + " " + request.getHeader("User-Agent");
String logStr = request.getQueryString() + ", " + infoStr + ", took:" + took + ", entries:" + gpxFile.getEntries().size() + ", " + pathWrapper.getDebugInfo();
logger.info(logStr);
if ("extended_json".equals(outType)) {
return Response.ok(convertToTree(matchRsp, enableElevation, pointsEncoded)).
header("X-GH-Took", "" + Math.round(took * 1000)).
build();
} else if (writeGPX) {
GHResponse rsp = new GHResponse();
rsp.add(pathWrapper);
return WebHelper.gpxSuccessResponseBuilder(rsp, timeString, trackName, enableElevation, withRoute, withTrack, withWayPoints).
header("X-GH-Took", "" + Math.round(took * 1000)).
build();
} else {
GHResponse rsp = new GHResponse();
rsp.add(pathWrapper);
ObjectNode map = WebHelper.jsonObject(rsp, instructions, calcPoints, enableElevation, pointsEncoded, took);
Map<String, Object> matchResult = new HashMap<>();
matchResult.put("distance", matchRsp.getMatchLength());
matchResult.put("time", matchRsp.getMatchMillis());
matchResult.put("original_distance", matchRsp.getGpxEntriesLength());
matchResult.put("original_time", matchRsp.getGpxEntriesMillis());
map.putPOJO("map_matching", matchResult);
if (enableTraversalKeys) {
List<Integer> traversalKeylist = new ArrayList<>();
for (EdgeMatch em : matchRsp.getEdgeMatches()) {
EdgeIteratorState edge = em.getEdgeState();
// encode edges as traversal keys which includes orientation, decode simply by multiplying with 0.5
traversalKeylist.add(GHUtility.createEdgeKey(edge.getBaseNode(), edge.getAdjNode(), edge.getEdge(), false));
}
map.putPOJO("traversal_keys", traversalKeylist);
}
return Response.ok(map).
header("X-GH-Took", "" + Math.round(took * 1000)).
build();
}
}
static JsonNode convertToTree(MatchResult result, boolean elevation, boolean pointsEncoded) {
ObjectNode root = JsonNodeFactory.instance.objectNode();
ObjectNode diary = root.putObject("diary");
ArrayNode entries = diary.putArray("entries");
ObjectNode route = entries.addObject();
ArrayNode links = route.putArray("links");
for (int emIndex = 0; emIndex < result.getEdgeMatches().size(); emIndex++) {
ObjectNode link = links.addObject();
EdgeMatch edgeMatch = result.getEdgeMatches().get(emIndex);
PointList pointList = edgeMatch.getEdgeState().fetchWayGeometry(emIndex == 0 ? 3 : 2);
final ObjectNode geometry = link.putObject("geometry");
if (pointList.size() < 2) {
geometry.putPOJO("coordinates", pointsEncoded ? WebHelper.encodePolyline(pointList, elevation) : pointList.toLineString(elevation));
geometry.put("type", "Point");
} else {
geometry.putPOJO("coordinates", pointsEncoded ? WebHelper.encodePolyline(pointList, elevation) : pointList.toLineString(elevation));
geometry.put("type", "LineString");
}
link.put("id", edgeMatch.getEdgeState().getEdge());
ArrayNode wpts = link.putArray("wpts");
for (GPXExtension extension : edgeMatch.getGpxExtensions()) {
ObjectNode wpt = wpts.addObject();
wpt.put("x", extension.getQueryResult().getSnappedPoint().lon);
wpt.put("y", extension.getQueryResult().getSnappedPoint().lat);
wpt.put("timestamp", extension.getEntry().getTime());
}
}
return root;
}
}
| Move stuff around
| matching-web/src/main/java/com/graphhopper/matching/http/MapMatchingResource.java | Move stuff around |
|
Java | apache-2.0 | 4b6e87dbfbce13c44f73e9b9e732117e73195fab | 0 | Hack23/cia,Hack23/cia,Hack23/cia,Hack23/cia | /*
* Copyright 2010-2019 James Pether Sörling
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id$
* $HeadURL$
*/
package com.hack23.cia.service.external.esv.impl;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.time.Month;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.codec.Charsets;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.http.client.fluent.Request;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.hack23.cia.service.external.esv.api.GovernmentBodyAnnualOutcomeSummary;
import com.hack23.cia.service.external.esv.api.GovernmentBodyAnnualSummary;
/**
* The Class EsvGovernmentBodyOperationOutcomeReaderImpl.
*/
@Component
final class EsvGovernmentBodyOperationOutcomeReaderImpl implements EsvGovernmentBodyOperationOutcomeReader {
/** The Constant ORGANISATIONSNUMMER. */
private static final String ORGANISATIONSNUMMER = "Organisationsnummer";
/** The Constant MYNDIGHET. */
private static final String MYNDIGHET = "Myndighet";
/** The Constant ÅR. */
private static final String YEAR = "År";
/** The Constant UTFALL_DECEMBER. */
private static final String UTFALL_DECEMBER = "Utfall december";
/** The Constant UTFALL_NOVEMBER. */
private static final String UTFALL_NOVEMBER = "Utfall november";
/** The Constant UTFALL_OKTOBER. */
private static final String UTFALL_OKTOBER = "Utfall oktober";
/** The Constant UTFALL_SEPTEMBER. */
private static final String UTFALL_SEPTEMBER = "Utfall september";
/** The Constant UTFALL_AUGUSTI. */
private static final String UTFALL_AUGUSTI = "Utfall augusti";
/** The Constant UTFALL_JULI. */
private static final String UTFALL_JULI = "Utfall juli";
/** The Constant UTFALL_JUNI. */
private static final String UTFALL_JUNI = "Utfall juni";
/** The Constant UTFALL_MAJ. */
private static final String UTFALL_MAJ = "Utfall maj";
/** The Constant UTFALL_APRIL. */
private static final String UTFALL_APRIL = "Utfall april";
/** The Constant UTFALL_MARS. */
private static final String UTFALL_MARS = "Utfall mars";
/** The Constant UTFALL_FEBRUARI. */
private static final String UTFALL_FEBRUARI = "Utfall februari";
/** The Constant UTFALL_JANUARI. */
private static final String UTFALL_JANUARI = "Utfall januari";
/** The Constant SPECIFIC_OUTGOING_FIELDS. */
private static final String[] SPECIFIC_OUTGOING_FIELDS = new String[] { "Inkomsttyp", "Inkomsttypsnamn", "Inkomsthuvudgrupp", "Inkomsthuvudgruppsnamn", "Inkomsttitelgrupp", "Inkomsttitelgruppsnamn", "Inkomsttitel", "Inkomsttitelsnamn", "Inkomstundertitel", "Inkomstundertitelsnamn"};
/** The Constant SPECIFIC_INCOMING_FIELDS. */
private static final String[] SPECIFIC_INCOMING_FIELDS = new String[] { "Utgiftsområde", "Utgiftsområdesnamn", "Anslag", "Anslagsnamn", "Anslagspost", "Anslagspostsnamn", "Anslagsdelpost", "Anslagsdelpostsnamn"};
/** The esv excel reader. */
@Autowired
private EsvExcelReader esvExcelReader;
private List<GovernmentBodyAnnualOutcomeSummary> incomeCsvValues;
private List<GovernmentBodyAnnualOutcomeSummary> outGoingCsvValues;
/**
* Instantiates a new esv government body operation outcome reader impl.
*/
public EsvGovernmentBodyOperationOutcomeReaderImpl() {
super();
}
@Override
public synchronized List<GovernmentBodyAnnualOutcomeSummary> readIncomeCsv() throws IOException {
if (incomeCsvValues == null) {
incomeCsvValues = readUsingZipInputStream(Request.Get(
"https://www.esv.se/psidata/manadsutfall/GetFile/?documentType=Inkomst&fileType=Zip&fileName=M%C3%A5nadsutfall%20inkomster%20januari%202006%20-%20maj%202019,%20definitivt.zip&year=2019&month=5&status=Definitiv")
.execute().returnContent().asStream(),SPECIFIC_OUTGOING_FIELDS);
}
return Collections.unmodifiableList(incomeCsvValues);
}
@Override
public synchronized List<GovernmentBodyAnnualOutcomeSummary> readOutgoingCsv() throws IOException {
if (outGoingCsvValues == null) {
outGoingCsvValues = readUsingZipInputStream(Request.Get(
"https://www.esv.se/psidata/manadsutfall/GetFile/?documentType=Utgift&fileType=Zip&fileName=M%C3%A5nadsutfall%20utgifter%20januari%202006%20-%20maj%202019,%20definitivt.zip&year=2019&month=5&status=Definitiv")
.execute().returnContent().asStream(),SPECIFIC_INCOMING_FIELDS);
}
return Collections.unmodifiableList(outGoingCsvValues);
}
/**
* Read using zip input stream.
*
* @param inputStream
* the input stream
* @param specificFields
* the specific fields
* @return the list
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private List<GovernmentBodyAnnualOutcomeSummary> readUsingZipInputStream(final InputStream inputStream,final String[] specificFields) throws IOException {
final BufferedInputStream bis = new BufferedInputStream(inputStream);
final ZipInputStream is = new ZipInputStream(bis);
final List<GovernmentBodyAnnualOutcomeSummary> list = new ArrayList<>();
try {
ZipEntry entry;
while ((entry = is.getNextEntry()) != null) {
list.addAll(readCsvContent(is,specificFields));
}
} finally {
is.close();
}
return list;
}
/**
* Read csv content.
*
* @param is
* the is
* @param specificFields
* the specific fields
* @return the list
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private List<GovernmentBodyAnnualOutcomeSummary> readCsvContent(final InputStream is,final String[] specificFields) throws IOException {
final CSVParser parser = CSVParser.parse(new InputStreamReader(is,Charsets.UTF_8), CSVFormat.EXCEL.withHeader().withDelimiter(';'));
final List<CSVRecord> records = parser.getRecords();
records.remove(0);
final Map<Integer, Map<String,String>> orgMinistryMap = createOrgMinistryMap(esvExcelReader.getDataPerMinistry(null));
final List<GovernmentBodyAnnualOutcomeSummary> list = new ArrayList<>();
for (final CSVRecord csvRecord : records) {
final GovernmentBodyAnnualOutcomeSummary governmentBodyAnnualOutcomeSummary = new GovernmentBodyAnnualOutcomeSummary(csvRecord.get(MYNDIGHET), csvRecord.get(ORGANISATIONSNUMMER), orgMinistryMap.get(Integer.valueOf(csvRecord.get(YEAR))).get(csvRecord.get(ORGANISATIONSNUMMER).replaceAll("-", "")), Integer.parseInt(csvRecord.get(YEAR)));
for (final String field : specificFields) {
governmentBodyAnnualOutcomeSummary.addDescriptionField(field,csvRecord.get(field));
}
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.JANUARY.getValue(),csvRecord.get(UTFALL_JANUARI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.FEBRUARY.getValue(),csvRecord.get(UTFALL_FEBRUARI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.MARCH.getValue(),csvRecord.get(UTFALL_MARS));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.APRIL.getValue(),csvRecord.get(UTFALL_APRIL));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.MAY.getValue(),csvRecord.get(UTFALL_MAJ));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.JUNE.getValue(),csvRecord.get(UTFALL_JUNI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.JULY.getValue(),csvRecord.get(UTFALL_JULI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.AUGUST.getValue(),csvRecord.get(UTFALL_AUGUSTI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.SEPTEMBER.getValue(),csvRecord.get(UTFALL_SEPTEMBER));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.OCTOBER.getValue(),csvRecord.get(UTFALL_OKTOBER));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.NOVEMBER.getValue(),csvRecord.get(UTFALL_NOVEMBER));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.DECEMBER.getValue(),csvRecord.get(UTFALL_DECEMBER));
list.add(governmentBodyAnnualOutcomeSummary);
}
return list;
}
/**
* Creates the org ministry map.
*
* @param data the data
* @return the map
*/
private static Map<Integer, Map<String, String>> createOrgMinistryMap(
final Map<Integer, List<GovernmentBodyAnnualSummary>> data) {
final Map<Integer, Map<String,String>> orgMinistryMap = new HashMap<>();
final Set<Entry<Integer, List<GovernmentBodyAnnualSummary>>> entrySet = data.entrySet();
for (final Entry<Integer, List<GovernmentBodyAnnualSummary>> entry : entrySet) {
orgMinistryMap.put(entry.getKey(), entry.getValue().stream().collect(Collectors.groupingBy(t -> t.getOrgNumber().replaceAll("-","") ,Collectors.collectingAndThen(
Collectors.toList(),
values -> values.get(0).getMinistry()))));
}
return orgMinistryMap;
}
/**
* Adds the result for month.
*
* @param governmentBodyAnnualOutcomeSummary
* the government body annual outcome summary
* @param month
* the month
* @param value
* the value
*/
private static void addResultForMonth(final GovernmentBodyAnnualOutcomeSummary governmentBodyAnnualOutcomeSummary, final int month,
final String value) {
if (value != null && value.length() >0 ) {
governmentBodyAnnualOutcomeSummary.addData(month,Double.valueOf(value.replaceAll(",", ".")));
}
}
}
| service.external.esv/src/main/java/com/hack23/cia/service/external/esv/impl/EsvGovernmentBodyOperationOutcomeReaderImpl.java | /*
* Copyright 2010-2019 James Pether Sörling
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id$
* $HeadURL$
*/
package com.hack23.cia.service.external.esv.impl;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.time.Month;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.codec.Charsets;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.http.client.fluent.Request;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.hack23.cia.service.external.esv.api.GovernmentBodyAnnualOutcomeSummary;
import com.hack23.cia.service.external.esv.api.GovernmentBodyAnnualSummary;
/**
* The Class EsvGovernmentBodyOperationOutcomeReaderImpl.
*/
@Component
final class EsvGovernmentBodyOperationOutcomeReaderImpl implements EsvGovernmentBodyOperationOutcomeReader {
/** The Constant ORGANISATIONSNUMMER. */
private static final String ORGANISATIONSNUMMER = "Organisationsnummer";
/** The Constant MYNDIGHET. */
private static final String MYNDIGHET = "Myndighet";
/** The Constant ÅR. */
private static final String YEAR = "År";
/** The Constant UTFALL_DECEMBER. */
private static final String UTFALL_DECEMBER = "Utfall december";
/** The Constant UTFALL_NOVEMBER. */
private static final String UTFALL_NOVEMBER = "Utfall november";
/** The Constant UTFALL_OKTOBER. */
private static final String UTFALL_OKTOBER = "Utfall oktober";
/** The Constant UTFALL_SEPTEMBER. */
private static final String UTFALL_SEPTEMBER = "Utfall september";
/** The Constant UTFALL_AUGUSTI. */
private static final String UTFALL_AUGUSTI = "Utfall augusti";
/** The Constant UTFALL_JULI. */
private static final String UTFALL_JULI = "Utfall juli";
/** The Constant UTFALL_JUNI. */
private static final String UTFALL_JUNI = "Utfall juni";
/** The Constant UTFALL_MAJ. */
private static final String UTFALL_MAJ = "Utfall maj";
/** The Constant UTFALL_APRIL. */
private static final String UTFALL_APRIL = "Utfall april";
/** The Constant UTFALL_MARS. */
private static final String UTFALL_MARS = "Utfall mars";
/** The Constant UTFALL_FEBRUARI. */
private static final String UTFALL_FEBRUARI = "Utfall februari";
/** The Constant UTFALL_JANUARI. */
private static final String UTFALL_JANUARI = "Utfall januari";
/** The Constant SPECIFIC_OUTGOING_FIELDS. */
private static final String[] SPECIFIC_OUTGOING_FIELDS = new String[] { "Inkomsttyp", "Inkomsttypsnamn", "Inkomsthuvudgrupp", "Inkomsthuvudgruppsnamn", "Inkomsttitelgrupp", "Inkomsttitelgruppsnamn", "Inkomsttitel", "Inkomsttitelsnamn", "Inkomstundertitel", "Inkomstundertitelsnamn"};
/** The Constant SPECIFIC_INCOMING_FIELDS. */
private static final String[] SPECIFIC_INCOMING_FIELDS = new String[] { "Utgiftsområde", "Utgiftsområdesnamn", "Anslag", "Anslagsnamn", "Anslagspost", "Anslagspostsnamn", "Anslagsdelpost", "Anslagsdelpostsnamn"};
/** The esv excel reader. */
@Autowired
private EsvExcelReader esvExcelReader;
private List<GovernmentBodyAnnualOutcomeSummary> incomeCsvValues;
private List<GovernmentBodyAnnualOutcomeSummary> outGoingCsvValues;
/**
* Instantiates a new esv government body operation outcome reader impl.
*/
public EsvGovernmentBodyOperationOutcomeReaderImpl() {
super();
}
@Override
public synchronized List<GovernmentBodyAnnualOutcomeSummary> readIncomeCsv() throws IOException {
if (incomeCsvValues == null) {
incomeCsvValues = readUsingZipInputStream(Request.Get(
"https://www.esv.se/psidata/manadsutfall/GetFile/?documentType=Inkomst&fileType=Zip&fileName=M%C3%A5nadsutfall%20inkomster%20januari%202006%20-%20februari%202019,%20definitivt.zip&year=2019&month=2&status=Definitiv")
.execute().returnContent().asStream(),SPECIFIC_OUTGOING_FIELDS);
}
return Collections.unmodifiableList(incomeCsvValues);
}
@Override
public synchronized List<GovernmentBodyAnnualOutcomeSummary> readOutgoingCsv() throws IOException {
if (outGoingCsvValues == null) {
outGoingCsvValues = readUsingZipInputStream(Request.Get(
"https://www.esv.se/psidata/manadsutfall/GetFile/?documentType=Utgift&fileType=Zip&fileName=M%C3%A5nadsutfall%20utgifter%20januari%202006%20-%20februari%202019,%20definitivt.zip&year=2019&month=2&status=Definitiv")
.execute().returnContent().asStream(),SPECIFIC_INCOMING_FIELDS);
}
return Collections.unmodifiableList(outGoingCsvValues);
}
/**
* Read using zip input stream.
*
* @param inputStream
* the input stream
* @param specificFields
* the specific fields
* @return the list
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private List<GovernmentBodyAnnualOutcomeSummary> readUsingZipInputStream(final InputStream inputStream,final String[] specificFields) throws IOException {
final BufferedInputStream bis = new BufferedInputStream(inputStream);
final ZipInputStream is = new ZipInputStream(bis);
final List<GovernmentBodyAnnualOutcomeSummary> list = new ArrayList<>();
try {
ZipEntry entry;
while ((entry = is.getNextEntry()) != null) {
list.addAll(readCsvContent(is,specificFields));
}
} finally {
is.close();
}
return list;
}
/**
* Read csv content.
*
* @param is
* the is
* @param specificFields
* the specific fields
* @return the list
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private List<GovernmentBodyAnnualOutcomeSummary> readCsvContent(final InputStream is,final String[] specificFields) throws IOException {
final CSVParser parser = CSVParser.parse(new InputStreamReader(is,Charsets.UTF_8), CSVFormat.EXCEL.withHeader().withDelimiter(';'));
final List<CSVRecord> records = parser.getRecords();
records.remove(0);
final Map<Integer, Map<String,String>> orgMinistryMap = createOrgMinistryMap(esvExcelReader.getDataPerMinistry(null));
final List<GovernmentBodyAnnualOutcomeSummary> list = new ArrayList<>();
for (final CSVRecord csvRecord : records) {
final GovernmentBodyAnnualOutcomeSummary governmentBodyAnnualOutcomeSummary = new GovernmentBodyAnnualOutcomeSummary(csvRecord.get(MYNDIGHET), csvRecord.get(ORGANISATIONSNUMMER), orgMinistryMap.get(Integer.valueOf(csvRecord.get(YEAR))).get(csvRecord.get(ORGANISATIONSNUMMER).replaceAll("-", "")), Integer.parseInt(csvRecord.get(YEAR)));
for (final String field : specificFields) {
governmentBodyAnnualOutcomeSummary.addDescriptionField(field,csvRecord.get(field));
}
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.JANUARY.getValue(),csvRecord.get(UTFALL_JANUARI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.FEBRUARY.getValue(),csvRecord.get(UTFALL_FEBRUARI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.MARCH.getValue(),csvRecord.get(UTFALL_MARS));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.APRIL.getValue(),csvRecord.get(UTFALL_APRIL));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.MAY.getValue(),csvRecord.get(UTFALL_MAJ));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.JUNE.getValue(),csvRecord.get(UTFALL_JUNI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.JULY.getValue(),csvRecord.get(UTFALL_JULI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.AUGUST.getValue(),csvRecord.get(UTFALL_AUGUSTI));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.SEPTEMBER.getValue(),csvRecord.get(UTFALL_SEPTEMBER));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.OCTOBER.getValue(),csvRecord.get(UTFALL_OKTOBER));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.NOVEMBER.getValue(),csvRecord.get(UTFALL_NOVEMBER));
addResultForMonth(governmentBodyAnnualOutcomeSummary,Month.DECEMBER.getValue(),csvRecord.get(UTFALL_DECEMBER));
list.add(governmentBodyAnnualOutcomeSummary);
}
return list;
}
/**
* Creates the org ministry map.
*
* @param data the data
* @return the map
*/
private static Map<Integer, Map<String, String>> createOrgMinistryMap(
final Map<Integer, List<GovernmentBodyAnnualSummary>> data) {
final Map<Integer, Map<String,String>> orgMinistryMap = new HashMap<>();
final Set<Entry<Integer, List<GovernmentBodyAnnualSummary>>> entrySet = data.entrySet();
for (final Entry<Integer, List<GovernmentBodyAnnualSummary>> entry : entrySet) {
orgMinistryMap.put(entry.getKey(), entry.getValue().stream().collect(Collectors.groupingBy(t -> t.getOrgNumber().replaceAll("-","") ,Collectors.collectingAndThen(
Collectors.toList(),
values -> values.get(0).getMinistry()))));
}
return orgMinistryMap;
}
/**
* Adds the result for month.
*
* @param governmentBodyAnnualOutcomeSummary
* the government body annual outcome summary
* @param month
* the month
* @param value
* the value
*/
private static void addResultForMonth(final GovernmentBodyAnnualOutcomeSummary governmentBodyAnnualOutcomeSummary, final int month,
final String value) {
if (value != null && value.length() >0 ) {
governmentBodyAnnualOutcomeSummary.addData(month,Double.valueOf(value.replaceAll(",", ".")));
}
}
}
| update https://www.esv.se/psidata/manadsutfall/ | service.external.esv/src/main/java/com/hack23/cia/service/external/esv/impl/EsvGovernmentBodyOperationOutcomeReaderImpl.java | update https://www.esv.se/psidata/manadsutfall/ |
|
Java | apache-2.0 | 1709a3ad96aa16ba50ab23ffa67571d8adc485a8 | 0 | eddumelendez/spring-boot,bjornlindstrom/spring-boot,Buzzardo/spring-boot,jayarampradhan/spring-boot,aahlenst/spring-boot,javyzheng/spring-boot,chrylis/spring-boot,deki/spring-boot,pvorb/spring-boot,habuma/spring-boot,isopov/spring-boot,mbenson/spring-boot,linead/spring-boot,royclarkson/spring-boot,tsachev/spring-boot,pvorb/spring-boot,hello2009chen/spring-boot,xiaoleiPENG/my-project,sebastiankirsch/spring-boot,vpavic/spring-boot,linead/spring-boot,joshthornhill/spring-boot,mdeinum/spring-boot,Buzzardo/spring-boot,philwebb/spring-boot,bclozel/spring-boot,ptahchiev/spring-boot,tsachev/spring-boot,Buzzardo/spring-boot,drumonii/spring-boot,spring-projects/spring-boot,michael-simons/spring-boot,ilayaperumalg/spring-boot,michael-simons/spring-boot,drumonii/spring-boot,vakninr/spring-boot,kamilszymanski/spring-boot,mbenson/spring-boot,vpavic/spring-boot,aahlenst/spring-boot,aahlenst/spring-boot,felipeg48/spring-boot,dreis2211/spring-boot,wilkinsona/spring-boot,shakuzen/spring-boot,kdvolder/spring-boot,jxblum/spring-boot,mosoft521/spring-boot,jxblum/spring-boot,ptahchiev/spring-boot,felipeg48/spring-boot,joshiste/spring-boot,Buzzardo/spring-boot,bclozel/spring-boot,bclozel/spring-boot,chrylis/spring-boot,shangyi0102/spring-boot,scottfrederick/spring-boot,vpavic/spring-boot,zhanhb/spring-boot,htynkn/spring-boot,philwebb/spring-boot,scottfrederick/spring-boot,mbenson/spring-boot,yangdd1205/spring-boot,tsachev/spring-boot,zhanhb/spring-boot,wilkinsona/spring-boot,ptahchiev/spring-boot,lexandro/spring-boot,mosoft521/spring-boot,Nowheresly/spring-boot,kdvolder/spring-boot,lexandro/spring-boot,eddumelendez/spring-boot,tiarebalbi/spring-boot,philwebb/spring-boot,ihoneymon/spring-boot,kdvolder/spring-boot,dreis2211/spring-boot,mbenson/spring-boot,eddumelendez/spring-boot,donhuvy/spring-boot,htynkn/spring-boot,habuma/spring-boot,shakuzen/spring-boot,sebastiankirsch/spring-boot,ihoneymon/spring-boot,zhanhb/spring-boot,scottfrederick/spring-boot,NetoDevel/spring-boot,ptahchiev/spring-boot,donhuvy/spring-boot,bjornlindstrom/spring-boot,shangyi0102/spring-boot,bbrouwer/spring-boot,linead/spring-boot,joshiste/spring-boot,spring-projects/spring-boot,hello2009chen/spring-boot,shangyi0102/spring-boot,royclarkson/spring-boot,hello2009chen/spring-boot,lburgazzoli/spring-boot,jbovet/spring-boot,dreis2211/spring-boot,shakuzen/spring-boot,joshthornhill/spring-boot,tiarebalbi/spring-boot,jayarampradhan/spring-boot,royclarkson/spring-boot,habuma/spring-boot,yhj630520/spring-boot,wilkinsona/spring-boot,zhanhb/spring-boot,aahlenst/spring-boot,kamilszymanski/spring-boot,xiaoleiPENG/my-project,zhanhb/spring-boot,mosoft521/spring-boot,jayarampradhan/spring-boot,shakuzen/spring-boot,Buzzardo/spring-boot,bbrouwer/spring-boot,mosoft521/spring-boot,jxblum/spring-boot,xiaoleiPENG/my-project,chrylis/spring-boot,shakuzen/spring-boot,bjornlindstrom/spring-boot,rweisleder/spring-boot,Nowheresly/spring-boot,javyzheng/spring-boot,mbenson/spring-boot,olivergierke/spring-boot,linead/spring-boot,rweisleder/spring-boot,jayarampradhan/spring-boot,deki/spring-boot,yangdd1205/spring-boot,scottfrederick/spring-boot,drumonii/spring-boot,wilkinsona/spring-boot,tsachev/spring-boot,deki/spring-boot,felipeg48/spring-boot,tiarebalbi/spring-boot,linead/spring-boot,javyzheng/spring-boot,yangdd1205/spring-boot,dreis2211/spring-boot,lburgazzoli/spring-boot,philwebb/spring-boot,rweisleder/spring-boot,joshthornhill/spring-boot,olivergierke/spring-boot,mdeinum/spring-boot,ilayaperumalg/spring-boot,lburgazzoli/spring-boot,joshiste/spring-boot,spring-projects/spring-boot,ilayaperumalg/spring-boot,tiarebalbi/spring-boot,bbrouwer/spring-boot,donhuvy/spring-boot,lburgazzoli/spring-boot,lexandro/spring-boot,philwebb/spring-boot,spring-projects/spring-boot,lexandro/spring-boot,felipeg48/spring-boot,donhuvy/spring-boot,sebastiankirsch/spring-boot,bjornlindstrom/spring-boot,drumonii/spring-boot,donhuvy/spring-boot,scottfrederick/spring-boot,kdvolder/spring-boot,eddumelendez/spring-boot,felipeg48/spring-boot,dreis2211/spring-boot,pvorb/spring-boot,DeezCashews/spring-boot,lexandro/spring-boot,jxblum/spring-boot,DeezCashews/spring-boot,DeezCashews/spring-boot,yhj630520/spring-boot,vakninr/spring-boot,ptahchiev/spring-boot,zhanhb/spring-boot,NetoDevel/spring-boot,mosoft521/spring-boot,olivergierke/spring-boot,bjornlindstrom/spring-boot,eddumelendez/spring-boot,DeezCashews/spring-boot,dreis2211/spring-boot,mdeinum/spring-boot,kdvolder/spring-boot,bclozel/spring-boot,eddumelendez/spring-boot,ilayaperumalg/spring-boot,spring-projects/spring-boot,kamilszymanski/spring-boot,DeezCashews/spring-boot,vakninr/spring-boot,shakuzen/spring-boot,kamilszymanski/spring-boot,isopov/spring-boot,joshiste/spring-boot,hello2009chen/spring-boot,olivergierke/spring-boot,Nowheresly/spring-boot,jbovet/spring-boot,NetoDevel/spring-boot,bbrouwer/spring-boot,chrylis/spring-boot,javyzheng/spring-boot,aahlenst/spring-boot,sebastiankirsch/spring-boot,royclarkson/spring-boot,javyzheng/spring-boot,yhj630520/spring-boot,ilayaperumalg/spring-boot,habuma/spring-boot,jbovet/spring-boot,ihoneymon/spring-boot,deki/spring-boot,joshiste/spring-boot,tiarebalbi/spring-boot,bclozel/spring-boot,jbovet/spring-boot,jxblum/spring-boot,vakninr/spring-boot,joshthornhill/spring-boot,drumonii/spring-boot,michael-simons/spring-boot,sebastiankirsch/spring-boot,jayarampradhan/spring-boot,mdeinum/spring-boot,chrylis/spring-boot,donhuvy/spring-boot,vpavic/spring-boot,wilkinsona/spring-boot,Buzzardo/spring-boot,rweisleder/spring-boot,yhj630520/spring-boot,isopov/spring-boot,vpavic/spring-boot,htynkn/spring-boot,olivergierke/spring-boot,royclarkson/spring-boot,aahlenst/spring-boot,Nowheresly/spring-boot,mbenson/spring-boot,spring-projects/spring-boot,rweisleder/spring-boot,drumonii/spring-boot,lburgazzoli/spring-boot,NetoDevel/spring-boot,shangyi0102/spring-boot,bbrouwer/spring-boot,ihoneymon/spring-boot,philwebb/spring-boot,rweisleder/spring-boot,habuma/spring-boot,Nowheresly/spring-boot,ptahchiev/spring-boot,michael-simons/spring-boot,joshthornhill/spring-boot,htynkn/spring-boot,shangyi0102/spring-boot,vakninr/spring-boot,jxblum/spring-boot,tsachev/spring-boot,mdeinum/spring-boot,NetoDevel/spring-boot,joshiste/spring-boot,mdeinum/spring-boot,deki/spring-boot,tiarebalbi/spring-boot,pvorb/spring-boot,michael-simons/spring-boot,isopov/spring-boot,scottfrederick/spring-boot,habuma/spring-boot,kdvolder/spring-boot,chrylis/spring-boot,kamilszymanski/spring-boot,ilayaperumalg/spring-boot,jbovet/spring-boot,hello2009chen/spring-boot,isopov/spring-boot,ihoneymon/spring-boot,htynkn/spring-boot,ihoneymon/spring-boot,bclozel/spring-boot,michael-simons/spring-boot,isopov/spring-boot,yhj630520/spring-boot,xiaoleiPENG/my-project,wilkinsona/spring-boot,htynkn/spring-boot,vpavic/spring-boot,pvorb/spring-boot,felipeg48/spring-boot,tsachev/spring-boot,xiaoleiPENG/my-project | /*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.autoconfigure.orm.jpa;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.PersistenceUnitUtil;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.orm.jpa.EntityManagerHolder;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.verify;
/**
* Tests for {@link TestEntityManager}.
*
* @author Phillip Webb
*/
public class TestEntityManagerTests {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Mock
private EntityManagerFactory entityManagerFactory;
@Mock
private EntityManager entityManager;
@Mock
private PersistenceUnitUtil persistenceUnitUtil;
private TestEntityManager testEntityManager;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
this.testEntityManager = new TestEntityManager(this.entityManagerFactory);
given(this.entityManagerFactory.getPersistenceUnitUtil())
.willReturn(this.persistenceUnitUtil);
}
@Test
public void createWhenEntityManagerIsNullShouldThrowException() throws Exception {
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("EntityManagerFactory must not be null");
new TestEntityManager(null);
}
@Test
public void persistAndGetIdShouldPersistAndGetId() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Object result = this.testEntityManager.persistAndGetId(entity);
verify(this.entityManager).persist(entity);
assertThat(result).isEqualTo(123);
}
@Test
public void persistAndGetIdForTypeShouldPersistAndGetId() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Integer result = this.testEntityManager.persistAndGetId(entity, Integer.class);
verify(this.entityManager).persist(entity);
assertThat(result).isEqualTo(123);
}
@Test
public void persistShouldPersist() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
TestEntity result = this.testEntityManager.persist(entity);
verify(this.entityManager).persist(entity);
assertThat(result).isSameAs(entity);
}
@Test
public void persistAndFlushShouldPersistAndFlush() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
TestEntity result = this.testEntityManager.persistAndFlush(entity);
verify(this.entityManager).persist(entity);
verify(this.entityManager).flush();
assertThat(result).isSameAs(entity);
}
@Test
public void persistFlushFindShouldPersistAndFlushAndFind() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
TestEntity found = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
given(this.entityManager.find(TestEntity.class, 123)).willReturn(found);
TestEntity result = this.testEntityManager.persistFlushFind(entity);
verify(this.entityManager).persist(entity);
verify(this.entityManager).flush();
assertThat(result).isSameAs(found);
}
@Test
public void mergeShouldMerge() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.entityManager.merge(entity)).willReturn(entity);
TestEntity result = this.testEntityManager.merge(entity);
verify(this.entityManager).merge(entity);
assertThat(result).isSameAs(entity);
}
@Test
public void removeShouldRemove() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
this.testEntityManager.remove(entity);
verify(this.entityManager).remove(entity);
}
@Test
public void findShouldFind() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.entityManager.find(TestEntity.class, 123)).willReturn(entity);
TestEntity result = this.testEntityManager.find(TestEntity.class, 123);
assertThat(result).isSameAs(entity);
}
@Test
public void flushShouldFlush() throws Exception {
bindEntityManager();
this.testEntityManager.flush();
verify(this.entityManager).flush();
}
@Test
public void refreshShouldRefresh() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
this.testEntityManager.refresh(entity);
verify(this.entityManager).refresh(entity);
}
@Test
public void clearShouldClear() throws Exception {
bindEntityManager();
this.testEntityManager.clear();
verify(this.entityManager).clear();
}
@Test
public void detachShouldDetach() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
this.testEntityManager.detach(entity);
verify(this.entityManager).detach(entity);
}
@Test
public void getIdForTypeShouldGetId() throws Exception {
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Integer result = this.testEntityManager.getId(entity, Integer.class);
assertThat(result).isEqualTo(123);
}
@Test
public void getIdForTypeWhenTypeIsWrongShouldThrowException() throws Exception {
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
this.thrown.expectMessage("ID mismatch: java.lang.Integer");
this.testEntityManager.getId(entity, Long.class);
}
@Test
public void getIdShouldGetId() throws Exception {
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Object result = this.testEntityManager.getId(entity);
assertThat(result).isEqualTo(123);
}
@Test
public void getEntityManagerShouldGetEntityManager() throws Exception {
bindEntityManager();
assertThat(this.testEntityManager.getEntityManager())
.isEqualTo(this.entityManager);
}
@Test
public void getEntityManagerWhenNotSetShouldThrowException() throws Exception {
this.thrown.expect(IllegalStateException.class);
this.thrown.expectMessage("No transactional EntityManager found");
this.testEntityManager.getEntityManager();
}
private void bindEntityManager() {
EntityManagerHolder holder = new EntityManagerHolder(this.entityManager);
TransactionSynchronizationManager.bindResource(this.entityManagerFactory, holder);
}
static class TestEntity {
}
}
| spring-boot-test-autoconfigure/src/test/java/org/springframework/boot/test/autoconfigure/orm/jpa/TestEntityManagerTests.java | /*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.autoconfigure.orm.jpa;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.PersistenceUnitUtil;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.orm.jpa.EntityManagerHolder;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.verify;
/**
* Tests for {@link TestEntityManager}.
*
* @author Phillip Webb
*/
public class TestEntityManagerTests {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Mock
private EntityManagerFactory entityManagerFactory;
@Mock
private EntityManager entityManager;
@Mock
private PersistenceUnitUtil persistenceUnitUtil;
private TestEntityManager testEntityManager;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
this.testEntityManager = new TestEntityManager(this.entityManagerFactory);
given(this.entityManagerFactory.getPersistenceUnitUtil())
.willReturn(this.persistenceUnitUtil);
}
@Test
public void createWhenEntityManagerIsNullShouldThrowException() throws Exception {
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("EntityManagerFactory must not be null");
new TestEntityManager(null);
}
@Test
public void persistAndGetIdShouldPersistAndGetId() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Object result = this.testEntityManager.persistAndGetId(entity);
verify(this.entityManager).persist(entity);
assertThat(result).isEqualTo(123);
}
@Test
public void persistAndGetIdForTypeShouldPersistAndGetId() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Integer result = this.testEntityManager.persistAndGetId(entity, Integer.class);
verify(this.entityManager).persist(entity);
assertThat(result).isEqualTo(123);
}
@Test
public void persistShouldPersist() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
TestEntity result = this.testEntityManager.persist(entity);
verify(this.entityManager).persist(entity);
assertThat(result).isSameAs(entity);
}
@Test
public void persistAndFlushShouldPersistAndFlush() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
TestEntity result = this.testEntityManager.persistAndFlush(entity);
verify(this.entityManager).persist(entity);
verify(this.entityManager).flush();
assertThat(result).isSameAs(entity);
}
@Test
public void persistFlushFindShouldPersistAndFlushAndFind() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
TestEntity found = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
given(this.entityManager.find(TestEntity.class, 123)).willReturn(found);
TestEntity result = this.testEntityManager.persistFlushFind(entity);
verify(this.entityManager).persist(entity);
verify(this.entityManager).flush();
assertThat(result).isSameAs(found);
}
@Test
public void mergeShouldMerge() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.entityManager.merge(entity)).willReturn(entity);
TestEntity result = this.testEntityManager.merge(entity);
verify(this.entityManager).merge(entity);
assertThat(result).isSameAs(entity);
}
@Test
public void removeShouldRemove() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
this.testEntityManager.remove(entity);
verify(this.entityManager).remove(entity);
}
@Test
public void findShouldFind() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
given(this.entityManager.find(TestEntity.class, 123)).willReturn(entity);
TestEntity result = this.testEntityManager.find(TestEntity.class, 123);
assertThat(result).isSameAs(entity);
}
@Test
public void flushShouldFlush() throws Exception {
bindEntityManager();
this.testEntityManager.flush();
verify(this.entityManager).flush();
}
@Test
public void refreshShouldRefresh() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
this.testEntityManager.refresh(entity);
verify(this.entityManager).refresh(entity);
}
@Test
public void clearShouldClear() throws Exception {
bindEntityManager();
this.testEntityManager.clear();
verify(this.entityManager).clear();
}
@Test
public void detachShouldDetach() throws Exception {
bindEntityManager();
TestEntity entity = new TestEntity();
this.testEntityManager.detach(entity);
verify(this.entityManager).detach(entity);
}
@Test
public void getIdForTypeShouldGetId() throws Exception {
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Integer result = this.testEntityManager.getId(entity, Integer.class);
assertThat(result).isEqualTo(123);
}
@Test
public void getIdForTypeWhenTypeIsWrongShouldThrowException() throws Exception {
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
this.thrown.expectMessage("ID mismatch Object of class [java.lang.Integer] "
+ "must be an instance of class java.lang.Long");
this.testEntityManager.getId(entity, Long.class);
}
@Test
public void getIdShouldGetId() throws Exception {
TestEntity entity = new TestEntity();
given(this.persistenceUnitUtil.getIdentifier(entity)).willReturn(123);
Object result = this.testEntityManager.getId(entity);
assertThat(result).isEqualTo(123);
}
@Test
public void getEntityManagerShouldGetEntityManager() throws Exception {
bindEntityManager();
assertThat(this.testEntityManager.getEntityManager())
.isEqualTo(this.entityManager);
}
@Test
public void getEntityManagerWhenNotSetShouldThrowException() throws Exception {
this.thrown.expect(IllegalStateException.class);
this.thrown.expectMessage("No transactional EntityManager found");
this.testEntityManager.getEntityManager();
}
private void bindEntityManager() {
EntityManagerHolder holder = new EntityManagerHolder(this.entityManager);
TransactionSynchronizationManager.bindResource(this.entityManagerFactory, holder);
}
static class TestEntity {
}
}
| Adapt to Assert.instanceOf changes
Since SPR-15196, the behavior of `Assert` methods changed and some were
deprecated. This commit adapts to the new error mechanism implemented in
`Assert.instanceOf`.
Fixes gh-8140
| spring-boot-test-autoconfigure/src/test/java/org/springframework/boot/test/autoconfigure/orm/jpa/TestEntityManagerTests.java | Adapt to Assert.instanceOf changes |
|
Java | apache-2.0 | b711a90177cf3a7756d74c59f9eabecb42d4b146 | 0 | leleuj/cas,philliprower/cas,apereo/cas,pdrados/cas,fogbeam/cas_mirror,Jasig/cas,leleuj/cas,philliprower/cas,fogbeam/cas_mirror,fogbeam/cas_mirror,philliprower/cas,philliprower/cas,rkorn86/cas,apereo/cas,leleuj/cas,Jasig/cas,philliprower/cas,apereo/cas,leleuj/cas,philliprower/cas,fogbeam/cas_mirror,pdrados/cas,leleuj/cas,fogbeam/cas_mirror,rkorn86/cas,apereo/cas,Jasig/cas,rkorn86/cas,pdrados/cas,pdrados/cas,Jasig/cas,pdrados/cas,philliprower/cas,apereo/cas,leleuj/cas,apereo/cas,apereo/cas,fogbeam/cas_mirror,pdrados/cas,rkorn86/cas | package org.apereo.cas.config;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.ticket.TicketCatalog;
import org.apereo.cas.ticket.TicketDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* This is {@link IgniteTicketRegistryTicketCatalogConfiguration}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
@Configuration("igniteTicketRegistryTicketCatalogConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class IgniteTicketRegistryTicketCatalogConfiguration extends CasCoreTicketCatalogConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(IgniteTicketRegistryTicketCatalogConfiguration.class);
@Autowired
private CasConfigurationProperties casProperties;
@Override
protected void buildAndRegisterServiceTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("serviceTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getSt().getTimeToKillInSeconds());
super.buildAndRegisterServiceTicketDefinition(plan, metadata);
}
@Override
protected void buildAndRegisterProxyTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("proxyTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getPt().getTimeToKillInSeconds());
super.buildAndRegisterServiceTicketDefinition(plan, metadata);
}
@Override
protected void buildAndRegisterTicketGrantingTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("ticketGrantingTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getTgt().getMaxTimeToLiveInSeconds());
super.buildAndRegisterTicketGrantingTicketDefinition(plan, metadata);
}
@Override
protected void buildAndRegisterProxyGrantingTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("proxyGrantingTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getTgt().getMaxTimeToLiveInSeconds());
super.buildAndRegisterTicketGrantingTicketDefinition(plan, metadata);
}
}
| support/cas-server-support-ignite-ticket-registry/src/main/java/org/apereo/cas/config/IgniteTicketRegistryTicketCatalogConfiguration.java | package org.apereo.cas.config;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.ticket.TicketCatalog;
import org.apereo.cas.ticket.TicketDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* This is {@link IgniteTicketRegistryTicketCatalogConfiguration}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
@Configuration("igniteTicketRegistryTicketCatalogConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class IgniteTicketRegistryTicketCatalogConfiguration extends CasCoreTicketCatalogConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(IgniteTicketRegistryTicketCatalogConfiguration.class);
@Autowired
private CasConfigurationProperties casProperties;
@Override
protected void buildAndRegisterServiceTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("serviceTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getSt().getTimeToKillInSeconds());
super.buildAndRegisterServiceTicketDefinition(plan, metadata);
}
@Override
protected void buildAndRegisterProxyTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("serviceTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getPt().getTimeToKillInSeconds());
super.buildAndRegisterServiceTicketDefinition(plan, metadata);
}
@Override
protected void buildAndRegisterTicketGrantingTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("ticketGrantingTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getTgt().getMaxTimeToLiveInSeconds());
super.buildAndRegisterTicketGrantingTicketDefinition(plan, metadata);
}
@Override
protected void buildAndRegisterProxyGrantingTicketDefinition(final TicketCatalog plan, final TicketDefinition metadata) {
metadata.getProperties().setStorageName("proxyGrantingTicketsCache");
metadata.getProperties().setStorageTimeout(casProperties.getTicket().getTgt().getMaxTimeToLiveInSeconds());
super.buildAndRegisterTicketGrantingTicketDefinition(plan, metadata);
}
}
| Update IgniteTicketRegistryTicketCatalogConfiguration.java (#3131)
Change on ProxyTicketDefinition storage name from "serviceTicketsCache" to "proxyTicketsCache" to fix duplicate cache name issue which raise IgniteCheckException on startup.
Best Regards,
Alexandre de Pellegrin | support/cas-server-support-ignite-ticket-registry/src/main/java/org/apereo/cas/config/IgniteTicketRegistryTicketCatalogConfiguration.java | Update IgniteTicketRegistryTicketCatalogConfiguration.java (#3131) |
|
Java | apache-2.0 | 8a0efdb5d9d933259f3c4164fdf18b4295790e69 | 0 | felipecsl/okreplay,felipecsl/okreplay,betamaxteam/betamax,nuxusr/walkman,felipecsl/walkman,robfletcher/betamax,felipecsl/walkman,betamaxteam/betamax,robfletcher/betamax,nuxusr/walkman,nuxusr/walkman | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gneoxsolutions.betamax;
import com.gneoxsolutions.betamax.message.Request;
import com.google.common.io.ByteStreams;
import java.io.IOException;
/**
* Standard {@link MatchRule} implementations.
*/
public enum MatchRules implements MatchRule {
method {
@Override
public boolean isMatch(Request a, Request b) {
return a.getMethod().equalsIgnoreCase(b.getMethod());
}
}, uri {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().equals(b.getUri());
}
}, host {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getHost().equals(b.getUri().getHost());
}
}, path {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getPath().equals(b.getUri().getPath());
}
}, port {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getPort() == b.getUri().getPort();
}
}, query {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getQuery().equals(b.getUri().getQuery());
}
}, authorization {
@Override
public boolean isMatch(Request a, Request b) {
return a.getHeader("Authorization").equals(b.getHeader("Authorization"));
}
}, accept {
@Override
public boolean isMatch(Request a, Request b) {
return a.getHeader("Accept").equals(b.getHeader("Accept"));
}
}, body {
@Override
public boolean isMatch(Request a, Request b) {
try {
return ByteStreams.equal(a.getBodyAsBinary(), b.getBodyAsBinary());
} catch (IOException e) {
// TODO: better exception type
throw new RuntimeException(e);
}
}
}
}
| betamax-core/src/main/java/com/gneoxsolutions/betamax/MatchRules.java | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gneoxsolutions.betamax;
import com.gneoxsolutions.betamax.message.Request;
import com.google.common.io.ByteStreams;
import java.io.IOException;
/**
* Standard {@link MatchRule} implementations.
*/
public enum MatchRules implements MatchRule {
method {
@Override
public boolean isMatch(Request a, Request b) {
return a.getMethod().equalsIgnoreCase(b.getMethod());
}
}, uri {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().equals(b.getUri());
}
}, host {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getHost().equals(b.getUri().getHost());
}
}, path {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getPath().equals(b.getUri().getPath());
}
}, port {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getPort() == b.getUri().getPort();
}
}, query {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getQuery().equals(b.getUri().getQuery());
}
}, fragment {
@Override
public boolean isMatch(Request a, Request b) {
return a.getUri().getFragment().equals(b.getUri().getFragment());
}
}, authorization {
@Override
public boolean isMatch(Request a, Request b) {
return a.getHeader("Authorization").equals(b.getHeader("Authorization"));
}
}, accept {
@Override
public boolean isMatch(Request a, Request b) {
return a.getHeader("Accept").equals(b.getHeader("Accept"));
}
}, body {
@Override
public boolean isMatch(Request a, Request b) {
try {
return ByteStreams.equal(a.getBodyAsBinary(), b.getBodyAsBinary());
} catch (IOException e) {
// TODO: better exception type
throw new RuntimeException(e);
}
}
}
}
| Also removing 'fragment' from the MatchRules because it's untested.
| betamax-core/src/main/java/com/gneoxsolutions/betamax/MatchRules.java | Also removing 'fragment' from the MatchRules because it's untested. |
|
Java | apache-2.0 | f0444718209848db6b512b2adff0d87537e3b121 | 0 | InfoSec812/vertx-web,vert-x3/vertx-web,sibay/vertx-web,InfoSec812/vertx-web,sibay/vertx-web,InfoSec812/vertx-web,sibay/vertx-web,sibay/vertx-web,vert-x3/vertx-web,aesteve/vertx-web,sibay/vertx-web,mystdeim/vertx-web,InfoSec812/vertx-web,vert-x3/vertx-web,aesteve/vertx-web,mystdeim/vertx-web,InfoSec812/vertx-web,sibay/vertx-web,mystdeim/vertx-web,aesteve/vertx-web,mystdeim/vertx-web,vert-x3/vertx-web,InfoSec812/vertx-web,vert-x3/vertx-web,aesteve/vertx-web,aesteve/vertx-web,mystdeim/vertx-web,mystdeim/vertx-web | package io.vertx.webclient.impl;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.streams.WriteStream;
import io.vertx.webclient.BodyCodec;
import io.vertx.webclient.spi.BodyStream;
/**
* @author <a href="mailto:[email protected]">Julien Viet</a>
*/
public class StreamingBodyCodec implements BodyCodec<Void> {
private final WriteStream<Buffer> stream;
public StreamingBodyCodec(WriteStream<Buffer> stream) {
this.stream = stream;
}
@Override
public void writeStream(Handler<AsyncResult<BodyStream<Void>>> handler) {
handler.handle(Future.succeededFuture(new BodyStream<Void>() {
Future<Void> fut = Future.future();
@Override
public Future<Void> state() {
return fut;
}
@Override
public void handle(Throwable cause) {
if (!fut.isComplete()) {
fut.fail(cause);
}
}
@Override
public WriteStream<Buffer> exceptionHandler(Handler<Throwable> handler) {
stream.exceptionHandler(handler);
return this;
}
@Override
public WriteStream<Buffer> write(Buffer data) {
stream.write(data);
return this;
}
@Override
public void end() {
stream.end();
if (!fut.isComplete()) {
fut.complete();
}
}
@Override
public WriteStream<Buffer> setWriteQueueMaxSize(int maxSize) {
stream.setWriteQueueMaxSize(maxSize);
return this;
}
@Override
public boolean writeQueueFull() {
return stream.writeQueueFull();
}
@Override
public WriteStream<Buffer> drainHandler(Handler<Void> handler) {
stream.drainHandler(handler);
return this;
}
}));
}
}
| vertx-web-client/src/main/java/io/vertx/webclient/impl/StreamingBodyCodec.java | package io.vertx.webclient.impl;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.streams.WriteStream;
import io.vertx.webclient.BodyCodec;
import io.vertx.webclient.spi.BodyStream;
/**
* @author <a href="mailto:[email protected]">Julien Viet</a>
*/
public class StreamingBodyCodec implements BodyCodec<Void> {
private final WriteStream<Buffer> stream;
public StreamingBodyCodec(WriteStream<Buffer> stream) {
this.stream = stream;
}
@Override
public void writeStream(Handler<AsyncResult<BodyStream<Void>>> handler) {
handler.handle(Future.succeededFuture(new BodyStream<Void>() {
Future<Void> fut = Future.future();
@Override
public Future<Void> state() {
return fut;
}
@Override
public void handle(Throwable cause) {
if (!fut.isComplete()) {
fut.fail(cause);
}
}
@Override
public WriteStream<Buffer> exceptionHandler(Handler<Throwable> handler) {
stream.exceptionHandler(handler);
return this;
}
@Override
public WriteStream<Buffer> write(Buffer data) {
stream.write(data);
return this;
}
@Override
public void end() {
stream.end();
if (!fut.isComplete()) {
fut.complete();
}
}
@Override
public WriteStream<Buffer> setWriteQueueMaxSize(int maxSize) {
return this;
}
@Override
public boolean writeQueueFull() {
return stream.writeQueueFull();
}
@Override
public WriteStream<Buffer> drainHandler(Handler<Void> handler) {
stream.drainHandler(handler);
return this;
}
}));
}
}
| Missing setWriteQueueMaxSize implementation on StreamingBodyCodec
| vertx-web-client/src/main/java/io/vertx/webclient/impl/StreamingBodyCodec.java | Missing setWriteQueueMaxSize implementation on StreamingBodyCodec |
|
Java | apache-2.0 | bb3b4e9660bb3f902f29922c623a25d9210dd098 | 0 | rimolive/rhiot,krissman/rhiot,finiteloopme/rhiot,rhiot/rhiot,rimolive/rhiot,krissman/rhiot,krissman/rhiot,jasonchaffee/camel-labs,lasombra/rhiot,krissman/rhiot,jasonchaffee/camel-labs,finiteloopme/rhiot,lasombra/rhiot,rhiot/rhiot,rimolive/rhiot,rimolive/rhiot,lasombra/rhiot,rhiot/rhiot,finiteloopme/rhiot,krissman/rhiot,rimolive/rhiot,krissman/rhiot,jasonchaffee/camel-labs,lasombra/rhiot,finiteloopme/rhiot,rimolive/rhiot,krissman/rhiot,rhiot/rhiot,rhiot/rhiot,rhiot/rhiot,rhiot/rhiot,rimolive/rhiot | /**
* Licensed to the Camel Labs under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.camellabs.component.pi4j.i2c.driver;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import com.github.camellabs.component.pi4j.i2c.I2CConsumer;
import com.github.camellabs.component.pi4j.i2c.I2CEndpoint;
import com.pi4j.io.i2c.I2CDevice;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Code from Marcus Hirt, 2015, Code From http://hirt.se/blog/?p=652
*/
public class BMP180Consumer extends I2CConsumer {
private static final transient Logger LOG = LoggerFactory.getLogger(BMP180Consumer.class);
// Calibration data
private static final int CALIBRATION_START = 0xAA;
private static final int CALIBRATION_END = 0xBF;
private final static short BMP180_CONTROL = 0xF4;
private final static short BMP180_TEMPDATA = 0xF6;
private final static short BMP180_PRESSUREDATA = 0xF6;
private final static byte BMP180_READTEMPCMD = 0x2E;
private final static byte BMP180_READPRESSURECMD = 0x34;
private BMP180OperatingMode operatingMode = BMP180OperatingMode.STANDARD;
// Calibration variables
private short AC1;
private short AC2;
private short AC3;
private int AC4;
private int AC5;
private int AC6;
private short B1;
private short B2;
private short MC;
private short MD;
public BMP180Consumer(I2CEndpoint endpoint, Processor processor, I2CDevice device) {
super(endpoint, processor, device);
}
/**
* Returns the temperature in degrees Celcius.
*
* @return the temperature in degrees Celcius.
* @throws IOException if there was communication problem
*/
private float readTemperature() throws IOException {
int UT = readRawTemp();
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
return ((B5 + 8) >> 4) / 10.0f;
}
/**
* Returns the pressure in Pascal.
*
* @return the pressure in Pascal.
* @throws IOException if there was communication problem
*/
private int readPressure() throws IOException {
long p = 0;
int UT = readRawTemp();
int UP = readRawPressure();
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
int B6 = B5 - 4000;
X1 = (B2 * ((B6 * B6) >> 12)) >> 11;
X2 = (AC2 * B6) >> 11;
int X3 = X1 + X2;
int B3 = (((AC1 * 4 + X3) << operatingMode.getOverSamplingSetting()) + 2) / 4;
X1 = (AC3 * B6) >> 13;
X2 = (B1 * ((B6 * B6) >> 12)) >> 16;
X3 = ((X1 + X2) + 2) >> 2;
long B4 = (AC4 * ((long)(X3 + 32768))) >> 15;
long B7 = ((long)UP - B3) * (50000 >> operatingMode.getOverSamplingSetting());
if (B7 < 0x80000000) {
p = (B7 * 2) / B4;
} else {
p = (B7 / B4) * 2;
}
X1 = (int)((p >> 8) * (p >> 8));
X1 = (X1 * 3038) >> 16;
X2 = (int)(-7357 * p) >> 16;
p = p + ((X1 + X2 + 3791) >> 4);
return (int)p;
}
/**
* Returns the raw temperature sensor data. Mostly for debugging.
*
* @return the raw temperature sensor data.
* @throws IOException if there was a communication problem
*/
private int readRawTemp() throws IOException {
write(BMP180_CONTROL, BMP180_READTEMPCMD);
sleep(50);
return readU16BigEndian(BMP180_TEMPDATA);
}
/**
* Returns the raw pressure sensor data. Mostly for debugging.
*
* @return the raw pressure sensor data.
* @throws IOException if there was a communication problem
*/
private int readRawPressure() throws IOException {
write(BMP180_CONTROL, BMP180_READPRESSURECMD);
sleep(operatingMode.getWaitTime());
return readU3(BMP180_PRESSUREDATA) >> (8 - operatingMode.getOverSamplingSetting());
}
public BMP180OperatingMode getOperatingMode() {
return operatingMode;
}
public void setOperatingMode(BMP180OperatingMode operatingMode) {
this.operatingMode = operatingMode;
}
@Override
protected void createBody(Exchange exchange) throws IOException {
BMP180Value body = new BMP180Value();
body.setPressure(readPressure());
body.setTemperature(readTemperature());
LOG.debug("" + body);
exchange.getIn().setBody(body);
}
/**
* Read 3 bytes unsigned.
*/
private int readU3(int address) throws IOException {
// TODO: Check if there is any potential performance benefit to reading
// them all at once into a byte array. It's probably translated to
// to consecutive byte reads anyways, so probably not.
int msb = read(address);
int lsb = read(address + 1);
int xlsb = read(address + 2);
return (msb << 16) + (lsb << 8) + xlsb;
}
protected void doStart() throws Exception {
super.doStart();
int totalBytes = CALIBRATION_END - CALIBRATION_START + 1;
byte[] bytes = new byte[totalBytes];
int bytesRead = read(CALIBRATION_START, bytes, 0, totalBytes);
if (bytesRead != totalBytes) {
throw new IOException("Could not read calibration data. Read " + bytes + " of " + totalBytes);
}
DataInputStream calibrationData = new DataInputStream(new ByteArrayInputStream(bytes));
AC1 = calibrationData.readShort();
AC2 = calibrationData.readShort();
AC3 = calibrationData.readShort();
AC4 = calibrationData.readUnsignedShort();
AC5 = calibrationData.readUnsignedShort();
AC6 = calibrationData.readUnsignedShort();
B1 = calibrationData.readShort();
B2 = calibrationData.readShort();
calibrationData.readShort(); // MB not used for anything it seems...
MC = calibrationData.readShort();
MD = calibrationData.readShort();
LOG.info("AC1:" + AC1 + ", AC2:" + AC2 + ", AC3:" + AC3 + ", AC4:" + AC4 + ", AC5:" + AC5 + ", AC6:" + AC6 + ", B1:" + B1 + ", B2:" + B2 + ", MC:" + MC + ", MD:" + MD);
}
}
| iot/components/camel-pi4j/src/main/java/com/github/camellabs/component/pi4j/i2c/driver/BMP180Consumer.java | /**
* Licensed to the Camel Labs under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.camellabs.component.pi4j.i2c.driver;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import com.github.camellabs.component.pi4j.i2c.I2CConsumer;
import com.github.camellabs.component.pi4j.i2c.I2CEndpoint;
import com.pi4j.io.i2c.I2CDevice;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Code from Marcus Hirt, 2015, Code From http://hirt.se/blog/?p=652
*/
public class BMP180Consumer extends I2CConsumer {
private static final transient Logger LOG = LoggerFactory.getLogger(BMP180Consumer.class);
// Calibration data
private static final int CALIBRATION_START = 0xAA;
private static final int CALIBRATION_END = 0xBF;
private final static short BMP180_CONTROL = 0xF4;
private final static short BMP180_TEMPDATA = 0xF6;
private final static short BMP180_PRESSUREDATA = 0xF6;
private final static byte BMP180_READTEMPCMD = 0x2E;
private final static byte BMP180_READPRESSURECMD = 0x34;
private BMP180OperatingMode mode = BMP180OperatingMode.STANDARD;
// Calibration variables
private short AC1;
private short AC2;
private short AC3;
private int AC4;
private int AC5;
private int AC6;
private short B1;
private short B2;
private short MC;
private short MD;
public BMP180Consumer(I2CEndpoint endpoint, Processor processor, I2CDevice device) {
super(endpoint, processor, device);
}
/**
* Returns the temperature in degrees Celcius.
*
* @return the temperature in degrees Celcius.
* @throws IOException if there was communication problem
*/
public float readTemperature() throws IOException {
int UT = readRawTemp();
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
return ((B5 + 8) >> 4) / 10.0f;
}
/**
* Returns the pressure in Pascal.
*
* @return the pressure in Pascal.
* @throws IOException if there was communication problem
*/
public int readPressure() throws IOException {
long p = 0;
int UT = readRawTemp();
int UP = readRawPressure();
int X1 = ((UT - AC6) * AC5) >> 15;
int X2 = (MC << 11) / (X1 + MD);
int B5 = X1 + X2;
int B6 = B5 - 4000;
X1 = (B2 * ((B6 * B6) >> 12)) >> 11;
X2 = (AC2 * B6) >> 11;
int X3 = X1 + X2;
int B3 = (((AC1 * 4 + X3) << mode.getOverSamplingSetting()) + 2) / 4;
X1 = (AC3 * B6) >> 13;
X2 = (B1 * ((B6 * B6) >> 12)) >> 16;
X3 = ((X1 + X2) + 2) >> 2;
long B4 = (AC4 * ((long)(X3 + 32768))) >> 15;
long B7 = ((long)UP - B3) * (50000 >> mode.getOverSamplingSetting());
if (B7 < 0x80000000) {
p = (B7 * 2) / B4;
} else {
p = (B7 / B4) * 2;
}
X1 = (int)((p >> 8) * (p >> 8));
X1 = (X1 * 3038) >> 16;
X2 = (int)(-7357 * p) >> 16;
p = p + ((X1 + X2 + 3791) >> 4);
return (int)p;
}
/**
* Returns the raw temperature sensor data. Mostly for debugging.
*
* @return the raw temperature sensor data.
* @throws IOException if there was a communication problem
*/
public int readRawTemp() throws IOException {
write(BMP180_CONTROL, BMP180_READTEMPCMD);
sleep(50);
return readU16BigEndian(BMP180_TEMPDATA);
}
/**
* Returns the raw pressure sensor data. Mostly for debugging.
*
* @return the raw pressure sensor data.
* @throws IOException if there was a communication problem
*/
public int readRawPressure() throws IOException {
write(BMP180_CONTROL, BMP180_READPRESSURECMD);
sleep(mode.getWaitTime());
return readU3(BMP180_PRESSUREDATA) >> (8 - mode.getOverSamplingSetting());
}
@Override
protected void createBody(Exchange exchange) throws IOException {
BMP180Value body = new BMP180Value();
body.setPressure(readPressure());
body.setTemperature(readTemperature());
LOG.debug("" + body);
exchange.getIn().setBody(body);
}
/**
* Read 3 bytes unsigned.
*/
private int readU3(int address) throws IOException {
// TODO: Check if there is any potential performance benefit to reading
// them all at once into a byte array. It's probably translated to
// to consecutive byte reads anyways, so probably not.
int msb = read(address);
int lsb = read(address + 1);
int xlsb = read(address + 2);
return (msb << 16) + (lsb << 8) + xlsb;
}
protected void doStart() throws Exception {
super.doStart();
int totalBytes = CALIBRATION_END - CALIBRATION_START + 1;
byte[] bytes = new byte[totalBytes];
int bytesRead = read(CALIBRATION_START, bytes, 0, totalBytes);
if (bytesRead != totalBytes) {
throw new IOException("Could not read calibration data. Read " + bytes + " of " + totalBytes);
}
DataInputStream calibrationData = new DataInputStream(new ByteArrayInputStream(bytes));
AC1 = calibrationData.readShort();
AC2 = calibrationData.readShort();
AC3 = calibrationData.readShort();
AC4 = calibrationData.readUnsignedShort();
AC5 = calibrationData.readUnsignedShort();
AC6 = calibrationData.readUnsignedShort();
B1 = calibrationData.readShort();
B2 = calibrationData.readShort();
calibrationData.readShort(); // MB not used for anything it seems...
MC = calibrationData.readShort();
MD = calibrationData.readShort();
LOG.info("AC1:" + AC1 + ", AC2:" + AC2 + ", AC3:" + AC3 + ", AC4:" + AC4 + ", AC5:" + AC5 + ", AC6:" + AC6 + ", B1:" + B1 + ", B2:" + B2 + ", MC:" + MC + ", MD:" + MD);
}
}
| add operationMode param | iot/components/camel-pi4j/src/main/java/com/github/camellabs/component/pi4j/i2c/driver/BMP180Consumer.java | add operationMode param |
|
Java | apache-2.0 | 2b57a33a61450029271fa42dcae5308a11e97727 | 0 | MZaratin-Larus/neo4art,MZaratin-Larus/neo4art,nico-Fritz/neo4art,neo4art/neo4art,MZaratin-Larus/neo4art,MZaratin-Larus/neo4art,nico-Fritz/neo4art,nico-Fritz/neo4art,MZaratin-Larus/neo4art,neo4art/neo4art,neo4art/neo4art,neo4art/neo4art,MZaratin-Larus/neo4art,nico-Fritz/neo4art,neo4art/neo4art,nico-Fritz/neo4art,neo4art/neo4art,nico-Fritz/neo4art | /**
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4art.graphdb.connection;
/**
* @author Lorenzo Speranzoni
* @since 29 Mar 2015
*/
public class Neo4ArtGraphDatabase {
public static final String NEO4J_STORE_DIR = System.getProperty("NEO4J_STORE_DIR", "target");
public static final String NEO4J_PATH = System.getProperty("NEO4J_PATH", "jdbc:neo4j:file:" + NEO4J_STORE_DIR);
public static final String NEO4J_URL = System.getProperty("NEO4J_URL", "http://localhost:7474");
public static final String NEO4J_USR = System.getProperty("NEO4J_USR", "neo4j");
public static final String NEO4J_PWD = System.getProperty("NEO4J_PWD", "neo4art");
}
| neo4art-commons/src/main/java/org/neo4art/graphdb/connection/Neo4ArtGraphDatabase.java | /**
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4art.graphdb.connection;
/**
* @author Lorenzo Speranzoni
* @since 29 Mar 2015
*/
public class Neo4ArtGraphDatabase {
public static final String NEO4J_STORE_DIR = System.getProperty("NEO4J_STORE_DIR", "/Users/lorenzo/Progetti/Neo4j/projects/neo4art/application/database/neo4j-community-2.2.0/data/graph.db");
public static final String NEO4J_PATH = System.getProperty("NEO4J_PATH", "jdbc:neo4j:file:" + NEO4J_STORE_DIR);
public static final String NEO4J_URL = System.getProperty("NEO4J_URL", "http://localhost:7474");
public static final String NEO4J_USR = System.getProperty("NEO4J_USR", "neo4j");
public static final String NEO4J_PWD = System.getProperty("NEO4J_PWD", "neo4art");
}
| modified wrong store dir
| neo4art-commons/src/main/java/org/neo4art/graphdb/connection/Neo4ArtGraphDatabase.java | modified wrong store dir |
|
Java | apache-2.0 | 1762b82c41ea4b56bf87678311d154c8defed960 | 0 | TheRealRasu/arx,bitraten/arx,fstahnke/arx,RaffaelBild/arx,kbabioch/arx,RaffaelBild/arx,COWYARD/arx,fstahnke/arx,bitraten/arx,kbabioch/arx,TheRealRasu/arx,jgaupp/arx,kentoa/arx,arx-deidentifier/arx,kentoa/arx,arx-deidentifier/arx,COWYARD/arx,tijanat/arx,tijanat/arx,jgaupp/arx | /*
* ARX: Powerful Data Anonymization
* Copyright (C) 2012 - 2014 Florian Kohlmayer, Fabian Prasser
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.deidentifier.arx.gui.view.impl.common;
import java.util.Arrays;
import org.deidentifier.arx.gui.view.def.IComponent;
import org.eclipse.nebula.widgets.nattable.NatTable;
import org.eclipse.nebula.widgets.nattable.config.AbstractRegistryConfiguration;
import org.eclipse.nebula.widgets.nattable.config.CellConfigAttributes;
import org.eclipse.nebula.widgets.nattable.config.IConfigRegistry;
import org.eclipse.nebula.widgets.nattable.data.IColumnAccessor;
import org.eclipse.nebula.widgets.nattable.data.IDataProvider;
import org.eclipse.nebula.widgets.nattable.data.ListDataProvider;
import org.eclipse.nebula.widgets.nattable.grid.GridRegion;
import org.eclipse.nebula.widgets.nattable.grid.data.DefaultCornerDataProvider;
import org.eclipse.nebula.widgets.nattable.grid.layer.ColumnHeaderLayer;
import org.eclipse.nebula.widgets.nattable.grid.layer.CornerLayer;
import org.eclipse.nebula.widgets.nattable.grid.layer.GridLayer;
import org.eclipse.nebula.widgets.nattable.grid.layer.RowHeaderLayer;
import org.eclipse.nebula.widgets.nattable.layer.AbstractLayerTransform;
import org.eclipse.nebula.widgets.nattable.layer.CompositeLayer;
import org.eclipse.nebula.widgets.nattable.layer.DataLayer;
import org.eclipse.nebula.widgets.nattable.painter.cell.ICellPainter;
import org.eclipse.nebula.widgets.nattable.painter.cell.TextPainter;
import org.eclipse.nebula.widgets.nattable.painter.cell.decorator.BeveledBorderDecorator;
import org.eclipse.nebula.widgets.nattable.selection.SelectionLayer;
import org.eclipse.nebula.widgets.nattable.style.BorderStyle;
import org.eclipse.nebula.widgets.nattable.style.CellStyleAttributes;
import org.eclipse.nebula.widgets.nattable.style.DisplayMode;
import org.eclipse.nebula.widgets.nattable.style.HorizontalAlignmentEnum;
import org.eclipse.nebula.widgets.nattable.style.Style;
import org.eclipse.nebula.widgets.nattable.style.VerticalAlignmentEnum;
import org.eclipse.nebula.widgets.nattable.util.GUIHelper;
import org.eclipse.nebula.widgets.nattable.viewport.ViewportLayer;
import org.eclipse.swt.events.ControlAdapter;
import org.eclipse.swt.events.ControlEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.widgets.Composite;
/**
* A virtual table implemented with NatTable
*
* @author Fabian Prasser
*/
public class ComponentTable implements IComponent {
/**
* Header style
* @author Fabian Prasser
*/
private static class DefaultHeaderStyleConfiguration extends AbstractRegistryConfiguration {
private final Font font;
private final Color bgColor = GUIHelper.COLOR_WIDGET_BACKGROUND;
private final Color fgColor = GUIHelper.COLOR_WIDGET_FOREGROUND;
private final Color gradientBgColor = GUIHelper.COLOR_WHITE;
private final Color gradientFgColor = GUIHelper.getColor(136, 212, 215);
private final HorizontalAlignmentEnum hAlign = HorizontalAlignmentEnum.CENTER;
private final VerticalAlignmentEnum vAlign = VerticalAlignmentEnum.MIDDLE;
private final BorderStyle borderStyle = null;
private final ICellPainter cellPainter = new BeveledBorderDecorator(new TextPainter());
private final Boolean renderGridLines = Boolean.FALSE;
private final String region;
/**
* Creates a new instance
* @param parent
* @param region
*/
public DefaultHeaderStyleConfiguration(Composite parent, String region){
this.font = parent.getFont();
this.region = region;
}
@Override
public void configureRegistry(IConfigRegistry configRegistry) {
//configure the painter
configRegistry.registerConfigAttribute(
CellConfigAttributes.CELL_PAINTER,
cellPainter,
DisplayMode.NORMAL,
region);
configRegistry.registerConfigAttribute(
CellConfigAttributes.CELL_PAINTER,
cellPainter,
DisplayMode.NORMAL,
GridRegion.CORNER);
//configure whether to render grid lines or not
//e.g. for the BeveledBorderDecorator the rendering of the grid lines should be disabled
configRegistry.registerConfigAttribute(
CellConfigAttributes.RENDER_GRID_LINES,
renderGridLines,
DisplayMode.NORMAL,
region);
configRegistry.registerConfigAttribute(
CellConfigAttributes.RENDER_GRID_LINES,
renderGridLines,
DisplayMode.NORMAL,
GridRegion.CORNER);
//configure the normal style
Style cellStyle = new Style();
cellStyle.setAttributeValue(CellStyleAttributes.BACKGROUND_COLOR, bgColor);
cellStyle.setAttributeValue(CellStyleAttributes.FOREGROUND_COLOR, fgColor);
cellStyle.setAttributeValue(CellStyleAttributes.GRADIENT_BACKGROUND_COLOR, gradientBgColor);
cellStyle.setAttributeValue(CellStyleAttributes.GRADIENT_FOREGROUND_COLOR, gradientFgColor);
cellStyle.setAttributeValue(CellStyleAttributes.HORIZONTAL_ALIGNMENT, hAlign);
cellStyle.setAttributeValue(CellStyleAttributes.VERTICAL_ALIGNMENT, vAlign);
cellStyle.setAttributeValue(CellStyleAttributes.BORDER_STYLE, borderStyle);
cellStyle.setAttributeValue(CellStyleAttributes.FONT, font);
configRegistry.registerConfigAttribute(
CellConfigAttributes.CELL_STYLE,
cellStyle,
DisplayMode.NORMAL,
region);
configRegistry.registerConfigAttribute(
CellConfigAttributes.CELL_STYLE,
cellStyle,
DisplayMode.NORMAL,
GridRegion.CORNER);
}
}
/**
* The body layer
* @author Fabian Prasser
*/
private static class BodyLayerStack extends AbstractLayerTransform {
/** Selection layer*/
private SelectionLayer selectionLayer;
/** Data layer*/
private DataLayer dataLayer;
/**
* Creates a new instance
* @param dataProvider
*/
public BodyLayerStack(IDataProvider dataProvider) {
dataLayer = new DataLayer(dataProvider);
selectionLayer = new SelectionLayer(dataLayer);
ViewportLayer viewportLayer = new ViewportLayer(selectionLayer);
setUnderlyingLayer(viewportLayer);
}
/**
* Returns the selection layer
* @return
*/
public SelectionLayer getSelectionLayer() {
return selectionLayer;
}
/**
* Returns the data layer
* @return
*/
public DataLayer getDataLayer(){
return dataLayer;
}
}
/**
* The column layer
* @author Fabian Prasser
*/
public class ColumnHeaderLayerStack extends AbstractLayerTransform {
/**
* Creates a new instance
* @param parent
* @param dataProvider
* @param bodyLayer
*/
public ColumnHeaderLayerStack(Composite parent,
IDataProvider dataProvider,
BodyLayerStack bodyLayer) {
DataLayer dataLayer = new DataLayer(dataProvider);
ColumnHeaderLayer colHeaderLayer = new ColumnHeaderLayer(dataLayer,
bodyLayer,
bodyLayer.getSelectionLayer(),
false);
colHeaderLayer.addConfiguration(new DefaultHeaderStyleConfiguration(parent, GridRegion.COLUMN_HEADER));
setUnderlyingLayer(colHeaderLayer);
}
}
/**
* The row layer
* @author Fabian Prasser
*/
private static class RowHeaderLayerStack extends AbstractLayerTransform {
/**
* Creates a new instance
* @param parent
* @param dataProvider
* @param bodyLayer
*/
public RowHeaderLayerStack(Composite parent,
IDataProvider dataProvider,
BodyLayerStack bodyLayer) {
DataLayer dataLayer = new DataLayer(dataProvider, 50, 20);
RowHeaderLayer rowHeaderLayer = new RowHeaderLayer(dataLayer, bodyLayer, bodyLayer.getSelectionLayer(), false);
rowHeaderLayer.addConfiguration(new DefaultHeaderStyleConfiguration(parent, GridRegion.ROW_HEADER));
setUnderlyingLayer(rowHeaderLayer);
}
}
/** The parent*/
private final Composite parent;
/** The underlying nattable instance*/
private NatTable table = null;
/** The layout data*/
private Object layoutData = null;
/** The layout*/
private ComponentTableLayout layout = new ComponentTableLayout(true, 100);
/**
* Creates a new instance
* @param parent
*/
public ComponentTable(Composite parent) {
this.parent = parent;
}
/**
* Updates the underlying table. Hides the row header.
* @param dataProvider
* @param columns
*/
public void setTable(IDataProvider dataProvider, String[] columns) {
// Disable redrawing
this.parent.setRedraw(false);
// Dispose
if (table != null && !table.isDisposed()) {
table.dispose();
}
// Create data providers
IDataProvider columnHeaderDataProvider = getHeaderDataProvider(dataProvider, columns, false);
// Create layers
BodyLayerStack bodyLayer = new BodyLayerStack(dataProvider);
ColumnHeaderLayerStack columnHeaderLayer = new ColumnHeaderLayerStack(parent, columnHeaderDataProvider, bodyLayer);
CompositeLayer compositeLayer = new CompositeLayer(1, 2);
compositeLayer.setChildLayer(GridRegion.BODY, bodyLayer, 0, 1);
compositeLayer.setChildLayer(GridRegion.COLUMN_HEADER, columnHeaderLayer, 0, 0);
// Create table
table = new NatTable(parent, compositeLayer);
addColumnWidthHandler(table, dataProvider, bodyLayer.getDataLayer(), null);
// Set layout
if (this.layoutData != null) {
table.setLayoutData(layoutData);
}
// Redraw
this.parent.setRedraw(true);
this.parent.layout(true);
}
/**
* Sets the layout
* @param layout
*/
public void setLayout(ComponentTableLayout layout){
this.layout = layout;
if (this.table != null && !this.table.isDisposed()) {
parent.layout(true);
}
}
/**
* Updates the underlying table. Hides the row header.
* @param data
* @param columns
*/
public void setTable(String[][] data, String[] columns) {
setTable(getDataProvider(data), columns);
}
/**
* Updates the underlying table
* @param data
*/
public void setData(IDataProvider dataProvider) {
this.setData(dataProvider, null, null);
}
/**
* Updates the underlying table
* @param data
*/
public void setData(String[][] data) {
this.setData(data, null, null);
}
/**
* Updates the underlying data
* @param dataProvider
* @param rows May be null
* @param columns May be null
*/
public void setData(IDataProvider dataProvider, String[] rows, String[] columns) {
// Disable redrawing
this.parent.setRedraw(false);
// Dispose
if (table != null && !table.isDisposed()) {
table.dispose();
}
// Create data providers
IDataProvider rowHeaderDataProvider = getHeaderDataProvider(dataProvider, rows, true);
IDataProvider columnHeaderDataProvider = getHeaderDataProvider(dataProvider, columns, false);
IDataProvider cornerDataProvider = new DefaultCornerDataProvider(columnHeaderDataProvider, rowHeaderDataProvider);
// Create layers
BodyLayerStack bodyLayer = new BodyLayerStack(dataProvider);
ColumnHeaderLayerStack columnHeaderLayer = new ColumnHeaderLayerStack(parent, columnHeaderDataProvider, bodyLayer);
RowHeaderLayerStack rowHeaderLayer = new RowHeaderLayerStack(parent, rowHeaderDataProvider, bodyLayer);
CornerLayer cornerLayer = new CornerLayer(new DataLayer(cornerDataProvider), rowHeaderLayer, columnHeaderLayer);
GridLayer gridLayer = new GridLayer(bodyLayer, columnHeaderLayer, rowHeaderLayer, cornerLayer);
// Create table
table = new NatTable(parent, gridLayer);
addColumnWidthHandler(table, dataProvider, bodyLayer.getDataLayer(), cornerLayer);
// Set layout
if (this.layoutData != null) {
table.setLayoutData(layoutData);
}
// Redraw
this.parent.setRedraw(true);
this.parent.layout(true);
}
/**
* Adds a handler for automatically resizing columns
* @param table
* @param dataProvider
* @param dataLayer
* @param cornerLayer
*/
private void addColumnWidthHandler(final NatTable table,
final IDataProvider dataProvider,
final DataLayer dataLayer,
final CornerLayer cornerLayer) {
table.addControlListener(new ControlAdapter(){
public void controlResized(ControlEvent arg0) {
parent.setRedraw(false);
// Prepare
int width = table.getSize().x;
// Check if larger than parent
int columns = dataProvider.getColumnCount();
int total = layout.columnWidth * columns;
total += cornerLayer != null ? cornerLayer.getColumnWidthByPosition(0) : 0;
if (total >= width) {
for (int i=0; i<columns; i++){
dataLayer.setColumnWidthByPosition(i, layout.columnWidth, i==columns-1);
}
} else {
// If not, extend to cover the whole area
int columnWidth = width;
columnWidth -= (cornerLayer != null) ? cornerLayer.getColumnWidthByPosition(0) : 0;
columnWidth = (int)Math.round((double)columnWidth / (double)dataProvider.getColumnCount());
total = (cornerLayer != null) ? cornerLayer.getColumnWidthByPosition(0) : 0;
for (int i=0; i<columns; i++){
if (total + columnWidth > width) {
columnWidth = width - total;
}
dataLayer.setColumnWidthByPosition(i, columnWidth, i==columns-1);
total += columnWidth;
}
}
parent.setRedraw(true);
table.redraw();
}
});
}
/**
* Updates the underlying data
* @param data
* @param rows May be null
* @param columns May be null
*/
public void setData(String[][] data, String[] rows, String[] columns) {
setData(getDataProvider(data), rows, columns);
}
/**
* Sets the layout data
* @param data
*/
public void setLayoutData(Object data){
this.layoutData = data;
if (table != null) table.setLayoutData(data);
}
private IDataProvider getHeaderDataProvider(final IDataProvider data,
final String[] header,
final boolean row) {
if (header==null) {
return new IDataProvider(){
@Override
public int getColumnCount() {
return row ? 1 : data.getColumnCount();
}
@Override
public Object getDataValue(int arg0, int arg1) {
return row ? arg0 : arg1;
}
@Override
public int getRowCount() {
return row ? data.getRowCount() : 1;
}
@Override
public void setDataValue(int arg0, int arg1, Object arg2) {
// Ignore
}
};
} else {
return new IDataProvider(){
@Override
public int getColumnCount() {
return row ? 1 : data.getColumnCount();
}
@Override
public Object getDataValue(int arg0, int arg1) {
return row ? header[arg1] : header[arg0];
}
@Override
public int getRowCount() {
return row ? data.getRowCount() : 1;
}
@Override
public void setDataValue(int arg0, int arg1, Object arg2) {
// Ignore
}
};
}
}
private IDataProvider getDataProvider(final String[][] data) {
return new ListDataProvider<String[]>(Arrays.asList(data), new IColumnAccessor<String[]>(){
@Override
public int getColumnCount() {
return data==null || data.length==0 || data[0]==null ? 0 : data[0].length;
}
@Override
public Object getDataValue(String[] arg0, int arg1) {
return arg0[arg1];
}
@Override
public void setDataValue(String[] arg0, int arg1, Object arg2) {
arg0[arg1] = arg2.toString();
}
});
}
/**
* Empties the table
*/
public void setEmpty() {
if (this.table == null || this.table.isDisposed()) return;
this.parent.setRedraw(false);
this.table.dispose();
this.parent.setRedraw(true);
this.parent.layout(true);
}
} | src/gui/org/deidentifier/arx/gui/view/impl/common/ComponentTable.java | /*
* ARX: Powerful Data Anonymization
* Copyright (C) 2012 - 2014 Florian Kohlmayer, Fabian Prasser
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.deidentifier.arx.gui.view.impl.common;
import java.util.Arrays;
import org.deidentifier.arx.gui.view.def.IComponent;
import org.eclipse.nebula.widgets.nattable.NatTable;
import org.eclipse.nebula.widgets.nattable.data.IColumnAccessor;
import org.eclipse.nebula.widgets.nattable.data.IDataProvider;
import org.eclipse.nebula.widgets.nattable.data.ListDataProvider;
import org.eclipse.nebula.widgets.nattable.grid.GridRegion;
import org.eclipse.nebula.widgets.nattable.grid.data.DefaultCornerDataProvider;
import org.eclipse.nebula.widgets.nattable.grid.layer.ColumnHeaderLayer;
import org.eclipse.nebula.widgets.nattable.grid.layer.CornerLayer;
import org.eclipse.nebula.widgets.nattable.grid.layer.GridLayer;
import org.eclipse.nebula.widgets.nattable.grid.layer.RowHeaderLayer;
import org.eclipse.nebula.widgets.nattable.layer.AbstractLayerTransform;
import org.eclipse.nebula.widgets.nattable.layer.CompositeLayer;
import org.eclipse.nebula.widgets.nattable.layer.DataLayer;
import org.eclipse.nebula.widgets.nattable.selection.SelectionLayer;
import org.eclipse.nebula.widgets.nattable.viewport.ViewportLayer;
import org.eclipse.swt.events.ControlAdapter;
import org.eclipse.swt.events.ControlEvent;
import org.eclipse.swt.widgets.Composite;
/**
* A virtual table implemented with NatTable
*
* @author Fabian Prasser
*/
public class ComponentTable implements IComponent {
/**
* The body layer
* @author Fabian Prasser
*/
private static class BodyLayerStack extends AbstractLayerTransform {
/** Selection layer*/
private SelectionLayer selectionLayer;
/** Data layer*/
private DataLayer dataLayer;
/**
* Creates a new instance
* @param dataProvider
*/
public BodyLayerStack(IDataProvider dataProvider) {
dataLayer = new DataLayer(dataProvider);
selectionLayer = new SelectionLayer(dataLayer);
ViewportLayer viewportLayer = new ViewportLayer(selectionLayer);
setUnderlyingLayer(viewportLayer);
}
/**
* Returns the selection layer
* @return
*/
public SelectionLayer getSelectionLayer() {
return selectionLayer;
}
/**
* Returns the data layer
* @return
*/
public DataLayer getDataLayer(){
return dataLayer;
}
}
/**
* The column layer
* @author Fabian Prasser
*/
public class ColumnHeaderLayerStack extends AbstractLayerTransform {
/**
* Creates a new instance
* @param dataProvider
* @param bodyLayer
*/
public ColumnHeaderLayerStack(IDataProvider dataProvider,
BodyLayerStack bodyLayer) {
DataLayer dataLayer = new DataLayer(dataProvider);
ColumnHeaderLayer colHeaderLayer = new ColumnHeaderLayer(dataLayer,
bodyLayer,
bodyLayer.getSelectionLayer());
setUnderlyingLayer(colHeaderLayer);
}
}
/**
* The row layer
* @author Fabian Prasser
*/
private static class RowHeaderLayerStack extends AbstractLayerTransform {
/**
* Creates a new instance
* @param dataProvider
* @param bodyLayer
*/
public RowHeaderLayerStack(IDataProvider dataProvider,
BodyLayerStack bodyLayer) {
DataLayer dataLayer = new DataLayer(dataProvider, 50, 20);
RowHeaderLayer rowHeaderLayer = new RowHeaderLayer(dataLayer, bodyLayer, bodyLayer.getSelectionLayer());
setUnderlyingLayer(rowHeaderLayer);
}
}
/** The parent*/
private final Composite parent;
/** The underlying nattable instance*/
private NatTable table = null;
/** The layout data*/
private Object layoutData = null;
/** The layout*/
private ComponentTableLayout layout = new ComponentTableLayout(true, 100);
/**
* Creates a new instance
* @param parent
*/
public ComponentTable(Composite parent) {
this.parent = parent;
}
/**
* Updates the underlying table. Hides the row header.
* @param dataProvider
* @param columns
*/
public void setTable(IDataProvider dataProvider, String[] columns) {
// Disable redrawing
this.parent.setRedraw(false);
// Dispose
if (table != null && !table.isDisposed()) {
table.dispose();
}
// Create data providers
IDataProvider columnHeaderDataProvider = getHeaderDataProvider(dataProvider, columns, false);
// Create layers
BodyLayerStack bodyLayer = new BodyLayerStack(dataProvider);
ColumnHeaderLayerStack columnHeaderLayer = new ColumnHeaderLayerStack(columnHeaderDataProvider, bodyLayer);
CompositeLayer compositeLayer = new CompositeLayer(1, 2);
compositeLayer.setChildLayer(GridRegion.BODY, bodyLayer, 0, 1);
compositeLayer.setChildLayer(GridRegion.COLUMN_HEADER, columnHeaderLayer, 0, 0);
// Create table
table = new NatTable(parent, compositeLayer);
addColumnWidthHandler(table, dataProvider, bodyLayer.getDataLayer(), null);
// Set layout
if (this.layoutData != null) {
table.setLayoutData(layoutData);
}
// Redraw
this.parent.setRedraw(true);
this.parent.layout(true);
}
/**
* Sets the layout
* @param layout
*/
public void setLayout(ComponentTableLayout layout){
this.layout = layout;
if (this.table != null && !this.table.isDisposed()) {
parent.layout(true);
}
}
/**
* Updates the underlying table. Hides the row header.
* @param data
* @param columns
*/
public void setTable(String[][] data, String[] columns) {
setTable(getDataProvider(data), columns);
}
/**
* Updates the underlying table
* @param data
*/
public void setData(IDataProvider dataProvider) {
this.setData(dataProvider, null, null);
}
/**
* Updates the underlying table
* @param data
*/
public void setData(String[][] data) {
this.setData(data, null, null);
}
/**
* Updates the underlying data
* @param dataProvider
* @param rows May be null
* @param columns May be null
*/
public void setData(IDataProvider dataProvider, String[] rows, String[] columns) {
// Disable redrawing
this.parent.setRedraw(false);
// Dispose
if (table != null && !table.isDisposed()) {
table.dispose();
}
// Create data providers
IDataProvider rowHeaderDataProvider = getHeaderDataProvider(dataProvider, rows, true);
IDataProvider columnHeaderDataProvider = getHeaderDataProvider(dataProvider, columns, false);
IDataProvider cornerDataProvider = new DefaultCornerDataProvider(columnHeaderDataProvider, rowHeaderDataProvider);
// Create layers
BodyLayerStack bodyLayer = new BodyLayerStack(dataProvider);
ColumnHeaderLayerStack columnHeaderLayer = new ColumnHeaderLayerStack(columnHeaderDataProvider, bodyLayer);
RowHeaderLayerStack rowHeaderLayer = new RowHeaderLayerStack(rowHeaderDataProvider, bodyLayer);
CornerLayer cornerLayer = new CornerLayer(new DataLayer(cornerDataProvider), rowHeaderLayer, columnHeaderLayer);
GridLayer gridLayer = new GridLayer(bodyLayer, columnHeaderLayer, rowHeaderLayer, cornerLayer);
// Create table
table = new NatTable(parent, gridLayer);
addColumnWidthHandler(table, dataProvider, bodyLayer.getDataLayer(), cornerLayer);
// Set layout
if (this.layoutData != null) {
table.setLayoutData(layoutData);
}
// Redraw
this.parent.setRedraw(true);
this.parent.layout(true);
}
/**
* Adds a handler for automatically resizing columns
* @param table
* @param dataProvider
* @param dataLayer
* @param cornerLayer
*/
private void addColumnWidthHandler(final NatTable table,
final IDataProvider dataProvider,
final DataLayer dataLayer,
final CornerLayer cornerLayer) {
table.addControlListener(new ControlAdapter(){
public void controlResized(ControlEvent arg0) {
parent.setRedraw(false);
// Prepare
int width = table.getSize().x;
// Check if larger than parent
int columns = dataProvider.getColumnCount();
int total = layout.columnWidth * columns;
total += cornerLayer != null ? cornerLayer.getColumnWidthByPosition(0) : 0;
if (total >= width) {
for (int i=0; i<columns; i++){
dataLayer.setColumnWidthByPosition(i, layout.columnWidth, i==columns-1);
}
} else {
// If not, extend to cover the whole area
int columnWidth = width;
columnWidth -= (cornerLayer != null) ? cornerLayer.getColumnWidthByPosition(0) : 0;
columnWidth = (int)Math.round((double)columnWidth / (double)dataProvider.getColumnCount());
total = (cornerLayer != null) ? cornerLayer.getColumnWidthByPosition(0) : 0;
for (int i=0; i<columns; i++){
if (total + columnWidth > width) {
columnWidth = width - total;
}
dataLayer.setColumnWidthByPosition(i, columnWidth, i==columns-1);
total += columnWidth;
}
}
parent.setRedraw(true);
table.redraw();
}
});
}
/**
* Updates the underlying data
* @param data
* @param rows May be null
* @param columns May be null
*/
public void setData(String[][] data, String[] rows, String[] columns) {
setData(getDataProvider(data), rows, columns);
}
/**
* Sets the layout data
* @param data
*/
public void setLayoutData(Object data){
this.layoutData = data;
if (table != null) table.setLayoutData(data);
}
private IDataProvider getHeaderDataProvider(final IDataProvider data,
final String[] header,
final boolean row) {
if (header==null) {
return new IDataProvider(){
@Override
public int getColumnCount() {
return row ? 1 : data.getColumnCount();
}
@Override
public Object getDataValue(int arg0, int arg1) {
return row ? arg0 : arg1;
}
@Override
public int getRowCount() {
return row ? data.getRowCount() : 1;
}
@Override
public void setDataValue(int arg0, int arg1, Object arg2) {
// Ignore
}
};
} else {
return new IDataProvider(){
@Override
public int getColumnCount() {
return row ? 1 : data.getColumnCount();
}
@Override
public Object getDataValue(int arg0, int arg1) {
return row ? header[arg1] : header[arg0];
}
@Override
public int getRowCount() {
return row ? data.getRowCount() : 1;
}
@Override
public void setDataValue(int arg0, int arg1, Object arg2) {
// Ignore
}
};
}
}
private IDataProvider getDataProvider(final String[][] data) {
return new ListDataProvider<String[]>(Arrays.asList(data), new IColumnAccessor<String[]>(){
@Override
public int getColumnCount() {
return data==null || data.length==0 || data[0]==null ? 0 : data[0].length;
}
@Override
public Object getDataValue(String[] arg0, int arg1) {
return arg0[arg1];
}
@Override
public void setDataValue(String[] arg0, int arg1, Object arg2) {
arg0[arg1] = arg2.toString();
}
});
}
/**
* Empties the table
*/
public void setEmpty() {
if (this.table == null || this.table.isDisposed()) return;
this.parent.setRedraw(false);
this.table.dispose();
this.parent.setRedraw(true);
this.parent.layout(true);
}
} | Harmonize design of column and row header | src/gui/org/deidentifier/arx/gui/view/impl/common/ComponentTable.java | Harmonize design of column and row header |
|
Java | apache-2.0 | 221832eda3f86ade3eb8829fbe30ba4078c29388 | 0 | openfurther/further-open-core,openfurther/further-open-core | /**
* Copyright (C) [2013] [The FURTHeR Project]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.further.fqe.ds.model.further.export;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.base.Joiner;
import edu.utah.further.core.api.collections.CollectionUtil;
import edu.utah.further.core.api.exception.ApplicationException;
import edu.utah.further.dts.api.domain.concept.DtsConcept;
import edu.utah.further.dts.api.domain.namespace.DtsNamespace;
import edu.utah.further.dts.api.service.DtsOperationService;
import edu.utah.further.fqe.ds.api.domain.AbstractQueryContext;
import edu.utah.further.fqe.ds.api.domain.ExportContext;
import edu.utah.further.fqe.ds.api.domain.Exporter;
import edu.utah.further.ds.further.model.impl.domain.Observation;
import edu.utah.further.ds.further.model.impl.domain.Person;
/**
* A comma separated value implementation of an {@link Exporter}
* <p>
* -----------------------------------------------------------------------------------<br>
* (c) 2008-2012 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <[email protected]>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author Rich Hansen {@code <[email protected]>}
* @version Jul 31, 2014
*/
@Service("csvExporter")
@Transactional
public final class CsvExporterImpl implements Exporter
{
/**
* A logger that helps identify this class' printouts.
*/
private static final Logger log = getLogger(AbstractQueryContext.class);
/**
* String used to designate that a particular code was not found in DTS
*/
private static final String NOT_FOUND = "NOT_FOUND";
/**
* Strings used to find Demographic map entries
*/
public static final String GENDER_PERSON_SOURCE_CD = "gender";
public static final String BIRTH_DATE_PERSON_SOURCE_CD = "birthdate";
public static final String BIRTH_YEAR_PERSON_SOURCE_CD = "birthyear";
public static final String BIRTH_MONTH_PERSON_SOURCE_CD = "birthmonth";
public static final String BIRTH_DAY_PERSON_SOURCE_CD = "birthday";
public static final String EDUCATION_PERSON_SOURCE_CD = "education";
public static final String MULTI_BIRTH_IND_PERSON_SOURCE_CD = "multibirthind";
public static final String MULTI_BIRTH_NUM_PERSON_SOURCE_CD = "multibirthnum";
public static final String DEATH_DATE_PERSON_SOURCE_CD = "deathdate";
public static final String DEATH_YEAR_PERSON_SOURCE_CD = "deathyear";
public static final String PEDIGREE_PERSON_SOURCE_CD = "pedigree";
public static final String ETHNICITY_PERSON_SOURCE_CD = "ethnicity";
public static final String RACE_PERSON_SOURCE_CD = "race";
public static final String RELIGION_PERSON_SOURCE_CD = "religion";
public static final String PRIMARYLANGUAGE_PERSON_SOURCE_CD = "primarylanguage";
public static final String MARITAL_PERSON_SOURCE_CD = "marital";
public static final String CAUSEOFDEATH_PERSON_SOURCE_CD = "causeofdeath";
public static final String VITALSTATUS_PERSON_SOURCE_CD = "vitalstatus";
// ========================= DEPENDENCIES ==============================
/**
* Terminology services
*/
@Autowired
private DtsOperationService dos;
/**
* A prefix to namespace mapper. Prefixes that do not require a namespace -1.
*/
@Resource(name = "prefixMapper")
private Map<String, Integer> prefixMapper;
// ========================= IMPLEMENTATION: Exporter =======
/*
* (non-Javadoc)
*
* @see edu.utah.further.fqe.ds.api.domain.Exporter#format(java.util.List,
* edu.utah.further.fqe.ds.api.domain.ExportContext)
*/
@Override
@SuppressWarnings("unchecked")
public <F> F format(final List<?> results, final ExportContext exportContext)
{
if (results == null || results.size() == 0)
{
throw new ApplicationException(
"No results found. Your query may have returned zero results. "
+ "If you think this is an error, ensure that you are "
+ "running not running a count only query.");
}
final Class<?> resultClazz = results.get(0).getClass();
// Handle Person results
if (resultClazz.equals(Person.class))
{
// We've already checked the type
final List<Person> persons = (List<Person>) results;
final Map<String, String> nameMapper = getCodeToNameMap(persons);
// Build the CSV header
final StringBuilder sb = new StringBuilder();
sb.append(Joiner.on(",").join(createPersonHeaderList())
+ System.getProperty("line.separator"));
// Build the CSV data
for (final Person person : persons)
{
sb.append(new PersonStringAdapter(person, nameMapper)
+ System.getProperty("line.separator"));
}
log.debug("Header is: " + sb.toString());
return (F) sb.toString();
}
// handle other result types here
// blow up otherwise
throw new ApplicationException("Unsupported result type: "
+ resultClazz.getCanonicalName());
}
// ========================= GET/SET METHODS ===========================
/**
* Return the prefixMapper property.
*
* @return the prefixMapper
*/
public Map<String, Integer> getPrefixMapper()
{
return prefixMapper;
}
/**
* Set a new value for the prefixMapper property.
*
* @param prefixMapper
* the prefixMapper to set
*/
public void setPrefixMapper(final Map<String, Integer> prefixMapper)
{
this.prefixMapper = prefixMapper;
}
/**
* Return the dos property.
*
* @return the dos
*/
public DtsOperationService getDos()
{
return dos;
}
/**
* Set a new value for the dos property.
*
* @param dos
* the dos to set
*/
public void setDos(final DtsOperationService dos)
{
this.dos = dos;
}
// ========================= PRIVATE METHODS/CLASSES ===========================
/**
* Returns a map of the concept_cd to it's named value.
*
* E.g. SNOMED:248152002 -> Female
*
* @param persons
* @return
*/
private Map<String, String> getCodeToNameMap(
final List<Person> persons)
{
final Map<String, String> terminologyNameMap = CollectionUtil.newMap();
Map<DtsNamespace, Set<String>> translationErrors = null;
DtsNamespace dtsNamespace = null;
String code = null;
for (final Person person : persons)
{
log.debug("Processing person: " + person.getId());
// Lookup the Gender name
if(person.getAdministrativeGenderNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getAdministrativeGenderNamespaceId().intValue());
code = person.getAdministrativeGender();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getAdministrativeGenderNamespaceId().toString(), code);
}
// Lookup the Ethnicity name
if(person.getEthnicityNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getEthnicityNamespaceId().intValue());
code = person.getEthnicity();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getEthnicityNamespaceId().toString(), code);
}
// Lookup the Race name
if(person.getRaceNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getRaceNamespaceId().intValue());
code = person.getRace();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getRaceNamespaceId().toString(), code);
}
// Lookup the Religion name
if(person.getReligionNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getReligionNamespaceId().intValue());
code = person.getReligion();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getReligionNamespaceId().toString(), code);
}
// Lookup the PrimaryLanguage name
if(person.getPrimaryLanguageNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getPrimaryLanguageNamespaceId().intValue());
code = person.getPrimaryLanguage();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getPrimaryLanguageNamespaceId().toString(), code);
}
// Lookup the MaritalStatus name
if(person.getMaritalStatusNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getMaritalStatusNamespaceId().intValue());
code = person.getMaritalStatus();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getMaritalStatusNamespaceId().toString(), code);
}
// Lookup the CauseOfDeath name
if(person.getCauseOfDeathNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getCauseOfDeathNamespaceId().intValue());
code = person.getCauseOfDeath();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getCauseOfDeathNamespaceId().toString(), code);
}
// Lookup the VitalStatus name
if(person.getVitalStatusNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getVitalStatusNamespaceId().intValue());
code = person.getVitalStatus();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getVitalStatusNamespaceId().toString(), code);
}
}
return terminologyNameMap;
}
/**
* Lookup logic supporting creation of map of the concept_cd to it's named value.
*
*
* @param terminologyNameMap
* @param translationErrors
* @param dtsNamespace
* @param code
* @return
*/
private void codeToNameLookup(
final Map<String, String> terminologyNameMap,
Map<DtsNamespace, Set<String>> translationErrors,
final DtsNamespace dtsNamespace, final String namespaceId, final String code) {
final DtsConcept dtsConcept = dtsNamespace.isLocal() ? dos
.findConceptByLocalCode(dtsNamespace, code) : dos
.findConceptByCodeInSource(dtsNamespace, code);
String name = (dtsConcept == null) ? "" : dtsConcept.getName();
// Replace all commas in names.
name = name.replace(",", ";");
// Keep track of all untranslated codes
if (dtsConcept == null)
{
if (translationErrors == null)
{
translationErrors = CollectionUtil.newMap();
}
Set<String> untranslatedCodes = translationErrors
.get(dtsNamespace);
if (untranslatedCodes == null)
{
untranslatedCodes = CollectionUtil.newSet();
}
untranslatedCodes.add(code);
translationErrors.put(dtsNamespace, untranslatedCodes);
}
// Put the <namespace id + concept_cd,name> into the terminologyNameMap
terminologyNameMap.put(namespaceId + ":" + code, name);
}
/**
* Creates the list of headers (attribute names) to put at the top of the CSV
*
* @return
*/
private List<String> createPersonHeaderList()
{
// Create the header
final List<String> headerValues = CollectionUtil.newList();
headerValues.add("PERSON NUM");
for (final DemographicExportAttribute attribute : DemographicExportAttribute
.values())
{
if (attribute.isIgnored()) {
continue;
}
headerValues.add(attribute.getDisplayName());
if (attribute.isValueCoded())
{
headerValues.add(attribute.getDisplayName() + " CODE");
}
}
return headerValues;
}
/**
* This class maps observations to person attributes and provides a toString in a
* comma separated value format. In the i2b2 model, we chose to store all demographic
* data as observations, therefore the observations need to be mapped back to person
* attributes.
* <p>
* -----------------------------------------------------------------------------------
* <br>
* (c) 2008-2012 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <[email protected]>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author N. Dustin Schultz {@code <[email protected]>}
* @version Oct 8, 2012
*/
private static final class PersonStringAdapter
{
/**
* The person to adapt
*/
private final Person person;
/**
* Holds the mapping between the code of the value and the attribute
*/
private final Map<DemographicExportAttribute, AttributeValue> attributeValueMapper = CollectionUtil
.newMap();
/**
* Constructor
*
* @param person
*/
public PersonStringAdapter(final Person person,
final Map<String, String> nameMapper)
{
this.person = person;
log.debug("Adapting person: " + person.getId());
// Adapt the Gender
String source = GENDER_PERSON_SOURCE_CD;
DemographicExportAttribute attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
String concept =
(person.getAdministrativeGenderNamespaceId() == null ? "" : person.getAdministrativeGenderNamespaceId())
+ ":"
+ (person.getAdministrativeGender() == null ? "" : person.getAdministrativeGender());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the Ethnicity
source = ETHNICITY_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getEthnicityNamespaceId() == null ? "" : person.getEthnicityNamespaceId())
+ ":"
+ (person.getEthnicity() == null ? "" : person.getEthnicity());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the DateOfBirth
source = BIRTH_DATE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getDateOfBirth() == null ? "" : person.getDateOfBirth());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the BirthYear
source = BIRTH_YEAR_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getBirthYear() == null ? "" : person.getBirthYear());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the BirthMonth
source = BIRTH_MONTH_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getBirthMonth() == null ? "" : person.getBirthMonth());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the BirthDay
source = BIRTH_DAY_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getBirthDay() == null ? "" : person.getBirthDay());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the EducationLevel
source = EDUCATION_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getEducationLevel() == null ? "" : person.getEducationLevel());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the MultipleBirthIndicator
source = MULTI_BIRTH_IND_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getMultipleBirthIndicator() == null ? "" : person.getMultipleBirthIndicator());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the MultipleBirthIndicatorOrderNumber
source = MULTI_BIRTH_NUM_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getMultipleBirthIndicatorOrderNumber() == null ? "" : person.getMultipleBirthIndicatorOrderNumber());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the DateOfDeath
source = DEATH_DATE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getDateOfDeath() == null ? "" : person.getDateOfDeath());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the DeathYear
source = DEATH_YEAR_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getDeathYear() == null ? "" : person.getDeathYear());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the PedigreeQuality
source = PEDIGREE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
source
+ ":"
+ (person.getPedigreeQuality() == null ? "" : person.getPedigreeQuality());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the Race
source = RACE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getRaceNamespaceId() == null ? "" : person.getRaceNamespaceId())
+ ":"
+ (person.getRace() == null ? "" : person.getRace());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the Religion
source = RELIGION_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getReligionNamespaceId() == null ? "" : person.getReligionNamespaceId())
+ ":"
+ (person.getReligion() == null ? "" : person.getReligion());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the PrimaryLanguage
source = PRIMARYLANGUAGE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getPrimaryLanguageNamespaceId() == null ? "" : person.getPrimaryLanguageNamespaceId())
+ ":"
+ (person.getPrimaryLanguage() == null ? "" : person.getPrimaryLanguage());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the MaritalStatus
source = MARITAL_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getMaritalStatusNamespaceId() == null ? "" : person.getMaritalStatusNamespaceId())
+ ":"
+ (person.getMaritalStatus() == null ? "" : person.getMaritalStatus());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the CauseOfDeath
source = CAUSEOFDEATH_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getCauseOfDeathNamespaceId() == null ? "" : person.getCauseOfDeathNamespaceId())
+ ":"
+ (person.getCauseOfDeath() == null ? "" : person.getCauseOfDeath());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the VitalStatus
source = VITALSTATUS_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getVitalStatusNamespaceId() == null ? "" : person.getVitalStatusNamespaceId())
+ ":"
+ (person.getVitalStatus() == null ? "" : person.getVitalStatus());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
final List<String> values = CollectionUtil.newList();
// Always add the person id as the first value
values.add(person.getId().getId().toString());
for (final DemographicExportAttribute exportAttribute : DemographicExportAttribute
.values())
{
if (exportAttribute.isIgnored()) {
continue;
}
final AttributeValue value = attributeValueMapper.get(exportAttribute);
if (value == null)
{
// they don't have this export attribute
values.add("");
if (exportAttribute.isValueCoded())
{
values.add("");
}
}
else
{
if(value.getName() != null)
{
if (exportAttribute.isValueCoded())
{
values.add(value.getName());
} else {
// FUR-2481 - replace colons with underscore
if(value.getCode() != null)
{
String newValue = value.getCode().replace(":", "_");
values.add(newValue);
} else {
values.add("");
}
}
} else {
values.add("");
}
if (exportAttribute.isValueCoded())
{
if ("".equals(value.getName())) {
// FUR-2482 - replace codes that aren't in DTS with NOT_FOUND
values.add(NOT_FOUND);
} else {
// FUR-2481 - replace colons with underscore
if(value.getCode() != null)
{
String newValue = value.getCode().replace(":", "_");
values.add(newValue);
} else {
values.add("");
}
}
}
}
}
return Joiner.on(",").join(values);
}
}
/**
* Holds a given codes attribute key, name, and code
* <p>
* -----------------------------------------------------------------------------------
* <br>
* (c) 2008-2012 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <[email protected]>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author N. Dustin Schultz {@code <[email protected]>}
* @version Oct 9, 2012
*/
private static final class AttributeValue
{
/**
* The name of this attribute
*/
private final String name;
/**
* The code of this attribute
*/
private final String code;
/**
* @param key
* @param name
*/
public AttributeValue(final String code, final String name)
{
super();
this.code = code;
this.name = name;
}
/**
* Return the code property.
*
* @return the code
*/
public String getCode()
{
return code;
}
/**
* Return the name property.
*
* @return the name
*/
public String getName()
{
return name;
}
}
}
| fqe/fqe-ds-model/fqe-ds-further-model/src/main/java/edu/utah/further/fqe/ds/model/further/export/CsvExporterImpl.java | /**
* Copyright (C) [2013] [The FURTHeR Project]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.further.fqe.ds.model.further.export;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.base.Joiner;
import edu.utah.further.core.api.collections.CollectionUtil;
import edu.utah.further.core.api.exception.ApplicationException;
import edu.utah.further.dts.api.domain.concept.DtsConcept;
import edu.utah.further.dts.api.domain.namespace.DtsNamespace;
import edu.utah.further.dts.api.service.DtsOperationService;
import edu.utah.further.fqe.ds.api.domain.AbstractQueryContext;
import edu.utah.further.fqe.ds.api.domain.ExportContext;
import edu.utah.further.fqe.ds.api.domain.Exporter;
import edu.utah.further.ds.further.model.impl.domain.Observation;
import edu.utah.further.ds.further.model.impl.domain.Person;
/**
* A comma separated value implementation of an {@link Exporter}
* <p>
* -----------------------------------------------------------------------------------<br>
* (c) 2008-2012 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <[email protected]>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author Rich Hansen {@code <[email protected]>}
* @version Jul 31, 2014
*/
@Service("csvExporter")
@Transactional
public final class CsvExporterImpl implements Exporter
{
/**
* A logger that helps identify this class' printouts.
*/
private static final Logger log = getLogger(AbstractQueryContext.class);
/**
* String used to designate that a particular code was not found in DTS
*/
private static final String NOT_FOUND = "NOT_FOUND";
/**
* Strings used to find Demographic map entries
*/
public static final String GENDER_PERSON_SOURCE_CD = "gender";
public static final String BIRTH_DATE_PERSON_SOURCE_CD = "birthdate";
public static final String BIRTH_YEAR_PERSON_SOURCE_CD = "birthyear";
public static final String BIRTH_MONTH_PERSON_SOURCE_CD = "birthmonth";
public static final String BIRTH_DAY_PERSON_SOURCE_CD = "birthday";
public static final String EDUCATION_PERSON_SOURCE_CD = "education";
public static final String MULTI_BIRTH_IND_PERSON_SOURCE_CD = "multibirthind";
public static final String MULTI_BIRTH_NUM_PERSON_SOURCE_CD = "multibirthnum";
public static final String DEATH_DATE_PERSON_SOURCE_CD = "deathdate";
public static final String DEATH_YEAR_PERSON_SOURCE_CD = "deathyear";
public static final String PEDIGREE_PERSON_SOURCE_CD = "pedigree";
public static final String ETHNICITY_PERSON_SOURCE_CD = "ethnicity";
public static final String RACE_PERSON_SOURCE_CD = "race";
public static final String RELIGION_PERSON_SOURCE_CD = "religion";
public static final String PRIMARYLANGUAGE_PERSON_SOURCE_CD = "primarylanguage";
public static final String MARITAL_PERSON_SOURCE_CD = "marital";
public static final String CAUSEOFDEATH_PERSON_SOURCE_CD = "causeofdeath";
public static final String VITALSTATUS_PERSON_SOURCE_CD = "vitalstatus";
// ========================= DEPENDENCIES ==============================
/**
* Terminology services
*/
@Autowired
private DtsOperationService dos;
/**
* A prefix to namespace mapper. Prefixes that do not require a namespace -1.
*/
@Resource(name = "prefixMapper")
private Map<String, Integer> prefixMapper;
// ========================= IMPLEMENTATION: Exporter =======
/*
* (non-Javadoc)
*
* @see edu.utah.further.fqe.ds.api.domain.Exporter#format(java.util.List,
* edu.utah.further.fqe.ds.api.domain.ExportContext)
*/
@Override
@SuppressWarnings("unchecked")
public <F> F format(final List<?> results, final ExportContext exportContext)
{
if (results == null || results.size() == 0)
{
throw new ApplicationException(
"No results found. Your query may have returned zero results. "
+ "If you think this is an error, ensure that you are "
+ "running not running a count only query.");
}
final Class<?> resultClazz = results.get(0).getClass();
// Handle Person results
if (resultClazz.equals(Person.class))
{
// We've already checked the type
final List<Person> persons = (List<Person>) results;
final Map<String, String> nameMapper = getCodeToNameMap(persons);
// Build the CSV header
final StringBuilder sb = new StringBuilder();
sb.append(Joiner.on(",").join(createPersonHeaderList())
+ System.getProperty("line.separator"));
// Build the CSV data
for (final Person person : persons)
{
sb.append(new PersonStringAdapter(person, nameMapper)
+ System.getProperty("line.separator"));
}
log.debug("Header is: " + sb.toString());
return (F) sb.toString();
}
// handle other result types here
// blow up otherwise
throw new ApplicationException("Unsupported result type: "
+ resultClazz.getCanonicalName());
}
// ========================= GET/SET METHODS ===========================
/**
* Return the prefixMapper property.
*
* @return the prefixMapper
*/
public Map<String, Integer> getPrefixMapper()
{
return prefixMapper;
}
/**
* Set a new value for the prefixMapper property.
*
* @param prefixMapper
* the prefixMapper to set
*/
public void setPrefixMapper(final Map<String, Integer> prefixMapper)
{
this.prefixMapper = prefixMapper;
}
/**
* Return the dos property.
*
* @return the dos
*/
public DtsOperationService getDos()
{
return dos;
}
/**
* Set a new value for the dos property.
*
* @param dos
* the dos to set
*/
public void setDos(final DtsOperationService dos)
{
this.dos = dos;
}
// ========================= PRIVATE METHODS/CLASSES ===========================
/**
* Returns a map of the concept_cd to it's named value.
*
* E.g. SNOMED:248152002 -> Female
*
* @param persons
* @return
*/
private Map<String, String> getCodeToNameMap(
final List<Person> persons)
{
final Map<String, String> terminologyNameMap = CollectionUtil.newMap();
Map<DtsNamespace, Set<String>> translationErrors = null;
DtsNamespace dtsNamespace = null;
String code = null;
for (final Person person : persons)
{
log.debug("Processing person: " + person.getId());
// Lookup the Gender name
if(person.getAdministrativeGenderNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getAdministrativeGenderNamespaceId().intValue());
code = person.getAdministrativeGender();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getAdministrativeGenderNamespaceId().toString(), code);
}
// Lookup the Ethnicity name
if(person.getEthnicityNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getEthnicityNamespaceId().intValue());
code = person.getEthnicity();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getEthnicityNamespaceId().toString(), code);
}
// Lookup the Race name
if(person.getRaceNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getRaceNamespaceId().intValue());
code = person.getRace();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getRaceNamespaceId().toString(), code);
}
// Lookup the Religion name
if(person.getReligionNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getReligionNamespaceId().intValue());
code = person.getReligion();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getReligionNamespaceId().toString(), code);
}
// Lookup the PrimaryLanguage name
if(person.getPrimaryLanguageNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getPrimaryLanguageNamespaceId().intValue());
code = person.getPrimaryLanguage();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getPrimaryLanguageNamespaceId().toString(), code);
}
// Lookup the MaritalStatus name
if(person.getMaritalStatusNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getMaritalStatusNamespaceId().intValue());
code = person.getMaritalStatus();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getMaritalStatusNamespaceId().toString(), code);
}
// Lookup the CauseOfDeath name
if(person.getCauseOfDeathNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getCauseOfDeathNamespaceId().intValue());
code = person.getCauseOfDeath();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getCauseOfDeathNamespaceId().toString(), code);
}
// Lookup the VitalStatus name
if(person.getVitalStatusNamespaceId() != null)
{
dtsNamespace = dos.findNamespaceById(person.getVitalStatusNamespaceId().intValue());
code = person.getVitalStatus();
codeToNameLookup(terminologyNameMap, translationErrors, dtsNamespace, person.getVitalStatusNamespaceId().toString(), code);
}
}
return terminologyNameMap;
}
/**
* Lookup logic supporting creation of map of the concept_cd to it's named value.
*
*
* @param terminologyNameMap
* @param translationErrors
* @param dtsNamespace
* @param code
* @return
*/
private void codeToNameLookup(
final Map<String, String> terminologyNameMap,
Map<DtsNamespace, Set<String>> translationErrors,
final DtsNamespace dtsNamespace, final String namespaceId, final String code) {
final DtsConcept dtsConcept = dtsNamespace.isLocal() ? dos
.findConceptByLocalCode(dtsNamespace, code) : dos
.findConceptByCodeInSource(dtsNamespace, code);
String name = (dtsConcept == null) ? "" : dtsConcept.getName();
// Replace all commas in names.
name = name.replace(",", ";");
// Keep track of all untranslated codes
if (dtsConcept == null)
{
if (translationErrors == null)
{
translationErrors = CollectionUtil.newMap();
}
Set<String> untranslatedCodes = translationErrors
.get(dtsNamespace);
if (untranslatedCodes == null)
{
untranslatedCodes = CollectionUtil.newSet();
}
untranslatedCodes.add(code);
translationErrors.put(dtsNamespace, untranslatedCodes);
}
// Put the <namespace id + concept_cd,name> into the terminologyNameMap
terminologyNameMap.put(namespaceId + ":" + code, name);
}
/**
* Creates the list of headers (attribute names) to put at the top of the CSV
*
* @return
*/
private List<String> createPersonHeaderList()
{
// Create the header
final List<String> headerValues = CollectionUtil.newList();
headerValues.add("PERSON NUM");
for (final DemographicExportAttribute attribute : DemographicExportAttribute
.values())
{
if (attribute.isIgnored()) {
continue;
}
headerValues.add(attribute.getDisplayName());
if (attribute.isValueCoded())
{
headerValues.add(attribute.getDisplayName() + " CODE");
}
}
return headerValues;
}
/**
* This class maps observations to person attributes and provides a toString in a
* comma separated value format. In the i2b2 model, we chose to store all demographic
* data as observations, therefore the observations need to be mapped back to person
* attributes.
* <p>
* -----------------------------------------------------------------------------------
* <br>
* (c) 2008-2012 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <[email protected]>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author N. Dustin Schultz {@code <[email protected]>}
* @version Oct 8, 2012
*/
private static final class PersonStringAdapter
{
/**
* The person to adapt
*/
private final Person person;
/**
* Holds the mapping between the code of the value and the attribute
*/
private final Map<DemographicExportAttribute, AttributeValue> attributeValueMapper = CollectionUtil
.newMap();
/**
* Constructor
*
* @param person
*/
public PersonStringAdapter(final Person person,
final Map<String, String> nameMapper)
{
this.person = person;
log.debug("Adapting person: " + person.getId());
// Adapt the Gender
String source = GENDER_PERSON_SOURCE_CD;
DemographicExportAttribute attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
String concept =
(person.getAdministrativeGenderNamespaceId() == null ? "" : person.getAdministrativeGenderNamespaceId())
+ ":"
+ (person.getAdministrativeGender() == null ? "" : person.getAdministrativeGender());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the Ethnicity
source = ETHNICITY_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getEthnicityNamespaceId() == null ? "" : person.getEthnicityNamespaceId())
+ ":"
+ (person.getEthnicity() == null ? "" : person.getEthnicity());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the DateOfBirth
source = BIRTH_DATE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = "" +
(person.getDateOfBirth() == null ? "" : person.getDateOfBirth());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the BirthYear
source = BIRTH_YEAR_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getBirthYear() == null ? "" : person.getBirthYear());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the BirthMonth
source = BIRTH_MONTH_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getBirthMonth() == null ? "" : person.getBirthMonth());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the BirthDay
source = BIRTH_DAY_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getBirthDay() == null ? "" : person.getBirthDay());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the EducationLevel
source = EDUCATION_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getEducationLevel() == null ? "" : person.getEducationLevel());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the MultipleBirthIndicator
source = MULTI_BIRTH_IND_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getMultipleBirthIndicator() == null ? "" : person.getMultipleBirthIndicator());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the MultipleBirthIndicatorOrderNumber
source = MULTI_BIRTH_NUM_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getMultipleBirthIndicatorOrderNumber() == null ? "" : person.getMultipleBirthIndicatorOrderNumber());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the DateOfDeath
source = DEATH_DATE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getDateOfDeath() == null ? "" : person.getDateOfDeath());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the DeathYear
source = DEATH_YEAR_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getDeathYear() == null ? "" : person.getDeathYear());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the PedigreeQuality
source = PEDIGREE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept = ""
+ (person.getPedigreeQuality() == null ? "" : person.getPedigreeQuality());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the Race
source = RACE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getRaceNamespaceId() == null ? "" : person.getRaceNamespaceId())
+ ":"
+ (person.getRace() == null ? "" : person.getRace());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the Religion
source = RELIGION_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getReligionNamespaceId() == null ? "" : person.getReligionNamespaceId())
+ ":"
+ (person.getReligion() == null ? "" : person.getReligion());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the PrimaryLanguage
source = PRIMARYLANGUAGE_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getPrimaryLanguageNamespaceId() == null ? "" : person.getPrimaryLanguageNamespaceId())
+ ":"
+ (person.getPrimaryLanguage() == null ? "" : person.getPrimaryLanguage());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the MaritalStatus
source = MARITAL_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getMaritalStatusNamespaceId() == null ? "" : person.getMaritalStatusNamespaceId())
+ ":"
+ (person.getMaritalStatus() == null ? "" : person.getMaritalStatus());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the CauseOfDeath
source = CAUSEOFDEATH_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getCauseOfDeathNamespaceId() == null ? "" : person.getCauseOfDeathNamespaceId())
+ ":"
+ (person.getCauseOfDeath() == null ? "" : person.getCauseOfDeath());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
// Adapt the VitalStatus
source = VITALSTATUS_PERSON_SOURCE_CD;
attribute = DemographicExportAttribute
.getAttributeBySourceCode(source);
concept =
(person.getVitalStatusNamespaceId() == null ? "" : person.getVitalStatusNamespaceId())
+ ":"
+ (person.getVitalStatus() == null ? "" : person.getVitalStatus());
attributeValueMapper.put(attribute, new AttributeValue(concept,
nameMapper.get(concept)));
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
final List<String> values = CollectionUtil.newList();
// Always add the person id as the first value
values.add(person.getId().getId().toString());
for (final DemographicExportAttribute exportAttribute : DemographicExportAttribute
.values())
{
if (exportAttribute.isIgnored()) {
continue;
}
final AttributeValue value = attributeValueMapper.get(exportAttribute);
if (value == null)
{
// they don't have this export attribute
values.add("");
if (exportAttribute.isValueCoded())
{
values.add("");
}
}
else
{
if(value.getName() != null)
{
values.add(value.getName());
} else {
values.add("");
}
if (exportAttribute.isValueCoded())
{
if ("".equals(value.getName())) {
// FUR-2482 - replace codes that aren't in DTS with NOT_FOUND
values.add(NOT_FOUND);
} else {
// FUR-2481 - replace colons with underscore
if(value.getCode() != null)
{
String newValue = value.getCode().replace(":", "_");
values.add(newValue);
} else {
values.add("");
}
}
} else {
// FUR-2481 - replace colons with underscore
if(value.getCode() != null)
{
String newValue = value.getCode().replace(":", "_");
values.add(newValue);
} else {
values.add("");
}
}
}
}
return Joiner.on(",").join(values);
}
}
/**
* Holds a given codes attribute key, name, and code
* <p>
* -----------------------------------------------------------------------------------
* <br>
* (c) 2008-2012 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <[email protected]>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author N. Dustin Schultz {@code <[email protected]>}
* @version Oct 9, 2012
*/
private static final class AttributeValue
{
/**
* The name of this attribute
*/
private final String name;
/**
* The code of this attribute
*/
private final String code;
/**
* @param key
* @param name
*/
public AttributeValue(final String code, final String name)
{
super();
this.code = code;
this.name = name;
}
/**
* Return the code property.
*
* @return the code
*/
public String getCode()
{
return code;
}
/**
* Return the name property.
*
* @return the name
*/
public String getName()
{
return name;
}
}
}
| CSV Export, Person refactor: change to non-coded attrs | fqe/fqe-ds-model/fqe-ds-further-model/src/main/java/edu/utah/further/fqe/ds/model/further/export/CsvExporterImpl.java | CSV Export, Person refactor: change to non-coded attrs |
|
Java | apache-2.0 | e8631f61a6424e4ccca407f6c7a1e8fb267f5e85 | 0 | anujbhan/airavata,gouravshenoy/airavata,apache/airavata,jjj117/airavata,dogless/airavata,hasinitg/airavata,glahiru/airavata,glahiru/airavata,apache/airavata,hasinitg/airavata,gouravshenoy/airavata,jjj117/airavata,machristie/airavata,machristie/airavata,anujbhan/airavata,gouravshenoy/airavata,hasinitg/airavata,gouravshenoy/airavata,machristie/airavata,jjj117/airavata,hasinitg/airavata,gouravshenoy/airavata,apache/airavata,hasinitg/airavata,anujbhan/airavata,glahiru/airavata,apache/airavata,jjj117/airavata,jjj117/airavata,dogless/airavata,glahiru/airavata,apache/airavata,anujbhan/airavata,dogless/airavata,anujbhan/airavata,dogless/airavata,apache/airavata,dogless/airavata,gouravshenoy/airavata,glahiru/airavata,machristie/airavata,machristie/airavata,dogless/airavata,hasinitg/airavata,gouravshenoy/airavata,apache/airavata,machristie/airavata,anujbhan/airavata,anujbhan/airavata,machristie/airavata,apache/airavata,jjj117/airavata | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.provenance.impl.jpa;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import org.apache.airavata.common.registry.api.exception.RegistryException;
import org.apache.airavata.commons.gfac.type.ActualParameter;
import org.apache.airavata.provenance.model.Experiment_Data;
import org.apache.airavata.provenance.model.Node_Data;
import org.apache.airavata.provenance.model.Workflow_Data;
import org.apache.airavata.registry.api.AiravataProvenanceRegistry;
import org.apache.airavata.registry.api.workflow.*;
import org.apache.airavata.registry.api.workflow.WorkflowInstanceStatus.ExecutionStatus;
public class AiravataJPAProvenanceRegistry extends AiravataProvenanceRegistry{
private static final String PERSISTENCE_UNIT_NAME = "airavata_provenance";
private EntityManagerFactory factory;
public AiravataJPAProvenanceRegistry(String user) {
super(user);
this.factory = Persistence.createEntityManagerFactory(PERSISTENCE_UNIT_NAME);
}
public List<ActualParameter> loadOutput(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
public String saveOutput(String arg0, List<ActualParameter> arg1)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public WorkflowExecution getWorkflowExecution(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowExecution> getWorkflowExecutionByUser(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowExecution> getWorkflowExecutionByUser(String arg0,
int arg1, int arg2) throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<String> getWorkflowExecutionIdByUser(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getWorkflowExecutionMetadata(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getWorkflowExecutionName(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowIOData> getWorkflowExecutionOutput(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public WorkflowIOData getWorkflowExecutionOutput(String arg0, String arg1)
throws RegistryException {
return null;
}
@Override
public String[] getWorkflowExecutionOutputNames(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public WorkflowInstanceStatus getWorkflowExecutionStatus(String instanceID)
throws RegistryException {
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT p FROM Workflow_Data p WHERE p.workflow_InstanceID = :workflow_InstanceID");
q.setParameter("workflow_InstanceID", instanceID);
Workflow_Data singleResult = (Workflow_Data) q.getSingleResult();
WorkflowInstanceStatus workflowInstanceStatus = new
WorkflowInstanceStatus(new WorkflowInstance(singleResult.getExperiment_Data().getExperiment_ID(),singleResult.getTemplate_name())
,ExecutionStatus.valueOf(singleResult.getStatus()),new Date(singleResult.getLast_update_time().getTime()));
return workflowInstanceStatus;
}
@Override
public String getWorkflowExecutionUser(String arg0)
throws RegistryException {
return null;
}
@Override
public boolean saveWorkflowData(WorkflowRunTimeData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT p FROM Experiment_Data p WHERE p.experiment_ID = :exp_ID");
q.setParameter("exp_ID", arg0.getExperimentID());
Experiment_Data eData = (Experiment_Data) q.getSingleResult();
Workflow_Data wData = new Workflow_Data();
wData.setExperiment_Data(eData);
wData.setExperiment_Data(eData);
wData.setTemplate_name(arg0.getTemplateID());
wData.setWorkflow_instanceID(arg0.getWorkflowInstanceID());
wData.setStatus(arg0.getWorkflowStatus().toString());
wData.setStart_time(arg0.getStartTime());
em.persist(wData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionMetadata(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
return true;
}
@Override
public boolean saveWorkflowExecutionName(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Experiment_Data expData = new Experiment_Data();
expData.setExperiment_ID(arg0);
expData.setName(arg1);
em.persist(expData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionOutput(String arg0, WorkflowIOData arg1)
throws RegistryException {
// TODO Auto-generated method stub
return true;
}
@Override
public boolean saveWorkflowExecutionOutput(String arg0, String arg1,
String arg2) throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowExecutionServiceInput(WorkflowServiceIOData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0.getWorkflowInstanceId());
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
Node_Data nData = new Node_Data();
nData.setWorkflow_Data(wData);
nData.setNode_id(arg0.getNodeId());
nData.setInputs(arg0.getValue());
nData.setNode_type((arg0.getNodeType().getNodeType().toString()));
nData.setStatus(arg0.getNodeStatus().getExecutionStatus().toString());
em.persist(nData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionServiceOutput(WorkflowServiceIOData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0.getWorkflowInstanceId());
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
q = em.createQuery("SELECT p FROM Node_Data p WHERE p.workflow_Data = :workflow_data AND p.node_id = :node_ID");
q.setParameter("workflow_data", wData);
q.setParameter("node_ID", arg0.getNodeId());
Node_Data nData = (Node_Data) q.getSingleResult();
nData.setOutputs(arg0.getValue());
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionStatus(String arg0, ExecutionStatus arg1)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0);
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
wData.setStatus(arg1.toString());
em.persist(wData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionUser(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowLastUpdateTime(String arg0, Timestamp arg1)
throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowNodeGramData(WorkflowNodeGramData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0);
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
q = em.createQuery("SELECT p FROM Node_Data p WHERE p.workflow_Data = :workflow_data AND p.node_id = :node_ID");
q.setParameter("workflow_data", wData);
q.setParameter("node_ID", arg0.getNodeID());
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowNodeGramLocalJobID(String arg0, String arg1,
String arg2) throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowNodeLastUpdateTime(String arg0, String arg1,
Timestamp arg2) throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowNodeStatus(String arg0, String arg1,
ExecutionStatus arg2) throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0);
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
q = em.createQuery("SELECT p FROM Node_Data p WHERE p.workflow_Data = :workflow_data AND p.node_id = :node_ID");
q.setParameter("workflow_data", wData);
q.setParameter("node_ID", arg1);
Node_Data nData = (Node_Data) q.getSingleResult();
nData.setStatus(arg2.toString());
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowStatus(String arg0, WorkflowInstanceStatus arg1)
throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public List<WorkflowServiceIOData> searchWorkflowExecutionServiceInput(
String arg0, String arg1, String arg2) throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowServiceIOData> searchWorkflowExecutionServiceOutput(
String arg0, String arg1, String arg2) throws RegistryException {
// TODO Auto-generated method stub
return null;
}
}
| modules/commons/provenance-registry/src/main/java/org/apache/airavata/provenance/impl/jpa/AiravataJPAProvenanceRegistry.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.provenance.impl.jpa;
import java.sql.Timestamp;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import org.apache.airavata.common.registry.api.exception.RegistryException;
import org.apache.airavata.commons.gfac.type.ActualParameter;
import org.apache.airavata.provenance.model.Experiment_Data;
import org.apache.airavata.provenance.model.Node_Data;
import org.apache.airavata.provenance.model.Workflow_Data;
import org.apache.airavata.registry.api.AiravataProvenanceRegistry;
import org.apache.airavata.registry.api.workflow.WorkflowExecution;
import org.apache.airavata.registry.api.workflow.WorkflowIOData;
import org.apache.airavata.registry.api.workflow.WorkflowInstanceStatus;
import org.apache.airavata.registry.api.workflow.WorkflowInstanceStatus.ExecutionStatus;
import org.apache.airavata.registry.api.workflow.WorkflowNodeGramData;
import org.apache.airavata.registry.api.workflow.WorkflowRunTimeData;
import org.apache.airavata.registry.api.workflow.WorkflowServiceIOData;
public class AiravataJPAProvenanceRegistry extends AiravataProvenanceRegistry{
private static final String PERSISTENCE_UNIT_NAME = "airavata_provenance";
private EntityManagerFactory factory;
public AiravataJPAProvenanceRegistry(String user) {
super(user);
this.factory = Persistence.createEntityManagerFactory(PERSISTENCE_UNIT_NAME);
}
public List<ActualParameter> loadOutput(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
public String saveOutput(String arg0, List<ActualParameter> arg1)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public WorkflowExecution getWorkflowExecution(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowExecution> getWorkflowExecutionByUser(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowExecution> getWorkflowExecutionByUser(String arg0,
int arg1, int arg2) throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<String> getWorkflowExecutionIdByUser(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getWorkflowExecutionMetadata(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getWorkflowExecutionName(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowIOData> getWorkflowExecutionOutput(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public WorkflowIOData getWorkflowExecutionOutput(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public String[] getWorkflowExecutionOutputNames(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public WorkflowInstanceStatus getWorkflowExecutionStatus(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getWorkflowExecutionUser(String arg0)
throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean saveWorkflowData(WorkflowRunTimeData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT p FROM Experiment_Data p WHERE p.experiment_ID = :exp_ID");
q.setParameter("exp_ID", arg0.getExperimentID());
Experiment_Data eData = (Experiment_Data) q.getSingleResult();
Workflow_Data wData = new Workflow_Data();
wData.setExperiment_Data(eData);
wData.setExperiment_Data(eData);
wData.setTemplate_name(arg0.getTemplateID());
wData.setWorkflow_instanceID(arg0.getWorkflowInstanceID());
wData.setStatus(arg0.getWorkflowStatus().toString());
wData.setStart_time(arg0.getStartTime());
em.persist(wData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionMetadata(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
return true;
}
@Override
public boolean saveWorkflowExecutionName(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Experiment_Data expData = new Experiment_Data();
expData.setExperiment_ID(arg0);
expData.setName(arg1);
em.persist(expData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionOutput(String arg0, WorkflowIOData arg1)
throws RegistryException {
// TODO Auto-generated method stub
return true;
}
@Override
public boolean saveWorkflowExecutionOutput(String arg0, String arg1,
String arg2) throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowExecutionServiceInput(WorkflowServiceIOData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0.getWorkflowInstanceId());
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
Node_Data nData = new Node_Data();
nData.setWorkflow_Data(wData);
nData.setNode_id(arg0.getNodeId());
nData.setInputs(arg0.getValue());
nData.setNode_type((arg0.getNodeType().getNodeType().toString()));
nData.setStatus(arg0.getNodeStatus().getExecutionStatus().toString());
em.persist(nData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionServiceOutput(WorkflowServiceIOData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0.getWorkflowInstanceId());
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
q = em.createQuery("SELECT p FROM Node_Data p WHERE p.workflow_Data = :workflow_data AND p.node_id = :node_ID");
q.setParameter("workflow_data", wData);
q.setParameter("node_ID", arg0.getNodeId());
Node_Data nData = (Node_Data) q.getSingleResult();
nData.setOutputs(arg0.getValue());
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionStatus(String arg0, ExecutionStatus arg1)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0);
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
wData.setStatus(arg1.toString());
em.persist(wData);
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowExecutionUser(String arg0, String arg1)
throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowLastUpdateTime(String arg0, Timestamp arg1)
throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowNodeGramData(WorkflowNodeGramData arg0)
throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0);
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
q = em.createQuery("SELECT p FROM Node_Data p WHERE p.workflow_Data = :workflow_data AND p.node_id = :node_ID");
q.setParameter("workflow_data", wData);
q.setParameter("node_ID", arg0.getNodeID());
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowNodeGramLocalJobID(String arg0, String arg1,
String arg2) throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowNodeLastUpdateTime(String arg0, String arg1,
Timestamp arg2) throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean saveWorkflowNodeStatus(String arg0, String arg1,
ExecutionStatus arg2) throws RegistryException {
// TODO Auto-generated method stub
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("SELECT w FROM Workflow_Data w WHERE w.workflow_instanceID = :workflow_ID");
q.setParameter("workflow_ID", arg0);
Workflow_Data wData = (Workflow_Data) q.getSingleResult();
q = em.createQuery("SELECT p FROM Node_Data p WHERE p.workflow_Data = :workflow_data AND p.node_id = :node_ID");
q.setParameter("workflow_data", wData);
q.setParameter("node_ID", arg1);
Node_Data nData = (Node_Data) q.getSingleResult();
nData.setStatus(arg2.toString());
em.getTransaction().commit();
em.close();
return true;
}
@Override
public boolean saveWorkflowStatus(String arg0, WorkflowInstanceStatus arg1)
throws RegistryException {
// TODO Auto-generated method stub
return false;
}
@Override
public List<WorkflowServiceIOData> searchWorkflowExecutionServiceInput(
String arg0, String arg1, String arg2) throws RegistryException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<WorkflowServiceIOData> searchWorkflowExecutionServiceOutput(
String arg0, String arg1, String arg2) throws RegistryException {
// TODO Auto-generated method stub
return null;
}
}
| implementing the getWorkflowStatus method.
git-svn-id: 64c7115bac0e45f25b6ef7317621bf38f6d5f89e@1380862 13f79535-47bb-0310-9956-ffa450edef68
| modules/commons/provenance-registry/src/main/java/org/apache/airavata/provenance/impl/jpa/AiravataJPAProvenanceRegistry.java | implementing the getWorkflowStatus method. |
|
Java | apache-2.0 | d07062bdf098957b67ca72bf7fd486b00f8956fe | 0 | mythguided/hydra,mythguided/hydra,mythguided/hydra,mythguided/hydra | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.data.filter.value;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import com.addthis.basis.collect.HotMap;
import com.addthis.basis.net.HttpUtil;
import com.addthis.basis.net.http.HttpResponse;
import com.addthis.basis.util.Bytes;
import com.addthis.basis.util.Files;
import com.addthis.basis.util.Multidict;
import com.addthis.bundle.value.ValueObject;
import com.addthis.codec.Codec;
import com.addthis.codec.annotations.FieldConfig;
import com.addthis.codec.codables.Codable;
import com.addthis.codec.codables.SuperCodable;
import com.addthis.codec.json.CodecJSON;
import com.addthis.hydra.common.hash.MD5HashFunction;
import com.google.common.annotations.VisibleForTesting;
import org.apache.http.client.methods.HttpGet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ValueFilterHttpGet extends StringFilter implements SuperCodable {
private static final Logger log = LoggerFactory.getLogger(ValueFilterHttpGet.class);
private static final Codec codec = CodecJSON.INSTANCE;
@FieldConfig(codable = true)
private int cacheSize = 1000;
@FieldConfig(codable = true)
private long cacheAge;
@FieldConfig(codable = true)
private int timeout = 60000;
@FieldConfig(codable = true)
private int retry = 1;
@FieldConfig(codable = true)
private long retryTimeout = 1000;
@FieldConfig(codable = true, required = true)
private String template;
@FieldConfig(codable = true)
private String missValue;
@FieldConfig(codable = true)
private boolean trace;
@FieldConfig(codable = true)
private boolean emptyOk = true;
@FieldConfig(codable = true)
private boolean persist;
@FieldConfig(codable = true)
private String persistDir = ".";
private HotMap<String, CacheObject> cache =
new HotMap<>(new ConcurrentHashMap());
private AtomicBoolean init = new AtomicBoolean(false);
private File persistTo;
@VisibleForTesting
ValueFilterHttpGet() {}
public static class CacheObject implements Codable, Comparable<CacheObject> {
@FieldConfig(codable = true)
private long time;
@FieldConfig(codable = true)
private String key;
@FieldConfig(codable = true)
private String data;
private String hash;
@Override
public int compareTo(CacheObject o) {
return (int) (time - o.time);
}
}
@Override
public void postDecode() {
if (persist) {
persistTo = Files.initDirectory(persistDir);
LinkedList<CacheObject> list = new LinkedList<>();
for (File file : persistTo.listFiles()) {
if (file.isFile()) {
try {
CacheObject cached = codec.decode(CacheObject.class, Files.read(file));
cached.hash = file.getName();
list.add(cached);
if (log.isDebugEnabled()) {
log.debug("restored " + cached.hash + " as " + cached.key);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
// sort so that hot map has the most recent inserted last
CacheObject[] sort = new CacheObject[list.size()];
list.toArray(sort);
Arrays.sort(sort);
for (CacheObject cached : sort) {
if (log.isDebugEnabled()) {
log.debug("insert into hot " + cached.hash + " as " + cached.key);
}
cache.put(cached.key, cached);
}
}
}
@Override public void preEncode() {}
private static final class ValidationOnly extends ValueFilterHttpGet {
@Override public void postDecode() {
// intentionally do nothing
}
@Override public ValueObject filter(ValueObject value) {
throw new UnsupportedOperationException("This class is only intended for use in construction validation.");
}
}
private synchronized CacheObject cacheGet(String key) {
return cache.get(key);
}
private synchronized CacheObject cachePut(String key, String value) {
CacheObject cached = new CacheObject();
cached.time = System.currentTimeMillis();
cached.key = key;
cached.data = value;
cached.hash = MD5HashFunction.hashAsString(key);
cache.put(cached.key, cached);
try {
Files.write(new File(persistTo, cached.hash), codec.encode(cached), false);
if (log.isDebugEnabled()) {
log.debug("creating " + cached.hash + " for " + cached.key);
}
} catch (Exception ex) {
log.warn("", ex);
}
while (cache.size() > cacheSize) {
CacheObject old = cache.removeEldest();
new File(persistTo, old.hash).delete();
if (log.isDebugEnabled()) {
log.debug("deleted " + old.hash + " containing " + old.key);
}
}
return cached;
}
@Override
public String filter(String sv) {
if (sv == null) {
return null;
}
CacheObject cached = cacheGet(sv);
if (cached == null || (cacheAge > 0 && System.currentTimeMillis() - cached.time > cacheAge)) {
if (log.isDebugEnabled() && cached != null && cacheAge > 0 && System.currentTimeMillis() - cached.time > cacheAge) {
log.debug("aging out, replacing " + cached.hash + " or " + cached.key);
}
int retries = retry;
while (retries-- > 0) {
try {
String replacement = template.replace("{{}}", sv);
byte[] val = httpGet(replacement, null, null, timeout, trace);
if (val != null && (emptyOk || val.length > 0)) {
cached = cachePut(sv, Bytes.toString(val));
break;
} else if (trace) {
log.error("{} returned {} retries left = {}", replacement, (val != null ? val.length : -1), retries);
}
} catch (IOException e) {
e.printStackTrace();
}
try {
Thread.sleep(retryTimeout);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (cached == null && missValue != null) {
cachePut(sv, missValue);
}
}
return cached != null ? cached.data : null;
}
public static byte[] httpGet(String url, Map<String, String> requestHeaders,
Map<String, String> responseHeaders, int timeoutms,
boolean traceError) throws IOException {
HttpGet get = new HttpGet(url);
if (requestHeaders != null) {
for (Map.Entry<String, String> entry : requestHeaders.entrySet()) {
get.addHeader(entry.getKey(), entry.getValue());
}
}
HttpResponse response = HttpUtil.execute(get, timeoutms);
Multidict resHeaders = response.getHeaders();
if (responseHeaders != null && resHeaders != null) {
for (Map.Entry<String, String> entry : resHeaders.entries()) {
responseHeaders.put(entry.getKey(), entry.getValue());
}
}
if (response.getStatus() == 200) {
return response.getBody();
} else {
if (traceError) {
log.error("{} returned {}, {}", url, response.getStatus(), response.getReason());
}
return null;
}
}
}
| hydra-filters/src/main/java/com/addthis/hydra/data/filter/value/ValueFilterHttpGet.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.data.filter.value;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import com.addthis.basis.collect.HotMap;
import com.addthis.basis.net.HttpUtil;
import com.addthis.basis.net.http.HttpResponse;
import com.addthis.basis.util.Bytes;
import com.addthis.basis.util.Files;
import com.addthis.basis.util.Multidict;
import com.addthis.bundle.value.ValueObject;
import com.addthis.codec.Codec;
import com.addthis.codec.annotations.FieldConfig;
import com.addthis.codec.codables.Codable;
import com.addthis.codec.codables.SuperCodable;
import com.addthis.codec.json.CodecJSON;
import com.addthis.hydra.common.hash.MD5HashFunction;
import org.apache.http.client.methods.HttpGet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ValueFilterHttpGet extends StringFilter implements SuperCodable {
private static final Logger log = LoggerFactory.getLogger(ValueFilterHttpGet.class);
private static final Codec codec = CodecJSON.INSTANCE;
@FieldConfig(codable = true)
private int cacheSize = 1000;
@FieldConfig(codable = true)
private long cacheAge;
@FieldConfig(codable = true)
private int timeout = 60000;
@FieldConfig(codable = true)
private int retry = 1;
@FieldConfig(codable = true)
private long retryTimeout = 1000;
@FieldConfig(codable = true, required = true)
private String template;
@FieldConfig(codable = true)
private String missValue;
@FieldConfig(codable = true)
private boolean trace;
@FieldConfig(codable = true)
private boolean emptyOk = true;
@FieldConfig(codable = true)
private boolean persist;
@FieldConfig(codable = true)
private String persistDir = ".";
private HotMap<String, CacheObject> cache =
new HotMap<>(new ConcurrentHashMap());
private AtomicBoolean init = new AtomicBoolean(false);
private File persistTo;
private ValueFilterHttpGet() {}
public static class CacheObject implements Codable, Comparable<CacheObject> {
@FieldConfig(codable = true)
private long time;
@FieldConfig(codable = true)
private String key;
@FieldConfig(codable = true)
private String data;
private String hash;
@Override
public int compareTo(CacheObject o) {
return (int) (time - o.time);
}
}
@Override
public void postDecode() {
if (persist) {
persistTo = Files.initDirectory(persistDir);
LinkedList<CacheObject> list = new LinkedList<>();
for (File file : persistTo.listFiles()) {
if (file.isFile()) {
try {
CacheObject cached = codec.decode(CacheObject.class, Files.read(file));
cached.hash = file.getName();
list.add(cached);
if (log.isDebugEnabled()) {
log.debug("restored " + cached.hash + " as " + cached.key);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
// sort so that hot map has the most recent inserted last
CacheObject[] sort = new CacheObject[list.size()];
list.toArray(sort);
Arrays.sort(sort);
for (CacheObject cached : sort) {
if (log.isDebugEnabled()) {
log.debug("insert into hot " + cached.hash + " as " + cached.key);
}
cache.put(cached.key, cached);
}
}
}
@Override public void preEncode() {}
private static final class ValidationOnly extends ValueFilterHttpGet {
@Override public void postDecode() {
// intentionally do nothing
}
@Override public ValueObject filter(ValueObject value) {
throw new UnsupportedOperationException("This class is only intended for use in construction validation.");
}
}
private synchronized CacheObject cacheGet(String key) {
return cache.get(key);
}
private synchronized CacheObject cachePut(String key, String value) {
CacheObject cached = new CacheObject();
cached.time = System.currentTimeMillis();
cached.key = key;
cached.data = value;
cached.hash = MD5HashFunction.hashAsString(key);
cache.put(cached.key, cached);
try {
Files.write(new File(persistTo, cached.hash), codec.encode(cached), false);
if (log.isDebugEnabled()) {
log.debug("creating " + cached.hash + " for " + cached.key);
}
} catch (Exception ex) {
log.warn("", ex);
}
while (cache.size() > cacheSize) {
CacheObject old = cache.removeEldest();
new File(persistTo, old.hash).delete();
if (log.isDebugEnabled()) {
log.debug("deleted " + old.hash + " containing " + old.key);
}
}
return cached;
}
@Override
public String filter(String sv) {
if (sv == null) {
return null;
}
CacheObject cached = cacheGet(sv);
if (cached == null || (cacheAge > 0 && System.currentTimeMillis() - cached.time > cacheAge)) {
if (log.isDebugEnabled() && cached != null && cacheAge > 0 && System.currentTimeMillis() - cached.time > cacheAge) {
log.debug("aging out, replacing " + cached.hash + " or " + cached.key);
}
int retries = retry;
while (retries-- > 0) {
try {
String replacement = template.replace("{{}}", sv);
byte[] val = httpGet(replacement, null, null, timeout, trace);
if (val != null && (emptyOk || val.length > 0)) {
cached = cachePut(sv, Bytes.toString(val));
break;
} else if (trace) {
log.error("{} returned {} retries left = {}", replacement, (val != null ? val.length : -1), retries);
}
} catch (IOException e) {
e.printStackTrace();
}
try {
Thread.sleep(retryTimeout);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (cached == null && missValue != null) {
cachePut(sv, missValue);
}
}
return cached != null ? cached.data : null;
}
public static byte[] httpGet(String url, Map<String, String> requestHeaders,
Map<String, String> responseHeaders, int timeoutms,
boolean traceError) throws IOException {
HttpGet get = new HttpGet(url);
if (requestHeaders != null) {
for (Map.Entry<String, String> entry : requestHeaders.entrySet()) {
get.addHeader(entry.getKey(), entry.getValue());
}
}
HttpResponse response = HttpUtil.execute(get, timeoutms);
Multidict resHeaders = response.getHeaders();
if (responseHeaders != null && resHeaders != null) {
for (Map.Entry<String, String> entry : resHeaders.entries()) {
responseHeaders.put(entry.getKey(), entry.getValue());
}
}
if (response.getStatus() == 200) {
return response.getBody();
} else {
if (traceError) {
log.error("{} returned {}, {}", url, response.getStatus(), response.getReason());
}
return null;
}
}
}
| package private scope for http get
some of the tests use the construct-then-decode
strategy
| hydra-filters/src/main/java/com/addthis/hydra/data/filter/value/ValueFilterHttpGet.java | package private scope for http get |
|
Java | bsd-2-clause | 06b7d4c1312496ac15eab9c01f1733210793c4d7 | 0 | conveyal/gtfs-lib | package com.conveyal.gtfs.validator;
import com.conveyal.gtfs.PatternFinder;
import com.conveyal.gtfs.TripPatternKey;
import com.conveyal.gtfs.error.SQLErrorStorage;
import com.conveyal.gtfs.loader.Feed;
import com.conveyal.gtfs.loader.JdbcGtfsLoader;
import com.conveyal.gtfs.loader.Requirement;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.model.Pattern;
import com.conveyal.gtfs.model.PatternStop;
import com.conveyal.gtfs.model.Route;
import com.conveyal.gtfs.model.Stop;
import com.conveyal.gtfs.model.StopTime;
import com.conveyal.gtfs.model.Trip;
import org.apache.commons.dbutils.DbUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.conveyal.gtfs.model.Entity.setIntParameter;
/**
* Groups trips together into "patterns" that share the same sequence of stops.
* This is not a normal validator in the sense that it does not check for bad data.
* It's taking advantage of the fact that we're already iterating over the trips one by one to build up the patterns.
*/
public class PatternFinderValidator extends TripValidator {
private static final Logger LOG = LoggerFactory.getLogger(PatternFinderValidator.class);
PatternFinder patternFinder;
public PatternFinderValidator(Feed feed, SQLErrorStorage errorStorage) {
super(feed, errorStorage);
patternFinder = new PatternFinder();
}
@Override
public void validateTrip (Trip trip, Route route, List<StopTime> stopTimes, List<Stop> stops) {
// As we hit each trip, accumulate them into the wrapped PatternFinder object.
patternFinder.processTrip(trip, stopTimes);
}
/**
* Store patterns and pattern stops in the database. Also, update the trips table with a pattern_id column.
*/
@Override
public void complete(ValidationResult validationResult) {
LOG.info("Updating trips with pattern IDs...");
// FIXME: There may be a better way to handle getting the full list of stops
Map<String, Stop> stopById = new HashMap<>();
for (Stop stop : feed.stops) {
stopById.put(stop.stop_id, stop);
}
// FIXME In the editor we need patterns to exist separately from and before trips themselves, so me make another table.
Map<TripPatternKey, Pattern> patterns = patternFinder.createPatternObjects(stopById, errorStorage);
Connection connection = null;
try {
// TODO this assumes gtfs-lib is using an SQL database and not a MapDB.
// Maybe we should just create patterns in a separate step, but that would mean iterating over the stop_times twice.
LOG.info("Storing pattern ID for each trip.");
connection = feed.getConnection();
Statement statement = connection.createStatement();
String tripsTableName = feed.tablePrefix + "trips";
String patternsTableName = feed.tablePrefix + "patterns";
String patternStopsTableName = feed.tablePrefix + "pattern_stops";
statement.execute(String.format("alter table %s add column pattern_id varchar", tripsTableName));
// FIXME: Here we're creating a pattern table that has an integer ID field (similar to the other GTFS tables)
// AND a varchar pattern_id with essentially the same value cast to a string. Perhaps the pattern ID should
// be a UUID or something, just to better distinguish it from the int ID?
statement.execute(String.format("create table %s (id serial, pattern_id varchar primary key, " +
"route_id varchar, name varchar, shape_id varchar)", patternsTableName));
// FIXME: Use patterns table?
// Table patternsTable = new Table(patternsTableName, Pattern.class, Requirement.EDITOR, Table.PATTERNS.fields);
Table patternStopsTable = new Table(patternStopsTableName, PatternStop.class, Requirement.EDITOR,
Table.PATTERN_STOP.fields);
String insertPatternStopSql = patternStopsTable.generateInsertSql(true);
// Create pattern stops table with serial ID and primary key on pattern ID and stop sequence
patternStopsTable.createSqlTable(connection, null, true, new String[]{"pattern_id", "stop_sequence"});
PreparedStatement updateTripStatement = connection.prepareStatement(
String.format("update %s set pattern_id = ? where trip_id = ?", tripsTableName));
PreparedStatement insertPatternStatement = connection.prepareStatement(
String.format("insert into %s values (DEFAULT, ?, ?, ?, ?)", patternsTableName));
PreparedStatement insertPatternStopStatement = connection.prepareStatement(insertPatternStopSql);
int batchSize = 0;
// TODO update to use batch trackers
for (Map.Entry<TripPatternKey, Pattern> entry : patterns.entrySet()) {
Pattern pattern = entry.getValue();
TripPatternKey key = entry.getKey();
// First, create a pattern relation.
insertPatternStatement.setString(1, pattern.pattern_id);
insertPatternStatement.setString(2, pattern.route_id);
insertPatternStatement.setString(3, pattern.name);
// FIXME: This could be null...
insertPatternStatement.setString(4, pattern.associatedShapes.iterator().next());
insertPatternStatement.addBatch();
// Construct pattern stops based on values in trip pattern key.
// FIXME: Use pattern stops table here?
for (int i = 0; i < key.stops.size(); i++) {
int travelTime = 0;
String stopId = key.stops.get(i);
if (i > 0) travelTime = key.arrivalTimes.get(i) - key.departureTimes.get(i - 1);
insertPatternStopStatement.setString(1, pattern.pattern_id);
setIntParameter(insertPatternStopStatement, 2, i);
insertPatternStopStatement.setString(3, stopId);
setIntParameter(insertPatternStopStatement,4, travelTime);
setIntParameter(insertPatternStopStatement,5, key.departureTimes.get(i) - key.arrivalTimes.get(i));
setIntParameter(insertPatternStopStatement,6, key.dropoffTypes.get(i));
setIntParameter(insertPatternStopStatement,7, key.pickupTypes.get(i));
insertPatternStopStatement.setDouble(8, key.shapeDistances.get(i));
setIntParameter(insertPatternStopStatement,9, key.timepoints.get(i));
insertPatternStopStatement.addBatch();
// FIXME: should each pattern stop be incrementing the batch size?
batchSize += 1;
}
// Finally, update all trips on this pattern to reference this pattern's ID.
for (String tripId : pattern.associatedTrips) {
updateTripStatement.setString(1, pattern.pattern_id);
updateTripStatement.setString(2, tripId);
updateTripStatement.addBatch();
batchSize += 1;
}
// If we've accumulated a lot of prepared statement calls, pass them on to the database backend.
if (batchSize > JdbcGtfsLoader.INSERT_BATCH_SIZE) {
updateTripStatement.executeBatch();
insertPatternStatement.executeBatch();
insertPatternStopStatement.executeBatch();
batchSize = 0;
}
}
// Send any remaining prepared statement calls to the database backend.
updateTripStatement.executeBatch();
insertPatternStatement.executeBatch();
insertPatternStopStatement.executeBatch();
// Index new pattern_id column on trips. The other tables are already indexed because they have primary keys.
statement.execute(String.format("create index trips_pattern_id_idx on %s (pattern_id)", tripsTableName));
connection.commit();
connection.close();
LOG.info("Done storing pattern IDs.");
} catch (SQLException e) {
// Close transaction if failure occurs on creating patterns.
DbUtils.closeQuietly(connection);
// This exception will be stored as a validator failure.
throw new RuntimeException(e);
}
}
}
| src/main/java/com/conveyal/gtfs/validator/PatternFinderValidator.java | package com.conveyal.gtfs.validator;
import com.conveyal.gtfs.PatternFinder;
import com.conveyal.gtfs.TripPatternKey;
import com.conveyal.gtfs.error.SQLErrorStorage;
import com.conveyal.gtfs.loader.Feed;
import com.conveyal.gtfs.loader.JdbcGtfsLoader;
import com.conveyal.gtfs.loader.Requirement;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.model.Pattern;
import com.conveyal.gtfs.model.PatternStop;
import com.conveyal.gtfs.model.Route;
import com.conveyal.gtfs.model.Stop;
import com.conveyal.gtfs.model.StopTime;
import com.conveyal.gtfs.model.Trip;
import org.apache.commons.dbutils.DbUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Groups trips together into "patterns" that share the same sequence of stops.
* This is not a normal validator in the sense that it does not check for bad data.
* It's taking advantage of the fact that we're already iterating over the trips one by one to build up the patterns.
*/
public class PatternFinderValidator extends TripValidator {
private static final Logger LOG = LoggerFactory.getLogger(PatternFinderValidator.class);
PatternFinder patternFinder;
public PatternFinderValidator(Feed feed, SQLErrorStorage errorStorage) {
super(feed, errorStorage);
patternFinder = new PatternFinder();
}
@Override
public void validateTrip (Trip trip, Route route, List<StopTime> stopTimes, List<Stop> stops) {
// As we hit each trip, accumulate them into the wrapped PatternFinder object.
patternFinder.processTrip(trip, stopTimes);
}
/**
* Store patterns and pattern stops in the database. Also, update the trips table with a pattern_id column.
*/
@Override
public void complete(ValidationResult validationResult) {
LOG.info("Updating trips with pattern IDs...");
// FIXME: There may be a better way to handle getting the full list of stops
Map<String, Stop> stopById = new HashMap<>();
for (Stop stop : feed.stops) {
stopById.put(stop.stop_id, stop);
}
// FIXME In the editor we need patterns to exist separately from and before trips themselves, so me make another table.
Map<TripPatternKey, Pattern> patterns = patternFinder.createPatternObjects(stopById, errorStorage);
Connection connection = null;
try {
// TODO this assumes gtfs-lib is using an SQL database and not a MapDB.
// Maybe we should just create patterns in a separate step, but that would mean iterating over the stop_times twice.
LOG.info("Storing pattern ID for each trip.");
connection = feed.getConnection();
Statement statement = connection.createStatement();
String tripsTableName = feed.tablePrefix + "trips";
String patternsTableName = feed.tablePrefix + "patterns";
String patternStopsTableName = feed.tablePrefix + "pattern_stops";
statement.execute(String.format("alter table %s add column pattern_id varchar", tripsTableName));
// FIXME: Here we're creating a pattern table that has an integer ID field (similar to the other GTFS tables)
// AND a varchar pattern_id with essentially the same value cast to a string. Perhaps the pattern ID should
// be a UUID or something, just to better distinguish it from the int ID?
statement.execute(String.format("create table %s (id serial, pattern_id varchar primary key, " +
"route_id varchar, name varchar, shape_id varchar)", patternsTableName));
// FIXME: Use patterns table?
// Table patternsTable = new Table(patternsTableName, Pattern.class, Requirement.EDITOR, Table.PATTERNS.fields);
Table patternStopsTable = new Table(patternStopsTableName, PatternStop.class, Requirement.EDITOR,
Table.PATTERN_STOP.fields);
String insertPatternStopSql = patternStopsTable.generateInsertSql(true);
// Create pattern stops table with serial ID and primary key on pattern ID and stop sequence
patternStopsTable.createSqlTable(connection, null, true, new String[]{"pattern_id", "stop_sequence"});
PreparedStatement updateTripStatement = connection.prepareStatement(
String.format("update %s set pattern_id = ? where trip_id = ?", tripsTableName));
PreparedStatement insertPatternStatement = connection.prepareStatement(
String.format("insert into %s values (DEFAULT, ?, ?, ?, ?)", patternsTableName));
PreparedStatement insertPatternStopStatement = connection.prepareStatement(insertPatternStopSql);
int batchSize = 0;
// TODO update to use batch trackers
for (Map.Entry<TripPatternKey, Pattern> entry : patterns.entrySet()) {
Pattern pattern = entry.getValue();
TripPatternKey key = entry.getKey();
// First, create a pattern relation.
insertPatternStatement.setString(1, pattern.pattern_id);
insertPatternStatement.setString(2, pattern.route_id);
insertPatternStatement.setString(3, pattern.name);
insertPatternStatement.setString(4, pattern.associatedShapes.iterator().next());
insertPatternStatement.addBatch();
// Construct pattern stops based on values in trip pattern key.
// FIXME: Use pattern stops table here?
for (int i = 0; i < key.stops.size(); i++) {
int travelTime = 0;
String stopId = key.stops.get(i);
if (i > 0) travelTime = key.arrivalTimes.get(i) - key.departureTimes.get(i - 1);
insertPatternStopStatement.setString(1, pattern.pattern_id);
insertPatternStopStatement.setInt(2, i);
insertPatternStopStatement.setString(3, stopId);
insertPatternStopStatement.setInt(4, travelTime);
insertPatternStopStatement.setInt(5, key.departureTimes.get(i) - key.arrivalTimes.get(i));
insertPatternStopStatement.setInt(6, key.dropoffTypes.get(i));
insertPatternStopStatement.setInt(7, key.pickupTypes.get(i));
insertPatternStopStatement.setDouble(8, key.shapeDistances.get(i));
insertPatternStopStatement.setInt(9, key.timepoints.get(i));
insertPatternStopStatement.addBatch();
// FIXME: should each pattern stop be incrementing the batch size?
batchSize += 1;
}
// Finally, update all trips on this pattern to reference this pattern's ID.
for (String tripId : pattern.associatedTrips) {
updateTripStatement.setString(1, pattern.pattern_id);
updateTripStatement.setString(2, tripId);
updateTripStatement.addBatch();
batchSize += 1;
}
// If we've accumulated a lot of prepared statement calls, pass them on to the database backend.
if (batchSize > JdbcGtfsLoader.INSERT_BATCH_SIZE) {
updateTripStatement.executeBatch();
insertPatternStatement.executeBatch();
insertPatternStopStatement.executeBatch();
batchSize = 0;
}
}
// Send any remaining prepared statement calls to the database backend.
updateTripStatement.executeBatch();
insertPatternStatement.executeBatch();
insertPatternStopStatement.executeBatch();
// Index new pattern_id column on trips. The other tables are already indexed because they have primary keys.
statement.execute(String.format("create index trips_pattern_id_idx on %s (pattern_id)", tripsTableName));
connection.commit();
connection.close();
LOG.info("Done storing pattern IDs.");
} catch (SQLException e) {
// Close transaction if failure occurs on creating patterns.
DbUtils.closeQuietly(connection);
// This exception will be stored as a validator failure.
throw new RuntimeException(e);
}
}
}
| fix setInt on pattern stop insertion
| src/main/java/com/conveyal/gtfs/validator/PatternFinderValidator.java | fix setInt on pattern stop insertion |
|
Java | bsd-3-clause | 381ab58686b381c9fff3b0a5ef0252ed64eb24ef | 0 | kduske-n4/snuggletex,kduske-n4/snuggletex | /* $Id$
*
* Copyright 2008 University of Edinburgh.
* All Rights Reserved
*/
package uk.ac.ed.ph.snuggletex.conversion;
import uk.ac.ed.ph.snuggletex.SnuggleInput;
import uk.ac.ed.ph.snuggletex.SnuggleLogicException;
import java.util.ArrayList;
import java.util.List;
/**
* Represents the "working view" of the current {@link SnuggleInput} document. This is
* slightly non-trivial in that user-defined command and environments generally work as
* simple text substitutions, so the state of the document changes during tokenisation.
* However, we also need to be able to keep track of where each character in the document
* comes from so as to enable decent error reporting. hence the complexity within here.
*
* @author David McKain
* @version $Revision$
*/
public final class WorkingDocument {
/**
* Marker interface that can be applied to classes that provide information for a
* {@link CharacterSource}.
*
* @author David McKain
* @version $Revision$
*/
public static interface SourceContext {
/* (Marker interface) */
}
/**
* Represents a block of text that has been imported into this document.
*/
public static final class CharacterSource {
/** Contextual information about where this data came from */
public final SourceContext context;
/**
* The block that this block was initially substituted into, or null to indicate that it
* is "top level" or was appended rather than substituted.
*/
public final CharacterSource substitutedSource;
/**
* The offset in {@link #substitutedSource} where the substitution occurred, or 0
* if {@link #substitutedSource} is null.
*/
public final int substitutionOffset;
/**
* The text in the parent component that this data replaced, or null
* if {@link #substitutedSource} is null.
*/
public final CharSequence substitutedText;
private transient String stringRepresentation;
public CharacterSource(final SourceContext context) {
this(context, null, 0, null);
}
public CharacterSource(final SourceContext context, final CharacterSource substitutedSource,
final int substitutionOffset, final CharSequence substitutedText) {
this.context = context;
this.substitutedSource = substitutedSource;
this.substitutionOffset = substitutionOffset;
this.substitutedText = substitutedText;
}
@Override
public String toString() {
if (stringRepresentation==null) {
StringBuilder resultBuilder = new StringBuilder(getClass().getSimpleName())
.append("(context=")
.append(context);
if (substitutedSource!=null) {
resultBuilder.append(",substituted=").append(substitutedSource)
.append(",offset=").append(substitutionOffset)
.append(",substitutedText=").append(substitutedText);
}
resultBuilder.append(")");
stringRepresentation = resultBuilder.toString();
}
return stringRepresentation;
}
}
/**
* Represents a contiguous portion of the document hat comes from the same {@link CharacterSource}.
* During parsing, {@link Slice}s will be broken up to create new Slices whenever user-defined
* commands and environments are substituted.
*/
public static final class Slice {
public final int startIndex;
public final int endIndex;
public final CharacterSource resolvedComponent;
public final int componentIndexOffset;
public Slice(final int startIndex, final int endIndex, final CharacterSource resolvedComponent, final int componentIndexOffset) {
this.startIndex = startIndex;
this.endIndex = endIndex;
this.resolvedComponent = resolvedComponent;
this.componentIndexOffset = componentIndexOffset;
}
@Override
public String toString() {
return getClass().getSimpleName()
+ "(span=[" + startIndex + "," + endIndex + ") => " + resolvedComponent + "; offset " + componentIndexOffset + ")";
}
}
/**
* Holds information about how a particular index in the document is resolved against the {@link Slice}
* in which it is contained.
*
* @author David McKain
* @version $Revision$
*/
public static class IndexResolution {
public final int scoreboardIndex;
public final Slice slice;
public final int indexInComponent;
public IndexResolution(final int scoreboardIndex, final Slice slice, final int indexInComponent) {
this.scoreboardIndex = scoreboardIndex;
this.slice = slice;
this.indexInComponent = indexInComponent;
}
@Override
public String toString() {
return getClass().getSimpleName()
+ "(sbIndex=" + scoreboardIndex
+ ", slice=" + slice
+ ", indexInComponent=" + indexInComponent
+ ")";
}
}
//----------------------------------------------------------------
private final StringBuilder buffer;
private final List<Slice> scoreBoard;
/** Current length of document (kept synced with buffer) */
private int length;
private int freezeIndex;
public WorkingDocument(final CharSequence initialData, final SourceContext context) {
this.scoreBoard = new ArrayList<Slice>();
this.freezeIndex = 0;
/* Import data into buffer */
this.buffer = new StringBuilder(initialData);
/* Set current length based on the length of buffer */
this.length = buffer.length();
/* Set up scoreboard to contain the initial data only */
CharacterSource initialComponent = new CharacterSource(context);
scoreBoard.add(new Slice(0, length, initialComponent, 0));
}
public FrozenSlice freezeSlice(final int startIndex, final int endIndex) {
checkRange(startIndex, endIndex);
freezeIndex = Math.max(freezeIndex, endIndex);
return new FrozenSlice(this, startIndex, endIndex);
}
/**
* This is a rather nasty method which resets the "freeze index" of this document back to the
* given index. Any {@link FrozenSlice}s that may previously have spanned this index are no
* longer valid as their contents may change.
* <p>
* This class does NOT attempt to enforce this in any way - it is up to the caller to make
* sure he/she understands the implications of doing this.
* <p>
* The main requirement for having this is when evaluating single-argument user-defined commands
* like <tt>\\udc \\alpha</tt> where the argument is implicitly taken to be the next
* valid token.
*
* @param newFreezeIndex
*/
public void unfreeze(final int newFreezeIndex) {
this.freezeIndex = newFreezeIndex;
}
public int length() {
return length;
}
/** Index in scoreboard of last successful resolution used in {@link #resolveIndex(int, boolean)} below */
int lastResolvedSliceIndex = 0;
public IndexResolution resolveIndex(final int index, final boolean fallLeftOnBoundaries) {
checkIndex(index, "Index");
if (fallLeftOnBoundaries && index==0) {
/* If falling to left at index 0, then we miss the first component completely. This is
* not useful in practice!
*/
return null;
}
/* Search for the slice containing this index. We'll search from the index of the last
* successful call to here (0 on first call) since most calls to this method are made on
* indices quite close together.
*/
IndexResolution result = null;
Slice mapping;
int sliceIndex = lastResolvedSliceIndex;
int numSlices = scoreBoard.size();
while (sliceIndex>=0 && sliceIndex<numSlices) {
mapping = scoreBoard.get(sliceIndex);
if (mapping.startIndex>index) {
/* We're too far to the right, so let's go back one place. */
sliceIndex--;
}
else if (index<mapping.endIndex || (fallLeftOnBoundaries && index==mapping.endIndex)) {
/* Success! */
result = new IndexResolution(sliceIndex, mapping, index + mapping.componentIndexOffset);
break;
}
else {
/* Move to the right */
sliceIndex++;
}
}
lastResolvedSliceIndex = result!=null ? sliceIndex : lastResolvedSliceIndex;
return result;
}
public int charAt(final int index) {
if (index>=0 && index<length) {
return buffer.charAt(index);
}
return -1;
}
public boolean matchesAt(final int index, final char c) {
return charAt(index)==c;
}
public boolean matchesAt(final int index, final String s) {
if (length()-index < s.length()) {
/* Can't possibly match if we don't have enough characters */
return false;
}
return s.equals(extract(index, index+s.length()));
}
public int indexOf(final int startSearchIndex, final char c) {
for (int i=startSearchIndex; i<length(); i++) {
if (charAt(i)==c) {
return i;
}
}
return -1;
}
public int indexOf(final int startSearchIndex, final String s) {
int lastSearchIndex = length() - s.length();
for (int i=startSearchIndex; i<=lastSearchIndex; i++) {
if (matchesAt(i, s)) {
return i;
}
}
return -1;
}
public boolean isRegionWhitespace(final int startIndex, final int endIndex) {
if (startIndex==endIndex) {
return false;
}
for (int i=startIndex; i<endIndex; i++) {
if (!Character.isWhitespace(charAt(i))) {
return false;
}
}
return true;
}
public CharSequence extract() {
return buffer;
}
public CharSequence extract(final int startIndex, final int endIndex) {
checkRange(startIndex, endIndex);
return buffer.subSequence(startIndex, endIndex);
}
public CharacterSource substitute(final int startIndex, final int endIndex,
final CharSequence data, final SourceContext context) {
checkRange(startIndex, endIndex);
/* Make sure we are not trying to change the frozen part of the document */
if (startIndex < freezeIndex) {
throw new IllegalArgumentException("Cannot modify frozen part of document (startIndex=" + startIndex
+ ",freezeIndex=" + freezeIndex
+ ",attemptedText=" + buffer.substring(freezeIndex, Math.min(freezeIndex+20, length))
+ ")");
}
/* Work out which components the start of the substitution lies in */
IndexResolution startResolution = resolveIndex(startIndex, false);
if (startResolution==null) {
/* Special case - we are *appending* to the document, rather than substituting
* existing content. This case is somewhat simpler to deal with so we will do it
* all now.
*/
CharacterSource toAppend = new CharacterSource(context);
int newStartIndex = length;
buffer.append(data);
int newLength = buffer.length();
Slice newEndSlice = new Slice(newStartIndex, newLength, toAppend, -newStartIndex);
scoreBoard.add(newEndSlice);
length = newLength;
return toAppend;
}
/* If still here, then we're doing a genuine substitution (or insertion). The first
* thing we do is modify the StringBuilder.
*/
CharSequence beingReplaced = buffer.subSequence(startIndex, endIndex);
buffer.delete(startIndex, endIndex);
buffer.insert(startIndex, data);
/* Next up, we have to rebuild the scoreboard. This is much more complicated so is best
* demonstrated with some examples.
*
* First note that the scoreboard will change from the slice containing to the startIndex
* all the way to the end.
*
* Let's assume for convenience that there are 4 entries in the scoreboard and that
* the resolved startIndex lies within the entry with index 1 (i.e. the second entry).
*
* [..............)[...............)[..................)[..............)
*
* Expl 1: [XXXXXXXXX) is being replaced with [YYYYYYY) to give
* [..............)[YYYYYYY)[...)[..................)[..............)
* This stays repl left- next component similarly
* the same over has offset shifted
*
* Here the replacement's left boundary sits nicely against an existing slice.
*
* Expl 2: [XXXXXXXXX) is being replaced with [YYYYYYY) to give
* [..............)[)[YYYYYYY)[.)[..................)[..............)
* Stays same cut repl left shift shift
*
* Here we need to create a slice out of the second component before continuing
* in the same way.
*
* Expl 3: [XXXXXXXXXXXXXXXXXXXXX) is being replaced with [Y) to give
* [..............)[Y)[..........)[..............)
* Stays same repl slice shift left
*
* Here, the replacement spans more than 1 slice. The original 3rd slice needs
* to be cut down and shifted left. The 4th slice is just shifted left. Phew!!!
*/
/* Anyway, in all cases we start off by keeping the slices that lie before the
* one containing the start of the substitution.
*/
int startSliceIndex = startResolution.scoreboardIndex;
List<Slice> newScoreBoard = new ArrayList<Slice>();
for (int i=0; i<startSliceIndex; i++) {
newScoreBoard.add(scoreBoard.get(i));
}
/* Next up, we look at the slice containing the start of the substitution, making a
* new slice to reflect "the bit before" if it is required. The offset of this component
* does not change.
*/
Slice startSlice = startResolution.slice;
if (startIndex > startSlice.startIndex) {
Slice bitBefore = new Slice(startSlice.startIndex, startIndex,
startSlice.resolvedComponent, startSlice.componentIndexOffset);
newScoreBoard.add(bitBefore);
}
/* Now we create a slice corresponding to the substitution. The offset of this slice is
* set to ensure that currentIndex corresponds to index 0 in the substitution - i.e.
* shift left by currentIndex.
*/
int currentIndex = startIndex;
int substitutionSize = data.length();
CharacterSource result = new CharacterSource(context, startResolution.slice.resolvedComponent,
startResolution.indexInComponent, beingReplaced);
Slice substitutionSlice = new Slice(currentIndex, currentIndex + substitutionSize, result, -currentIndex);
currentIndex += substitutionSize;
newScoreBoard.add(substitutionSlice);
/* Next we need to find out where the end of the substitution is. All slices before this
* one will be removed from the scoreboard. From the current end slice, we will re-slice
* it if required to include whatever comes immediately after the substitution end point.
*
* To calculate what happens to the offset, we look at 'endIndex' (before) and
* 'currentIndex' (after). Both must map to the same point in the original character source.
* Thus:
*
* endIndex + before offset comp = currentIndex + after offset
*
* i.e. after offset = before offset + before index in comp + endIndex - currentIndex;
*/
IndexResolution endResolution = resolveIndex(endIndex, true);
Slice endSlice = endResolution.slice;
if (endIndex < endSlice.endIndex) {
int bitAfterSize = endSlice.endIndex - endIndex;
int resultOffset = endSlice.componentIndexOffset + endIndex - currentIndex;
Slice bitAfter = new Slice(currentIndex, currentIndex + bitAfterSize,
endSlice.resolvedComponent, resultOffset);
newScoreBoard.add(bitAfter);
currentIndex += bitAfterSize;
}
/* Finally, we include shifted versions of all slices that followed the end index in the
* original scoreboard. The change in offset is calculated by looking at the left
* endpoint of the slice before and after shifted, which must map to the same point
* in the original character source. Thus we have:
*
* before startIndex + before offset = after startIndex + after offset
*
* i.e. after offset = before offset + before startIndex - after startIndex
*/
Slice trailingSlice, shiftedTrailingSlice;
for (int i=endResolution.scoreboardIndex+1, size=scoreBoard.size(); i<size; i++) {
trailingSlice = scoreBoard.get(i);
int afterSliceLength = trailingSlice.endIndex - trailingSlice.startIndex;
int shiftedOffset = trailingSlice.componentIndexOffset + trailingSlice.startIndex - currentIndex;
shiftedTrailingSlice = new Slice(currentIndex, currentIndex + afterSliceLength,
trailingSlice.resolvedComponent, shiftedOffset);
newScoreBoard.add(shiftedTrailingSlice);
currentIndex += afterSliceLength;
}
/* Replace scoreboard */
scoreBoard.clear();
scoreBoard.addAll(newScoreBoard);
/* Finally sync up lengths */
if (buffer.length()!=currentIndex) {
throw new SnuggleLogicException("Failed sanity check: buffer length is " + buffer.length() + ", last board index=" + currentIndex);
}
length = currentIndex;
return result;
}
private void checkRange(final int startIndex, final int endIndex) {
checkIndex(startIndex, "Start Index");
checkIndex(endIndex, "End Index");
if (startIndex>endIndex) {
throw new IllegalArgumentException("Start index " + startIndex + " must be <= end index " + endIndex);
}
}
private void checkIndex(final int index, final String errorStart) {
if (!(index>=0 && index<=length)) {
throw new IndexOutOfBoundsException(errorStart + " " + index
+ " is outwith the current bounds [0," + length + ")");
}
}
//---------------------------------------------------
// For debugging during development
public void dumpScoreboard() {
for (Slice mapping : scoreBoard) {
System.out.println(mapping);
}
}
public static void main(String[] args) {
SourceContext ctx = null;
WorkingDocument d = new WorkingDocument("\\mycommand blah blah", ctx);
System.out.println("INITIAL STATE\n");
// d.dumpScoreboard();
//
d.substitute(0, 10, "This expands to \\bob", ctx);
System.out.println("AFTER FIRST");
d.dumpScoreboard();
//
d.substitute(16, 20, "[Bob expanded]", ctx);
System.out.println("\nAFTER SECOND");
d.dumpScoreboard();
// d.substitute(d.length(), d.length(), "You", ctx);
//
// System.out.println("\nAFTER FIRST SUBS\n");
// d.dumpScoreboard();
//
// d.substitute(1,1, "INSERTED", ctx);
// d.substitute(14,15, "a", ctx);
// d.substitute(2,3, "stuff", ctx);
////
// System.out.println("\nAFTER SECOND SUBS\n");
// d.dumpScoreboard();
// System.out.println("STATE IS NOW " + d.extract());
// d.freezeSlice(0, 10);
// d.substitute(10, 12, "bob", ctx);
System.out.println(d.extract());
}
}
| src/uk/ac/ed/ph/snuggletex/conversion/WorkingDocument.java | /* $Id$
*
* Copyright 2008 University of Edinburgh.
* All Rights Reserved
*/
package uk.ac.ed.ph.snuggletex.conversion;
import uk.ac.ed.ph.snuggletex.SnuggleInput;
import uk.ac.ed.ph.snuggletex.SnuggleLogicException;
import java.util.ArrayList;
import java.util.List;
/**
* Represents the "working view" of the current {@link SnuggleInput} document. This is
* slightly non-trivial in that user-defined command and environments generally work as
* simple text substitutions, so the state of the document changes during tokenisation.
* However, we also need to be able to keep track of where each character in the document
* comes from so as to enable decent error reporting. hence the complexity within here.
*
* @author David McKain
* @version $Revision$
*/
public final class WorkingDocument {
/**
* Marker interface that can be applied to classes that provide information for a
* {@link CharacterSource}.
*
* @author David McKain
* @version $Revision$
*/
public static interface SourceContext {
/* (Marker interface) */
}
/**
* Represents a block of text that has been imported into this document.
*/
public static final class CharacterSource {
/** Contextual information about where this data came from */
public final SourceContext context;
/**
* The block that this block was initially substituted into, or null to indicate that it
* is "top level" or was appended rather than substituted.
*/
public final CharacterSource substitutedSource;
/**
* The offset in {@link #substitutedSource} where the substitution occurred, or 0
* if {@link #substitutedSource} is null.
*/
public final int substitutionOffset;
/**
* The text in the parent component that this data replaced, or null
* if {@link #substitutedSource} is null.
*/
public final CharSequence substitutedText;
private transient String stringRepresentation;
public CharacterSource(final SourceContext context) {
this(context, null, 0, null);
}
public CharacterSource(final SourceContext context, final CharacterSource substitutedSource,
final int substitutionOffset, final CharSequence substitutedText) {
this.context = context;
this.substitutedSource = substitutedSource;
this.substitutionOffset = substitutionOffset;
this.substitutedText = substitutedText;
}
@Override
public String toString() {
if (stringRepresentation==null) {
StringBuilder resultBuilder = new StringBuilder(getClass().getSimpleName())
.append("(context=")
.append(context);
if (substitutedSource!=null) {
resultBuilder.append(",substituted=").append(substitutedSource)
.append(",offset=").append(substitutionOffset)
.append(",substitutedText=").append(substitutedText);
}
resultBuilder.append(")");
stringRepresentation = resultBuilder.toString();
}
return stringRepresentation;
}
}
/**
* Represents a contiguous portion of the document hat comes from the same {@link CharacterSource}.
* During parsing, {@link Slice}s will be broken up to create new Slices whenever user-defined
* commands and environments are substituted.
*/
public static final class Slice {
public final int startIndex;
public final int endIndex;
public final CharacterSource resolvedComponent;
public final int componentIndexOffset;
public Slice(final int startIndex, final int endIndex, final CharacterSource resolvedComponent, final int componentIndexOffset) {
this.startIndex = startIndex;
this.endIndex = endIndex;
this.resolvedComponent = resolvedComponent;
this.componentIndexOffset = componentIndexOffset;
}
@Override
public String toString() {
return getClass().getSimpleName()
+ "(span=[" + startIndex + "," + endIndex + ") => " + resolvedComponent + "; offset " + componentIndexOffset + ")";
}
}
/**
* Holds information about how a particular index in the document is resolved against the {@link Slice}
* in which it is contained.
*
* @author David McKain
* @version $Revision$
*/
public static class IndexResolution {
public final int scoreboardIndex;
public final Slice slice;
public final int indexInComponent;
public IndexResolution(final int scoreboardIndex, final Slice slice, final int indexInComponent) {
this.scoreboardIndex = scoreboardIndex;
this.slice = slice;
this.indexInComponent = indexInComponent;
}
@Override
public String toString() {
return getClass().getSimpleName()
+ "(sbIndex=" + scoreboardIndex
+ ", slice=" + slice
+ ", indexInComponent=" + indexInComponent
+ ")";
}
}
//----------------------------------------------------------------
private final StringBuilder buffer;
private final List<Slice> scoreBoard;
/** Current length of document (kept synced with buffer) */
private int length;
private int freezeIndex;
public WorkingDocument(final CharSequence initialData, final SourceContext context) {
this.scoreBoard = new ArrayList<Slice>();
this.freezeIndex = 0;
/* Import data into buffer */
this.buffer = new StringBuilder(initialData);
/* Set current length based on the length of buffer */
this.length = buffer.length();
/* Set up scoreboard to contain the initial data only */
CharacterSource initialComponent = new CharacterSource(context);
scoreBoard.add(new Slice(0, length, initialComponent, 0));
}
public FrozenSlice freezeSlice(final int startIndex, final int endIndex) {
checkRange(startIndex, endIndex);
freezeIndex = Math.max(freezeIndex, endIndex);
return new FrozenSlice(this, startIndex, endIndex);
}
/**
* This is a rather nasty method which resets the "freeze index" of this document back to the
* given index. Any {@link FrozenSlice}s that may previously have spanned this index are no
* longer valid as their contents may change.
* <p>
* This class does NOT attempt to enforce this in any way - it is up to the caller to make
* sure he/she understands the implications of doing this.
* <p>
* The main requirement for having this is when evaluating single-argument user-defined commands
* like <tt>\\udc \\alpha</tt> where the argument is implicitly taken to be the next
* valid token.
*
* @param newFreezeIndex
*/
public void unfreeze(final int newFreezeIndex) {
this.freezeIndex = newFreezeIndex;
}
public int length() {
return length;
}
/** Index in scoreboard of last successful resolution used in {@link #resolveIndex(int, boolean)} below */
int lastResolvedSliceIndex = 0;
public IndexResolution resolveIndex(final int index, final boolean fallLeftOnBoundaries) {
checkIndex(index, "Index");
if (fallLeftOnBoundaries && index==0) {
/* If falling to left at index 0, then we miss the first component completely. This is
* not useful in practice!
*/
return null;
}
/* Search for the slice containing this index. We'll search from the index of the last
* successful call to here (0 on first call) since most calls to this method are made on
* indices quite close together.
*/
IndexResolution result = null;
Slice mapping;
int sliceIndex = lastResolvedSliceIndex;
int numSlices = scoreBoard.size();
while (sliceIndex>=0 && sliceIndex<numSlices) {
mapping = scoreBoard.get(sliceIndex);
if (mapping.startIndex>index) {
/* We're too far to the right, so let's go back one place. */
sliceIndex--;
}
else if (index<mapping.endIndex || (fallLeftOnBoundaries && index==mapping.endIndex)) {
/* Success! */
result = new IndexResolution(sliceIndex, mapping, index + mapping.componentIndexOffset);
break;
}
else {
/* Move to the right */
sliceIndex++;
}
}
lastResolvedSliceIndex = result!=null ? sliceIndex : lastResolvedSliceIndex;
return result;
}
public int charAt(final int index) {
if (index>=0 && index<length) {
return buffer.charAt(index);
}
return -1;
}
public int indexOf(final int startSearchIndex, final char c) {
for (int i=startSearchIndex; i<length(); i++) {
if (charAt(i)==c) {
return i;
}
}
return -1;
}
public boolean matchesAt(final int index, final char c) {
return charAt(index)==c;
}
public boolean matchesAt(final int index, final String s) {
if (length()-index < s.length()) {
/* Can't possibly match if we don't have enough characters */
return false;
}
return s.equals(extract(index, index+s.length()));
}
public boolean isRegionWhitespace(final int startIndex, final int endIndex) {
if (startIndex==endIndex) {
return false;
}
for (int i=startIndex; i<endIndex; i++) {
if (!Character.isWhitespace(charAt(i))) {
return false;
}
}
return true;
}
public CharSequence extract() {
return buffer;
}
public CharSequence extract(final int startIndex, final int endIndex) {
checkRange(startIndex, endIndex);
return buffer.subSequence(startIndex, endIndex);
}
public CharacterSource substitute(final int startIndex, final int endIndex,
final CharSequence data, final SourceContext context) {
checkRange(startIndex, endIndex);
/* Make sure we are not trying to change the frozen part of the document */
if (startIndex < freezeIndex) {
throw new IllegalArgumentException("Cannot modify frozen part of document (startIndex=" + startIndex
+ ",freezeIndex=" + freezeIndex
+ ",attemptedText=" + buffer.substring(freezeIndex, Math.min(freezeIndex+20, length))
+ ")");
}
/* Work out which components the start of the substitution lies in */
IndexResolution startResolution = resolveIndex(startIndex, false);
if (startResolution==null) {
/* Special case - we are *appending* to the document, rather than substituting
* existing content. This case is somewhat simpler to deal with so we will do it
* all now.
*/
CharacterSource toAppend = new CharacterSource(context);
int newStartIndex = length;
buffer.append(data);
int newLength = buffer.length();
Slice newEndSlice = new Slice(newStartIndex, newLength, toAppend, -newStartIndex);
scoreBoard.add(newEndSlice);
length = newLength;
return toAppend;
}
/* If still here, then we're doing a genuine substitution (or insertion). The first
* thing we do is modify the StringBuilder.
*/
CharSequence beingReplaced = buffer.subSequence(startIndex, endIndex);
buffer.delete(startIndex, endIndex);
buffer.insert(startIndex, data);
/* Next up, we have to rebuild the scoreboard. This is much more complicated so is best
* demonstrated with some examples.
*
* First note that the scoreboard will change from the slice containing to the startIndex
* all the way to the end.
*
* Let's assume for convenience that there are 4 entries in the scoreboard and that
* the resolved startIndex lies within the entry with index 1 (i.e. the second entry).
*
* [..............)[...............)[..................)[..............)
*
* Expl 1: [XXXXXXXXX) is being replaced with [YYYYYYY) to give
* [..............)[YYYYYYY)[...)[..................)[..............)
* This stays repl left- next component similarly
* the same over has offset shifted
*
* Here the replacement's left boundary sits nicely against an existing slice.
*
* Expl 2: [XXXXXXXXX) is being replaced with [YYYYYYY) to give
* [..............)[)[YYYYYYY)[.)[..................)[..............)
* Stays same cut repl left shift shift
*
* Here we need to create a slice out of the second component before continuing
* in the same way.
*
* Expl 3: [XXXXXXXXXXXXXXXXXXXXX) is being replaced with [Y) to give
* [..............)[Y)[..........)[..............)
* Stays same repl slice shift left
*
* Here, the replacement spans more than 1 slice. The original 3rd slice needs
* to be cut down and shifted left. The 4th slice is just shifted left. Phew!!!
*/
/* Anyway, in all cases we start off by keeping the slices that lie before the
* one containing the start of the substitution.
*/
int startSliceIndex = startResolution.scoreboardIndex;
List<Slice> newScoreBoard = new ArrayList<Slice>();
for (int i=0; i<startSliceIndex; i++) {
newScoreBoard.add(scoreBoard.get(i));
}
/* Next up, we look at the slice containing the start of the substitution, making a
* new slice to reflect "the bit before" if it is required. The offset of this component
* does not change.
*/
Slice startSlice = startResolution.slice;
if (startIndex > startSlice.startIndex) {
Slice bitBefore = new Slice(startSlice.startIndex, startIndex,
startSlice.resolvedComponent, startSlice.componentIndexOffset);
newScoreBoard.add(bitBefore);
}
/* Now we create a slice corresponding to the substitution. The offset of this slice is
* set to ensure that currentIndex corresponds to index 0 in the substitution - i.e.
* shift left by currentIndex.
*/
int currentIndex = startIndex;
int substitutionSize = data.length();
CharacterSource result = new CharacterSource(context, startResolution.slice.resolvedComponent,
startResolution.indexInComponent, beingReplaced);
Slice substitutionSlice = new Slice(currentIndex, currentIndex + substitutionSize, result, -currentIndex);
currentIndex += substitutionSize;
newScoreBoard.add(substitutionSlice);
/* Next we need to find out where the end of the substitution is. All slices before this
* one will be removed from the scoreboard. From the current end slice, we will re-slice
* it if required to include whatever comes immediately after the substitution end point.
*
* To calculate what happens to the offset, we look at 'endIndex' (before) and
* 'currentIndex' (after). Both must map to the same point in the original character source.
* Thus:
*
* endIndex + before offset comp = currentIndex + after offset
*
* i.e. after offset = before offset + before index in comp + endIndex - currentIndex;
*/
IndexResolution endResolution = resolveIndex(endIndex, true);
Slice endSlice = endResolution.slice;
if (endIndex < endSlice.endIndex) {
int bitAfterSize = endSlice.endIndex - endIndex;
int resultOffset = endSlice.componentIndexOffset + endIndex - currentIndex;
Slice bitAfter = new Slice(currentIndex, currentIndex + bitAfterSize,
endSlice.resolvedComponent, resultOffset);
newScoreBoard.add(bitAfter);
currentIndex += bitAfterSize;
}
/* Finally, we include shifted versions of all slices that followed the end index in the
* original scoreboard. The change in offset is calculated by looking at the left
* endpoint of the slice before and after shifted, which must map to the same point
* in the original character source. Thus we have:
*
* before startIndex + before offset = after startIndex + after offset
*
* i.e. after offset = before offset + before startIndex - after startIndex
*/
Slice trailingSlice, shiftedTrailingSlice;
for (int i=endResolution.scoreboardIndex+1, size=scoreBoard.size(); i<size; i++) {
trailingSlice = scoreBoard.get(i);
int afterSliceLength = trailingSlice.endIndex - trailingSlice.startIndex;
int shiftedOffset = trailingSlice.componentIndexOffset + trailingSlice.startIndex - currentIndex;
shiftedTrailingSlice = new Slice(currentIndex, currentIndex + afterSliceLength,
trailingSlice.resolvedComponent, shiftedOffset);
newScoreBoard.add(shiftedTrailingSlice);
currentIndex += afterSliceLength;
}
/* Replace scoreboard */
scoreBoard.clear();
scoreBoard.addAll(newScoreBoard);
/* Finally sync up lengths */
if (buffer.length()!=currentIndex) {
throw new SnuggleLogicException("Failed sanity check: buffer length is " + buffer.length() + ", last board index=" + currentIndex);
}
length = currentIndex;
return result;
}
private void checkRange(final int startIndex, final int endIndex) {
checkIndex(startIndex, "Start Index");
checkIndex(endIndex, "End Index");
if (startIndex>endIndex) {
throw new IllegalArgumentException("Start index " + startIndex + " must be <= end index " + endIndex);
}
}
private void checkIndex(final int index, final String errorStart) {
if (!(index>=0 && index<=length)) {
throw new IndexOutOfBoundsException(errorStart + " " + index
+ " is outwith the current bounds [0," + length + ")");
}
}
//---------------------------------------------------
// For debugging during development
public void dumpScoreboard() {
for (Slice mapping : scoreBoard) {
System.out.println(mapping);
}
}
public static void main(String[] args) {
SourceContext ctx = null;
WorkingDocument d = new WorkingDocument("\\mycommand blah blah", ctx);
System.out.println("INITIAL STATE\n");
// d.dumpScoreboard();
//
d.substitute(0, 10, "This expands to \\bob", ctx);
System.out.println("AFTER FIRST");
d.dumpScoreboard();
//
d.substitute(16, 20, "[Bob expanded]", ctx);
System.out.println("\nAFTER SECOND");
d.dumpScoreboard();
// d.substitute(d.length(), d.length(), "You", ctx);
//
// System.out.println("\nAFTER FIRST SUBS\n");
// d.dumpScoreboard();
//
// d.substitute(1,1, "INSERTED", ctx);
// d.substitute(14,15, "a", ctx);
// d.substitute(2,3, "stuff", ctx);
////
// System.out.println("\nAFTER SECOND SUBS\n");
// d.dumpScoreboard();
// System.out.println("STATE IS NOW " + d.extract());
// d.freezeSlice(0, 10);
// d.substitute(10, 12, "bob", ctx);
System.out.println(d.extract());
}
}
| Added indexOf() method taking a String as second parameter. | src/uk/ac/ed/ph/snuggletex/conversion/WorkingDocument.java | Added indexOf() method taking a String as second parameter. |
|
Java | bsd-3-clause | 96bfbd7998e9b1239f9ffc253e6deb831d70544d | 0 | RockinRoel/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jamming/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks | package org.smartboot.servlet;
import org.smartboot.aio.EnhanceAsynchronousChannelProvider;
import org.smartboot.http.HttpRequest;
import org.smartboot.http.HttpResponse;
import org.smartboot.http.server.HttpMessageProcessor;
import org.smartboot.http.server.HttpRequestProtocol;
import org.smartboot.http.server.Request;
import org.smartboot.http.server.handle.HttpHandle;
import org.smartboot.servlet.conf.ServletInfo;
import org.smartboot.socket.StateMachineEnum;
import org.smartboot.socket.buffer.BufferFactory;
import org.smartboot.socket.buffer.BufferPagePool;
import org.smartboot.socket.extension.plugins.MonitorPlugin;
import org.smartboot.socket.extension.processor.AbstractMessageProcessor;
import org.smartboot.socket.transport.AioQuickServer;
import org.smartboot.socket.transport.AioSession;
import java.io.IOException;
/**
* @author 三刀([email protected])
* @version V1.0 , 2020/12/22
*/
public class Bootstrap {
public static void main(String[] args) {
System.setProperty("java.nio.channels.spi.AsynchronousChannelProvider", EnhanceAsynchronousChannelProvider.class.getName());
ContainerRuntime containerRuntime = new ContainerRuntime();
// plaintext
ApplicationRuntime applicationRuntime = new ApplicationRuntime("/");
ServletInfo plainTextServletInfo = new ServletInfo();
plainTextServletInfo.setServletName("plaintext");
plainTextServletInfo.setServletClass(HelloWorldServlet.class.getName());
plainTextServletInfo.addMapping("/plaintext");
applicationRuntime.getDeploymentInfo().addServlet(plainTextServletInfo);
// json
ServletInfo jsonServletInfo = new ServletInfo();
jsonServletInfo.setServletName("json");
jsonServletInfo.setServletClass(JsonServlet.class.getName());
jsonServletInfo.addMapping("/json");
applicationRuntime.getDeploymentInfo().addServlet(jsonServletInfo);
containerRuntime.addRuntime(applicationRuntime);
containerRuntime.start();
HttpMessageProcessor processor = new HttpMessageProcessor();
processor.pipeline(new HttpHandle() {
@Override
public void doHandle(HttpRequest request, HttpResponse response) throws IOException {
containerRuntime.doHandle(request, response);
}
});
http(processor);
}
public static void http(final HttpMessageProcessor processor) {
AbstractMessageProcessor<Request> messageProcessor = new AbstractMessageProcessor<Request>() {
@Override
public void process0(AioSession session, Request msg) {
processor.process(session, msg);
}
@Override
public void stateEvent0(AioSession session, StateMachineEnum stateMachineEnum, Throwable throwable) {
processor.stateEvent(session, stateMachineEnum, throwable);
}
};
messageProcessor.addPlugin(new MonitorPlugin(5));
// messageProcessor.addPlugin(new SocketOptionPlugin());
int cpuNum = Runtime.getRuntime().availableProcessors();
// 定义服务器接受的消息类型以及各类消息对应的处理器
AioQuickServer<Request> server = new AioQuickServer<>(8080, new HttpRequestProtocol(), messageProcessor);
server.setThreadNum(cpuNum + 2)
.setReadBufferSize(1024 * 4)
.setBufferFactory(new BufferFactory() {
@Override
public BufferPagePool create() {
return new BufferPagePool(10 * 1024 * 1024, cpuNum + 2, 64 * 1024 * 1024, true);
}
})
.setBannerEnabled(false)
.setWriteBuffer(1024 * 4, 8);
// messageProcessor.addPlugin(new BufferPageMonitorPlugin(server, 6));
try {
server.start();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| frameworks/Java/smart-socket/src/main/java/org/smartboot/servlet/Bootstrap.java | package org.smartboot.servlet;
import org.smartboot.aio.EnhanceAsynchronousChannelProvider;
import org.smartboot.http.HttpRequest;
import org.smartboot.http.HttpResponse;
import org.smartboot.http.server.HttpMessageProcessor;
import org.smartboot.http.server.HttpRequestProtocol;
import org.smartboot.http.server.Request;
import org.smartboot.http.server.handle.HttpHandle;
import org.smartboot.servlet.conf.ServletInfo;
import org.smartboot.socket.StateMachineEnum;
import org.smartboot.socket.buffer.BufferFactory;
import org.smartboot.socket.buffer.BufferPagePool;
import org.smartboot.socket.extension.plugins.MonitorPlugin;
import org.smartboot.socket.extension.processor.AbstractMessageProcessor;
import org.smartboot.socket.transport.AioQuickServer;
import org.smartboot.socket.transport.AioSession;
import java.io.IOException;
/**
* @author 三刀([email protected])
* @version V1.0 , 2020/12/22
*/
public class Bootstrap {
public static void main(String[] args) {
System.setProperty("java.nio.channels.spi.AsynchronousChannelProvider", EnhanceAsynchronousChannelProvider.class.getName());
ContainerRuntime containerRuntime = new ContainerRuntime();
// plaintext
ApplicationRuntime applicationRuntime = new ApplicationRuntime("/");
ServletInfo plainTextServletInfo = new ServletInfo();
plainTextServletInfo.setServletName("plaintext");
plainTextServletInfo.setServletClass(HelloWorldServlet.class.getName());
plainTextServletInfo.addMapping("/plaintext");
applicationRuntime.getDeploymentInfo().addServlet(plainTextServletInfo);
// json
ServletInfo jsonServletInfo = new ServletInfo();
jsonServletInfo.setServletName("json");
jsonServletInfo.setServletClass(HelloWorldServlet.class.getName());
jsonServletInfo.addMapping("/json");
applicationRuntime.getDeploymentInfo().addServlet(jsonServletInfo);
containerRuntime.addRuntime(applicationRuntime);
containerRuntime.start();
HttpMessageProcessor processor = new HttpMessageProcessor();
processor.pipeline(new HttpHandle() {
@Override
public void doHandle(HttpRequest request, HttpResponse response) throws IOException {
containerRuntime.doHandle(request, response);
}
});
http(processor);
}
public static void http(final HttpMessageProcessor processor) {
AbstractMessageProcessor<Request> messageProcessor = new AbstractMessageProcessor<Request>() {
@Override
public void process0(AioSession session, Request msg) {
processor.process(session, msg);
}
@Override
public void stateEvent0(AioSession session, StateMachineEnum stateMachineEnum, Throwable throwable) {
processor.stateEvent(session, stateMachineEnum, throwable);
}
};
messageProcessor.addPlugin(new MonitorPlugin(5));
// messageProcessor.addPlugin(new SocketOptionPlugin());
int cpuNum = Runtime.getRuntime().availableProcessors();
// 定义服务器接受的消息类型以及各类消息对应的处理器
AioQuickServer<Request> server = new AioQuickServer<>(8080, new HttpRequestProtocol(), messageProcessor);
server.setThreadNum(cpuNum + 2)
.setReadBufferSize(1024 * 4)
.setBufferFactory(new BufferFactory() {
@Override
public BufferPagePool create() {
return new BufferPagePool(10 * 1024 * 1024, cpuNum + 2, 64 * 1024 * 1024, true);
}
})
.setBannerEnabled(false)
.setWriteBuffer(1024 * 4, 8);
// messageProcessor.addPlugin(new BufferPageMonitorPlugin(server, 6));
try {
server.start();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| smart-servlet bugfix (#6317)
* update smart-servlet to 0.1.3-SNAPSHOT
* update aio-enhance to 1.0.3-SNAPSHOT
* smart-servlet bugfix | frameworks/Java/smart-socket/src/main/java/org/smartboot/servlet/Bootstrap.java | smart-servlet bugfix (#6317) |
|
Java | bsd-3-clause | 4b9b2103da072edda86b6ef81e1b8557732bd0ae | 0 | ursjoss/scipamato,ursjoss/scipamato,ursjoss/scipamato,ursjoss/scipamato | package ch.difty.sipamato.entity.filter;
import ch.difty.sipamato.entity.IdSipamatoEntity;
import ch.difty.sipamato.lib.AssertAs;
/**
* Implementations of {@link SearchTerm} accept a <code>fieldName</code> as key and a <code>rawSearchTerm</code> as value.
* The rawSearchTerm holds a comparison specification holding a value and some meta information on how to compare the field
* with the provided value.<p/>
*
* <b>Note:</b>Identity is based on <code>fieldName</code> and <code>rawSearchTerm</code> only, thus ignoring <code>id</code>
* or <code>searchConditionId</code>. This might be an issue in some use cases in the future!
*
* @author u.joss
*/
public abstract class SearchTerm<T extends SearchTerm<?>> extends IdSipamatoEntity<Long> {
private static final long serialVersionUID = 1L;
private final Long searchConditionId;
private final SearchTermType searchTermType;
private final String fieldName;
private final String rawSearchTerm;
SearchTerm(final SearchTermType type, final String fieldName, final String rawSearchTerm) {
this(null, type, null, fieldName, rawSearchTerm);
}
SearchTerm(final Long id, final SearchTermType type, final Long searchConditionId, final String fieldName, final String rawSearchTerm) {
super(id);
this.searchConditionId = searchConditionId;
this.searchTermType = AssertAs.notNull(type);
this.fieldName = AssertAs.notNull(fieldName, "fieldName");
this.rawSearchTerm = AssertAs.notNull(rawSearchTerm, "rawSearchTerm");
}
public static SearchTerm<?> of(final long id, final int searchTermTypeId, final long searchConditionId, final String fieldName, final String rawSearchTerm) {
SearchTermType type = SearchTermType.byId(searchTermTypeId);
switch (type) {
case BOOLEAN:
return new BooleanSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
case INTEGER:
return new IntegerSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
case STRING:
return new StringSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
case AUDIT:
return new AuditSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
default:
throw new UnsupportedOperationException("SearchTermType." + type + " is not supported");
}
}
public SearchTermType getSearchTermType() {
return searchTermType;
}
public Long getSearchConditionId() {
return searchConditionId;
}
public String getFieldName() {
return fieldName;
}
public String getRawSearchTerm() {
return rawSearchTerm;
}
@Override
public String getDisplayValue() {
return rawSearchTerm;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + fieldName.hashCode();
result = prime * result + rawSearchTerm.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
@SuppressWarnings("unchecked")
final T other = (T) obj;
if (fieldName.hashCode() != other.getFieldName().hashCode())
return false;
if (!rawSearchTerm.equals(other.getRawSearchTerm()))
return false;
return true;
}
}
| implementation/sipamato/src/main/java/ch/difty/sipamato/entity/filter/SearchTerm.java | package ch.difty.sipamato.entity.filter;
import ch.difty.sipamato.entity.IdSipamatoEntity;
import ch.difty.sipamato.lib.AssertAs;
/**
* Implementations of {@link SearchTerm} accept a <code>fieldName</code> as key and a <code>rawSearchTerm</code> as value.
* The rawSearchTerm holds a comparison specification holding a value and some meta information on how to compare the field
* with the provided value.<p/>
*
* <b>Note:</b>Identity is based on <code>fieldName</code> and <code>rawSearchTerm</code> only, thus ignoring <code>id</code>
* or <code>searchConditionId</code>. This might be an issue in some use cases in the future!
*
* @author u.joss
*/
public abstract class SearchTerm<T extends SearchTerm<?>> extends IdSipamatoEntity<Long> {
private static final long serialVersionUID = 1L;
private final Long searchConditionId;
private final SearchTermType searchTermType;
private final String fieldName;
private final String searchTerm;
SearchTerm(final SearchTermType type, final String fieldName, final String rawSearchTerm) {
this(null, type, null, fieldName, rawSearchTerm);
}
SearchTerm(final Long id, final SearchTermType type, final Long searchConditionId, final String fieldName, final String rawSearchTerm) {
super(id);
this.searchConditionId = searchConditionId;
this.searchTermType = AssertAs.notNull(type);
this.fieldName = AssertAs.notNull(fieldName, "fieldName");
this.searchTerm = AssertAs.notNull(rawSearchTerm, "rawSearchTerm");
}
public static SearchTerm<?> of(final long id, final int searchTermTypeId, final long searchConditionId, final String fieldName, final String rawSearchTerm) {
SearchTermType type = SearchTermType.byId(searchTermTypeId);
switch (type) {
case BOOLEAN:
return new BooleanSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
case INTEGER:
return new IntegerSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
case STRING:
return new StringSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
case AUDIT:
return new AuditSearchTerm(id, searchConditionId, fieldName, rawSearchTerm);
default:
throw new UnsupportedOperationException("SearchTermType." + type + " is not supported");
}
}
public SearchTermType getSearchTermType() {
return searchTermType;
}
public Long getSearchConditionId() {
return searchConditionId;
}
public String getFieldName() {
return fieldName;
}
public String getRawSearchTerm() {
return searchTerm;
}
@Override
public String getDisplayValue() {
return searchTerm;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + fieldName.hashCode();
result = prime * result + searchTerm.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
@SuppressWarnings("unchecked")
final T other = (T) obj;
if (fieldName.hashCode() != other.getFieldName().hashCode())
return false;
if (!searchTerm.equals(other.getRawSearchTerm()))
return false;
return true;
}
}
| Rename variable
| implementation/sipamato/src/main/java/ch/difty/sipamato/entity/filter/SearchTerm.java | Rename variable |
|
Java | bsd-3-clause | 268d38706af8d71fc9e0f87407df43b78d61d809 | 0 | mjlaali/cleartk,mjlaali/cleartk,mjlaali/cleartk,mjlaali/cleartk | /**
* Copyright (c) 2014, Regents of the University of Colorado
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the University of Colorado at Boulder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.cleartk.util.cr.linereader;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.collection.CollectionReaderDescription;
import org.apache.uima.fit.factory.CollectionReaderFactory;
import org.apache.uima.fit.pipeline.SimplePipeline;
import org.cleartk.test.util.DefaultTestBase;
import org.junit.Assert;
import org.junit.Test;
/**
* <br>
* Copyright (c) 2014, Regents of the University of Colorado <br>
* All rights reserved.
*
* @author Philip Ogren
*/
public class LineReaderXmiWriterTest extends DefaultTestBase {
private String inputDir = "src/test/resources/linereader";
@Test
public void testXmiWriter() throws Exception {
File outputDir = new File(outputDirectory, "line-reader-xmi");
outputDir.mkdirs();
String languageCode = "en-us";
CollectionReaderDescription reader = CollectionReaderFactory.createReaderDescription(
LineReader.class,
null,
LineReader.PARAM_FILE_OR_DIRECTORY_NAME,
inputDir,
LineReader.PARAM_LANGUAGE,
languageCode,
LineReader.PARAM_SUFFIXES,
new String[] { ".txt", ".dat" });
AnalysisEngineDescription xmiWriter = LineReaderXmiWriter.getDescription(outputDir);
SimplePipeline.runPipeline(reader, xmiWriter);
List<String> actualFiles = Arrays.asList(outputDir.list());
List<String> expectedFiles = Arrays.asList(
"a-test1.txt.1.xmi",
"a-test1.txt.2.xmi",
"a-test1.txt.3.xmi",
"a-test1.txt.4.xmi",
"a-test1.txt.5.xmi",
"a-test1.txt.6.xmi",
"a-test1.txt.7.xmi",
"a-test1.txt.8.xmi",
"b-test2.dat.9.xmi",
"b-test2.dat.10.xmi",
"b-test2.dat.11.xmi",
"b-test2.dat.12.xmi");
Collections.sort(expectedFiles);
Collections.sort(actualFiles);
Assert.assertEquals(expectedFiles, actualFiles);
}
}
| cleartk-util/src/test/java/org/cleartk/util/cr/linereader/LineReaderXmiWriterTest.java | /**
* Copyright (c) 2014, Regents of the University of Colorado
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the University of Colorado at Boulder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.cleartk.util.cr.linereader;
import java.io.File;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.collection.CollectionReaderDescription;
import org.apache.uima.fit.factory.CollectionReaderFactory;
import org.apache.uima.fit.pipeline.SimplePipeline;
import org.cleartk.test.util.DefaultTestBase;
import org.junit.Assert;
import org.junit.Test;
/**
* <br>
* Copyright (c) 2014, Regents of the University of Colorado <br>
* All rights reserved.
*
* @author Philip Ogren
*/
public class LineReaderXmiWriterTest extends DefaultTestBase {
private String inputDir = "src/test/resources/linereader";
@Test
public void testXmiWriter() throws Exception {
File outputDir = new File(outputDirectory, "line-reader-xmi");
outputDir.mkdirs();
String languageCode = "en-us";
CollectionReaderDescription reader = CollectionReaderFactory.createReaderDescription(
LineReader.class,
null,
LineReader.PARAM_FILE_OR_DIRECTORY_NAME,
inputDir,
LineReader.PARAM_LANGUAGE,
languageCode,
LineReader.PARAM_SUFFIXES,
new String[] { ".txt", ".dat" });
AnalysisEngineDescription xmiWriter = LineReaderXmiWriter.getDescription(outputDir);
SimplePipeline.runPipeline(reader, xmiWriter);
Set<String> actualFiles = new HashSet<>(Arrays.asList(outputDir.list()));
Set<String> expectedFiles = new HashSet<>(Arrays.asList(
"a-test1.txt.1.xmi",
"a-test1.txt.2.xmi",
"a-test1.txt.3.xmi",
"a-test1.txt.4.xmi",
"a-test1.txt.5.xmi",
"a-test1.txt.6.xmi",
"a-test1.txt.7.xmi",
"a-test1.txt.8.xmi",
"b-test2.dat.9.xmi",
"b-test2.dat.10.xmi",
"b-test2.dat.11.xmi",
"b-test2.dat.12.xmi"));
Assert.assertEquals(expectedFiles, actualFiles);
}
}
| Sorts files in LineReaderXmiWriterTest to make comparison easier
| cleartk-util/src/test/java/org/cleartk/util/cr/linereader/LineReaderXmiWriterTest.java | Sorts files in LineReaderXmiWriterTest to make comparison easier |
|
Java | isc | ccbb4796f26e57164bbb72abf66d50c78f447108 | 0 | joshuawarner32/avian,MaartenR/avian,ucdseniordesign/avian,lwahlmeier/avian,bigfatbrowncat/avian-pack.avian,marcinolawski/avian,bgould/avian,lwahlmeier/avian,getlantern/avian,lostdj/avian,dicej/avian,bgould/avian,getlantern/avian,bgould/avian,lwahlmeier/avian,lwahlmeier/avian,minor-jason/avian,minor-jason/avian,ucdseniordesign/avian,dicej/avian,joshuawarner32/avian,lostdj/avian,bigfatbrowncat/avian-pack.avian,badlogic/avian,ucdseniordesign/avian,lostdj/avian,MaartenR/avian,badlogic/avian,badlogic/avian,ucdseniordesign/avian,lostdj/avian,marcinolawski/avian,bigfatbrowncat/avian-pack.avian,marcinolawski/avian,MaartenR/avian,dicej/avian,bgould/avian,badlogic/avian,joshuawarner32/avian,getlantern/avian,MaartenR/avian,bigfatbrowncat/avian-pack.avian,dicej/avian,minor-jason/avian,minor-jason/avian,joshuawarner32/avian,marcinolawski/avian,getlantern/avian | /* Copyright (c) 2009, Avian Contributors
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice appear
in all copies.
There is NO WARRANTY for this software. See license.txt for
details. */
package avian;
import java.util.concurrent.Callable;
/**
* This class provides methods to capture continuations and manage
* control flow when calling continuations.
*
* <p>A continuation is a snapshot of a thread's call stack which can
* be captured via <code>callWithCurrentContinuation</code> and later
* restored any number of times. The program may restore this
* snapshot by either feeding it a result (to be returned by
* <code>callWithCurrentContinuation</code>) or feeding it an
* exception (to be thrown by
* <code>callWithCurrentContinuation</code>). Continuations may be
* used to implement features such as coroutines, generators, and
* cooperative multitasking.
*
* <p>This class provides two static methods,
* <code>callWithCurrentContinuation</code> and
* <code>dynamicWind</code>, with similar semantics to the Scheme
* functions <code>call-with-current-continuation</code> and
* <code>dynamic-wind</code>, respectively. In addition, we define
* how continuations work with respect to native code, exceptions,
* try/finally blocks, synchronized blocks, and multithreading.
*
* <h3>Continuations and Continuation Contexts</h3>
*
* <p>A continuation can be thought of as a singly-linked list of
* stack frames representing the call trace, where the head of the
* list is the frame of the method most recently called (i.e. the top
* of the stack). However, this trace only extends as far as the most
* recent chain of Java frames - it ends just prior to the most recent
* native frame in the stack. The reason for this is that the VM
* cannot, in general, safely capture and restore native frames.
* Therefore, each call from native code to Java (including the
* original invocation of <code>main(String[])</code> or
* <code>Thread.run()</code>) represents a new continuation context in
* which continuations may be captured, and these will only contain
* frames from within that context.
*
* <p>Calling a continuation (i.e. feeding it a result or exception)
* causes the current continuation to be replaced with the calling
* continuation. When the last method in this new continuation
* returns, it returns to the native frame which created the current
* context, which may not be the same as the context in which that
* continuation was created.
*
* <p>We define the return type of a continuation context as the
* return type of the first method called in that context. A
* continuation may be called from a different context than the one in
* which it was created, provided the return type of the latter is
* compatible with the current context.
*
* <p>Given a thread executing in context "A" which wants to call a
* continuation created in context "B", the following rules apply:
*
* <ul>
*
* <li>If the return type of "A" is <code>void</code>, the return
* type of "B" may be anything, including <code>void</code></li>
*
* <li>If the return type of "A" is a primitive type, the return
* type of "B" must match exactly</li>
*
* <li>If the return type of "A" is an object type, that type must
* assignable from the return type of "B" (i.e. the latter must
* either be the same as the former or a superclass or
* superinterface of it)</li>
*
* </ul>
*
* <p>A thread may call a continuation created by a different thread
* provided the return types are compatible. Multiple threads may
* safely call the same continuation simultaneously without
* synchronization. Any attempt to call a continuation from a context
* with an incompatible return type will throw an {@link
* avian.IncompatibleContinuationException}.
*
* <h3>Winding, Unwinding, and Rewinding</h3>
*
* <p>Traditionally, Java provides one way to wind the execution stack
* (recursive method calls) and two ways to unwind it (normal returns
* and exception unwinding). With continuations, we add a new way to
* rewind the stack and a new way to unwind it.
*
* <p>The call stack of a continuation may share frames with other
* continuations - in which case they share a common history. When
* calling a continuation "B" from the current continuation "A", the
* VM must unwind past any frames which are in "A" but not in "B" and
* rewind past any frames in "B" but not in "A". During this
* unwinding and rewinding, control may pass through synchronized and
* try/finally blocks while going down the old stack and up the new
* stack.
*
* <p>However, unlike the traditional processes of winding and
* unwinding, the VM will ignore these blocks - monitors will not be
* released or acquired and finally blocks will not execute. This is
* by design. The purpose of such a block is to acquire a resource,
* such as a file handle or monitor, once before executing a task and
* release it after the task is finished, regardless of how often the
* task might temporarily yield control to other continuations.
*
* <p>Conversely, one might wish to acquire and release a resource
* each time control (re)winds to or unwinds from a continuation,
* respectively. In this case, we use <code>dynamicWind</code> to
* register functions which will run every time that frame is passed,
* regardless of how the stack is wound or unwound.
*/
public class Continuations {
private Continuations() { }
/**
* Captures the current continuation, passing a reference to the
* specified receiver.
*
* <p>This method will either return the result returned by
* <code>receiver.receive(Callback)</code>, propagate the exception
* thrown by that method, return the result passed to the
* handleResult(T) method of the continuation, or throw the
* exception passed to the handleException(Throwable) of the
* continuation.
*/
public static native <T> T callWithCurrentContinuation
(CallbackReceiver<T> receiver) throws Exception;
/**
* Calls the specified "before" and "after" tasks each time a
* continuation containing the call is wound or unwound,
* respectively.
*
* <p>This method first calls <code>before.run()</code>, then
* <code>thunk.call()</code>, and finally <code>after.run()</code>,
* returning the result of the second call. If
* <code>before.run()</code> does not return normally, the second
* and third calls will not happen. If <code>thunk.call()</code>
* throws an exception, <code>after.run()</code>, will be called
* before the exception is propagated.
*
* <p>If <code>thunk.call()</code> calls a continuation (directly or
* via a subroutine) which does not include the current call to
* <code>dynamicWind</code>, <code>after.run()</code> will be called
* before control passes to that continuation. If this call throws
* an exception, the exception will propagate to the current caller
* of <code>dynamicWind</code>.
*
* <p>If <code>thunk.call()</code> creates a continuation which is
* later called from a continuation which does not include the
* current call to <code>dynamicWind</code>,
* <code>before.run()</code> will be called before control passes to
* that continuation. As above, if this call throws an exception,
* the exception will propagate to the current caller of
* <code>dynamicWind</code>.
*/
public static <T> T dynamicWind(Runnable before,
Callable<T> thunk,
Runnable after)
throws Exception
{
UnwindResult result = dynamicWind2(before, thunk, after);
if (result.continuation != null) {
after.run();
if (result.exception != null) {
result.continuation.handleException(result.exception);
} else {
result.continuation.handleResult(result.result);
}
throw new AssertionError();
} else {
return (T) result.result;
}
}
private static native UnwindResult dynamicWind2(Runnable before,
Callable thunk,
Runnable after)
throws Exception;
private static UnwindResult wind(Runnable before,
Callable thunk,
Runnable after)
throws Exception
{
before.run();
try {
return new UnwindResult(null, thunk.call(), null);
} finally {
after.run();
}
}
private static void rewind(Runnable before,
Callback continuation,
Object result,
Throwable exception)
throws Exception
{
before.run();
if (exception != null) {
continuation.handleException(exception);
} else {
continuation.handleResult(result);
}
throw new AssertionError();
}
private static class Continuation<T> implements Callback<T> {
public native void handleResult(T result);
public native void handleException(Throwable exception);
}
private static class UnwindResult {
public final Callback continuation;
public final Object result;
public final Throwable exception;
public UnwindResult(Callback continuation, Object result,
Throwable exception)
{
this.continuation = continuation;
this.result = result;
this.exception = exception;
}
}
}
| classpath/avian/Continuations.java | /* Copyright (c) 2009, Avian Contributors
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice appear
in all copies.
There is NO WARRANTY for this software. See license.txt for
details. */
package avian;
import java.util.concurrent.Callable;
/**
* This class provides methods to capture continuations and manage
* control flow when calling continuations.
*
* <p>A continuation is a snapshot of a thread's call stack which can
* be captured via <code>callWithCurrentContinuation</code> and later
* restored any number of times. The program may restore this
* snapshot by either feeding it a result (to be returned by
* <code>callWithCurrentContinuation</code>) or feeding it an
* exception (to be thrown by
* <code>callWithCurrentContinuation</code>). Continuations may be
* used to implement features such as coroutines, generators, and
* cooperative multitasking.
*
* <p>This class provides two static methods,
* <code>callWithCurrentContinuation</code> and
* <code>dynamicWind</code>, with similar semantics to the Scheme
* functions <code>call-with-current-continuation</code> and
* <code>dynamic-wind</code>, respectively. In addition, we define
* how continuations work with respect to native code, exceptions,
* try/finally blocks, synchronized blocks, and multithreading.
*
* <h3>Continuations and Continuation Contexts</h3>
*
* <p>A continuation can be thought of as a singly-linked list of
* stack frames representing the call trace, where the head of the
* list is the frame of the method most recently called (i.e. the top
* of the stack). However, this trace only extends as far as the most
* recent chain of Java frames - it ends just prior to the most recent
* native frame in the stack. The reason for this is that the VM
* cannot, in general, safely capture and restore native frames.
* Therefore, each call from native code to Java (including the
* original invocation of <code>main(String[])</code> or
* <code>Thread.run()</code>) represents a new continuation context in
* which continuations may be captured, and these will only contain
* frames from within that context.
*
* <p>Calling a continuation (i.e. feeding it a result or exception)
* causes the current continuation to be replaced with the calling
* continuation. When the last method in this new continuation
* returns, it returns to the native frame which created the current
* context, which may not be the same as the context in which that
* continuation was created.
*
* <p>We define the return type of a continuation context as the
* return type of the first method called in that context. A
* continuation may be called from a different context than the one in
* which it was created, provided the return type of the latter is
* compatible with the current context.
*
* <p>Given a thread executing in context "A" which wants to call a
* continuation created in context "B", the following rules apply:
*
* <ul>
*
* <li>If the return type of "A" is <code>void</code>, the return
* type of "B" may be anything, including <code>void</code></li>
*
* <li>If the return type of "A" is a primitive type, the return
* type of "B" must match exactly</li>
*
* <li>If the return type of "A" is an object type, that type must
* assignable from the return type of "B" (i.e. the latter must
* either be the same as the former or a superclass or
* superinterface of it)</li>
*
* </ul>
*
* <p>A thread may call a continuation created by a different thread
* provided the return types are compatible. Multiple threads may
* safely call the same continuation simultaneously without
* synchronization. Any attempt to call a continuation from a context
* with an incompatible return type will throw an {@link
* avian.IncompatibleContinuationException}.
*
* <h3>Winding, Unwinding, and Rewinding</h3>
*
* <p>Traditionally, Java provides one way to wind the execution stack
* (recursive method calls) and two ways to unwind it (normal returns
* and exception unwinding). With continuations, we add a new way to
* rewind the stack and a new way to unwind it.
*
* <p>The call stack of a continuation may share frames with other
* continuations - in which case they share a common history. When
* calling a continuation "B" from the current continuation "A", the
* VM must unwind past any frames which are in "A" but not in "B" and
* rewind past any frames in "B" but not in "A". During this
* unwinding and rewinding, control may pass through synchronized and
* try/finally blocks while going down the old stack and up the new
* stack.
*
* <p>However, unlike the traditional processes of winding and
* unwinding, the VM will ignore these blocks - monitors will not be
* released or acquired and finally blocks will not execute. This is
* by design. The purpose of such a block is to acquire a resource,
* such as a file handle or monitor, once before executing a task and
* release it after the task is finished, regardless of how often the
* task might temporarily yield control to other continuations.
*
* <p>Conversely, one might wish to acquire and release a resource
* each time control (re)winds to or unwinds from a continuation,
* respectively. In this case, we use <code>dynamicWind</code> to
* register functions which will run every time that frame is passed,
* regardless of how the stack is wound or unwound.
*/
public class Continuations {
private Continuations() { }
/**
* Captures the current continuation, passing a reference to the
* specified receiver.
*
* <p>This method will either return the result returned by
* <code>receiver.receive(Callback)</code>, propagate the exception
* thrown by that method, return the result passed to the
* handleResult(T) method of the continuation, or throw the
* exception passed to the handleException(Throwable) of the
* continuation.
*/
public static native <T> T callWithCurrentContinuation
(CallbackReceiver<T> receiver) throws Exception;
/**
* Calls the specified "before" and "after" tasks each time a
* continuation containing the call is wound or unwound,
* respectively.
*
* <p>This method first calls <code>before.run()</code>, then
* <code>thunk.call()</code>, and finally <code>after.run()</code>,
* returning the result of the second call. If
* <code>before.run()</code> does not return normally, the second
* and third calls will not happen. If <code>thunk.call()</code>
* throws an exception, <code>after.run()</code>, will be called
* before the exception is propagated.
*
* <p>If <code>thunk.call()</code> calls a continuation (directly or
* via a subroutine) which does not include the current call to
* <code>dynamicWind<code>, <code>after.run()</code> will be called
* before control passes to that continuation. If this call throws
* an exception, the exception will propagate to the current caller
* of <code>dynamicWind</code>.
*
* <p>If <code>thunk.call()</code> creates a continuation which is
* later called from a continuation which does not include the
* current call to <code>dynamicWind</code>,
* <code>before.run()</code> will be called before control passes to
* that continuation. As above, if this call throws an exception,
* the exception will propagate to the current caller of
* <code>dynamicWind</code>.
*/
public static <T> T dynamicWind(Runnable before,
Callable<T> thunk,
Runnable after)
throws Exception
{
UnwindResult result = dynamicWind2(before, thunk, after);
if (result.continuation != null) {
after.run();
if (result.exception != null) {
result.continuation.handleException(result.exception);
} else {
result.continuation.handleResult(result.result);
}
throw new AssertionError();
} else {
return (T) result.result;
}
}
private static native UnwindResult dynamicWind2(Runnable before,
Callable thunk,
Runnable after)
throws Exception;
private static UnwindResult wind(Runnable before,
Callable thunk,
Runnable after)
throws Exception
{
before.run();
try {
return new UnwindResult(null, thunk.call(), null);
} finally {
after.run();
}
}
private static void rewind(Runnable before,
Callback continuation,
Object result,
Throwable exception)
throws Exception
{
before.run();
if (exception != null) {
continuation.handleException(exception);
} else {
continuation.handleResult(result);
}
throw new AssertionError();
}
private static class Continuation<T> implements Callback<T> {
public native void handleResult(T result);
public native void handleException(Throwable exception);
}
private static class UnwindResult {
public final Callback continuation;
public final Object result;
public final Throwable exception;
public UnwindResult(Callback continuation, Object result,
Throwable exception)
{
this.continuation = continuation;
this.result = result;
this.exception = exception;
}
}
}
| fix typo in JavaDoc comment
| classpath/avian/Continuations.java | fix typo in JavaDoc comment |
|
Java | mit | b2153a5234fcf381e4939e2224e1ad7ed6ed3c29 | 0 | dielsonsales/OpenPomodoro | package me.dielsonsales.app.openpomodoro.controllers;
import android.content.Context;
import android.media.MediaPlayer;
import android.util.Log;
import me.dielsonsales.app.openpomodoro.R;
public class PomodoroSoundManager {
private static final String TAG = "PomodoroSoundManager";
private MediaPlayer mPlayer;
private static PomodoroSoundManager mInstance;
public static PomodoroSoundManager getInstance(Context context) {
if (mInstance == null) {
mInstance = new PomodoroSoundManager(context);
}
return mInstance;
}
private PomodoroSoundManager() {}
private PomodoroSoundManager(Context context) {
mPlayer = MediaPlayer.create(context, R.raw.alarm);
}
public void playAlarm() {
if (mPlayer.isPlaying()) {
mPlayer.pause();
mPlayer.seekTo(0);
}
Log.i("SoundManager", "Playing sound");
mPlayer.start();
}
}
| app/src/main/java/me/dielsonsales/app/openpomodoro/controllers/PomodoroSoundManager.java | package me.dielsonsales.app.openpomodoro.controllers;
import android.content.Context;
import android.media.MediaPlayer;
import me.dielsonsales.app.openpomodoro.R;
public class PomodoroSoundManager {
private static final String TAG = "PomodoroSoundManager";
private MediaPlayer mPlayer;
private Context mContext;
private static PomodoroSoundManager mInstance;
public static PomodoroSoundManager getInstance(Context context) {
if (mInstance == null) {
mInstance = new PomodoroSoundManager(context);
}
return mInstance;
}
private PomodoroSoundManager() {}
private PomodoroSoundManager(Context context) {
mContext = context;
mPlayer = MediaPlayer.create(mContext, R.raw.alarm);
}
public void playAlarm() {
if (mPlayer.isPlaying()) {
mPlayer.pause();
mPlayer.seekTo(0);
}
mPlayer.start();
}
}
| Change context to local variable in PomodoroSoundManager
| app/src/main/java/me/dielsonsales/app/openpomodoro/controllers/PomodoroSoundManager.java | Change context to local variable in PomodoroSoundManager |
|
Java | mit | 222f67990f74999ebbc7d3eea7029fccb339081f | 0 | Barteks2x/CubicChunksConverter,Barteks2x/CubicChunksConverter | /*
* This file is part of CubicChunksConverter, licensed under the MIT License (MIT).
*
* Copyright (c) 2017 contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package cubicchunks.converter.lib;
import com.flowpowered.nbt.ByteArrayTag;
import com.flowpowered.nbt.ByteTag;
import com.flowpowered.nbt.CompoundMap;
import com.flowpowered.nbt.CompoundTag;
import com.flowpowered.nbt.DoubleTag;
import com.flowpowered.nbt.IntArrayTag;
import com.flowpowered.nbt.IntTag;
import com.flowpowered.nbt.ListTag;
import com.flowpowered.nbt.StringTag;
import com.flowpowered.nbt.Tag;
import com.flowpowered.nbt.stream.NBTInputStream;
import com.flowpowered.nbt.stream.NBTOutputStream;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiFunction;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import java.util.zip.InflaterInputStream;
import cubicchunks.regionlib.impl.EntryLocation2D;
import cubicchunks.regionlib.impl.EntryLocation3D;
import cubicchunks.regionlib.impl.MinecraftChunkLocation;
import cubicchunks.regionlib.impl.SaveCubeColumns;
import cubicchunks.regionlib.impl.save.MinecraftSaveSection;
import cubicchunks.regionlib.impl.save.SaveSection2D;
import cubicchunks.regionlib.impl.save.SaveSection3D;
import cubicchunks.regionlib.util.WrappedException;
import static cubicchunks.regionlib.impl.save.MinecraftSaveSection.MinecraftRegionType.MCA;
public class AnvilToCubicChunksConverter implements ISaveConverter {
private static final BiFunction<Dimension, Path, Path> LOCATION_FUNC_SRC = (d, p) -> {
if (!d.getDirectory().isEmpty()) {
p = p.resolve(d.getDirectory());
}
return p.resolve("region");
};
private static final BiFunction<Dimension, Path, Path> LOCATION_FUNC_DST = (d, p) -> {
if (!d.getDirectory().isEmpty()) {
p = p.resolve(d.getDirectory());
}
return p;
};
private volatile int chunkCount = -1;
private volatile int fileCount = -1;
private int copyChunks = -1;
private int copiedFiles = -1;
private Map<Dimension, MinecraftSaveSection> saves = new ConcurrentHashMap<>();
private boolean countingFiles;
private boolean countingChunks;
@Override
public void convert(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
saves.clear();
chunkCount = 0;
countingChunks = true;
fileCount = 0;
countingFiles = true;
copyChunks = 0;
copiedFiles = 0;
initDimensions(srcDir);
startCounting(srcDir);
progress.setProgress(new ConvertProgress("Converting level information", 1, 4, 0, 1));
convertLevelInfo(progress, srcDir, dstDir);
progress.setProgress(new ConvertProgress("Converting chunk data (counting chunks)", 2, 4, 0, 1));
convertChunkData(progress, srcDir, dstDir);
progress.setProgress(new ConvertProgress("Copying other files (counting files)", 3, 4, 0, 1));
copyAllOtherData(progress, srcDir, dstDir);
for (MinecraftSaveSection save : saves.values()) {
save.close();
}
fillVanillaRangeEmpty(progress, dstDir);
}
private void fillVanillaRangeEmpty(IProgressListener progress, Path dstDir) throws IOException {
progress.setProgress(new ConvertProgress("Filling vanilla height range with empty cubes", 3, 4, 0, 1));
copyChunks = 0;
for (Dimension d : Dimensions.getDimensions()) {
Path dimLoc = LOCATION_FUNC_DST.apply(d, dstDir);
if (!Files.exists(dimLoc)) {
continue;
}
Files.createDirectories(dimLoc.resolve("region2d"));
Files.createDirectories(dimLoc.resolve("region3d"));
try (SaveSection2D section2d = SaveSection2D.createAt(dimLoc.resolve("region2d"));
SaveSection3D section3d = SaveSection3D.createAt(dimLoc.resolve("region3d"))) {
section2d.forAllKeys(pos -> {
for (int y = 0; y < 16; y++) {
EntryLocation3D cPos = new EntryLocation3D(pos.getEntryX(), y, pos.getEntryZ());
if (!section3d.load(cPos).isPresent()) {
section3d.save(cPos, writeCompressed(emptyCube(cPos)));
}
}
copyChunks++;
String msg = "Filling vanilla height range with empty cubes" + (countingChunks ? " (counting chunks)" : "");
progress.setProgress(new ConvertProgress(msg, 4, 4, copyChunks, countingChunks ? -1 : (chunkCount == 0 ? 100 : chunkCount)));
});
} catch (WrappedException e) {
throw (IOException) e.get();
}
}
}
private CompoundTag emptyCube(EntryLocation3D loc) {
int x = loc.getEntryX();
int y = loc.getEntryY();
int z = loc.getEntryZ();
CompoundMap root = new CompoundMap();
{
CompoundMap level = new CompoundMap();
{
level.put(new ByteTag("v", (byte) 1));
level.put(new IntTag("x", x));
level.put(new IntTag("y", y));
level.put(new IntTag("z", z));
level.put(new ByteTag("populated", true));
level.put(new ByteTag("fullyPopulated", true)); // TODO: handle this properly
level.put(new ByteTag("isSurfaceTracked", true)); // it's empty, no need to re-track
// no need for Sections, CC has isEmpty check for that
level.put(new ByteTag("initLightDone", false));
level.put(new ListTag<>("Entities", CompoundTag.class, Collections.singletonList(new CompoundTag("", new CompoundMap()))));
level.put(new ListTag<>("TileEntities", CompoundTag.class, Collections.singletonList(new CompoundTag("", new CompoundMap()))));
level.put(makeEmptyLightingInfo());
}
root.put(new CompoundTag("Level", level));
}
return new CompoundTag("", root);
}
private CompoundTag makeEmptyLightingInfo() {
IntArrayTag heightmap = new IntArrayTag("LastHeightMap", new int[256]);
CompoundMap lightingInfoMap = new CompoundMap();
lightingInfoMap.put(heightmap);
return new CompoundTag("LightingInfo", lightingInfoMap);
}
private void initDimensions(Path src) {
for (Dimension d : Dimensions.getDimensions()) {
Path srcLoc = LOCATION_FUNC_SRC.apply(d, src);
if (!Files.exists(srcLoc)) {
continue;
}
MinecraftSaveSection vanillaSave = MinecraftSaveSection.createAt(LOCATION_FUNC_SRC.apply(d, src), MCA);
saves.put(d, vanillaSave);
}
}
private void convertLevelInfo(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
NBTInputStream nbtIn = new NBTInputStream(new FileInputStream(srcDir.resolve("level.dat").toFile()));
CompoundTag root = (CompoundTag) nbtIn.readTag();
CompoundMap newRoot = new CompoundMap();
for (Tag<?> tag : root.getValue()) {
if (tag.getName().equals("Data")) {
CompoundMap data = ((CompoundTag) root.getValue().get("Data")).getValue();
CompoundMap newData = new CompoundMap();
for (Tag<?> dataTag : data) {
if (dataTag.getName().equals("generatorName")) {
String value = (String) dataTag.getValue();
String newValue;
if (value.equalsIgnoreCase("default")) {
newValue = "VanillaCubic";
} else {
newValue = value;
}
newData.put(new StringTag(dataTag.getName(), newValue));
} else {
newData.put(dataTag);
}
}
// put isCubicWorld at the end to overwrite previously existing data, if any
newData.put("isCubicWorld", new ByteTag("isCubicWorld", (byte) 1));
newRoot.put(new CompoundTag(tag.getName(), newData));
} else {
newRoot.put(tag);
}
}
Files.createDirectories(dstDir);
NBTOutputStream nbtOut = new NBTOutputStream(new FileOutputStream(dstDir.resolve("level.dat").toFile()));
nbtOut.writeTag(new CompoundTag(root.getName(), newRoot));
nbtOut.close();
}
private void convertChunkData(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
int step = 0;
int maxSteps = Dimensions.getDimensions().size();
for (Dimension d : Dimensions.getDimensions()) {
Path srcLoc = LOCATION_FUNC_SRC.apply(d, srcDir);
if (!Files.exists(srcLoc)) {
continue;
}
convertDimension(progress, d, LOCATION_FUNC_DST.apply(d, dstDir), step, maxSteps);
step++;
}
}
private void copyAllOtherData(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
Utils.copyEverythingExcept(srcDir, srcDir, dstDir, file ->
file.toString().contains("level.dat") ||
Dimensions.getDimensions().stream().anyMatch(dim ->
srcDir.resolve(dim.getDirectory()).resolve("region").equals(file)
),
f -> {
copiedFiles++;
String msg = "Copying other files" + (countingFiles ? " (counting files)" : "");
double p = copiedFiles;
progress.setProgress(new ConvertProgress(msg, 3, 3, p, countingFiles ? -1 : fileCount));
}
);
}
private void convertDimension(IProgressListener progress, Dimension dim, Path dstParent, int step, int maxSteps) throws IOException {
MinecraftSaveSection vanillaSave = saves.get(dim);
try(SaveCubeColumns saveCubic = SaveCubeColumns.create(dstParent)) {
vanillaSave.forAllKeys(mcPos -> {
try {
this.convertRegion(progress, mcPos, vanillaSave, saveCubic);
} catch (IOException e) {
throw new WrappedException(e);
}
});
} catch (WrappedException e) {
throw (IOException) e.get();
}
}
private void convertRegion(IProgressListener progress, MinecraftChunkLocation entryLoc,
MinecraftSaveSection vanillaSave,
SaveCubeColumns saveCubic) throws IOException {
ByteBuffer vanillaData = vanillaSave.load(entryLoc).get();
ByteBuffer[] cubes = extractCubeData(vanillaData);
ByteBuffer column = extractColumnData(vanillaData);
for (int y = 0; y < cubes.length; y++) {
if (cubes[y] == null) {
continue;
}
EntryLocation3D l = new EntryLocation3D(entryLoc.getEntryX(), y, entryLoc.getEntryZ());
saveCubic.save3d(l, cubes[y]);
}
if (column != null) {
saveCubic.save2d(new EntryLocation2D(entryLoc.getEntryX(), entryLoc.getEntryZ()), column);
}
copyChunks++;
String msg = "Converting chunk data" + (countingChunks ? " (counting chunks)" : "");
progress.setProgress(new ConvertProgress(msg, 2, 3, copyChunks, countingChunks ? -1 : chunkCount));
}
private ByteBuffer extractColumnData(ByteBuffer vanillaData) throws IOException {
ByteArrayInputStream in = new ByteArrayInputStream(vanillaData.array());
CompoundTag tag = readCompressed(in);
CompoundTag columnTag = extractColumnData(tag);
return writeCompressed(columnTag);
}
private CompoundTag extractColumnData(CompoundTag tag) throws IOException {
/**
*
* Vanilla Chunk NBT structure:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- xPos
* |- zPos
* |- LastUpdate
* |- TerrainPopulated
* |- LightPopulated
* |- InhabitedTime
* |- Biomes
* |- HeightMap
* |- Sections
* ||* Section list:
* | |- Y
* | |- Blocks
* | |- Data
* | |- Add
* | |- BlockLight
* | |- SkyLight
* |- Entities
* |- TileEntities
* |- TileTicks
*
* CubicChunks Column format:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- x
* |- z
* |- InhabitedTime
* |- Biomes
* |- OpacityIndex
*/
CompoundMap levelMap = new CompoundMap();
CompoundMap srcLevel = (CompoundMap) tag.getValue().get("Level").getValue();
int[] srcHeightMap = fixHeightmap((int[]) srcLevel.get("HeightMap").getValue());
levelMap.put(new IntTag("v", 1));
levelMap.put(new IntTag("x", (Integer) srcLevel.get("xPos").getValue()));
levelMap.put(new IntTag("z", (Integer) srcLevel.get("zPos").getValue()));
// Default to 0 because WorldPainter chunks don't have this tag
levelMap.put(srcLevel.getOrDefault("InhabitedTime", new IntTag("InhabitedTime", 0)));
levelMap.put(srcLevel.get("Biomes"));
levelMap.put(new ByteArrayTag("OpacityIndex", makeDummyOpacityIndex(srcHeightMap)));
CompoundMap rootMap = new CompoundMap();
rootMap.put(new CompoundTag("Level", levelMap));
if (tag.getValue().containsKey("DataVersion")) {
rootMap.put(tag.getValue().get("DataVersion"));
}
CompoundTag root = new CompoundTag("", rootMap);
return root;
}
private int[] fixHeightmap(int[] heights) {
for (int i = 0; i < heights.length; i++) {
heights[i]--; // vanilla = 1 above top, cc = top block
}
return heights;
}
private byte[] makeDummyOpacityIndex(int[] heightMap) throws IOException {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(buf);
for (int i = 0; i < 256; i++) { // 256 segment arrays
out.writeInt(0); // minY
out.writeInt(heightMap[i]); // maxY
out.writeShort(0); // no segments - write zero
}
out.close();
return buf.toByteArray();
}
private ByteBuffer[] extractCubeData(ByteBuffer vanillaData) throws IOException {
ByteArrayInputStream in = new ByteArrayInputStream(vanillaData.array());
CompoundTag[] tags = extractCubeData(readCompressed(in));
ByteBuffer[] buffers = new ByteBuffer[tags.length];
for (int i = 0; i < tags.length; i++) {
CompoundTag tag = tags[i];
if (tag == null) {
continue;
}
buffers[i] = writeCompressed(tag);
}
return buffers;
}
private CompoundTag[] extractCubeData(CompoundTag srcRootTag) {
/**
*
* Vanilla Chunk NBT structure:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- xPos
* |- zPos
* |- LastUpdate
* |- TerrainPopulated
* |- LightPopulated
* |- InhabitedTime
* |- Biomes
* |- HeightMap
* |- Sections
* ||* Section list:
* | |- Y
* | |- Blocks
* | |- Data
* | |- Add
* | |- BlockLight
* | |- SkyLight
* |- Entities
* |- TileEntities
* |- TileTicks
*
* CubicChunks Cube NBT structure:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- x
* |- y
* |- z
* |- populated
* |- fullyPopulated
* |- initLightDone
* |- Sections
* ||* A single section
* | |- Blocks
* | |- Data
* | |- Add
* | |- BlockLight
* | |- SkyLight
* |- Entities
* |- TileEntities
* |- TileTicks
* |- LightingInfo
* |- LastHeightMap
*/
CompoundMap srcRoot = srcRootTag.getValue();
CompoundTag[] tags = new CompoundTag[16];
for (int y = 0; y < 16; y++) {
CompoundMap srcLevel = ((CompoundTag) srcRoot.get("Level")).getValue();
CompoundTag srcSection = getSection(srcLevel, y);
if (srcSection == null) {
continue;
}
CompoundMap root = new CompoundMap();
{
if (srcRoot.containsKey("DataVersion")) {
root.put(srcRoot.get("DataVersion"));
}
CompoundMap level = new CompoundMap();
{
level.put(new ByteTag("v", (byte) 1));
level.put(new IntTag("x", (Integer) srcLevel.get("xPos").getValue()));
level.put(new IntTag("y", y));
level.put(new IntTag("z", (Integer) srcLevel.get("zPos").getValue()));
level.put(new ByteTag("populated", (Byte) srcLevel.get("TerrainPopulated").getValue()));
level.put(new ByteTag("fullyPopulated", (Byte) srcLevel.get("TerrainPopulated").getValue())); // TODO: handle this properly
level.put(new ByteTag("isSurfaceTracked", (byte) 0)); // so that cubic chunks can re-make surface tracking data on it's own
level.put(new ByteTag("initLightDone", (Byte) srcLevel.get("LightPopulated").getValue()));
// the vanilla section has additional Y tag, it will be ignored by cubic chunks
level.put(new ListTag<>("Sections", CompoundTag.class, Arrays.asList(fixSection(srcSection))));
level.put(filterEntities((ListTag<CompoundTag>) srcLevel.get("Entities"), y));
level.put(filterTileEntities((ListTag<?>) srcLevel.get("TileEntities"), y));
if (srcLevel.containsKey("TileTicks")) {
level.put(filterTileTicks((ListTag<CompoundTag>) srcLevel.get("TileTicks"), y));
}
level.put(makeLightingInfo(srcLevel));
}
root.put(new CompoundTag("Level", level));
}
tags[y] = new CompoundTag("", root);
}
return tags;
}
private CompoundTag fixSection(CompoundTag srcSection) {
ByteArrayTag data = (ByteArrayTag) srcSection.getValue().get("Blocks");
byte[] ids = data.getValue();
// TODO: handle it the forge way
for (int i = 0; i < ids.length; i++) {
if (ids[i] == 7) { // bedrock
ids[i] = 1; // stone
}
}
return srcSection;
}
private CompoundTag makeLightingInfo(CompoundMap srcLevel) {
IntArrayTag heightmap = new IntArrayTag("LastHeightMap", (int[]) srcLevel.get("HeightMap").getValue());
CompoundMap lightingInfoMap = new CompoundMap();
lightingInfoMap.put(heightmap);
CompoundTag lightingInfo = new CompoundTag("LightingInfo", lightingInfoMap);
return lightingInfo;
}
private CompoundTag getSection(CompoundMap srcLevel, int y) {
ListTag<CompoundTag> sections = (ListTag<CompoundTag>) srcLevel.get("Sections");
for (CompoundTag tag : sections.getValue()) {
if (((ByteTag) tag.getValue().get("Y")).getValue().equals((byte) (y))) {
return tag;
}
}
return null;
}
private ListTag<CompoundTag> filterEntities(ListTag<CompoundTag> entities, int cubeY) {
double yMin = cubeY*16;
double yMax = yMin + 16;
List<CompoundTag> cubeEntities = new ArrayList<>();
for (CompoundTag entityTag : entities.getValue()) {
List<DoubleTag> pos = ((ListTag<DoubleTag>) entityTag.getValue().get("Pos")).getValue();
double y = pos.get(1).getValue();
if (y >= yMin && y < yMax) {
cubeEntities.add(entityTag);
}
}
return new ListTag<>(entities.getName(), CompoundTag.class, cubeEntities);
}
private ListTag<?> filterTileEntities(ListTag<?> tileEntities, int cubeY) {
// empty list is list of EndTags
if (tileEntities.getValue().isEmpty()) {
return tileEntities;
}
int yMin = cubeY*16;
int yMax = yMin + 16;
List<CompoundTag> cubeTEs = new ArrayList<>();
for (CompoundTag teTag : ((ListTag<CompoundTag>) tileEntities).getValue()) {
int y = ((IntTag) teTag.getValue().get("y")).getValue();
if (y >= yMin && y < yMax) {
cubeTEs.add(teTag);
}
}
return new ListTag<>(tileEntities.getName(), CompoundTag.class, cubeTEs);
}
private ListTag<CompoundTag> filterTileTicks(ListTag<CompoundTag> tileTicks, int cubeY) {
int yMin = cubeY*16;
int yMax = yMin + 16;
List<CompoundTag> cubeTicks = new ArrayList<>();
for (CompoundTag tileTick : tileTicks.getValue()) {
int y = ((IntTag) tileTick.getValue().get("y")).getValue();
if (y >= yMin && y < yMax) {
cubeTicks.add(tileTick);
}
}
return new ListTag<>(tileTicks.getName(), CompoundTag.class, cubeTicks);
}
private static CompoundTag readCompressed(InputStream is) throws IOException {
int i = is.read();
BufferedInputStream data;
if (i == 1) {
data = new BufferedInputStream(new GZIPInputStream(is));
} else if (i == 2) {
data = new BufferedInputStream(new InflaterInputStream(is));
} else {
throw new UnsupportedOperationException();
}
return (CompoundTag) new NBTInputStream(data, false).readTag();
}
private static ByteBuffer writeCompressed(CompoundTag tag) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
NBTOutputStream nbtOut = new NBTOutputStream(new GZIPOutputStream(bytes), false);
nbtOut.writeTag(tag);
nbtOut.close();
bytes.flush();
return ByteBuffer.wrap(bytes.toByteArray());
}
private void startCounting(Path src) {
new Thread(() -> {
for (MinecraftSaveSection save : saves.values()) {
try {
// increment is non-atomic but it's safe here because we don't need it to be anywhere close to correct while counting
save.forAllKeys(loc -> chunkCount++);
} catch (IOException e) {
e.printStackTrace();
}
try {
fileCount = Utils.countFiles(src);
} catch (IOException e) {
e.printStackTrace();
}
}
countingChunks = false;
}, "Chunk and File counting thread").start();
}
}
| src/main/java/cubicchunks/converter/lib/AnvilToCubicChunksConverter.java | /*
* This file is part of CubicChunksConverter, licensed under the MIT License (MIT).
*
* Copyright (c) 2017 contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package cubicchunks.converter.lib;
import com.flowpowered.nbt.ByteArrayTag;
import com.flowpowered.nbt.ByteTag;
import com.flowpowered.nbt.CompoundMap;
import com.flowpowered.nbt.CompoundTag;
import com.flowpowered.nbt.DoubleTag;
import com.flowpowered.nbt.IntArrayTag;
import com.flowpowered.nbt.IntTag;
import com.flowpowered.nbt.ListTag;
import com.flowpowered.nbt.StringTag;
import com.flowpowered.nbt.Tag;
import com.flowpowered.nbt.stream.NBTInputStream;
import com.flowpowered.nbt.stream.NBTOutputStream;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiFunction;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import java.util.zip.InflaterInputStream;
import cubicchunks.regionlib.impl.EntryLocation2D;
import cubicchunks.regionlib.impl.EntryLocation3D;
import cubicchunks.regionlib.impl.MinecraftChunkLocation;
import cubicchunks.regionlib.impl.SaveCubeColumns;
import cubicchunks.regionlib.impl.save.MinecraftSaveSection;
import cubicchunks.regionlib.impl.save.SaveSection2D;
import cubicchunks.regionlib.impl.save.SaveSection3D;
import cubicchunks.regionlib.util.WrappedException;
import static cubicchunks.regionlib.impl.save.MinecraftSaveSection.MinecraftRegionType.MCA;
public class AnvilToCubicChunksConverter implements ISaveConverter {
private static final BiFunction<Dimension, Path, Path> LOCATION_FUNC_SRC = (d, p) -> {
if (!d.getDirectory().isEmpty()) {
p = p.resolve(d.getDirectory());
}
return p.resolve("region");
};
private static final BiFunction<Dimension, Path, Path> LOCATION_FUNC_DST = (d, p) -> {
if (!d.getDirectory().isEmpty()) {
p = p.resolve(d.getDirectory());
}
return p;
};
private volatile int chunkCount = -1;
private volatile int fileCount = -1;
private int copyChunks = -1;
private int copiedFiles = -1;
private Map<Dimension, MinecraftSaveSection> saves = new ConcurrentHashMap<>();
private boolean countingFiles;
private boolean countingChunks;
@Override
public void convert(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
saves.clear();
chunkCount = 0;
countingChunks = true;
fileCount = 0;
countingFiles = true;
copyChunks = 0;
copiedFiles = 0;
initDimensions(srcDir);
startCounting(srcDir);
progress.setProgress(new ConvertProgress("Converting level information", 1, 4, 0, 1));
convertLevelInfo(progress, srcDir, dstDir);
progress.setProgress(new ConvertProgress("Converting chunk data (counting chunks)", 2, 4, 0, 1));
convertChunkData(progress, srcDir, dstDir);
progress.setProgress(new ConvertProgress("Copying other files (counting files)", 3, 4, 0, 1));
copyAllOtherData(progress, srcDir, dstDir);
for (MinecraftSaveSection save : saves.values()) {
save.close();
}
fillVanillaRangeEmpty(progress, dstDir);
}
private void fillVanillaRangeEmpty(IProgressListener progress, Path dstDir) throws IOException {
progress.setProgress(new ConvertProgress("Filling vanilla height range with empty cubes", 3, 4, 0, 1));
copyChunks = 0;
for (Dimension d : Dimensions.getDimensions()) {
Path dimLoc = LOCATION_FUNC_DST.apply(d, dstDir);
if (!Files.exists(dimLoc)) {
continue;
}
Files.createDirectories(dimLoc.resolve("region2d"));
Files.createDirectories(dimLoc.resolve("region3d"));
try (SaveSection2D section2d = SaveSection2D.createAt(dimLoc.resolve("region2d"));
SaveSection3D section3d = SaveSection3D.createAt(dimLoc.resolve("region3d"))) {
section2d.forAllKeys(pos -> {
for (int y = 0; y < 16; y++) {
EntryLocation3D cPos = new EntryLocation3D(pos.getEntryX(), y, pos.getEntryZ());
if (!section3d.load(cPos).isPresent()) {
section3d.save(cPos, writeCompressed(emptyCube(cPos)));
}
}
copyChunks++;
String msg = "Filling vanilla height range with empty cubes" + (countingChunks ? " (counting chunks)" : "");
progress.setProgress(new ConvertProgress(msg, 4, 4, copyChunks, countingChunks ? -1 : (chunkCount == 0 ? 100 : chunkCount)));
});
} catch (WrappedException e) {
throw (IOException) e.get();
}
}
}
private CompoundTag emptyCube(EntryLocation3D loc) {
int x = loc.getEntryX();
int y = loc.getEntryY();
int z = loc.getEntryZ();
CompoundMap root = new CompoundMap();
{
CompoundMap level = new CompoundMap();
{
level.put(new ByteTag("v", (byte) 1));
level.put(new IntTag("x", x));
level.put(new IntTag("y", y));
level.put(new IntTag("z", z));
level.put(new ByteTag("populated", true));
level.put(new ByteTag("fullyPopulated", true)); // TODO: handle this properly
level.put(new ByteTag("isSurfaceTracked", true)); // it's empty, no need to re-track
// no need for Sections, CC has isEmpty check for that
level.put(new ByteTag("initLightDone", false));
level.put(new ListTag<>("Entities", CompoundTag.class, Collections.singletonList(new CompoundTag("", new CompoundMap()))));
level.put(new ListTag<>("TileEntities", CompoundTag.class, Collections.singletonList(new CompoundTag("", new CompoundMap()))));
level.put(makeEmptyLightingInfo());
}
root.put(new CompoundTag("Level", level));
}
return new CompoundTag("", root);
}
private CompoundTag makeEmptyLightingInfo() {
IntArrayTag heightmap = new IntArrayTag("LastHeightMap", new int[256]);
CompoundMap lightingInfoMap = new CompoundMap();
lightingInfoMap.put(heightmap);
return new CompoundTag("LightingInfo", lightingInfoMap);
}
private void initDimensions(Path src) {
for (Dimension d : Dimensions.getDimensions()) {
Path srcLoc = LOCATION_FUNC_SRC.apply(d, src);
if (!Files.exists(srcLoc)) {
continue;
}
MinecraftSaveSection vanillaSave = MinecraftSaveSection.createAt(LOCATION_FUNC_SRC.apply(d, src), MCA);
saves.put(d, vanillaSave);
}
}
private void convertLevelInfo(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
NBTInputStream nbtIn = new NBTInputStream(new FileInputStream(srcDir.resolve("level.dat").toFile()));
CompoundTag root = (CompoundTag) nbtIn.readTag();
CompoundMap newRoot = new CompoundMap();
for (Tag<?> tag : root.getValue()) {
if (tag.getName().equals("Data")) {
CompoundMap data = ((CompoundTag) root.getValue().get("Data")).getValue();
CompoundMap newData = new CompoundMap();
for (Tag<?> dataTag : data) {
if (dataTag.getName().equals("generatorName")) {
String value = (String) dataTag.getValue();
String newValue;
if (value.equalsIgnoreCase("default")) {
newValue = "VanillaCubic";
} else {
newValue = value;
}
newData.put(new StringTag(dataTag.getName(), newValue));
} else {
newData.put(dataTag);
}
}
// put isCubicWorld at the end to overwrite previously existing data, if any
newData.put("isCubicWorld", new ByteTag("isCubicWorld", (byte) 1));
newRoot.put(new CompoundTag(tag.getName(), newData));
} else {
newRoot.put(tag);
}
}
Files.createDirectories(dstDir);
NBTOutputStream nbtOut = new NBTOutputStream(new FileOutputStream(dstDir.resolve("level.dat").toFile()));
nbtOut.writeTag(new CompoundTag(root.getName(), newRoot));
nbtOut.close();
}
private void convertChunkData(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
int step = 0;
int maxSteps = Dimensions.getDimensions().size();
for (Dimension d : Dimensions.getDimensions()) {
Path srcLoc = LOCATION_FUNC_SRC.apply(d, srcDir);
if (!Files.exists(srcLoc)) {
continue;
}
convertDimension(progress, d, LOCATION_FUNC_DST.apply(d, dstDir), step, maxSteps);
step++;
}
}
private void copyAllOtherData(IProgressListener progress, Path srcDir, Path dstDir) throws IOException {
Utils.copyEverythingExcept(srcDir, srcDir, dstDir, file ->
file.toString().contains("level.dat") ||
Dimensions.getDimensions().stream().anyMatch(dim ->
srcDir.resolve(dim.getDirectory()).resolve("region").equals(file)
),
f -> {
copiedFiles++;
String msg = "Copying other files" + (countingFiles ? " (counting files)" : "");
double p = copiedFiles;
progress.setProgress(new ConvertProgress(msg, 3, 3, p, countingFiles ? -1 : fileCount));
}
);
}
private void convertDimension(IProgressListener progress, Dimension dim, Path dstParent, int step, int maxSteps) throws IOException {
MinecraftSaveSection vanillaSave = saves.get(dim);
try(SaveCubeColumns saveCubic = SaveCubeColumns.create(dstParent)) {
vanillaSave.forAllKeys(mcPos -> {
try {
this.convertRegion(progress, mcPos, vanillaSave, saveCubic);
} catch (IOException e) {
throw new WrappedException(e);
}
});
} catch (WrappedException e) {
throw (IOException) e.get();
}
}
private void convertRegion(IProgressListener progress, MinecraftChunkLocation entryLoc,
MinecraftSaveSection vanillaSave,
SaveCubeColumns saveCubic) throws IOException {
ByteBuffer vanillaData = vanillaSave.load(entryLoc).get();
ByteBuffer[] cubes = extractCubeData(vanillaData);
ByteBuffer column = extractColumnData(vanillaData);
for (int y = 0; y < cubes.length; y++) {
if (cubes[y] == null) {
continue;
}
EntryLocation3D l = new EntryLocation3D(entryLoc.getEntryX(), y, entryLoc.getEntryZ());
saveCubic.save3d(l, cubes[y]);
}
if (column != null) {
saveCubic.save2d(new EntryLocation2D(entryLoc.getEntryX(), entryLoc.getEntryZ()), column);
}
copyChunks++;
String msg = "Converting chunk data" + (countingChunks ? " (counting chunks)" : "");
progress.setProgress(new ConvertProgress(msg, 2, 3, copyChunks, countingChunks ? -1 : chunkCount));
}
private ByteBuffer extractColumnData(ByteBuffer vanillaData) throws IOException {
ByteArrayInputStream in = new ByteArrayInputStream(vanillaData.array());
CompoundTag tag = readCompressed(in);
CompoundTag columnTag = extractColumnData(tag);
return writeCompressed(columnTag);
}
private CompoundTag extractColumnData(CompoundTag tag) throws IOException {
/**
*
* Vanilla Chunk NBT structure:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- xPos
* |- zPos
* |- LastUpdate
* |- TerrainPopulated
* |- LightPopulated
* |- InhabitedTime
* |- Biomes
* |- HeightMap
* |- Sections
* ||* Section list:
* | |- Y
* | |- Blocks
* | |- Data
* | |- Add
* | |- BlockLight
* | |- SkyLight
* |- Entities
* |- TileEntities
* |- TileTicks
*
* CubicChunks Column format:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- x
* |- z
* |- InhabitedTime
* |- Biomes
* |- OpacityIndex
*/
CompoundMap levelMap = new CompoundMap();
CompoundMap srcLevel = (CompoundMap) tag.getValue().get("Level").getValue();
int[] srcHeightMap = fixHeightmap((int[]) srcLevel.get("HeightMap").getValue());
levelMap.put(new IntTag("v", 1));
levelMap.put(new IntTag("x", (Integer) srcLevel.get("xPos").getValue()));
levelMap.put(new IntTag("z", (Integer) srcLevel.get("zPos").getValue()));
levelMap.put(srcLevel.get("InhabitedTime"));
levelMap.put(srcLevel.get("Biomes"));
levelMap.put(new ByteArrayTag("OpacityIndex", makeDummyOpacityIndex(srcHeightMap)));
CompoundMap rootMap = new CompoundMap();
rootMap.put(new CompoundTag("Level", levelMap));
if (tag.getValue().containsKey("DataVersion")) {
rootMap.put(tag.getValue().get("DataVersion"));
}
CompoundTag root = new CompoundTag("", rootMap);
return root;
}
private int[] fixHeightmap(int[] heights) {
for (int i = 0; i < heights.length; i++) {
heights[i]--; // vanilla = 1 above top, cc = top block
}
return heights;
}
private byte[] makeDummyOpacityIndex(int[] heightMap) throws IOException {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(buf);
for (int i = 0; i < 256; i++) { // 256 segment arrays
out.writeInt(0); // minY
out.writeInt(heightMap[i]); // maxY
out.writeShort(0); // no segments - write zero
}
out.close();
return buf.toByteArray();
}
private ByteBuffer[] extractCubeData(ByteBuffer vanillaData) throws IOException {
ByteArrayInputStream in = new ByteArrayInputStream(vanillaData.array());
CompoundTag[] tags = extractCubeData(readCompressed(in));
ByteBuffer[] buffers = new ByteBuffer[tags.length];
for (int i = 0; i < tags.length; i++) {
CompoundTag tag = tags[i];
if (tag == null) {
continue;
}
buffers[i] = writeCompressed(tag);
}
return buffers;
}
private CompoundTag[] extractCubeData(CompoundTag srcRootTag) {
/**
*
* Vanilla Chunk NBT structure:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- xPos
* |- zPos
* |- LastUpdate
* |- TerrainPopulated
* |- LightPopulated
* |- InhabitedTime
* |- Biomes
* |- HeightMap
* |- Sections
* ||* Section list:
* | |- Y
* | |- Blocks
* | |- Data
* | |- Add
* | |- BlockLight
* | |- SkyLight
* |- Entities
* |- TileEntities
* |- TileTicks
*
* CubicChunks Cube NBT structure:
*
* ROOT
* |- DataVersion
* |- Level
* |- v
* |- x
* |- y
* |- z
* |- populated
* |- fullyPopulated
* |- initLightDone
* |- Sections
* ||* A single section
* | |- Blocks
* | |- Data
* | |- Add
* | |- BlockLight
* | |- SkyLight
* |- Entities
* |- TileEntities
* |- TileTicks
* |- LightingInfo
* |- LastHeightMap
*/
CompoundMap srcRoot = srcRootTag.getValue();
CompoundTag[] tags = new CompoundTag[16];
for (int y = 0; y < 16; y++) {
CompoundMap srcLevel = ((CompoundTag) srcRoot.get("Level")).getValue();
CompoundTag srcSection = getSection(srcLevel, y);
if (srcSection == null) {
continue;
}
CompoundMap root = new CompoundMap();
{
if (srcRoot.containsKey("DataVersion")) {
root.put(srcRoot.get("DataVersion"));
}
CompoundMap level = new CompoundMap();
{
level.put(new ByteTag("v", (byte) 1));
level.put(new IntTag("x", (Integer) srcLevel.get("xPos").getValue()));
level.put(new IntTag("y", y));
level.put(new IntTag("z", (Integer) srcLevel.get("zPos").getValue()));
level.put(new ByteTag("populated", (Byte) srcLevel.get("TerrainPopulated").getValue()));
level.put(new ByteTag("fullyPopulated", (Byte) srcLevel.get("TerrainPopulated").getValue())); // TODO: handle this properly
level.put(new ByteTag("isSurfaceTracked", (byte) 0)); // so that cubic chunks can re-make surface tracking data on it's own
level.put(new ByteTag("initLightDone", (Byte) srcLevel.get("LightPopulated").getValue()));
// the vanilla section has additional Y tag, it will be ignored by cubic chunks
level.put(new ListTag<>("Sections", CompoundTag.class, Arrays.asList(fixSection(srcSection))));
level.put(filterEntities((ListTag<CompoundTag>) srcLevel.get("Entities"), y));
level.put(filterTileEntities((ListTag<?>) srcLevel.get("TileEntities"), y));
if (srcLevel.containsKey("TileTicks")) {
level.put(filterTileTicks((ListTag<CompoundTag>) srcLevel.get("TileTicks"), y));
}
level.put(makeLightingInfo(srcLevel));
}
root.put(new CompoundTag("Level", level));
}
tags[y] = new CompoundTag("", root);
}
return tags;
}
private CompoundTag fixSection(CompoundTag srcSection) {
ByteArrayTag data = (ByteArrayTag) srcSection.getValue().get("Blocks");
byte[] ids = data.getValue();
// TODO: handle it the forge way
for (int i = 0; i < ids.length; i++) {
if (ids[i] == 7) { // bedrock
ids[i] = 1; // stone
}
}
return srcSection;
}
private CompoundTag makeLightingInfo(CompoundMap srcLevel) {
IntArrayTag heightmap = new IntArrayTag("LastHeightMap", (int[]) srcLevel.get("HeightMap").getValue());
CompoundMap lightingInfoMap = new CompoundMap();
lightingInfoMap.put(heightmap);
CompoundTag lightingInfo = new CompoundTag("LightingInfo", lightingInfoMap);
return lightingInfo;
}
private CompoundTag getSection(CompoundMap srcLevel, int y) {
ListTag<CompoundTag> sections = (ListTag<CompoundTag>) srcLevel.get("Sections");
for (CompoundTag tag : sections.getValue()) {
if (((ByteTag) tag.getValue().get("Y")).getValue().equals((byte) (y))) {
return tag;
}
}
return null;
}
private ListTag<CompoundTag> filterEntities(ListTag<CompoundTag> entities, int cubeY) {
double yMin = cubeY*16;
double yMax = yMin + 16;
List<CompoundTag> cubeEntities = new ArrayList<>();
for (CompoundTag entityTag : entities.getValue()) {
List<DoubleTag> pos = ((ListTag<DoubleTag>) entityTag.getValue().get("Pos")).getValue();
double y = pos.get(1).getValue();
if (y >= yMin && y < yMax) {
cubeEntities.add(entityTag);
}
}
return new ListTag<>(entities.getName(), CompoundTag.class, cubeEntities);
}
private ListTag<?> filterTileEntities(ListTag<?> tileEntities, int cubeY) {
// empty list is list of EndTags
if (tileEntities.getValue().isEmpty()) {
return tileEntities;
}
int yMin = cubeY*16;
int yMax = yMin + 16;
List<CompoundTag> cubeTEs = new ArrayList<>();
for (CompoundTag teTag : ((ListTag<CompoundTag>) tileEntities).getValue()) {
int y = ((IntTag) teTag.getValue().get("y")).getValue();
if (y >= yMin && y < yMax) {
cubeTEs.add(teTag);
}
}
return new ListTag<>(tileEntities.getName(), CompoundTag.class, cubeTEs);
}
private ListTag<CompoundTag> filterTileTicks(ListTag<CompoundTag> tileTicks, int cubeY) {
int yMin = cubeY*16;
int yMax = yMin + 16;
List<CompoundTag> cubeTicks = new ArrayList<>();
for (CompoundTag tileTick : tileTicks.getValue()) {
int y = ((IntTag) tileTick.getValue().get("y")).getValue();
if (y >= yMin && y < yMax) {
cubeTicks.add(tileTick);
}
}
return new ListTag<>(tileTicks.getName(), CompoundTag.class, cubeTicks);
}
private static CompoundTag readCompressed(InputStream is) throws IOException {
int i = is.read();
BufferedInputStream data;
if (i == 1) {
data = new BufferedInputStream(new GZIPInputStream(is));
} else if (i == 2) {
data = new BufferedInputStream(new InflaterInputStream(is));
} else {
throw new UnsupportedOperationException();
}
return (CompoundTag) new NBTInputStream(data, false).readTag();
}
private static ByteBuffer writeCompressed(CompoundTag tag) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
NBTOutputStream nbtOut = new NBTOutputStream(new GZIPOutputStream(bytes), false);
nbtOut.writeTag(tag);
nbtOut.close();
bytes.flush();
return ByteBuffer.wrap(bytes.toByteArray());
}
private void startCounting(Path src) {
new Thread(() -> {
for (MinecraftSaveSection save : saves.values()) {
try {
// increment is non-atomic but it's safe here because we don't need it to be anywhere close to correct while counting
save.forAllKeys(loc -> chunkCount++);
} catch (IOException e) {
e.printStackTrace();
}
try {
fileCount = Utils.countFiles(src);
} catch (IOException e) {
e.printStackTrace();
}
}
countingChunks = false;
}, "Chunk and File counting thread").start();
}
}
| Add workaround for WorldPainter worlds
| src/main/java/cubicchunks/converter/lib/AnvilToCubicChunksConverter.java | Add workaround for WorldPainter worlds |
|
Java | mit | 37d2216cba252d0e52d97161dcad7f8602518579 | 0 | liuxinglanyue/mjc,estan/mjc,estan/mjc,liuxinglanyue/mjc,estan/mjc,liuxinglanyue/mjc | package mjc;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.FileReader;
import java.util.Comparator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import mjc.lexer.Lexer;
import mjc.lexer.LexerException;
import mjc.parser.Parser;
import mjc.parser.ParserException;
import mjc.symbol.SymbolTable;
import mjc.symbol.SymbolTableBuilder;
import mjc.node.Start;
import mjc.analysis.ASTGraphPrinter;
import mjc.analysis.ASTPrinter;
import mjc.analysis.TypeChecker;
import mjc.error.MiniJavaError;
public class ARMMain {
private final ASTPrinter astPrinter = new ASTPrinter();
private final ASTGraphPrinter graphPrinter = new ASTGraphPrinter();
private final CommandLineParser commandLineParser = new GnuParser();
private final HelpFormatter helpFormatter = new HelpFormatter();
private final Options options = new Options();
private final static int EXIT_SUCCESS = 0;
private final static int EXIT_FAILURE = 1;
public ARMMain() {
options.addOption("S", false, "output assembler code");
options.addOption("o", true, "output file");
options.addOption("p", false, "print abstract syntax tree");
options.addOption("g", false, "print abstract syntax tree in GraphViz format");
options.addOption("s", false, "print symbol table");
options.addOption("h", false, "show help message");
helpFormatter.setOptionComparator(new OptionComparator<Option>());
}
public static void main(String[] args) {
ARMMain program = new ARMMain();
try {
System.exit(program.run(args));
} catch (Exception e) {
System.err.println(e.getMessage());
System.exit(EXIT_FAILURE);
}
}
/**
* Run compiler with the given command line arguments.
*
* @param args Command line arguments.
* @return EXIT_SUCCESS if compilation succeeded, otherwise EXIT_FAILURE.
*
* @throws ParseException if parsing of command line arguments failed.
* @throws IOException if an I/O error occurred.
* @throws LexerException if lexical analysis failed.
* @throws ParserException if parsing failed.
*/
private int run(String[] args) throws ParseException, ParserException, LexerException, IOException {
final CommandLine commandLine = commandLineParser.parse(options, args);
if (commandLine.hasOption("h")) {
helpFormatter.printHelp("mjc <infile> [options]", options);
System.exit(EXIT_SUCCESS);
}
if (commandLine.getArgs().length != 1) {
helpFormatter.printHelp("mjc <infile> [options]", options);
System.exit(EXIT_FAILURE);
}
/****************************************
* Stage 1: Lexical Analysis / Parsing. *
***************************************/
final String fileName = commandLine.getArgs()[0];
final PushbackReader reader = new PushbackReader(new FileReader(fileName));
final Parser parser = new Parser(new Lexer(reader));
final Start tree = parser.parse();
if (commandLine.hasOption("p"))
astPrinter.print(tree);
if (commandLine.hasOption("g"))
graphPrinter.print(tree);
/*******************************
* Stage 2: Semantic Analysis. *
*******************************/
// Build symbol table.
final SymbolTableBuilder builder = new SymbolTableBuilder();
final SymbolTable symbolTable = builder.build(tree);
if (builder.hasErrors()) {
for (MiniJavaError error : builder.getErrors()) {
System.err.println(error);
}
}
if (commandLine.hasOption("s"))
System.out.println(symbolTable);
// Run type-check.
final TypeChecker typeChecker = new TypeChecker();
if (!typeChecker.check(tree, symbolTable)) {
for (MiniJavaError error : typeChecker.getErrors()) {
System.err.println(error);
}
}
if (builder.hasErrors() || typeChecker.hasErrors()) {
// Errors in symbol table building or type checking, abort.
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
// Comparator for Options, to get them in the order we want in help output.
class OptionComparator<T extends Option> implements Comparator<T> {
private static final String ORDER = "Sopgsh";
@Override
public int compare(T option1, T option2) {
return ORDER.indexOf(option1.getOpt()) - ORDER.indexOf(option2.getOpt());
}
}
}
| src/main/java/mjc/ARMMain.java | package mjc;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.FileReader;
import java.util.Comparator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import mjc.lexer.Lexer;
import mjc.lexer.LexerException;
import mjc.parser.Parser;
import mjc.parser.ParserException;
import mjc.symbol.SymbolTable;
import mjc.symbol.SymbolTableBuilder;
import mjc.node.Start;
import mjc.analysis.ASTGraphPrinter;
import mjc.analysis.ASTPrinter;
import mjc.analysis.TypeChecker;
import mjc.error.MiniJavaError;
public class ARMMain {
private final ASTPrinter astPrinter = new ASTPrinter();
private final ASTGraphPrinter graphPrinter = new ASTGraphPrinter();
private final CommandLineParser commandLineParser = new GnuParser();
private final HelpFormatter helpFormatter = new HelpFormatter();
private final Options options = new Options();
private final static int EXIT_SUCCESS = 0;
private final static int EXIT_FAILURE = 1;
public ARMMain() {
options.addOption("S", false, "output assembler code");
options.addOption("o", true, "output file");
options.addOption("p", false, "print abstract syntax tree");
options.addOption("g", false, "print abstract syntax tree in GraphViz format");
options.addOption("s", false, "print symbol table");
options.addOption("h", false, "show help message");
helpFormatter.setOptionComparator(new OptionComparator<Option>());
}
public static void main(String[] args) {
ARMMain program = new ARMMain();
try {
System.exit(program.run(args));
} catch (Exception e) {
System.err.println(e.getMessage());
System.exit(EXIT_FAILURE);
}
}
/**
* Run compiler with the given command line arguments.
*
* @param args Command line arguments.
* @return EXIT_SUCCESS if compilation succeeded, otherwise EXIT_FAILURE.
*
* @throws ParseException if parsing of command line arguments failed.
* @throws IOException if an I/O error occurred.
* @throws LexerException if lexical analysis failed.
* @throws ParserException if parsing failed.
*/
private int run(String[] args) throws ParseException, ParserException, LexerException, IOException {
final CommandLine commandLine = commandLineParser.parse(options, args);
if (commandLine.hasOption("h")) {
helpFormatter.printHelp("mjc <infile> [options]", options);
System.exit(EXIT_SUCCESS);
}
if (commandLine.getArgs().length != 1) {
helpFormatter.printHelp("mjc <infile> [options]", options);
System.exit(EXIT_FAILURE);
}
/******************************
* Stage 1: Lexing + Parsing. *
*****************************/
final String fileName = commandLine.getArgs()[0];
final PushbackReader reader = new PushbackReader(new FileReader(fileName));
final Parser parser = new Parser(new Lexer(reader));
final Start tree = parser.parse();
if (commandLine.hasOption("p"))
astPrinter.print(tree);
if (commandLine.hasOption("g"))
graphPrinter.print(tree);
/*******************************
* Stage 2: Semantic Analysis. *
*******************************/
// Build symbol table.
final SymbolTableBuilder builder = new SymbolTableBuilder();
final SymbolTable symbolTable = builder.build(tree);
if (builder.hasErrors()) {
for (MiniJavaError error : builder.getErrors()) {
System.err.println(error);
}
}
if (commandLine.hasOption("s"))
System.out.println(symbolTable);
// Run type-check.
final TypeChecker typeChecker = new TypeChecker();
if (!typeChecker.check(tree, symbolTable)) {
for (MiniJavaError error : typeChecker.getErrors()) {
System.err.println(error);
}
}
if (builder.hasErrors() || typeChecker.hasErrors()) {
// Errors in symbol table building or type checking, abort.
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
// Comparator for Options, to get them in the order we want in help output.
class OptionComparator<T extends Option> implements Comparator<T> {
private static final String ORDER = "Sopgsh";
@Override
public int compare(T option1, T option2) {
return ORDER.indexOf(option1.getOpt()) - ORDER.indexOf(option2.getOpt());
}
}
}
| Style fix.
| src/main/java/mjc/ARMMain.java | Style fix. |
|
Java | mit | 965659774c737f884a98292e9ab7919bbd6ff201 | 0 | TechReborn/RebornCore | /*
* Copyright (c) 2018 modmuss50 and Gigabit101
*
*
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package reborncore.common.crafting;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.mojang.datafixers.Dynamic;
import com.mojang.datafixers.types.JsonOps;
import net.minecraft.block.entity.BlockEntity;
import net.minecraft.datafixers.NbtOps;
import net.minecraft.inventory.Inventory;
import net.minecraft.item.ItemStack;
import net.minecraft.recipe.Ingredient;
import net.minecraft.recipe.Recipe;
import net.minecraft.recipe.RecipeSerializer;
import net.minecraft.util.DefaultedList;
import net.minecraft.util.Identifier;
import net.minecraft.util.JsonHelper;
import net.minecraft.util.registry.Registry;
import net.minecraft.world.World;
import org.apache.commons.lang3.Validate;
import reborncore.common.crafting.ingredient.IngredientManager;
import reborncore.common.crafting.ingredient.RebornIngredient;
import reborncore.common.util.NonNullListCollector;
import reborncore.common.util.serialization.SerializationUtil;
import java.util.Collections;
import java.util.List;
public class RebornRecipe implements Recipe {
private final RebornRecipeType<?> type;
private final Identifier name;
private DefaultedList<RebornIngredient> ingredients = DefaultedList.of();
private DefaultedList<ItemStack> outputs = DefaultedList.of();
private int power;
private int time;
public RebornRecipe(RebornRecipeType<?> type, Identifier name) {
this.type = type;
this.name = name;
}
public void deserialize(JsonObject jsonObject){
//Crash if the recipe has all ready been deserialized
Validate.isTrue(ingredients.isEmpty());
power = JsonHelper.getInt(jsonObject, "power");
time = JsonHelper.getInt(jsonObject, "time");
ingredients = SerializationUtil.stream(JsonHelper.getArray(jsonObject, "ingredients"))
.map(IngredientManager::deserialize)
.collect(NonNullListCollector.toList());
JsonArray resultsJson = JsonHelper.getArray(jsonObject, "results");
outputs = RecipeUtils.deserializeItems(resultsJson);
}
public void serialize(JsonObject jsonObject) {
jsonObject.addProperty("power", power);
jsonObject.addProperty("time", time);
JsonArray ingredientsArray = new JsonArray();
getRebornIngredients().stream().map(RebornIngredient::witeToJson).forEach(ingredientsArray::add);
jsonObject.add("ingredients", ingredientsArray);
JsonArray resultsArray = new JsonArray();
for(ItemStack stack : outputs){
JsonObject stackObject = new JsonObject();
stackObject.addProperty("item", Registry.ITEM.getId(stack.getItem()).toString());
if(stack.getCount() > 1){
stackObject.addProperty("count", stack.getCount());
}
if(stack.hasTag()){
jsonObject.add("tag", Dynamic.convert(NbtOps.INSTANCE, JsonOps.INSTANCE, stack.getTag()));
}
}
jsonObject.add("results", resultsArray);
}
@Override
public Identifier getId() {
return name;
}
@Override
public RecipeSerializer<?> getSerializer() {
return type;
}
@Override
public net.minecraft.recipe.RecipeType<?> getType() {
return type;
}
public RebornRecipeType getRebornRecipeType(){
return type;
}
// use the RebornIngredient version to ensure stack sizes are checked
@Deprecated
@Override
public DefaultedList<Ingredient> getPreviewInputs() {
return ingredients.stream().map(RebornIngredient::getPreview).collect(NonNullListCollector.toList());
}
public DefaultedList<RebornIngredient> getRebornIngredients() {
return ingredients;
}
public List<ItemStack> getOutputs() {
return Collections.unmodifiableList(outputs);
}
public int getPower() {
return power;
}
public int getTime() {
return time;
}
/**
* @param blockEntity the blockEntity that is doing the crafting
* @return if true the recipe will craft, if false it will not
*/
public boolean canCraft(BlockEntity blockEntity){
return true;
}
/**
* @param blockEntity the blockEntity that is doing the crafting
* @return return true if fluid was taken and should craft
*/
public boolean onCraft(BlockEntity blockEntity){
return true; //TODO look into this being a boolean, seems a little odd, not sure what usees it for now
}
//Done as our recipes do not support these functions, hopefully nothing blidly calls them
@Deprecated
@Override
public boolean matches(Inventory inv, World worldIn) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public ItemStack craft(Inventory inv) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public boolean fits(int width, int height) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public ItemStack getOutput() {
throw new UnsupportedOperationException();
}
@Override
public DefaultedList<ItemStack> getRemainingStacks(Inventory p_179532_1_) {
throw new UnsupportedOperationException();
}
//Done to try and stop the table from loading it
@Override
public boolean isIgnoredInRecipeBook() {
return true;
}
}
| src/main/java/reborncore/common/crafting/RebornRecipe.java | /*
* Copyright (c) 2018 modmuss50 and Gigabit101
*
*
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package reborncore.common.crafting;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.mojang.datafixers.Dynamic;
import com.mojang.datafixers.types.JsonOps;
import net.minecraft.block.entity.BlockEntity;
import net.minecraft.datafixers.NbtOps;
import net.minecraft.inventory.Inventory;
import net.minecraft.item.ItemStack;
import net.minecraft.recipe.Ingredient;
import net.minecraft.recipe.Recipe;
import net.minecraft.recipe.RecipeSerializer;
import net.minecraft.util.DefaultedList;
import net.minecraft.util.Identifier;
import net.minecraft.util.JsonHelper;
import net.minecraft.util.registry.Registry;
import net.minecraft.world.World;
import org.apache.commons.lang3.Validate;
import reborncore.common.crafting.ingredient.IngredientManager;
import reborncore.common.crafting.ingredient.RebornIngredient;
import reborncore.common.util.NonNullListCollector;
import reborncore.common.util.serialization.SerializationUtil;
import java.util.Collections;
import java.util.List;
public class RebornRecipe implements Recipe {
private final RebornRecipeType<?> type;
private final Identifier name;
private DefaultedList<RebornIngredient> ingredients = DefaultedList.of();
private DefaultedList<ItemStack> outputs = DefaultedList.of();
private int power;
private int time;
public RebornRecipe(RebornRecipeType<?> type, Identifier name) {
this.type = type;
this.name = name;
}
public void deserialize(JsonObject jsonObject){
//Crash if the recipe has all ready been deserialized
Validate.isTrue(ingredients == null);
power = JsonHelper.getInt(jsonObject, "power");
time = JsonHelper.getInt(jsonObject, "time");
ingredients = SerializationUtil.stream(JsonHelper.getArray(jsonObject, "ingredients"))
.map(IngredientManager::deserialize)
.collect(NonNullListCollector.toList());
JsonArray resultsJson = JsonHelper.getArray(jsonObject, "results");
outputs = RecipeUtils.deserializeItems(resultsJson);
}
public void serialize(JsonObject jsonObject) {
jsonObject.addProperty("power", power);
jsonObject.addProperty("time", time);
JsonArray ingredientsArray = new JsonArray();
getRebornIngredients().stream().map(RebornIngredient::witeToJson).forEach(ingredientsArray::add);
jsonObject.add("ingredients", ingredientsArray);
JsonArray resultsArray = new JsonArray();
for(ItemStack stack : outputs){
JsonObject stackObject = new JsonObject();
stackObject.addProperty("item", Registry.ITEM.getId(stack.getItem()).toString());
if(stack.getCount() > 1){
stackObject.addProperty("count", stack.getCount());
}
if(stack.hasTag()){
jsonObject.add("tag", Dynamic.convert(NbtOps.INSTANCE, JsonOps.INSTANCE, stack.getTag()));
}
}
jsonObject.add("results", resultsArray);
}
@Override
public Identifier getId() {
return name;
}
@Override
public RecipeSerializer<?> getSerializer() {
return type;
}
@Override
public net.minecraft.recipe.RecipeType<?> getType() {
return type;
}
public RebornRecipeType getRebornRecipeType(){
return type;
}
// use the RebornIngredient version to ensure stack sizes are checked
@Deprecated
@Override
public DefaultedList<Ingredient> getPreviewInputs() {
return ingredients.stream().map(RebornIngredient::getPreview).collect(NonNullListCollector.toList());
}
public DefaultedList<RebornIngredient> getRebornIngredients() {
return ingredients;
}
public List<ItemStack> getOutputs() {
return Collections.unmodifiableList(outputs);
}
public int getPower() {
return power;
}
public int getTime() {
return time;
}
/**
* @param blockEntity the blockEntity that is doing the crafting
* @return if true the recipe will craft, if false it will not
*/
public boolean canCraft(BlockEntity blockEntity){
return true;
}
/**
* @param blockEntity the blockEntity that is doing the crafting
* @return return true if fluid was taken and should craft
*/
public boolean onCraft(BlockEntity blockEntity){
return true; //TODO look into this being a boolean, seems a little odd, not sure what usees it for now
}
//Done as our recipes do not support these functions, hopefully nothing blidly calls them
@Deprecated
@Override
public boolean matches(Inventory inv, World worldIn) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public ItemStack craft(Inventory inv) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public boolean fits(int width, int height) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public ItemStack getOutput() {
throw new UnsupportedOperationException();
}
@Override
public DefaultedList<ItemStack> getRemainingStacks(Inventory p_179532_1_) {
throw new UnsupportedOperationException();
}
//Done to try and stop the table from loading it
@Override
public boolean isIgnoredInRecipeBook() {
return true;
}
}
| Fix bad commit
| src/main/java/reborncore/common/crafting/RebornRecipe.java | Fix bad commit |
|
Java | mit | 553451c499a131f155d73968cd0a0455aa890f85 | 0 | sake/bouncycastle-java | package org.bouncycastle.asn1;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
class IndefiniteLengthInputStream
extends LimitedInputStream
{
private int _b1;
private int _b2;
private boolean _eofReached = false;
private boolean _eofOn00 = true;
IndefiniteLengthInputStream(
InputStream in)
throws IOException
{
super(in);
_b1 = in.read();
_b2 = in.read();
_eofReached = (_b2 < 0);
}
void setEofOn00(
boolean eofOn00)
{
_eofOn00 = eofOn00;
}
boolean checkForEof()
{
if (_eofOn00 && (_b1 == 0x00 && _b2 == 0x00))
{
_eofReached = true;
setParentEofDetect(true);
}
return _eofReached;
}
public int read(byte[] b, int off, int len)
throws IOException
{
// Only use this optimisation if we aren't checking for 00
if (_eofOn00 || len < 3)
{
return super.read(b, off, len);
}
if (_eofReached)
{
return -1;
}
int numRead = _in.read(b, off + 2, len - 2);
if (numRead < 0)
{
// Corrupted stream
throw new EOFException();
}
b[off] = (byte)_b1;
b[off + 1] = (byte)_b2;
_b1 = _in.read();
_b2 = _in.read();
if (_b2 < 0)
{
// Corrupted stream
throw new EOFException();
}
return numRead + 2;
}
public int read()
throws IOException
{
if (checkForEof())
{
return -1;
}
int b = _in.read();
if (b < 0)
{
// Corrupted stream
throw new EOFException();
}
int v = _b1;
_b1 = _b2;
_b2 = b;
return v;
}
}
| src/org/bouncycastle/asn1/IndefiniteLengthInputStream.java | package org.bouncycastle.asn1;
import java.io.IOException;
import java.io.InputStream;
class IndefiniteLengthInputStream
extends LimitedInputStream
{
private int _b1;
private int _b2;
private boolean _eofReached = false;
private boolean _eofOn00 = true;
IndefiniteLengthInputStream(
InputStream in)
throws IOException
{
super(in);
_b1 = in.read();
_b2 = in.read();
_eofReached = (_b2 < 0);
}
void setEofOn00(
boolean eofOn00)
{
_eofOn00 = eofOn00;
}
boolean checkForEof()
{
if (_eofOn00 && (_b1 == 0x00 && _b2 == 0x00))
{
_eofReached = true;
setParentEofDetect(true);
}
return _eofReached;
}
public int read(byte[] b, int off, int len)
throws IOException
{
// Only use this optimisation if we aren't checking for 00
if (_eofOn00 || len < 3)
{
return super.read(b, off, len);
}
if (_eofReached)
{
return -1;
}
int numRead = _in.read(b, off + 2, len - 2);
if (numRead < 0)
{
// throw new EOFException();
_eofReached = true;
return -1;
}
b[off] = (byte)_b1;
b[off + 1] = (byte)_b2;
_b1 = _in.read();
_b2 = _in.read();
if (_b2 < 0)
{
// Corrupted stream
// throw new EOFException();
_eofReached = true;
// Just fall thru...
}
return numRead + 2;
}
public int read()
throws IOException
{
if (checkForEof())
{
return -1;
}
int b = _in.read();
//
// strictly speaking we should return b1 and b2, but if this happens the stream
// is corrupted so we are already in trouble.
//
if (b < 0)
{
// Corrupted stream
// throw new EOFException();
_eofReached = true;
return -1;
}
int v = _b1;
_b1 = _b2;
_b2 = b;
return v;
}
}
| [#BJA-74]
Throw exception correctly for unexpected end of file
| src/org/bouncycastle/asn1/IndefiniteLengthInputStream.java | [#BJA-74] Throw exception correctly for unexpected end of file |
|
Java | mit | 706533c73ad5bb989629eacf33b39d5d38714717 | 0 | ushahidi/Crowdmap-Java,ushahidi/Crowdmap-Java | /*******************************************************************************
* Copyright (c) 2010 - 2013 Ushahidi Inc.
* All rights reserved
* Website: http://www.ushahidi.com
*
* GNU AFFERO GENERAL PUBLIC LICENSE Version 3 Usage
* This file may be used under the terms of the GNU AFFERO GENERAL
* PUBLIC LICENSE Version 3 as published by the Free Software
* Foundation and appearing in the file LICENSE included in the
* packaging of this file. Please review the following information to
* ensure the GNU AFFERO GENERAL PUBLIC LICENSE Version 3 requirements
* will be met: http://www.gnu.org/licenses/agpl.html.
******************************************************************************/
package com.crowdmap.java.sdk.service;
import com.crowdmap.java.sdk.json.Media;
import com.crowdmap.java.sdk.json.Response;
import com.crowdmap.java.sdk.model.form.MediaForm;
import com.crowdmap.java.sdk.util.Util;
import static com.crowdmap.java.sdk.net.CrowdmapHttpClient.METHOD_DELETE;
import static com.crowdmap.java.sdk.net.CrowdmapHttpClient.METHOD_POST;
import static com.crowdmap.java.sdk.net.CrowdmapHttpClient.METHOD_GET;
import static com.crowdmap.java.sdk.net.ICrowdmapConstants.LIMIT;
import static com.crowdmap.java.sdk.net.ICrowdmapConstants.OFFSET;
import static com.crowdmap.java.sdk.net.ICrowdmapConstants.SEGMENT_MEDIA;
/**
* Service for interacting with crowdmap's media API
*/
public class MediaService extends CrowdmapService {
/**
* Get media in crowdmap. GET /media
*/
public Media getMedia() {
//Crowdmap requires a new api signature every 2 minutes
// so before a request is made, generate a new key
//generate the api key
// set the apikey for the request
setApiKey(METHOD_GET, SEGMENT_MEDIA);
final String json = client.get(SEGMENT_MEDIA);
Media mediaJson = fromString(json, Media.class);
return mediaJson;
}
/**
* Get a specific media
*
* @param mediaId The ID of the media
* @return Media Object
*/
public Media getMedia(long mediaId) {
checkId(mediaId);
StringBuilder url = new StringBuilder(SEGMENT_MEDIA);
url.append(mediaId);
setApiKey(METHOD_GET, url.toString());
String response = client.get(url.toString());
Media mediaJson = fromString(response, Media.class);
return mediaJson;
}
public Media createMedia(MediaForm form) {
validateSession();
StringBuilder url = new StringBuilder(SEGMENT_MEDIA);
setApiKey(METHOD_POST, url.toString());
return fromString(client.multipartPost(url.toString(), form.getParameters()), Media.class);
}
public Response deleteMedia(long mediaId) {
checkId(mediaId);
validateSession();
StringBuilder url = new StringBuilder(SEGMENT_MEDIA);
url.append(mediaId);
setApiKey(METHOD_DELETE, url.toString());
return fromString(client.delete(url.toString()), Response.class);
}
public MediaService limit(int limit) {
if (limit > 0) {
getHttpClient().setRequestParameters(LIMIT, String.valueOf(limit));
}
return this;
}
public MediaService offset(int offset) {
if (getHttpClient().getRequestParameters().containsKey(LIMIT)) {
throw new IllegalArgumentException("Requires that a limit be set.");
}
getHttpClient().setRequestParameters(OFFSET, String.valueOf(offset));
return this;
}
@Override
public MediaService setSessionToken(String sessionToken) {
if ((sessionToken == null) || (sessionToken.length() == 0)) {
throw new IllegalArgumentException("Session token cannot be null or empty");
}
getHttpClient().setSessionToken(sessionToken);
return this;
}
}
| src/main/java/com/crowdmap/java/sdk/service/MediaService.java | /*******************************************************************************
* Copyright (c) 2010 - 2013 Ushahidi Inc.
* All rights reserved
* Website: http://www.ushahidi.com
*
* GNU AFFERO GENERAL PUBLIC LICENSE Version 3 Usage
* This file may be used under the terms of the GNU AFFERO GENERAL
* PUBLIC LICENSE Version 3 as published by the Free Software
* Foundation and appearing in the file LICENSE included in the
* packaging of this file. Please review the following information to
* ensure the GNU AFFERO GENERAL PUBLIC LICENSE Version 3 requirements
* will be met: http://www.gnu.org/licenses/agpl.html.
******************************************************************************/
package com.crowdmap.java.sdk.service;
import com.crowdmap.java.sdk.json.Media;
import com.crowdmap.java.sdk.json.Response;
import com.crowdmap.java.sdk.model.form.MediaForm;
import com.crowdmap.java.sdk.util.Util;
import static com.crowdmap.java.sdk.net.CrowdmapHttpClient.METHOD_DELETE;
import static com.crowdmap.java.sdk.net.CrowdmapHttpClient.METHOD_POST;
import static com.crowdmap.java.sdk.net.ICrowdmapConstants.LIMIT;
import static com.crowdmap.java.sdk.net.ICrowdmapConstants.OFFSET;
import static com.crowdmap.java.sdk.net.ICrowdmapConstants.SEGMENT_MEDIA;
/**
* Service for interacting with crowdmap's media API
*/
public class MediaService extends CrowdmapService {
/**
* Get media in crowdmap. GET /media
*/
public Media getMedia() {
//Crowdmap requires a new api signature every 2 minutes
// so before a request is made, generate a new key
//generate the api key
final String apiKey = Util
.generateSignature("GET", SEGMENT_MEDIA, getPublicKey(), getPrivateKey());
// set the apikey for the request
client.setApiKey(apiKey);
final String json = client.get(SEGMENT_MEDIA);
Media mediaJson = fromString(json, Media.class);
return mediaJson;
}
/**
* Get a specific media
*
* @param mediaId The ID of the media
* @return Media Object
*/
public Media getMedia(long mediaId) {
checkId(mediaId);
StringBuilder url = new StringBuilder(SEGMENT_MEDIA);
url.append(mediaId);
String response = client.get(url.toString());
Media mediaJson = fromString(response, Media.class);
return mediaJson;
}
public Media createMedia(MediaForm form) {
validateSession();
StringBuilder url = new StringBuilder(SEGMENT_MEDIA);
setApiKey(METHOD_POST, url.toString());
return fromString(client.multipartPost(url.toString(), form.getParameters()), Media.class);
}
public Response deleteMedia(long mediaId) {
checkId(mediaId);
validateSession();
StringBuilder url = new StringBuilder(SEGMENT_MEDIA);
url.append(mediaId);
setApiKey(METHOD_DELETE, url.toString());
return fromString(client.delete(url.toString()), Response.class);
}
public MediaService limit(int limit) {
if (limit > 0) {
getHttpClient().setRequestParameters(LIMIT, String.valueOf(limit));
}
return this;
}
public MediaService offset(int offset) {
if (getHttpClient().getRequestParameters().containsKey(LIMIT)) {
throw new IllegalArgumentException("Requires that a limit be set.");
}
getHttpClient().setRequestParameters(OFFSET, String.valueOf(offset));
return this;
}
@Override
public MediaService setSessionToken(String sessionToken) {
if ((sessionToken == null) || (sessionToken.length() == 0)) {
throw new IllegalArgumentException("Session token cannot be null or empty");
}
getHttpClient().setSessionToken(sessionToken);
return this;
}
}
| Add api key signature when fetching individual media
| src/main/java/com/crowdmap/java/sdk/service/MediaService.java | Add api key signature when fetching individual media |
|
Java | mit | 12253ed898c9f69e2e5b509ea1002443485b6b69 | 0 | zalando/nakadi,zalando/nakadi | package org.zalando.nakadi.domain;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class BatchItemTest {
@Test
public void testBatchItemSizeWithMultByteChar() {
final BatchItem item = new BatchItem("{ \"name\": \"香港\"} ");
assertEquals(20, item.getEventSize());
}
}
| src/test/java/org/zalando/nakadi/domain/BatchItemTest.java | package org.zalando.nakadi.domain;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class BatchItemTest {
@Test
public void testBatchItemSizeWithMultByteChar() {
BatchItem item = new BatchItem("{ \"name\": \"香港\"} ");
assertEquals(20, item.getEventSize());
}
}
| ARUHA-473 Fix test style
| src/test/java/org/zalando/nakadi/domain/BatchItemTest.java | ARUHA-473 Fix test style |
|
Java | mit | 3dcc73feb22f6bf684782db24b6fb4ccd63e0147 | 0 | nls-oskari/oskari-server,uhef/Oskari-Routing,uhef/Oskari-Routing,uhef/Oskari-Routing,nls-oskari/oskari-server,nls-oskari/oskari-server,uhef/Oskari-Routing | package fi.nls.oskari.work;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.pojo.*;
import fi.nls.oskari.transport.TransportService;
import fi.nls.oskari.utils.HttpHelper;
import fi.nls.oskari.wfs.WFSCommunicator;
import fi.nls.oskari.wfs.WFSImage;
import fi.nls.oskari.wfs.WFSParser;
import org.geotools.feature.FeatureCollection;
import org.geotools.feature.FeatureIterator;
import org.opengis.feature.Property;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Point;
import org.opengis.referencing.operation.MathTransform;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Job for WFS Map Layer
*/
public class WFSMapLayerJob extends Job {
private static final Logger log = LogFactory.getLogger(WFSMapLayerJob.class);
public static final String TYPE_NORMAL = "normal";
public static final String TYPE_HIGHLIGHT = "highlight";
public static final String TYPE_MAP_CLICK = "mapClick";
public static final String TYPE_FILTER = "filter";
public static final String OUTPUT_LAYER_ID = "layerId";
public static final String OUTPUT_ONCE = "once";
public static final String OUTPUT_MESSAGE = "message";
public static final String OUTPUT_FEATURES = "features";
public static final String OUTPUT_FEATURE = "feature";
public static final String OUTPUT_FIELDS = "fields";
public static final String OUTPUT_LOCALES = "locales";
public static final String OUTPUT_KEEP_PREVIOUS = "keepPrevious";
public static final String OUTPUT_STYLE = "style";
public static final String OUTPUT_IMAGE_SRS = "srs";
public static final String OUTPUT_IMAGE_BBOX = "bbox";
public static final String OUTPUT_IMAGE_ZOOM = "zoom";
public static final String OUTPUT_IMAGE_TYPE = "type";
public static final String OUTPUT_IMAGE_WIDTH = "width";
public static final String OUTPUT_IMAGE_HEIGHT= "height";
public static final String OUTPUT_IMAGE_URL = "url";
public static final String OUTPUT_IMAGE_DATA = "data";
public static final String BROWSER_MSIE = "msie";
public static final String PROCESS_STARTED = "Started";
public static final String PROCESS_ENDED = "Ended";
// process information
TransportService service;
private SessionStore session;
private Layer sessionLayer;
private WFSLayerStore layer;
private WFSLayerPermissionsStore permissions;
private String layerId;
private boolean layerPermission;
private boolean reqSendFeatures;
private boolean reqSendImage;
private boolean sendFeatures;
private boolean sendImage;
private MathTransform transformService;
private MathTransform transformClient;
private String type;
private FeatureCollection<SimpleFeatureType, SimpleFeature> features;
private List<List<Object>> featureValuesList;
private List<String> processedFIDs = new ArrayList<String>();
private Units units = new Units();
// API
private static final String PERMISSIONS_API = "GetLayerIds";
private static final String LAYER_CONFIGURATION_API = "GetWFSLayerConfiguration&id=";
// COOKIE
private static final String ROUTE_COOKIE_NAME = "ROUTEID=";
/**
* Creates a new runnable job with own Jedis instance
*
* Parameters define client's service (communication channel), session and layer's id.
* Sends all resources that the layer configuration allows.
*
* @param service
* @param store
* @param layerId
*/
public WFSMapLayerJob(TransportService service, String type, SessionStore store, String layerId) {
this(service, type, store, layerId, true, true);
}
/**
* Creates a new runnable job with own Jedis instance
*
* Parameters define client's service (communication channel), session and layer's id.
* Also sets resources that will be sent if the layer configuration allows.
*
* @param service
* @param store
* @param layerId
* @param reqSendFeatures
* @param reqSendImage
*/
public WFSMapLayerJob(TransportService service, String type, SessionStore store, String layerId,
boolean reqSendFeatures, boolean reqSendImage) {
this.service = service;
this.type = type;
this.session = store;
this.layerId = layerId;
this.sessionLayer = this.session.getLayers().get(this.layerId);
this.layer = null;
this.permissions = null;
this.layerPermission = false;
this.reqSendFeatures = reqSendFeatures;
this.reqSendImage = reqSendImage;
this.transformService = null;
this.transformClient = null;
}
/**
* Releases all when removed
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
}
/**
* Unique key definition
*/
@Override
public String getKey() {
return this.getClass().getSimpleName() + "_" + this.session.getClient() + "_" + this.layerId + "_" + this.type;
}
/**
* Gets service path for local API
*
* Path for Layer configuration and permissions request
*
* @return URL
*/
public String getAPIUrl() {
String session = "";
if(TransportService.SERVICE_URL_SESSION_PARAM != null) {
session = ";" + TransportService.SERVICE_URL_SESSION_PARAM + "=" + this.session.getSession();
}
return TransportService.SERVICE_URL + TransportService.SERVICE_URL_PATH + session + TransportService.SERVICE_URL_LIFERAY_PATH;
}
/**
* Process of the job
*
* Worker calls this when starts the job.
*
*/
@Override
public final void run() {
log.debug(PROCESS_STARTED, getKey());
if(!validateType()) {
log.warn("Not enough information to continue the task (" + this.type + ")");
return;
}
if(!goNext()) return;
this.getPermissions();
if(!this.layerPermission) {
return;
}
if(!goNext()) return;
this.getLayerConfiguration();
if(this.layer == null) {
log.error("Getting layer configuration failed");
return;
}
setResourceSending();
if(!validateMapScales()) {
log.debug("Map scale was not valid for layer", this.layerId);
return;
}
// if different SRS, create transforms for geometries
if(!this.session.getLocation().getSrs().equals(this.layer.getSRSName())) {
this.transformService = this.session.getLocation().getTransformForService(this.layer.getCrs(), true);
this.transformClient = this.session.getLocation().getTransformForClient(this.layer.getCrs(), true);
}
if(!goNext()) return;
if(this.type.equals(TYPE_NORMAL)) { // tiles for grid
if(!this.layer.isTileRequest()) { // make single request
if(!this.normalHandlers(null, true)) {
return;
}
}
List<List<Double>> grid = this.session.getGrid().getBounds();
boolean first = true;
int index = 0;
for(List<Double> bounds : grid) {
if(this.layer.isTileRequest()) { // make a request per tile
if(!this.normalHandlers(bounds, first)) {
return;
}
}
if(!goNext()) return;
if(this.sendImage && this.sessionLayer.isTile(bounds)) { // check if needed tile
log.debug("tile image handler");
Double[] bbox = new Double[4];
for (int i = 0; i < bbox.length; i++) {
bbox[i] = bounds.get(i);
}
// get from cache
BufferedImage bufferedImage = getImageCache(bbox);
boolean fromCache = (bufferedImage != null);
if(!fromCache) {
log.debug("tile image drawing");
WFSImage image = new WFSImage(this.layer,
this.session.getTileSize(),
this.session.getLocation(),
bounds,
this.session.getLayers().get(this.layerId).getStyleName(),
this.features);
log.debug("tile image drawing2");
bufferedImage = image.draw();
log.debug("tile image drawing3");
if(bufferedImage == null) {
log.debug("image parsing failed");
this.imageParsingFailed();
return;
}
// set to cache
if(!this.session.getGrid().isBoundsOnBoundary(index)) {
setImageCache(bufferedImage, this.session.getLayers().get(this.layerId).getStyleName(), bbox, true);
} else { // non-persistent cache - for ie
setImageCache(bufferedImage, this.session.getLayers().get(this.layerId).getStyleName(), bbox, false);
}
}
log.debug("image sending");
String url = createImageURL(this.session.getLayers().get(this.layerId).getStyleName(), bbox);
this.sendWFSImage(url, bufferedImage, bbox, true);
}
if(first) {
first = false;
this.session.setKeepPrevious(true); // keep the next tiles
}
index++;
}
} else if(this.type.equals(TYPE_HIGHLIGHT)) {
if(!this.requestHandler(null)) {
return;
}
this.featuresHandler();
if(!goNext()) return;
// IMAGE HANDLING
if(this.sendImage) {
Location location = this.session.getLocation();
WFSImage image = new WFSImage(this.layer,
this.session.getMapSize(),
this.session.getLocation(),
TYPE_HIGHLIGHT,
this.features);
BufferedImage bufferedImage = image.draw();
if(bufferedImage == null) {
this.imageParsingFailed();
return;
}
Double[] bbox = location.getBboxArray();
// cache (non-persistant)
setImageCache(bufferedImage, TYPE_HIGHLIGHT, bbox, false);
String url = createImageURL(TYPE_HIGHLIGHT, bbox);
this.sendWFSImage(url, bufferedImage, bbox, false);
}
} else if(this.type.equals(TYPE_MAP_CLICK)) {
if(!this.requestHandler(null)) {
return;
}
this.featuresHandler();
if(!goNext()) return;
if(this.sendFeatures) {
this.sendWFSFeatures(this.featureValuesList, TransportService.CHANNEL_MAP_CLICK);
}
} else if(this.type.equals(TYPE_FILTER)) {
if(!this.requestHandler(null)) {
return;
}
this.featuresHandler();
if(!goNext()) return;
if(this.sendFeatures) {
this.sendWFSFeatures(this.featureValuesList, TransportService.CHANNEL_FILTER);
}
}
log.debug(PROCESS_ENDED, getKey());
}
/**
* Wrapper for normal type job's handlers
*/
private boolean normalHandlers(List<Double> bounds, boolean first) {
if(!this.requestHandler(bounds)) {
log.debug("Cancelled by request handler");
return false;
}
if(first) {
propertiesHandler();
if(!goNext()) return false;
}
if(!goNext()) return false;
this.featuresHandler();
if(!goNext()) return false;
return true;
}
/**
* Makes request and parses response to features
*
* @param bounds
* @return <code>true</code> if thread should continue; <code>false</code>
* otherwise.
*/
private boolean requestHandler(List<Double> bounds) {
BufferedReader response = null;
if(layer.getTemplateType() == null) { // default
String payload = WFSCommunicator.createRequestPayload(this.layer, this.session, bounds, this.transformService);
log.debug("Request data\n", this.layer.getURL(), "\n", payload);
if(!goNext()) return false;
response = HttpHelper.postRequestReader(this.layer.getURL(), "", payload, this.layer.getUsername(), this.layer.getPassword());
} else {
log.warn("Failed to make a request because of undefined layer type", layer.getTemplateType());
}
Map<String, Object> output = new HashMap<String, Object>();
// request failed
if(response == null) {
log.warn("Request failed for layer", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_request_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
log.debug(PROCESS_ENDED, getKey());
return false;
}
if(!goNext()) return false;
// parse response
if(this.layer.getFeatureType().size() > 0) { // custom type => custom parsing
WFSParser parser = new WFSParser(response, this.layer);
this.features = parser.parse();
} else {
this.features = WFSCommunicator.parseSimpleFeatures(response, this.layer);
}
// parsing failed
if(this.features == null) {
log.warn("Parsing failed for layer", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "features_parsing_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
log.debug(PROCESS_ENDED, getKey());
return false;
}
// 0 features found - send size
if(this.type.equals(TYPE_MAP_CLICK) && this.features.size() == 0) {
log.debug("Empty result for map click", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURES, "empty");
output.put(OUTPUT_KEEP_PREVIOUS, this.session.isKeepPrevious());
this.service.send(session.getClient(), TransportService.CHANNEL_MAP_CLICK, output);
log.debug(PROCESS_ENDED, getKey());
return false;
} else if(this.type.equals(TYPE_FILTER) && this.features.size() == 0) {
log.debug("Empty result for filter", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURES, "empty");
this.service.send(session.getClient(), TransportService.CHANNEL_FILTER, output);
log.debug(PROCESS_ENDED, getKey());
return false;
} else {
if(this.features.size() == 0) {
log.debug("Empty result", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURE, "empty");
this.service.send(session.getClient(), TransportService.CHANNEL_FEATURE, output);
log.debug(PROCESS_ENDED, getKey());
return false;
} else if(this.features.size() == layer.getMaxFeatures()) {
log.debug("Max feature result", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURE, "max");
this.service.send(session.getClient(), TransportService.CHANNEL_FEATURE, output);
}
}
return true;
}
/**
* Parses features properties and sends to appropriate channels
*/
private void propertiesHandler() {
if(!this.sendFeatures) {
return;
}
List<String> selectedProperties = new ArrayList<String>();
List<String> layerSelectedProperties = layer.getSelectedFeatureParams(session.getLanguage());
// selected props
if(layerSelectedProperties != null && layerSelectedProperties.size() != 0) {
selectedProperties.addAll(this.layer.getSelectedFeatureParams(this.session.getLanguage()));
} else { // all properties
for(Property prop : this.features.features().next().getProperties()) {
String field = prop.getName().toString();
if(!this.layer.getGMLGeometryProperty().equals(field)) { // don't add geometry
selectedProperties.add(field);
}
}
}
this.sendWFSProperties(selectedProperties, this.layer.getFeatureParamsLocales(this.session.getLanguage()));
}
/**
* Parses features values
*/
private void featuresHandler() {
// send feature info
FeatureIterator<SimpleFeature> featuresIter = this.features.features();
this.featureValuesList = new ArrayList<List<Object>>();
while(goNext(featuresIter.hasNext())) {
SimpleFeature feature = featuresIter.next();
List<Object> values = new ArrayList<Object>();
String fid = feature.getIdentifier().getID();
if (!this.processedFIDs.contains(fid)) {
// __fid value
values.add(fid);
this.processedFIDs.add(fid);
// get feature geometry (transform if needed) and get geometry center
Geometry geometry = WFSParser.getFeatureGeometry(feature, this.layer.getGMLGeometryProperty(), this.transformClient);
// send values
if(this.sendFeatures) {
Point centerPoint = WFSParser.getGeometryCenter(geometry);
// selected values
List<String> selectedProperties = layer.getSelectedFeatureParams(session.getLanguage());
if(selectedProperties != null && selectedProperties.size() != 0) {
for(String attr : selectedProperties) {
values.add(feature.getAttribute(attr));
}
} else { // all values
for(Property prop : this.features.features().next().getProperties()) {
String field = prop.getName().toString();
if(!this.layer.getGMLGeometryProperty().equals(field)) { // don't add geometry
values.add(feature.getAttribute(field));
}
}
}
// center position (must be in properties also)
if(centerPoint != null) {
values.add(centerPoint.getX());
values.add(centerPoint.getY());
} else {
values.add(null);
values.add(null);
}
WFSParser.parseValuesForJSON(values);
if(this.type.equals(TYPE_NORMAL)) {
this.sendWFSFeature(values);
} else {
this.featureValuesList.add(values);
}
}
} else {
log.warn("Found duplicate feature ID", fid);
}
}
}
/**
* Gets image from cache
*
* @param bbox
*/
private BufferedImage getImageCache(Double[] bbox) {
return WFSImage.getCache(
this.layerId,
this.session.getLayers().get(this.layerId).getStyleName(),
this.session.getLocation().getSrs(),
bbox,
this.session.getLocation().getZoom()
);
}
/**
* Sets image to cache
*
* @param bufferedImage
* @param bbox
* @param persistent
*/
private void setImageCache(BufferedImage bufferedImage, String style, Double[] bbox, boolean persistent) {
WFSImage.setCache(
bufferedImage,
this.layerId,
style,
this.session.getLocation().getSrs(),
bbox,
this.session.getLocation().getZoom(),
persistent
);
}
/**
* Send image parsing error
*/
private void imageParsingFailed() {
log.error("Image parsing failed");
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_image_parsing_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
}
/**
* Checks if enough information for running the task type
*
* @return <code>true</code> if enough information for type; <code>false</code>
* otherwise.
*/
private boolean validateType() {
if(this.type.equals(TYPE_HIGHLIGHT)) {
if(this.sessionLayer.getHighlightedFeatureIds() != null &&
this.sessionLayer.getHighlightedFeatureIds().size() > 0) {
return true;
}
} else if(this.type.equals(TYPE_MAP_CLICK)) {
if(session.getMapClick() != null) {
return true;
}
} else if(this.type.equals(TYPE_FILTER)) {
if(session.getFilter() != null) {
return true;
}
} else if(this.type.equals(TYPE_NORMAL)) {
return true;
}
return false;
}
/**
* Gets layer permissions (uses cache)
*/
private void getPermissions() {
String json = WFSLayerPermissionsStore.getCache(this.session.getSession());
boolean fromCache = (json != null);
if(!fromCache) {
log.warn(getAPIUrl() + PERMISSIONS_API);
String cookies = null;
if(this.session.getRoute() != null && !this.session.getRoute().equals("")) {
cookies = ROUTE_COOKIE_NAME + this.session.getRoute();
}
json = HttpHelper.getRequest(getAPIUrl() + PERMISSIONS_API, cookies);
if(json == null)
return;
}
try {
this.permissions = WFSLayerPermissionsStore.setJSON(json);
this.layerPermission = this.permissions.isPermission(this.layerId);
} catch (IOException e) {
log.error(e, "JSON parsing failed for WFSLayerPermissionsStore \n" + json);
}
// no permissions
if(!this.layerPermission) {
log.warn("Session (" + this.session.getSession() + ") has no permissions for getting the layer (" + this.layerId + ")");
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_no_permissions");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
}
}
/**
* Gets layer configuration (uses cache)
*/
private void getLayerConfiguration() {
String json = WFSLayerStore.getCache(this.layerId);
boolean fromCache = (json != null);
if(!fromCache) {
log.warn(getAPIUrl() + LAYER_CONFIGURATION_API + this.layerId);
String cookies = null;
if(this.session.getRoute() != null && !this.session.getRoute().equals("")) {
cookies = ROUTE_COOKIE_NAME + this.session.getRoute();
}
json = HttpHelper.getRequest(getAPIUrl() + LAYER_CONFIGURATION_API + this.layerId, cookies);
if(json == null)
return;
}
try {
this.layer = WFSLayerStore.setJSON(json);
} catch (Exception e) {
log.error(e, "JSON parsing failed for WFSLayerStore \n" + json);
}
// no layer
if(this.layer == null) {
log.warn("Layer (" + this.layerId + ") configurations couldn't be fetched");
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_configuring_layer_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
}
}
/**
* Sets which resources will be sent (features, image)
*/
private void setResourceSending() {
// layer configuration is the default
this.sendFeatures = layer.isGetFeatureInfo();
this.sendImage = layer.isGetMapTiles();
// if request defines false and layer configuration allows
if(!this.reqSendFeatures && layer.isGetFeatureInfo())
this.sendFeatures = false;
if(!this.reqSendImage && layer.isGetMapTiles())
this.sendImage = false;
}
/**
* Checks if the map scale is valid
*
* @return <code>true</code> if map scale is valid; <code>false</code>
* otherwise.
*/
private boolean validateMapScales() {
double scale = this.session.getMapScales().get((int)this.session.getLocation().getZoom());
double minScaleInMapSrs = units.getScaleInSrs(layer.getMinScale(), layer.getSRSName(), session.getLocation().getSrs());
double maxScaleInMapSrs = units.getScaleInSrs(layer.getMaxScale(), layer.getSRSName(), session.getLocation().getSrs());
log.debug("Scale in:", layer.getSRSName(), scale, "[", layer.getMaxScale(), ",", layer.getMinScale(), "]");
log.debug("Scale in:", session.getLocation().getSrs(), scale, "[", maxScaleInMapSrs, ",", minScaleInMapSrs, "]");
if(minScaleInMapSrs >= scale && maxScaleInMapSrs <= scale) // min == biggest value
return true;
return false;
}
/**
* Creates image url
*
* @param style
* @param bbox
*/
private String createImageURL(String style, Double[] bbox) {
return "/image" +
"?" + OUTPUT_LAYER_ID + "=" + this.layerId +
"&" + OUTPUT_STYLE + "=" + style +
"&" + OUTPUT_IMAGE_SRS + "=" + this.session.getLocation().getSrs() +
"&" + OUTPUT_IMAGE_BBOX + "=" + bbox[0] +
"," + bbox[1] +
"," + bbox[2] +
"," + bbox[3] +
"&" + OUTPUT_IMAGE_ZOOM + "=" + this.session.getLocation().getZoom();
}
/**
* Sends properties (fields and locales)
*
* @param fields
* @param locales
*/
private void sendWFSProperties(List<String> fields, List<String> locales) {
if(fields == null || fields.size() == 0) {
log.warn("Failed to send properties");
return;
}
fields.add(0, "__fid");
fields.add("__centerX");
fields.add("__centerY");
if(locales != null) {
locales.add(0, "ID");
locales.add("x");
locales.add("y");
} else {
locales = new ArrayList<String>();
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FIELDS, fields);
output.put(OUTPUT_LOCALES, locales);
this.service.send(this.session.getClient(), TransportService.CHANNEL_PROPERTIES, output);
}
/**
* Sends one feature
*
* @param values
*/
private void sendWFSFeature(List<Object> values) {
if(values == null || values.size() == 0) {
log.warn("Failed to send feature");
return;
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURE, values);
this.service.send(this.session.getClient(), TransportService.CHANNEL_FEATURE, output);
}
/**
* Sends list of features
*
* @param features
* @param channel
*/
private void sendWFSFeatures(List<List<Object>> features, String channel) {
if(features == null || features.size() == 0) {
log.warn("Failed to send features");
return;
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURES, features);
if(channel.equals(TransportService.CHANNEL_MAP_CLICK)) {
output.put(OUTPUT_KEEP_PREVIOUS, this.session.isKeepPrevious());
}
this.service.send(this.session.getClient(), channel, output);
}
/**
* Sends image as an URL to IE 8 & 9, base64 data for others
*
* @param url
* @param bufferedImage
* @param bbox
* @param isTiled
*/
private void sendWFSImage(String url, BufferedImage bufferedImage, Double[] bbox, boolean isTiled) {
if(bufferedImage == null) {
log.warn("Failed to send image");
return;
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
Location location = this.session.getLocation();
Tile tileSize = null;
if(isTiled) {
tileSize = this.session.getTileSize();
} else {
tileSize = this.session.getMapSize();
}
output.put(OUTPUT_IMAGE_SRS, location.getSrs());
output.put(OUTPUT_IMAGE_BBOX, bbox);
output.put(OUTPUT_IMAGE_ZOOM, location.getZoom());
output.put(OUTPUT_IMAGE_TYPE, this.type); // "normal" | "highlight"
output.put(OUTPUT_KEEP_PREVIOUS, this.session.isKeepPrevious());
output.put(OUTPUT_IMAGE_WIDTH, tileSize.getWidth());
output.put(OUTPUT_IMAGE_HEIGHT, tileSize.getHeight());
output.put(OUTPUT_IMAGE_URL, url);
byte[] byteImage = WFSImage.imageToBytes(bufferedImage);
String base64Image = WFSImage.bytesToBase64(byteImage);
int base64Size = (base64Image.length()*2)/1024;
// IE6 & IE7 doesn't support base64, max size in base64 for IE8 is 32KB
if(!(this.session.getBrowser().equals(BROWSER_MSIE) && this.session.getBrowserVersion() < 8 ||
this.session.getBrowser().equals(BROWSER_MSIE) && this.session.getBrowserVersion() == 8 &&
base64Size >= 32)) {
output.put(OUTPUT_IMAGE_DATA, base64Image);
}
this.service.send(this.session.getClient(), TransportService.CHANNEL_IMAGE, output);
}
} | transport/src/main/java/fi/nls/oskari/work/WFSMapLayerJob.java | package fi.nls.oskari.work;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.pojo.*;
import fi.nls.oskari.transport.TransportService;
import fi.nls.oskari.utils.HttpHelper;
import fi.nls.oskari.wfs.WFSCommunicator;
import fi.nls.oskari.wfs.WFSImage;
import fi.nls.oskari.wfs.WFSParser;
import org.geotools.feature.FeatureCollection;
import org.geotools.feature.FeatureIterator;
import org.opengis.feature.Property;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Point;
import org.opengis.referencing.operation.MathTransform;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Job for WFS Map Layer
*/
public class WFSMapLayerJob extends Job {
private static final Logger log = LogFactory.getLogger(WFSMapLayerJob.class);
public static final String TYPE_NORMAL = "normal";
public static final String TYPE_HIGHLIGHT = "highlight";
public static final String TYPE_MAP_CLICK = "mapClick";
public static final String TYPE_FILTER = "filter";
public static final String OUTPUT_LAYER_ID = "layerId";
public static final String OUTPUT_ONCE = "once";
public static final String OUTPUT_MESSAGE = "message";
public static final String OUTPUT_FEATURES = "features";
public static final String OUTPUT_FEATURE = "feature";
public static final String OUTPUT_FIELDS = "fields";
public static final String OUTPUT_LOCALES = "locales";
public static final String OUTPUT_KEEP_PREVIOUS = "keepPrevious";
public static final String OUTPUT_STYLE = "style";
public static final String OUTPUT_IMAGE_SRS = "srs";
public static final String OUTPUT_IMAGE_BBOX = "bbox";
public static final String OUTPUT_IMAGE_ZOOM = "zoom";
public static final String OUTPUT_IMAGE_TYPE = "type";
public static final String OUTPUT_IMAGE_WIDTH = "width";
public static final String OUTPUT_IMAGE_HEIGHT= "height";
public static final String OUTPUT_IMAGE_URL = "url";
public static final String OUTPUT_IMAGE_DATA = "data";
public static final String BROWSER_MSIE = "msie";
public static final String PROCESS_STARTED = "Started";
public static final String PROCESS_ENDED = "Ended";
// process information
TransportService service;
private SessionStore session;
private Layer sessionLayer;
private WFSLayerStore layer;
private WFSLayerPermissionsStore permissions;
private String layerId;
private boolean layerPermission;
private boolean reqSendFeatures;
private boolean reqSendImage;
private boolean sendFeatures;
private boolean sendImage;
private MathTransform transformService;
private MathTransform transformClient;
private String type;
private FeatureCollection<SimpleFeatureType, SimpleFeature> features;
private List<List<Object>> featureValuesList;
private List<String> processedFIDs = new ArrayList<String>();
private Units units = new Units();
// API
private static final String PERMISSIONS_API = "GetLayerIds";
private static final String LAYER_CONFIGURATION_API = "GetWFSLayerConfiguration&id=";
// COOKIE
private static final String ROUTE_COOKIE_NAME = "ROUTEID=";
/**
* Creates a new runnable job with own Jedis instance
*
* Parameters define client's service (communication channel), session and layer's id.
* Sends all resources that the layer configuration allows.
*
* @param service
* @param store
* @param layerId
*/
public WFSMapLayerJob(TransportService service, String type, SessionStore store, String layerId) {
this(service, type, store, layerId, true, true);
}
/**
* Creates a new runnable job with own Jedis instance
*
* Parameters define client's service (communication channel), session and layer's id.
* Also sets resources that will be sent if the layer configuration allows.
*
* @param service
* @param store
* @param layerId
* @param reqSendFeatures
* @param reqSendImage
*/
public WFSMapLayerJob(TransportService service, String type, SessionStore store, String layerId,
boolean reqSendFeatures, boolean reqSendImage) {
this.service = service;
this.type = type;
this.session = store;
this.layerId = layerId;
this.sessionLayer = this.session.getLayers().get(this.layerId);
this.layer = null;
this.permissions = null;
this.layerPermission = false;
this.reqSendFeatures = reqSendFeatures;
this.reqSendImage = reqSendImage;
this.transformService = null;
this.transformClient = null;
}
/**
* Releases all when removed
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
}
/**
* Unique key definition
*/
@Override
public String getKey() {
return this.getClass().getSimpleName() + "_" + this.session.getClient() + "_" + this.layerId + "_" + this.type;
}
/**
* Gets service path for local API
*
* Path for Layer configuration and permissions request
*
* @return URL
*/
public String getAPIUrl() {
String session = "";
if(TransportService.SERVICE_URL_SESSION_PARAM != null) {
session = ";" + TransportService.SERVICE_URL_SESSION_PARAM + "=" + this.session.getSession();
}
return TransportService.SERVICE_URL + TransportService.SERVICE_URL_PATH + session + TransportService.SERVICE_URL_LIFERAY_PATH;
}
/**
* Process of the job
*
* Worker calls this when starts the job.
*
*/
@Override
public final void run() {
log.debug(PROCESS_STARTED, getKey());
if(!validateType()) {
log.warn("Not enough information to continue the task (" + this.type + ")");
return;
}
if(!goNext()) return;
this.getPermissions();
if(!this.layerPermission) {
return;
}
if(!goNext()) return;
this.getLayerConfiguration();
if(this.layer == null) {
log.error("Getting layer configuration failed");
return;
}
setResourceSending();
if(!validateMapScales()) {
log.debug("Map scale was not valid for layer", this.layerId);
return;
}
// if different SRS, create transforms for geometries
if(!this.session.getLocation().getSrs().equals(this.layer.getSRSName())) {
this.transformService = this.session.getLocation().getTransformForService(this.layer.getCrs(), true);
this.transformClient = this.session.getLocation().getTransformForClient(this.layer.getCrs(), true);
}
if(!goNext()) return;
if(this.type.equals(TYPE_NORMAL)) { // tiles for grid
if(!this.layer.isTileRequest()) { // make single request
if(!this.normalHandlers(null, true)) {
return;
}
}
List<List<Double>> grid = this.session.getGrid().getBounds();
boolean first = true;
int index = 0;
for(List<Double> bounds : grid) {
if(this.layer.isTileRequest()) { // make a request per tile
if(!this.normalHandlers(bounds, first)) {
return;
}
}
if(!goNext()) return;
if(this.sendImage && this.sessionLayer.isTile(bounds)) { // check if needed tile
Double[] bbox = new Double[4];
for (int i = 0; i < bbox.length; i++) {
bbox[i] = bounds.get(i);
}
// get from cache
BufferedImage bufferedImage = getImageCache(bbox);
boolean fromCache = (bufferedImage != null);
if(!fromCache) {
WFSImage image = new WFSImage(this.layer,
this.session.getTileSize(),
this.session.getLocation(),
bounds,
this.session.getLayers().get(this.layerId).getStyleName(),
this.features);
bufferedImage = image.draw();
if(bufferedImage == null) {
this.imageParsingFailed();
return;
}
// set to cache
if(!this.session.getGrid().isBoundsOnBoundary(index)) {
setImageCache(bufferedImage, this.session.getLayers().get(this.layerId).getStyleName(), bbox, true);
} else { // non-persistent cache - for ie
setImageCache(bufferedImage, this.session.getLayers().get(this.layerId).getStyleName(), bbox, false);
}
}
String url = createImageURL(this.session.getLayers().get(this.layerId).getStyleName(), bbox);
this.sendWFSImage(url, bufferedImage, bbox, true);
}
if(first) {
first = false;
this.session.setKeepPrevious(true); // keep the next tiles
}
index++;
}
} else if(this.type.equals(TYPE_HIGHLIGHT)) {
if(!this.requestHandler(null)) {
return;
}
this.featuresHandler();
if(!goNext()) return;
// IMAGE HANDLING
if(this.sendImage) {
Location location = this.session.getLocation();
WFSImage image = new WFSImage(this.layer,
this.session.getMapSize(),
this.session.getLocation(),
TYPE_HIGHLIGHT,
this.features);
BufferedImage bufferedImage = image.draw();
if(bufferedImage == null) {
this.imageParsingFailed();
return;
}
Double[] bbox = location.getBboxArray();
// cache (non-persistant)
setImageCache(bufferedImage, TYPE_HIGHLIGHT, bbox, false);
String url = createImageURL(TYPE_HIGHLIGHT, bbox);
this.sendWFSImage(url, bufferedImage, bbox, false);
}
} else if(this.type.equals(TYPE_MAP_CLICK)) {
if(!this.requestHandler(null)) {
return;
}
this.featuresHandler();
if(!goNext()) return;
if(this.sendFeatures) {
this.sendWFSFeatures(this.featureValuesList, TransportService.CHANNEL_MAP_CLICK);
}
} else if(this.type.equals(TYPE_FILTER)) {
if(!this.requestHandler(null)) {
return;
}
this.featuresHandler();
if(!goNext()) return;
if(this.sendFeatures) {
this.sendWFSFeatures(this.featureValuesList, TransportService.CHANNEL_FILTER);
}
}
log.debug(PROCESS_ENDED, getKey());
}
/**
* Wrapper for normal type job's handlers
*/
private boolean normalHandlers(List<Double> bounds, boolean first) {
if(!this.requestHandler(bounds)) {
log.debug("Cancelled by request handler");
return false;
}
if(first) {
propertiesHandler();
if(!goNext()) return false;
}
if(!goNext()) return false;
this.featuresHandler();
if(!goNext()) return false;
return true;
}
/**
* Makes request and parses response to features
*
* @param bounds
* @return <code>true</code> if thread should continue; <code>false</code>
* otherwise.
*/
private boolean requestHandler(List<Double> bounds) {
BufferedReader response = null;
if(layer.getTemplateType() == null) { // default
String payload = WFSCommunicator.createRequestPayload(this.layer, this.session, bounds, this.transformService);
log.debug("Request data\n", this.layer.getURL(), "\n", payload);
if(!goNext()) return false;
response = HttpHelper.postRequestReader(this.layer.getURL(), "", payload, this.layer.getUsername(), this.layer.getPassword());
} else {
log.warn("Failed to make a request because of undefined layer type", layer.getTemplateType());
}
Map<String, Object> output = new HashMap<String, Object>();
// request failed
if(response == null) {
log.warn("Request failed for layer", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_request_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
log.debug(PROCESS_ENDED, getKey());
return false;
}
if(!goNext()) return false;
// parse response
if(this.layer.getFeatureType().size() > 0) { // custom type => custom parsing
WFSParser parser = new WFSParser(response, this.layer);
this.features = parser.parse();
} else {
this.features = WFSCommunicator.parseSimpleFeatures(response, this.layer);
}
// parsing failed
if(this.features == null) {
log.warn("Parsing failed for layer", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "features_parsing_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
log.debug(PROCESS_ENDED, getKey());
return false;
}
// 0 features found - send size
if(this.type.equals(TYPE_MAP_CLICK) && this.features.size() == 0) {
log.debug("Empty result for map click", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURES, "empty");
output.put(OUTPUT_KEEP_PREVIOUS, this.session.isKeepPrevious());
this.service.send(session.getClient(), TransportService.CHANNEL_MAP_CLICK, output);
log.debug(PROCESS_ENDED, getKey());
return false;
} else if(this.type.equals(TYPE_FILTER) && this.features.size() == 0) {
log.debug("Empty result for filter", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURES, "empty");
this.service.send(session.getClient(), TransportService.CHANNEL_FILTER, output);
log.debug(PROCESS_ENDED, getKey());
return false;
} else {
if(this.features.size() == 0) {
log.debug("Empty result", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURE, "empty");
this.service.send(session.getClient(), TransportService.CHANNEL_FEATURE, output);
log.debug(PROCESS_ENDED, getKey());
return false;
} else if(this.features.size() == layer.getMaxFeatures()) {
log.debug("Max feature result", this.layerId);
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURE, "max");
this.service.send(session.getClient(), TransportService.CHANNEL_FEATURE, output);
}
}
return true;
}
/**
* Parses features properties and sends to appropriate channels
*/
private void propertiesHandler() {
if(!this.sendFeatures) {
return;
}
List<String> selectedProperties = new ArrayList<String>();
List<String> layerSelectedProperties = layer.getSelectedFeatureParams(session.getLanguage());
// selected props
if(layerSelectedProperties != null && layerSelectedProperties.size() != 0) {
selectedProperties.addAll(this.layer.getSelectedFeatureParams(this.session.getLanguage()));
} else { // all properties
for(Property prop : this.features.features().next().getProperties()) {
String field = prop.getName().toString();
if(!this.layer.getGMLGeometryProperty().equals(field)) { // don't add geometry
selectedProperties.add(field);
}
}
}
this.sendWFSProperties(selectedProperties, this.layer.getFeatureParamsLocales(this.session.getLanguage()));
}
/**
* Parses features values
*/
private void featuresHandler() {
// send feature info
FeatureIterator<SimpleFeature> featuresIter = this.features.features();
this.featureValuesList = new ArrayList<List<Object>>();
while(goNext(featuresIter.hasNext())) {
SimpleFeature feature = featuresIter.next();
List<Object> values = new ArrayList<Object>();
String fid = feature.getIdentifier().getID();
if (!this.processedFIDs.contains(fid)) {
// __fid value
values.add(fid);
this.processedFIDs.add(fid);
// get feature geometry (transform if needed) and get geometry center
Geometry geometry = WFSParser.getFeatureGeometry(feature, this.layer.getGMLGeometryProperty(), this.transformClient);
// send values
if(this.sendFeatures) {
Point centerPoint = WFSParser.getGeometryCenter(geometry);
// selected values
List<String> selectedProperties = layer.getSelectedFeatureParams(session.getLanguage());
if(selectedProperties != null && selectedProperties.size() != 0) {
for(String attr : selectedProperties) {
values.add(feature.getAttribute(attr));
}
} else { // all values
for(Property prop : this.features.features().next().getProperties()) {
String field = prop.getName().toString();
if(!this.layer.getGMLGeometryProperty().equals(field)) { // don't add geometry
values.add(feature.getAttribute(field));
}
}
}
// center position (must be in properties also)
if(centerPoint != null) {
values.add(centerPoint.getX());
values.add(centerPoint.getY());
} else {
values.add(null);
values.add(null);
}
WFSParser.parseValuesForJSON(values);
if(this.type.equals(TYPE_NORMAL)) {
this.sendWFSFeature(values);
} else {
this.featureValuesList.add(values);
}
}
} else {
log.warn("Found duplicate feature ID", fid);
}
}
}
/**
* Gets image from cache
*
* @param bbox
*/
private BufferedImage getImageCache(Double[] bbox) {
return WFSImage.getCache(
this.layerId,
this.session.getLayers().get(this.layerId).getStyleName(),
this.session.getLocation().getSrs(),
bbox,
this.session.getLocation().getZoom()
);
}
/**
* Sets image to cache
*
* @param bufferedImage
* @param bbox
* @param persistent
*/
private void setImageCache(BufferedImage bufferedImage, String style, Double[] bbox, boolean persistent) {
WFSImage.setCache(
bufferedImage,
this.layerId,
style,
this.session.getLocation().getSrs(),
bbox,
this.session.getLocation().getZoom(),
persistent
);
}
/**
* Send image parsing error
*/
private void imageParsingFailed() {
log.error("Image parsing failed");
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_image_parsing_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
}
/**
* Checks if enough information for running the task type
*
* @return <code>true</code> if enough information for type; <code>false</code>
* otherwise.
*/
private boolean validateType() {
if(this.type.equals(TYPE_HIGHLIGHT)) {
if(this.sessionLayer.getHighlightedFeatureIds() != null &&
this.sessionLayer.getHighlightedFeatureIds().size() > 0) {
return true;
}
} else if(this.type.equals(TYPE_MAP_CLICK)) {
if(session.getMapClick() != null) {
return true;
}
} else if(this.type.equals(TYPE_FILTER)) {
if(session.getFilter() != null) {
return true;
}
} else if(this.type.equals(TYPE_NORMAL)) {
return true;
}
return false;
}
/**
* Gets layer permissions (uses cache)
*/
private void getPermissions() {
String json = WFSLayerPermissionsStore.getCache(this.session.getSession());
boolean fromCache = (json != null);
if(!fromCache) {
log.warn(getAPIUrl() + PERMISSIONS_API);
String cookies = null;
if(this.session.getRoute() != null && !this.session.getRoute().equals("")) {
cookies = ROUTE_COOKIE_NAME + this.session.getRoute();
}
json = HttpHelper.getRequest(getAPIUrl() + PERMISSIONS_API, cookies);
if(json == null)
return;
}
try {
this.permissions = WFSLayerPermissionsStore.setJSON(json);
this.layerPermission = this.permissions.isPermission(this.layerId);
} catch (IOException e) {
log.error(e, "JSON parsing failed for WFSLayerPermissionsStore \n" + json);
}
// no permissions
if(!this.layerPermission) {
log.warn("Session (" + this.session.getSession() + ") has no permissions for getting the layer (" + this.layerId + ")");
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_no_permissions");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
}
}
/**
* Gets layer configuration (uses cache)
*/
private void getLayerConfiguration() {
String json = WFSLayerStore.getCache(this.layerId);
boolean fromCache = (json != null);
if(!fromCache) {
log.warn(getAPIUrl() + LAYER_CONFIGURATION_API + this.layerId);
String cookies = null;
if(this.session.getRoute() != null && !this.session.getRoute().equals("")) {
cookies = ROUTE_COOKIE_NAME + this.session.getRoute();
}
json = HttpHelper.getRequest(getAPIUrl() + LAYER_CONFIGURATION_API + this.layerId, cookies);
if(json == null)
return;
}
try {
this.layer = WFSLayerStore.setJSON(json);
} catch (Exception e) {
log.error(e, "JSON parsing failed for WFSLayerStore \n" + json);
}
// no layer
if(this.layer == null) {
log.warn("Layer (" + this.layerId + ") configurations couldn't be fetched");
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_ONCE, true);
output.put(OUTPUT_MESSAGE, "wfs_configuring_layer_failed");
this.service.send(session.getClient(), TransportService.CHANNEL_ERROR, output);
}
}
/**
* Sets which resources will be sent (features, image)
*/
private void setResourceSending() {
// layer configuration is the default
this.sendFeatures = layer.isGetFeatureInfo();
this.sendImage = layer.isGetMapTiles();
// if request defines false and layer configuration allows
if(!this.reqSendFeatures && layer.isGetFeatureInfo())
this.sendFeatures = false;
if(!this.reqSendImage && layer.isGetMapTiles())
this.sendImage = false;
}
/**
* Checks if the map scale is valid
*
* @return <code>true</code> if map scale is valid; <code>false</code>
* otherwise.
*/
private boolean validateMapScales() {
double scale = this.session.getMapScales().get((int)this.session.getLocation().getZoom());
double minScaleInMapSrs = units.getScaleInSrs(layer.getMinScale(), layer.getSRSName(), session.getLocation().getSrs());
double maxScaleInMapSrs = units.getScaleInSrs(layer.getMaxScale(), layer.getSRSName(), session.getLocation().getSrs());
log.debug("Scale in:", layer.getSRSName(), scale, "[", layer.getMaxScale(), ",", layer.getMinScale(), "]");
log.debug("Scale in:", session.getLocation().getSrs(), scale, "[", maxScaleInMapSrs, ",", minScaleInMapSrs, "]");
if(minScaleInMapSrs >= scale && maxScaleInMapSrs <= scale) // min == biggest value
return true;
return false;
}
/**
* Creates image url
*
* @param style
* @param bbox
*/
private String createImageURL(String style, Double[] bbox) {
return "/image" +
"?" + OUTPUT_LAYER_ID + "=" + this.layerId +
"&" + OUTPUT_STYLE + "=" + style +
"&" + OUTPUT_IMAGE_SRS + "=" + this.session.getLocation().getSrs() +
"&" + OUTPUT_IMAGE_BBOX + "=" + bbox[0] +
"," + bbox[1] +
"," + bbox[2] +
"," + bbox[3] +
"&" + OUTPUT_IMAGE_ZOOM + "=" + this.session.getLocation().getZoom();
}
/**
* Sends properties (fields and locales)
*
* @param fields
* @param locales
*/
private void sendWFSProperties(List<String> fields, List<String> locales) {
if(fields == null || fields.size() == 0) {
log.warn("Failed to send properties");
return;
}
fields.add(0, "__fid");
fields.add("__centerX");
fields.add("__centerY");
if(locales != null) {
locales.add(0, "ID");
locales.add("x");
locales.add("y");
} else {
locales = new ArrayList<String>();
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FIELDS, fields);
output.put(OUTPUT_LOCALES, locales);
this.service.send(this.session.getClient(), TransportService.CHANNEL_PROPERTIES, output);
}
/**
* Sends one feature
*
* @param values
*/
private void sendWFSFeature(List<Object> values) {
if(values == null || values.size() == 0) {
log.warn("Failed to send feature");
return;
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURE, values);
this.service.send(this.session.getClient(), TransportService.CHANNEL_FEATURE, output);
}
/**
* Sends list of features
*
* @param features
* @param channel
*/
private void sendWFSFeatures(List<List<Object>> features, String channel) {
if(features == null || features.size() == 0) {
log.warn("Failed to send features");
return;
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
output.put(OUTPUT_FEATURES, features);
if(channel.equals(TransportService.CHANNEL_MAP_CLICK)) {
output.put(OUTPUT_KEEP_PREVIOUS, this.session.isKeepPrevious());
}
this.service.send(this.session.getClient(), channel, output);
}
/**
* Sends image as an URL to IE 8 & 9, base64 data for others
*
* @param url
* @param bufferedImage
* @param bbox
* @param isTiled
*/
private void sendWFSImage(String url, BufferedImage bufferedImage, Double[] bbox, boolean isTiled) {
if(bufferedImage == null) {
log.warn("Failed to send image");
return;
}
Map<String, Object> output = new HashMap<String, Object>();
output.put(OUTPUT_LAYER_ID, this.layerId);
Location location = this.session.getLocation();
Tile tileSize = null;
if(isTiled) {
tileSize = this.session.getTileSize();
} else {
tileSize = this.session.getMapSize();
}
output.put(OUTPUT_IMAGE_SRS, location.getSrs());
output.put(OUTPUT_IMAGE_BBOX, bbox);
output.put(OUTPUT_IMAGE_ZOOM, location.getZoom());
output.put(OUTPUT_IMAGE_TYPE, this.type); // "normal" | "highlight"
output.put(OUTPUT_KEEP_PREVIOUS, this.session.isKeepPrevious());
output.put(OUTPUT_IMAGE_WIDTH, tileSize.getWidth());
output.put(OUTPUT_IMAGE_HEIGHT, tileSize.getHeight());
output.put(OUTPUT_IMAGE_URL, url);
byte[] byteImage = WFSImage.imageToBytes(bufferedImage);
String base64Image = WFSImage.bytesToBase64(byteImage);
int base64Size = (base64Image.length()*2)/1024;
// IE6 & IE7 doesn't support base64, max size in base64 for IE8 is 32KB
if(!(this.session.getBrowser().equals(BROWSER_MSIE) && this.session.getBrowserVersion() < 8 ||
this.session.getBrowser().equals(BROWSER_MSIE) && this.session.getBrowserVersion() == 8 &&
base64Size >= 32)) {
output.put(OUTPUT_IMAGE_DATA, base64Image);
}
this.service.send(this.session.getClient(), TransportService.CHANNEL_IMAGE, output);
}
} | [AH-94] debug
| transport/src/main/java/fi/nls/oskari/work/WFSMapLayerJob.java | [AH-94] debug |
|
Java | epl-1.0 | 69021350effa6c0c17ea4a01a2fa3f458ff18b24 | 0 | dejanb/hono,kinbod/hono,dejanb/hono,dejanb/hono,kinbod/hono,kinbod/hono | /**
* Copyright (c) 2016, 2017 Bosch Software Innovations GmbH.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Bosch Software Innovations GmbH - initial creation
*/
package org.eclipse.hono.service.amqp;
import static java.net.HttpURLConnection.HTTP_OK;
import static org.eclipse.hono.util.MessageHelper.encodeIdToJson;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import io.vertx.core.json.JsonObject;
import org.apache.qpid.proton.amqp.transport.AmqpError;
import org.apache.qpid.proton.amqp.transport.ErrorCondition;
import org.apache.qpid.proton.message.Message;
import org.eclipse.hono.config.ServiceConfigProperties;
import org.eclipse.hono.util.MessageHelper;
import org.eclipse.hono.util.ResourceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import io.vertx.core.Future;
import io.vertx.core.Vertx;
import io.vertx.proton.ProtonHelper;
import io.vertx.proton.ProtonLink;
import io.vertx.proton.ProtonReceiver;
import io.vertx.proton.ProtonSender;
/**
* Base class for Hono endpoints.
*
* @param <T> The type of configuration properties this endpoint understands.
*/
public abstract class BaseEndpoint<T extends ServiceConfigProperties> implements Endpoint {
protected final Vertx vertx;
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected T config = (T) new ServiceConfigProperties();
private static final String STATUS_OK = String.valueOf(HTTP_OK);
private Map<String, UpstreamReceiverImpl> activeClients = new HashMap<>();
/**
* Creates an endpoint for a Vertx instance.
*
* @param vertx The Vertx instance to use.
* @throws NullPointerException if vertx is {@code null};
*/
protected BaseEndpoint(final Vertx vertx) {
this.vertx = Objects.requireNonNull(vertx);
}
/**
* Sets configuration properties.
*
* @param props The properties.
* @throws NullPointerException if props is {@code null}.
*/
@Autowired(required = false)
public final void setConfiguration(final T props) {
this.config = Objects.requireNonNull(props);
}
@Override
public final void start(final Future<Void> startFuture) {
if (vertx == null) {
startFuture.fail("Vert.x instance must be set");
} else {
doStart(startFuture);
}
}
/**
* Subclasses should override this method to create required resources
* during startup.
* <p>
* This implementation always completes the start future.
*
* @param startFuture Completes if startup succeeded.
*/
protected void doStart(final Future<Void> startFuture) {
startFuture.complete();
}
@Override
public final void stop(final Future<Void> stopFuture) {
doStop(stopFuture);
}
/**
* Subclasses should override this method to release resources
* during shutdown.
* <p>
* This implementation always completes the stop future.
*
* @param stopFuture Completes if shutdown succeeded.
*/
protected void doStop(final Future<Void> stopFuture) {
stopFuture.complete();
}
/**
* Closes the link to an upstream client and removes all state kept for it.
*
* @param client The client to detach.
*/
protected final void onLinkDetach(final UpstreamReceiver client) {
onLinkDetach(client, null);
}
/**
* Closes the link to an upstream client and removes all state kept for it.
*
* @param client The client to detach.
* @param error The error condition to convey to the client when closing the link.
*/
protected final void onLinkDetach(final UpstreamReceiver client, final ErrorCondition error) {
if (error == null) {
logger.debug("closing receiver for client [{}]", client.getLinkId());
} else {
logger.debug("closing receiver for client [{}]: {}", client.getLinkId(), error.getDescription());
}
client.close(error);
removeClientLink(client.getLinkId());
}
/**
* Closes the link to a proton based receiver client.
*
* @param client The client to detach.
*/
protected void onLinkDetach(final ProtonReceiver client) {
onLinkDetach(client, (ErrorCondition) null);
}
/**
* Closes a link to a proton based client.
*
* @param client The client to detach.
* @param error The error condition to convey to the client when closing the link.
*/
protected final void onLinkDetach(final ProtonLink<?> client, final ErrorCondition error) {
if (error == null) {
logger.debug("closing link [{}]", client.getName());
} else {
logger.debug("closing link [{}]: {}", client.getName(), error.getDescription());
client.setCondition(error);
}
client.close();
}
/**
* Closes the link to a proton based receiver client.
*
* @param client The client to detach.
* @param error The error condition to convey to the client when closing the link.
*/
protected void onLinkDetach(final ProtonReceiver client, final ErrorCondition error) {
if (error == null) {
logger.debug("closing proton receiver for client [{}]", MessageHelper.getLinkName(client));
} else {
logger.debug("closing proton receiver for client [{}]: {}", MessageHelper.getLinkName(client), error.getDescription());
}
client.close();
}
/**
* Registers a link with an upstream client.
*
* @param link The link to register.
*/
protected final void registerClientLink(final UpstreamReceiverImpl link) {
activeClients.put(link.getLinkId(), link);
}
/**
* Looks up a link with an upstream client based on its identifier.
*
* @param linkId The identifier of the client.
* @return The link object representing the client or {@code null} if no link with the given identifier exists.
* @throws NullPointerException if the link id is {@code null}.
*/
protected final UpstreamReceiver getClientLink(final String linkId) {
return activeClients.get(Objects.requireNonNull(linkId));
}
/**
* Deregisters a link with an upstream client.
*
* @param linkId The identifier of the link to deregister.
*/
protected final void removeClientLink(final String linkId) {
activeClients.remove(linkId);
}
@Override
public void onLinkAttach(final ProtonReceiver receiver, final ResourceIdentifier targetResource) {
logger.info("Endpoint [{}] does not support data upload, closing link.", getName());
receiver.setCondition(ProtonHelper.condition(AmqpError.NOT_IMPLEMENTED, "resource cannot be written to"));
receiver.close();
}
@Override
public void onLinkAttach(final ProtonSender sender, final ResourceIdentifier targetResource) {
logger.info("Endpoint [{}] does not support data retrieval, closing link.", getName());
sender.setCondition(ProtonHelper.condition(AmqpError.NOT_IMPLEMENTED, "resource cannot be read from"));
sender.close();
}
protected final void addHeadersToResponse(final Message request, final JsonObject message) {
final boolean isApplicationCorrelationId = MessageHelper.getXOptAppCorrelationId(request);
logger.debug("registration request [{}] uses application specific correlation ID: {}", request.getMessageId(), isApplicationCorrelationId);
if (isApplicationCorrelationId) {
message.put(MessageHelper.ANNOTATION_X_OPT_APP_CORRELATION_ID, isApplicationCorrelationId);
}
final JsonObject correlationIdJson = encodeIdToJson(getCorrelationId(request));
message.put(MessageHelper.SYS_PROPERTY_CORRELATION_ID, correlationIdJson);
}
/**
* @param request the request message from which to extract the correlationId
* @return The ID used to correlate the given request message. This can either be the provided correlationId
* (Correlation ID Pattern) or the messageId of the request (Message ID Pattern, if no correlationId is provided).
*/
protected final Object getCorrelationId(final Message request) {
/* if a correlationId is provided, we use it to correlate the response -> Correlation ID Pattern */
if (request.getCorrelationId() != null) {
return request.getCorrelationId();
} else {
/* otherwise we use the message id -> Message ID Pattern */
return request.getMessageId();
}
}
/**
* Verifies that a message passes <em>formal</em> checks regarding e.g.
* required headers, content type and payload format.
*
* @param targetAddress The address the message has been received on.
* @param message The message to check.
* @return {@code true} if the message passes all checks and can be forwarded downstream.
*/
protected abstract boolean passesFormalVerification(final ResourceIdentifier targetAddress, final Message message);
}
| service-base/src/main/java/org/eclipse/hono/service/amqp/BaseEndpoint.java | /**
* Copyright (c) 2016, 2017 Bosch Software Innovations GmbH.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Bosch Software Innovations GmbH - initial creation
*/
package org.eclipse.hono.service.amqp;
import static java.net.HttpURLConnection.HTTP_OK;
import static org.eclipse.hono.util.MessageHelper.encodeIdToJson;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import io.vertx.core.json.JsonObject;
import org.apache.qpid.proton.amqp.transport.AmqpError;
import org.apache.qpid.proton.amqp.transport.ErrorCondition;
import org.apache.qpid.proton.message.Message;
import org.eclipse.hono.config.ServiceConfigProperties;
import org.eclipse.hono.util.MessageHelper;
import org.eclipse.hono.util.ResourceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import io.vertx.core.Future;
import io.vertx.core.Vertx;
import io.vertx.proton.ProtonHelper;
import io.vertx.proton.ProtonReceiver;
import io.vertx.proton.ProtonSender;
/**
* Base class for Hono endpoints.
*
* @param <T> The type of configuration properties this endpoint understands.
*/
public abstract class BaseEndpoint<T extends ServiceConfigProperties> implements Endpoint {
protected final Vertx vertx;
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected T config = (T) new ServiceConfigProperties();
private static final String STATUS_OK = String.valueOf(HTTP_OK);
private Map<String, UpstreamReceiverImpl> activeClients = new HashMap<>();
/**
* Creates an endpoint for a Vertx instance.
*
* @param vertx The Vertx instance to use.
* @throws NullPointerException if vertx is {@code null};
*/
protected BaseEndpoint(final Vertx vertx) {
this.vertx = Objects.requireNonNull(vertx);
}
/**
* Sets configuration properties.
*
* @param props The properties.
* @throws NullPointerException if props is {@code null}.
*/
@Autowired(required = false)
public final void setConfiguration(final T props) {
this.config = Objects.requireNonNull(props);
}
@Override
public final void start(final Future<Void> startFuture) {
if (vertx == null) {
startFuture.fail("Vert.x instance must be set");
} else {
doStart(startFuture);
}
}
/**
* Subclasses should override this method to create required resources
* during startup.
* <p>
* This implementation always completes the start future.
*
* @param startFuture Completes if startup succeeded.
*/
protected void doStart(final Future<Void> startFuture) {
startFuture.complete();
}
@Override
public final void stop(final Future<Void> stopFuture) {
doStop(stopFuture);
}
/**
* Subclasses should override this method to release resources
* during shutdown.
* <p>
* This implementation always completes the stop future.
*
* @param stopFuture Completes if shutdown succeeded.
*/
protected void doStop(final Future<Void> stopFuture) {
stopFuture.complete();
}
/**
* Closes the link to an upstream client and removes all state kept for it.
*
* @param client The client to detach.
*/
protected final void onLinkDetach(final UpstreamReceiver client) {
onLinkDetach(client, null);
}
/**
* Closes the link to an upstream client and removes all state kept for it.
*
* @param client The client to detach.
* @param error The error condition to convey to the client when closing the link.
*/
protected final void onLinkDetach(final UpstreamReceiver client, final ErrorCondition error) {
if (error == null) {
logger.debug("closing receiver for client [{}]", client.getLinkId());
} else {
logger.debug("closing receiver for client [{}]: {}", client.getLinkId(), error.getDescription());
}
client.close(error);
removeClientLink(client.getLinkId());
}
/**
* Closes the link to a proton based receiver client.
*
* @param client The client to detach.
*/
protected void onLinkDetach(final ProtonReceiver client) {
onLinkDetach(client, null);
}
/**
* Closes the link to a proton based receiver client.
*
* @param client The client to detach.
* @param error The error condition to convey to the client when closing the link.
*/
protected void onLinkDetach(final ProtonReceiver client, final ErrorCondition error) {
if (error == null) {
logger.debug("closing proton receiver for client [{}]", MessageHelper.getLinkName(client));
} else {
logger.debug("closing proton receiver for client [{}]: {}", MessageHelper.getLinkName(client), error.getDescription());
}
client.close();
}
/**
* Registers a link with an upstream client.
*
* @param link The link to register.
*/
protected final void registerClientLink(final UpstreamReceiverImpl link) {
activeClients.put(link.getLinkId(), link);
}
/**
* Looks up a link with an upstream client based on its identifier.
*
* @param linkId The identifier of the client.
* @return The link object representing the client or {@code null} if no link with the given identifier exists.
* @throws NullPointerException if the link id is {@code null}.
*/
protected final UpstreamReceiver getClientLink(final String linkId) {
return activeClients.get(Objects.requireNonNull(linkId));
}
/**
* Deregisters a link with an upstream client.
*
* @param linkId The identifier of the link to deregister.
*/
protected final void removeClientLink(final String linkId) {
activeClients.remove(linkId);
}
@Override
public void onLinkAttach(final ProtonReceiver receiver, final ResourceIdentifier targetResource) {
logger.info("Endpoint [{}] does not support data upload, closing link.", getName());
receiver.setCondition(ProtonHelper.condition(AmqpError.NOT_IMPLEMENTED, "resource cannot be written to"));
receiver.close();
}
@Override
public void onLinkAttach(final ProtonSender sender, final ResourceIdentifier targetResource) {
logger.info("Endpoint [{}] does not support data retrieval, closing link.", getName());
sender.setCondition(ProtonHelper.condition(AmqpError.NOT_IMPLEMENTED, "resource cannot be read from"));
sender.close();
}
protected final void addHeadersToResponse(final Message request, final JsonObject message) {
final boolean isApplicationCorrelationId = MessageHelper.getXOptAppCorrelationId(request);
logger.debug("registration request [{}] uses application specific correlation ID: {}", request.getMessageId(), isApplicationCorrelationId);
if (isApplicationCorrelationId) {
message.put(MessageHelper.ANNOTATION_X_OPT_APP_CORRELATION_ID, isApplicationCorrelationId);
}
final JsonObject correlationIdJson = encodeIdToJson(getCorrelationId(request));
message.put(MessageHelper.SYS_PROPERTY_CORRELATION_ID, correlationIdJson);
}
/**
* @param request the request message from which to extract the correlationId
* @return The ID used to correlate the given request message. This can either be the provided correlationId
* (Correlation ID Pattern) or the messageId of the request (Message ID Pattern, if no correlationId is provided).
*/
protected final Object getCorrelationId(final Message request) {
/* if a correlationId is provided, we use it to correlate the response -> Correlation ID Pattern */
if (request.getCorrelationId() != null) {
return request.getCorrelationId();
} else {
/* otherwise we use the message id -> Message ID Pattern */
return request.getMessageId();
}
}
/**
* Verifies that a message passes <em>formal</em> checks regarding e.g.
* required headers, content type and payload format.
*
* @param targetAddress The address the message has been received on.
* @param message The message to check.
* @return {@code true} if the message passes all checks and can be forwarded downstream.
*/
protected abstract boolean passesFormalVerification(final ResourceIdentifier targetAddress, final Message message);
}
| Add convenience method for closing a link with an error condition.
Signed-off-by: Kai Hudalla <[email protected]> | service-base/src/main/java/org/eclipse/hono/service/amqp/BaseEndpoint.java | Add convenience method for closing a link with an error condition. |
|
Java | epl-1.0 | cc6cae2c2deee331ffed8ff58254c08e136e7c4e | 0 | andriusvelykis/isabelle-eclipse,andriusvelykis/isabelle-eclipse,andriusvelykis/isabelle-eclipse | package isabelle.eclipse.editors;
import java.lang.reflect.InvocationTargetException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.filesystem.EFS;
import org.eclipse.core.filesystem.IFileStore;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IMarker;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.ITextOperationTarget;
import org.eclipse.jface.text.ITextViewer;
import org.eclipse.jface.text.TextSelection;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IEditorReference;
import org.eclipse.ui.IEditorSite;
import org.eclipse.ui.IFileEditorInput;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.actions.WorkspaceModifyOperation;
import org.eclipse.ui.ide.FileStoreEditorInput;
import org.eclipse.ui.ide.IDE;
import org.eclipse.ui.ide.IGotoMarker;
import org.eclipse.ui.ide.ResourceUtil;
import org.eclipse.ui.part.FileEditorInput;
import org.eclipse.ui.texteditor.IDocumentProvider;
import org.eclipse.ui.texteditor.ITextEditor;
public class EditorUtil {
/**
* Retrieves all open editors in the workbench.
*
* @return
*/
public static List<IEditorPart> getOpenEditors() {
List<IEditorPart> editors = new ArrayList<IEditorPart>();
for (IWorkbenchWindow window : PlatformUI.getWorkbench().getWorkbenchWindows()) {
for (IWorkbenchPage page : window.getPages()) {
for (IEditorReference editor : page.getEditorReferences()) {
IEditorPart editorPart = editor.getEditor(false);
if (editorPart != null) {
// editors can be null if there are problems
// instantiating them
editors.add(editorPart);
}
}
}
}
return editors;
}
/**
* Create the Editor Input appropriate for the given <code>IFileStore</code>.
* The result is a normal file editor input if the file exists in the
* workspace and, if not, we create a wrapper capable of managing an
* 'external' file using its <code>IFileStore</code>.
*
* @param fileStore
* The file store to provide the editor input for
* @return The editor input associated with the given file store
* @since 3.3
*
* Copied from {@link org.eclipse.ui.ide.IDE#getEditorInput(IFileStore)}
*/
public static IEditorInput getEditorInput(IFileStore fileStore) {
IFile workspaceFile = getWorkspaceFile(fileStore);
if (workspaceFile != null)
return new FileEditorInput(workspaceFile);
return new FileStoreEditorInput(fileStore);
}
/**
* Determine whether or not the <code>IFileStore</code> represents a file
* currently in the workspace.
*
* @param fileStore
* The <code>IFileStore</code> to test
* @return The workspace's <code>IFile</code> if it exists or
* <code>null</code> if not
*
* Copied from {@link org.eclipse.ui.ide.IDE#getWorkspaceFile(IFileStore)}
*/
private static IFile getWorkspaceFile(IFileStore fileStore) {
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IFile[] files = root.findFilesForLocationURI(fileStore.toURI());
files = filterNonExistentFiles(files);
if (files == null || files.length == 0)
return null;
// for now only return the first file
return files[0];
}
/**
* Filter the incoming array of <code>IFile</code> elements by removing
* any that do not currently exist in the workspace.
*
* @param files
* The array of <code>IFile</code> elements
* @return The filtered array
*
* Copied from {@link org.eclipse.ui.ide.IDE#filterNonExistentFiles(IFile[])}
*/
private static IFile[] filterNonExistentFiles(IFile[] files) {
if (files == null)
return null;
int length = files.length;
ArrayList<IFile> existentFiles = new ArrayList<IFile>(length);
for (int i = 0; i < length; i++) {
if (files[i].exists())
existentFiles.add(files[i]);
}
return existentFiles.toArray(new IFile[existentFiles.size()]);
}
/**
* Finds a corresponding workspace resource for the given element (e.g. editor input).
*
* @param element
* The element for which to resolve resource (e.g. an editor input)
* @return A resource corresponding to the given element, or {@code null}
*/
public static IResource getResource(Object element) {
// try resolving as file (a number of options there)
IResource resource = ResourceUtil.getFile(element);
if (resource == null) {
// try at least resource
resource = ResourceUtil.getResource(element);
}
return resource;
}
/**
* Retrieves a {@link ITextViewer} for a given editor. Assumes that editor's
* {@link ITextOperationTarget} is the text viewer and resolves it via the adapter.
*
* @param editor
* @return the text viewer, {@code null} if editor's {@link ITextOperationTarget} is not a text
* viewer.
*
* @see <a href="http://stackoverflow.com/questions/923342/get-itextviewer-from-ieditorpart-eclipse">From StackOverflow</a>
*/
public static ITextViewer getTextViewer(IEditorPart editor) {
ITextOperationTarget target = (ITextOperationTarget) editor.getAdapter(ITextOperationTarget.class);
if (target instanceof ITextViewer) {
return (ITextViewer) target;
}
return null;
}
/**
* Retrieves the text editor's document via its document provider.
*
* @param editor
* @return the document, or {@code null} if none or provider is unavailable
*/
public static IDocument getDocument(ITextEditor editor) {
IDocumentProvider provider = editor.getDocumentProvider();
if (provider != null) {
return provider.getDocument(editor.getEditorInput());
}
return null;
}
/**
* Selects and reveals the given region in the given editor part.
*
* @param part the editor part
* @param element the Java element to reveal
*
* @see From org.eclipse.jdt.internal.ui.javaeditor.EditorUtility
*/
public static void revealInEditor(IEditorPart part, IRegion selectRegion, IRegion highlightRegion) {
if (part == null || selectRegion == null || highlightRegion == null) {
return;
}
if (part instanceof TheoryEditor) {
((TheoryEditor) part).selectInEditor(selectRegion, highlightRegion);
} else {
revealInEditor(part, selectRegion.getOffset(), selectRegion.getLength(),
highlightRegion.getOffset(), highlightRegion.getLength());
}
}
/**
* Selects and reveals the given offset and length in the given editor part.
* @param editor the editor part
* @param offset the offset
* @param length the length
* @param highlightOffset the highlight offset
* @param highlightLength the highlight length
*
* @see From org.eclipse.jdt.internal.ui.javaeditor.EditorUtility
*/
public static void revealInEditor(IEditorPart editor, final int offset, final int length,
int highlightOffset, int highlightLength) {
if (editor instanceof ITextEditor) {
((ITextEditor)editor).setHighlightRange(highlightOffset, highlightLength, true);
((ITextEditor)editor).selectAndReveal(offset, length);
return;
}
// Support for non-text editor - try IGotoMarker interface
final IGotoMarker gotoMarkerTarget;
if (editor instanceof IGotoMarker)
gotoMarkerTarget= (IGotoMarker)editor;
else
gotoMarkerTarget= editor != null ? (IGotoMarker)editor.getAdapter(IGotoMarker.class) : null;
if (gotoMarkerTarget != null) {
final IEditorInput input= editor.getEditorInput();
if (input instanceof IFileEditorInput) {
WorkspaceModifyOperation op = new WorkspaceModifyOperation() {
@Override
protected void execute(IProgressMonitor monitor) throws CoreException {
IMarker marker= null;
try {
marker= ((IFileEditorInput)input).getFile().createMarker(IMarker.TEXT);
marker.setAttribute(IMarker.CHAR_START, offset);
marker.setAttribute(IMarker.CHAR_END, offset + length);
gotoMarkerTarget.gotoMarker(marker);
} finally {
if (marker != null)
marker.delete();
}
}
};
try {
op.run(null);
} catch (InvocationTargetException ex) {
// reveal failed
} catch (InterruptedException e) {
Assert.isTrue(false, "this operation can not be canceled"); //$NON-NLS-1$
}
}
return;
}
/*
* Workaround: send out a text selection
* XXX: Needs to be improved, see https://bugs.eclipse.org/bugs/show_bug.cgi?id=32214
*/
if (editor != null && editor.getEditorSite().getSelectionProvider() != null) {
IEditorSite site= editor.getEditorSite();
if (site == null)
return;
ISelectionProvider provider= editor.getEditorSite().getSelectionProvider();
if (provider == null)
return;
provider.setSelection(new TextSelection(offset, length));
}
}
/**
* Opens an editor on the given IFileStore object.
* <p>
* This method can be used to open files that reside outside the workspace resource set.
* </p>
* <p>
* If the page already has an editor open on the target object then that editor is brought to
* front; otherwise, a new editor is opened.
* </p>
*
* @param page
* the page in which the editor will be opened
* @param uri
* the URI of the file store representing the file to open
* @return an open editor or {@code null} if an external editor was opened
* @exception PartInitException
* if the editor could not be initialized
*
* @see org.eclipse.ui.ide.IDE#openEditorOnFileStore(IWorkbenchPage, IFileStore)
* @see EFS#getStore(URI)
*
* @since 3.3
*/
public static IEditorPart openEditor(IWorkbenchPage page, URI uri) throws PartInitException {
// sanity checks
if (page == null) {
throw new IllegalArgumentException();
}
IFileStore fileStore;
try {
fileStore = EFS.getStore(uri);
} catch (CoreException e) {
throw new PartInitException("CoreException opening the file store on the URI.", e);
}
// open the editor on the file
return IDE.openEditorOnFileStore(page, fileStore);
}
/**
* Retrieves active page from the workbench.
*
* @return the active page or {@code null} if not available or called from non-UI thread
*/
public static IWorkbenchPage getActivePage() {
IWorkbenchWindow activeWindow = PlatformUI.getWorkbench().getActiveWorkbenchWindow();
if (activeWindow != null) {
return activeWindow.getActivePage();
}
return null;
}
}
| isabelle.eclipse/src/isabelle/eclipse/editors/EditorUtil.java | package isabelle.eclipse.editors;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.filesystem.IFileStore;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IMarker;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.ITextOperationTarget;
import org.eclipse.jface.text.ITextViewer;
import org.eclipse.jface.text.TextSelection;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IEditorReference;
import org.eclipse.ui.IEditorSite;
import org.eclipse.ui.IFileEditorInput;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.actions.WorkspaceModifyOperation;
import org.eclipse.ui.ide.FileStoreEditorInput;
import org.eclipse.ui.ide.IGotoMarker;
import org.eclipse.ui.ide.ResourceUtil;
import org.eclipse.ui.part.FileEditorInput;
import org.eclipse.ui.texteditor.IDocumentProvider;
import org.eclipse.ui.texteditor.ITextEditor;
public class EditorUtil {
/**
* Retrieves all open editors in the workbench.
*
* @return
*/
public static List<IEditorPart> getOpenEditors() {
List<IEditorPart> editors = new ArrayList<IEditorPart>();
for (IWorkbenchWindow window : PlatformUI.getWorkbench().getWorkbenchWindows()) {
for (IWorkbenchPage page : window.getPages()) {
for (IEditorReference editor : page.getEditorReferences()) {
IEditorPart editorPart = editor.getEditor(false);
if (editorPart != null) {
// editors can be null if there are problems
// instantiating them
editors.add(editorPart);
}
}
}
}
return editors;
}
/**
* Create the Editor Input appropriate for the given <code>IFileStore</code>.
* The result is a normal file editor input if the file exists in the
* workspace and, if not, we create a wrapper capable of managing an
* 'external' file using its <code>IFileStore</code>.
*
* @param fileStore
* The file store to provide the editor input for
* @return The editor input associated with the given file store
* @since 3.3
*
* Copied from {@link org.eclipse.ui.ide.IDE#getEditorInput(IFileStore)}
*/
public static IEditorInput getEditorInput(IFileStore fileStore) {
IFile workspaceFile = getWorkspaceFile(fileStore);
if (workspaceFile != null)
return new FileEditorInput(workspaceFile);
return new FileStoreEditorInput(fileStore);
}
/**
* Determine whether or not the <code>IFileStore</code> represents a file
* currently in the workspace.
*
* @param fileStore
* The <code>IFileStore</code> to test
* @return The workspace's <code>IFile</code> if it exists or
* <code>null</code> if not
*
* Copied from {@link org.eclipse.ui.ide.IDE#getWorkspaceFile(IFileStore)}
*/
private static IFile getWorkspaceFile(IFileStore fileStore) {
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IFile[] files = root.findFilesForLocationURI(fileStore.toURI());
files = filterNonExistentFiles(files);
if (files == null || files.length == 0)
return null;
// for now only return the first file
return files[0];
}
/**
* Filter the incoming array of <code>IFile</code> elements by removing
* any that do not currently exist in the workspace.
*
* @param files
* The array of <code>IFile</code> elements
* @return The filtered array
*
* Copied from {@link org.eclipse.ui.ide.IDE#filterNonExistentFiles(IFile[])}
*/
private static IFile[] filterNonExistentFiles(IFile[] files) {
if (files == null)
return null;
int length = files.length;
ArrayList<IFile> existentFiles = new ArrayList<IFile>(length);
for (int i = 0; i < length; i++) {
if (files[i].exists())
existentFiles.add(files[i]);
}
return existentFiles.toArray(new IFile[existentFiles.size()]);
}
/**
* Finds a corresponding workspace resource for the given element (e.g. editor input).
*
* @param element
* The element for which to resolve resource (e.g. an editor input)
* @return A resource corresponding to the given element, or {@code null}
*/
public static IResource getResource(Object element) {
// try resolving as file (a number of options there)
IResource resource = ResourceUtil.getFile(element);
if (resource == null) {
// try at least resource
resource = ResourceUtil.getResource(element);
}
return resource;
}
/**
* Retrieves a {@link ITextViewer} for a given editor. Assumes that editor's
* {@link ITextOperationTarget} is the text viewer and resolves it via the adapter.
*
* @param editor
* @return the text viewer, {@code null} if editor's {@link ITextOperationTarget} is not a text
* viewer.
*
* @see <a href="http://stackoverflow.com/questions/923342/get-itextviewer-from-ieditorpart-eclipse">From StackOverflow</a>
*/
public static ITextViewer getTextViewer(IEditorPart editor) {
ITextOperationTarget target = (ITextOperationTarget) editor.getAdapter(ITextOperationTarget.class);
if (target instanceof ITextViewer) {
return (ITextViewer) target;
}
return null;
}
/**
* Retrieves the text editor's document via its document provider.
*
* @param editor
* @return the document, or {@code null} if none or provider is unavailable
*/
public static IDocument getDocument(ITextEditor editor) {
IDocumentProvider provider = editor.getDocumentProvider();
if (provider != null) {
return provider.getDocument(editor.getEditorInput());
}
return null;
}
/**
* Selects and reveals the given region in the given editor part.
*
* @param part the editor part
* @param element the Java element to reveal
*
* @see From org.eclipse.jdt.internal.ui.javaeditor.EditorUtility
*/
public static void revealInEditor(IEditorPart part, IRegion selectRegion, IRegion highlightRegion) {
if (part == null || selectRegion == null || highlightRegion == null) {
return;
}
if (part instanceof TheoryEditor) {
((TheoryEditor) part).selectInEditor(selectRegion, highlightRegion);
} else {
revealInEditor(part, selectRegion.getOffset(), selectRegion.getLength(),
highlightRegion.getOffset(), highlightRegion.getLength());
}
}
/**
* Selects and reveals the given offset and length in the given editor part.
* @param editor the editor part
* @param offset the offset
* @param length the length
* @param highlightOffset the highlight offset
* @param highlightLength the highlight length
*
* @see From org.eclipse.jdt.internal.ui.javaeditor.EditorUtility
*/
public static void revealInEditor(IEditorPart editor, final int offset, final int length,
int highlightOffset, int highlightLength) {
if (editor instanceof ITextEditor) {
((ITextEditor)editor).setHighlightRange(highlightOffset, highlightLength, true);
((ITextEditor)editor).selectAndReveal(offset, length);
return;
}
// Support for non-text editor - try IGotoMarker interface
final IGotoMarker gotoMarkerTarget;
if (editor instanceof IGotoMarker)
gotoMarkerTarget= (IGotoMarker)editor;
else
gotoMarkerTarget= editor != null ? (IGotoMarker)editor.getAdapter(IGotoMarker.class) : null;
if (gotoMarkerTarget != null) {
final IEditorInput input= editor.getEditorInput();
if (input instanceof IFileEditorInput) {
WorkspaceModifyOperation op = new WorkspaceModifyOperation() {
@Override
protected void execute(IProgressMonitor monitor) throws CoreException {
IMarker marker= null;
try {
marker= ((IFileEditorInput)input).getFile().createMarker(IMarker.TEXT);
marker.setAttribute(IMarker.CHAR_START, offset);
marker.setAttribute(IMarker.CHAR_END, offset + length);
gotoMarkerTarget.gotoMarker(marker);
} finally {
if (marker != null)
marker.delete();
}
}
};
try {
op.run(null);
} catch (InvocationTargetException ex) {
// reveal failed
} catch (InterruptedException e) {
Assert.isTrue(false, "this operation can not be canceled"); //$NON-NLS-1$
}
}
return;
}
/*
* Workaround: send out a text selection
* XXX: Needs to be improved, see https://bugs.eclipse.org/bugs/show_bug.cgi?id=32214
*/
if (editor != null && editor.getEditorSite().getSelectionProvider() != null) {
IEditorSite site= editor.getEditorSite();
if (site == null)
return;
ISelectionProvider provider= editor.getEditorSite().getSelectionProvider();
if (provider == null)
return;
provider.setSelection(new TextSelection(offset, length));
}
}
}
| Utility methods to open editor of URI (a bit different from IDE static methods, because it does not require editor ID to be given)
| isabelle.eclipse/src/isabelle/eclipse/editors/EditorUtil.java | Utility methods to open editor of URI (a bit different from IDE static methods, because it does not require editor ID to be given) |
|
Java | mpl-2.0 | 092c15c299ba524e134ce0d982469d26bb472577 | 0 | liuche/digital-clipboard,liuche/digital-clipboard | package me.mcomella.fathomtest;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.webkit.ConsoleMessage;
import android.webkit.ValueCallback;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final WebView webView = (WebView) findViewById(R.id.webview);
final WebSettings settings = webView.getSettings();
settings.setJavaScriptEnabled(true);
webView.setWebViewClient(new InjectClient());
webView.setWebChromeClient(new ChromeClient());
webView.loadUrl("http://apple.com");
}
// Log javascript errors.
public class ChromeClient extends WebChromeClient {
@Override
public boolean onConsoleMessage(ConsoleMessage consoleMessage) {
Log.d("lol", "s: " + consoleMessage.message());
Toast.makeText(MainActivity.this, "console: " + consoleMessage.message(), Toast.LENGTH_SHORT).show();
return super.onConsoleMessage(consoleMessage);
}
}
public class InjectClient extends WebViewClient {
@Override
public void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
final String script = "documenta.location = 'http://kotaku.com'";
view.evaluateJavascript(script, new ValueCallback<String>() {
@Override
public void onReceiveValue(String s) {
Log.d("lol", "s: " + s);
Toast.makeText(MainActivity.this, "Finished: " + s, Toast.LENGTH_SHORT).show();
}
});
}
}
}
| app/src/main/java/me/mcomella/fathomtest/MainActivity.java | package me.mcomella.fathomtest;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.webkit.ValueCallback;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final WebView webView = (WebView) findViewById(R.id.webview);
final WebSettings settings = webView.getSettings();
settings.setJavaScriptEnabled(true);
webView.setWebViewClient(new InjectClient());
webView.loadUrl("http://apple.com");
}
public class InjectClient extends WebViewClient {
@Override
public void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
final String script = "document.location = 'http://kotaku.com'";
view.evaluateJavascript(script, new ValueCallback<String>() {
@Override
public void onReceiveValue(String s) {
Log.d("lol", "s: " + s);
Toast.makeText(MainActivity.this, "Finished: " + s, Toast.LENGTH_SHORT).show();
}
});
}
}
}
| log js errors.
| app/src/main/java/me/mcomella/fathomtest/MainActivity.java | log js errors. |
|
Java | agpl-3.0 | a6975ffe726fb05aa9787783fbbd4ade5dac3e8c | 0 | kno10/reversegeocode | package com.kno10.reversegeocode.indexer;
/*
* Copyright (C) 2015, Erich Schubert
* Ludwig-Maximilians-Universität München
* Lehr- und Forschungseinheit für Datenbanksysteme
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.collections.ObservableList;
import javafx.embed.swing.SwingFXUtils;
import javafx.scene.Group;
import javafx.scene.Scene;
import javafx.scene.image.PixelReader;
import javafx.scene.image.PixelWriter;
import javafx.scene.image.WritableImage;
import javafx.scene.paint.Color;
import javafx.scene.shape.FillRule;
import javafx.scene.shape.LineTo;
import javafx.scene.shape.MoveTo;
import javafx.scene.shape.Path;
import javafx.scene.shape.PathElement;
import javafx.stage.Stage;
import javax.imageio.ImageIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gs.collections.api.bag.primitive.MutableIntBag;
import com.gs.collections.api.iterator.IntIterator;
import com.gs.collections.api.map.primitive.IntObjectMap;
import com.gs.collections.api.map.primitive.MutableIntObjectMap;
import com.gs.collections.impl.bag.mutable.primitive.IntHashBag;
import com.gs.collections.impl.list.mutable.primitive.FloatArrayList;
import com.gs.collections.impl.map.mutable.primitive.IntObjectHashMap;
import com.gs.collections.impl.set.mutable.UnifiedSet;
/**
* Build and encode the lookup index.
*
* This is currently implemented using JavaFX to facilitate the polygon drawing.
* For this reason, it needs to extend a JavaFX Application - this part of the
* JavaFX API is just stupid...
*
* TODO: make parameters configurable.
*
* @author Erich Schubert
*/
public class BuildLayeredIndex extends Application {
/** Class logger */
private static final Logger LOG = LoggerFactory
.getLogger(BuildLayeredIndex.class);
/** Input and output file names */
File infile, oufile, imfile;
/** Pattern for matching coordinates */
Pattern coordPattern = Pattern
.compile("(?<=\t)(-?\\d+(?:\\.\\d*)),(-?\\d+(?:\\.\\d*))(?=\t|$)");
/** Pattern for recognizing the level */
Pattern levelPattern = Pattern.compile("(?<=\t)(\\d+)(?=\t)");
/** Minimum and maximum level */
private int minLevel = 2, maxLevel = 10;
/** Entities read from the file */
private ArrayList<UnifiedSet<Entity>> entities;
/** Minimum size of objects to draw */
double minsize;
/** Viewport of the map */
Viewport viewport;
/**
* Constructor.
*/
public BuildLayeredIndex() {
super();
}
@Override
public void init() throws Exception {
super.init();
Map<String, String> named = getParameters().getNamed();
String v = named.get("input");
if (v == null) {
throw new RuntimeException("Missing parameter --input");
}
this.infile = new File(v);
v = named.get("output");
if (v == null) {
throw new RuntimeException("Missing parameter --output");
}
this.oufile = new File(v);
v = named.get("vis");
this.imfile = v != null ? new File(v) : null;
v = named.get("minlevel");
minLevel = v != null ? Integer.valueOf(v) : minLevel;
v = named.get("maxlevel");
maxLevel = v != null ? Integer.valueOf(v) : maxLevel;
// Initialize entity level sets:
this.entities = new ArrayList<>(maxLevel + 1);
for (int i = 0; i < minLevel; i++) {
entities.add(null);
}
for (int i = minLevel; i <= maxLevel; i++) {
entities.add(new UnifiedSet<>());
}
// Viewport on map
v = named.get("resolution");
double resolution = v != null ? Double.valueOf(v) : 0.01;
// TODO: make clipping configurable.
this.viewport = new Viewport(360., 140., 180., 60., resolution);
// Minimum size of bounding box.
v = named.get("minsize");
double pixel_minsize = v != null ? Double.valueOf(v) : 4;
this.minsize = pixel_minsize * resolution;
}
@Override
public void start(Stage stage) throws Exception {
// Preallocate objects (will be reset and reused!)
Matcher m = coordPattern.matcher(""), lm = levelPattern.matcher("");
FloatArrayList points = new FloatArrayList();
BoundingBox bb = new BoundingBox();
int polycount = 0, lines = 0, ecounter = 0;
// Everybody just "loves" such Java constructs...
try (BufferedReader b = new BufferedReader(new InputStreamReader(
new GZIPInputStream(new FileInputStream(infile))))) {
long start = System.currentTimeMillis();
String line = null;
while ((line = b.readLine()) != null) {
++lines;
points.clear();
bb.reset();
String meta = null;
lm.reset(line);
if (!lm.find()) {
LOG.warn("Line was not matched: {}", line);
continue;
}
// We keep metadata 0-terminated as seperator!
meta = line.substring(0, lm.end()) + '\0';
int level = Integer.parseInt(lm.group(1));
assert (!lm.find());
m.reset(line);
while (m.find()) {
assert (m.start() >= lm.end());
float lon = Float.parseFloat(m.group(1));
float lat = Float.parseFloat(m.group(2));
points.add(lon);
points.add(lat);
bb.update(lon, lat);
}
if (points.size() == 0) {
LOG.warn("Line was not matched: {}", line);
continue;
}
if (bb.size() < minsize) {
continue;
}
Entity ent = new Entity(meta);
if (level >= entities.size()) {
// Level not used.
continue;
}
UnifiedSet<Entity> levdata = entities.get(level);
if (levdata == null) {
// Level not used.
continue;
}
Entity exist = levdata.get(ent);
if (exist != null) {
exist.bb.update(bb);
exist.polys.add(points.toArray());
++polycount;
} else {
levdata.add(ent);
ent.bb = new BoundingBox(bb);
ent.polys = new LinkedList<>();
ent.polys.add(points.toArray());
++polycount;
++ecounter;
}
}
long end = System.currentTimeMillis();
LOG.info("Parsing time: {} ms", end - start);
LOG.info("Read {} lines, kept {} entities, {} polygons", //
lines, ecounter, polycount);
render(stage);
} catch (IOException e) {
LOG.error("IO Error", e);
}
Platform.exit();
}
/**
* Render the polygons onto the "winner" map.
*
* @param stage
* Empty JavaFX stage used for rendering
*/
public void render(Stage stage) {
final int blocksize = 1024;
Group rootGroup = new Group();
Scene scene = new Scene(rootGroup, blocksize, blocksize, Color.BLACK);
WritableImage writableImage = null; // Buffer
MutableIntObjectMap<String> meta = new IntObjectHashMap<>();
meta.put(0, ""); // Note: deliberately not \0 terminated.
int entnum = 1;
int[][] winners = new int[viewport.height][viewport.width];
int[][] winner = new int[viewport.height][viewport.width];
long start = System.currentTimeMillis();
for (int lev = minLevel; lev <= maxLevel; lev++) {
if (entities.get(lev) == null) {
continue;
}
LOG.info("Rendering level {}", lev);
for (int y = 0; y < viewport.height; y++) {
Arrays.fill(winner[y], 0);
}
// Sort by size.
ArrayList<Entity> order = new ArrayList<>(entities.get(lev));
Collections.sort(order);
Path path = new Path();
ObservableList<PathElement> elems = path.getElements();
for (Entity e : order) {
if (e.polys.size() <= 0) {
continue;
}
// Area to inspect
int xmin = Math.max(0,
(int) Math.floor(viewport.projLon(e.bb.lonmin)) - 1);
int xmax = Math.min(viewport.width,
(int) Math.ceil(viewport.projLon(e.bb.lonmax)) + 1);
int ymin = Math.max(0,
(int) Math.ceil(viewport.projLat(e.bb.latmin)) - 1);
int ymax = Math.min(viewport.height,
(int) Math.floor(viewport.projLat(e.bb.latmax)) + 1);
// System.out.format("%d-%d %d-%d; ", xmin, xmax, ymin, ymax);
for (int x1 = xmin; x1 < xmax; x1 += blocksize) {
int x2 = Math.min(x1 + blocksize, xmax);
for (int y1 = ymin; y1 < ymax; y1 += blocksize) {
int y2 = Math.min(y1 + blocksize, ymax);
// Implementation note: we are drawing upside down.
elems.clear();
for (float[] f : e.polys) {
assert (f.length > 1);
elems.add(new MoveTo(viewport.projLon(f[0]) - x1,
viewport.projLat(f[1]) - y1));
for (int i = 2, l = f.length; i < l; i += 2) {
elems.add(new LineTo(viewport.projLon(f[i])
- x1, viewport.projLat(f[i + 1]) - y1));
}
}
path.setStroke(Color.TRANSPARENT);
path.setFill(Color.WHITE);
path.setFillRule(FillRule.EVEN_ODD);
rootGroup.getChildren().add(path);
writableImage = scene.snapshot(writableImage);
rootGroup.getChildren().remove(path);
transferPixels(writableImage, x1, x2, y1, y2, //
winner, entnum);
}
}
// Note: we construct meta 0-terminated!
meta.put(entnum, e.key);
++entnum;
}
flatten(winners, winner, meta);
}
long end = System.currentTimeMillis();
LOG.info("Rendering time: {} ms", end - start);
buildIndex(meta, winners);
if (imfile != null) {
visualize(meta.size(), winners);
}
}
/**
* Transfer pixels from the rendering buffer to the winner/alpha maps.
*
* @param img
* Rendering buffer
* @param x1
* Left
* @param x2
* Right
* @param y1
* Bottom
* @param y2
* Top
* @param winner
* Output array
* @param c
* Entity number
*/
public void transferPixels(WritableImage img, int x1, int x2, int y1,
int y2, int[][] winner, int c) {
PixelReader reader = img.getPixelReader();
for (int y = y1, py = 0; y < y2; y++, py++) {
for (int x = x1, px = 0; x < x2; x++, px++) {
int col = reader.getArgb(px, py);
int alpha = (col & 0xFF);
// Always ignore cover less than 10%
if (alpha < 0x19) {
continue;
}
// Clip value range to positive bytes,
alpha = alpha > 0x7F ? 0x7F : alpha;
byte oldalpha = (byte) (winner[y][x] >>> 24);
if (alpha == 0x7F || (alpha > 0 && alpha >= oldalpha)) {
winner[y][x] = (alpha << 24) | c;
}
}
}
}
/**
* Flatten multiple layers of "winners".
*
* @param winners
* Input layers
* @param winner
* Output array
* @param ents
* Entities
* @param meta
* Reduce metadata
*/
private void flatten(int[][] winners, int[][] winner,
MutableIntObjectMap<String> meta) {
MutableIntObjectMap<MutableIntBag> parents = new IntObjectHashMap<>();
for (int y = 0; y < viewport.height; y++) {
for (int x = 0; x < viewport.width; x++) {
int id = winner[y][x] & 0xFFFFFF; // top byte is alpha!
if (id > 0) {
parents.getIfAbsentPut(id, IntHashBag::new)//
.add(winners[y][x]);
}
}
}
// Find the most frequent parent:
parents.forEachKeyValue((i, b) -> {
int best = -1, bcount = -1;
for (IntIterator it = b.intIterator(); it.hasNext();) {
int p = it.next(), c = b.occurrencesOf(p);
if (c > bcount || (c == bcount && p < best)) {
bcount = c;
best = p;
}
}
if (best > 0) {
meta.put(i, meta.get(i) /* 0 terminated! *///
+ meta.get(best) /* 0 terminated */);
}
});
for (int y = 0; y < viewport.height; y++) {
for (int x = 0; x < viewport.width; x++) {
int id = winner[y][x] & 0xFFFFFF; // top byte is alpha!
if (id > 0) {
winners[y][x] = id;
}
}
}
}
/**
* Build the output index file.
*
* @param meta
* Metadata
* @param winner
* Winner array
*/
private void buildIndex(IntObjectMap<String> meta, int[][] winner) {
int[] map = new int[meta.size()];
// Scan pixels for used indexes.
for (int y = 0; y < viewport.height; y++) {
int[] row = winner[y];
for (int x = 0; x < viewport.width; x++) {
map[row[x]] = 1; // present
}
}
// Enumerate used indexes.
int c = 0;
for (int i = 0; i < map.length; i++) {
map[i] = (map[i] == 0) ? -1 : c++;
}
LOG.info("Number of used entities: {}", c);
byte[] buffer = new byte[viewport.width * 8]; // Output buffer.
try (DataOutputStream os = new DataOutputStream(//
new FileOutputStream(oufile))) {
// First prepare all the data, so that we can put
// the final positions into the header table immediately.
// Encode the rows
byte[][] rows = new byte[viewport.height][];
for (int y = 0; y < viewport.height; y++) {
int len = encodeLine16(winner[y], map, buffer);
rows[y] = Arrays.copyOf(buffer, len);
}
// Encode the metadata
byte[][] metadata = new byte[c][];
int c2 = 0;
for (int i = 0; i < map.length; i++) {
if (map[i] <= -1) {
continue;
}
byte[] bytes = meta.get(i).getBytes("UTF-8");
metadata[c2++] = bytes;
}
assert (c2 == c);
// Part 1: HEADER
// Write a "magic" header first.
os.writeInt(0x6e0_6e0_01);
// Write dimensions
os.writeInt(viewport.width);
os.writeInt(viewport.height);
// Write coverage
os.writeFloat((float) viewport.xcover);
os.writeFloat((float) viewport.ycover);
os.writeFloat((float) viewport.xshift);
os.writeFloat((float) viewport.yshift);
// Write the number of indexes
os.writeInt(c);
final int headersize = os.size();
LOG.warn("Position of pixmap index: {}", headersize);
// Position of first row in the data:
final int firstpos = headersize + //
((viewport.height + metadata.length + 1) << 2);
int pos = firstpos;
// Part 2: PIXMAP header
// Write the row header table
for (int y = 0; y < viewport.height; y++) {
os.writeInt(pos);
pos += rows[y].length;
assert (pos > 0);
}
// Part 3: METADATA header
// Write the metadata header table
for (byte[] row : metadata) {
os.writeInt(pos);
pos += row.length;
assert (pos > 0);
}
os.writeInt(pos); // End of line extra value.
if (os.size() != firstpos) {
throw new RuntimeException(
"File construction logic is inconsistent. Expected: "
+ firstpos + " position: " + os.size());
}
// Part 2: PIXMAP rows
for (byte[] row : rows) {
os.write(row, 0, row.length);
}
// Part 3: METADATA entries
for (byte[] row : metadata) {
os.write(row, 0, row.length);
}
// Ensure we are at the predicted position.
if (pos != os.size()) {
throw new RuntimeException(
"File construction logic is inconsistent. Expected: "
+ firstpos + " position: " + os.size());
}
} catch (IOException e) {
LOG.error("IO error writing index.", e);
}
}
/**
* Encode a line of the output image map.
*
* @param winner
* Image map
* @param map
* Entity ID mapping
* @param buffer
* Output buffer
* @return Length
*/
// TODO: develop even more compact RLEs for this use case.
private int encodeLine16(int[] winner, int[] map, byte[] buffer) {
int len = 0;
// Perform a simple run-length encoding.
for (int x = 0; x < winner.length;) {
final int first = x;
final int cur = winner[x++];
while (x < winner.length && winner[x] == cur) {
++x;
}
// Write value of map.
len = writeUnsignedVarint(buffer, len, map[cur]);
// Write repetition count - 1
len = writeUnsignedVarint(buffer, len, x - first - 1);
}
return len;
}
/**
* Write a single varint.
*
* @param buffer
* Buffer to write to
* @param pos
* Current position
* @param val
* Value to write
* @return New position
*/
private static int writeUnsignedVarint(byte[] buffer, int pos, int val) {
// Extra bytes have the high bit set
while ((val & 0x7F) != val) {
buffer[pos++] = (byte) ((val & 0x7F) | 0x80);
val >>>= 7;
}
// Last byte doesn't have high bit set
buffer[pos++] = (byte) (val & 0x7F);
return pos;
}
/**
* Visualize the map.
*
* @param Maximum
* color
* @param winner
* Winners array
*/
public void visualize(int max, int[][] winner) {
// Randomly assign colors for visualization:
Random r = new Random();
int[] cols = new int[max + 1];
for (int i = 1; i < cols.length; i++) {
cols[i] = r.nextInt(0x1000000) | 0xFF000000;
}
try {
WritableImage writableImage = new WritableImage(viewport.width,
viewport.height);
PixelWriter writer = writableImage.getPixelWriter();
for (int y = 0; y < viewport.height; y++) {
// Note: visualization is drawn upside down.
int[] row = winner[viewport.height - 1 - y];
for (int x = 0; x < viewport.width; x++) {
writer.setArgb(x, y, cols[row[x]]);
}
}
ImageIO.write(SwingFXUtils.fromFXImage(writableImage, null), "png",
imfile);
} catch (IOException e) {
LOG.error("IO error writing visualization.", e);
}
}
/**
* An entity on the map.
*
* @author Erich Schubert
*/
public static class Entity implements Comparable<Entity> {
/** Index key (description) */
final String key;
/** Bounding box */
BoundingBox bb;
List<float[]> polys;
public Entity(String key) {
this.key = key;
}
@Override
public int hashCode() {
return key.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return key.equals(((Entity) obj).key);
}
/**
* Order descending by size.
*/
@Override
public int compareTo(Entity o) {
return Double.compare(o.bb.size(), bb.size());
}
}
/**
* Launch, as JavaFX application.
*
* @param args
* Parameters
*/
public static void main(String[] args) {
launch(args);
}
}
| src/indexer/java/com/kno10/reversegeocode/indexer/BuildLayeredIndex.java | package com.kno10.reversegeocode.indexer;
/*
* Copyright (C) 2015, Erich Schubert
* Ludwig-Maximilians-Universität München
* Lehr- und Forschungseinheit für Datenbanksysteme
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.collections.ObservableList;
import javafx.embed.swing.SwingFXUtils;
import javafx.scene.Group;
import javafx.scene.Scene;
import javafx.scene.image.PixelReader;
import javafx.scene.image.PixelWriter;
import javafx.scene.image.WritableImage;
import javafx.scene.paint.Color;
import javafx.scene.shape.FillRule;
import javafx.scene.shape.LineTo;
import javafx.scene.shape.MoveTo;
import javafx.scene.shape.Path;
import javafx.scene.shape.PathElement;
import javafx.stage.Stage;
import javax.imageio.ImageIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gs.collections.api.bag.primitive.MutableIntBag;
import com.gs.collections.api.iterator.IntIterator;
import com.gs.collections.api.map.primitive.IntObjectMap;
import com.gs.collections.api.map.primitive.MutableIntObjectMap;
import com.gs.collections.impl.bag.mutable.primitive.IntHashBag;
import com.gs.collections.impl.list.mutable.primitive.FloatArrayList;
import com.gs.collections.impl.map.mutable.primitive.IntObjectHashMap;
import com.gs.collections.impl.set.mutable.UnifiedSet;
/**
* Build and encode the lookup index.
*
* This is currently implemented using JavaFX to facilitate the polygon drawing.
* For this reason, it needs to extend a JavaFX Application - this part of the
* JavaFX API is just stupid...
*
* TODO: make parameters configurable.
*
* @author Erich Schubert
*/
public class BuildLayeredIndex extends Application {
/** Class logger */
private static final Logger LOG = LoggerFactory
.getLogger(BuildLayeredIndex.class);
/** Input and output file names */
File infile, oufile, imfile;
/** Pattern for matching coordinates */
Pattern coordPattern = Pattern
.compile("(?<=\t)(-?\\d+(?:\\.\\d*)),(-?\\d+(?:\\.\\d*))(?=\t|$)");
/** Pattern for recognizing the level */
Pattern levelPattern = Pattern.compile("(?<=\t)(\\d+)(?=\t)");
/** Minimum and maximum level */
private int minLevel = 2, maxLevel = 10;
/** Entities read from the file */
private ArrayList<UnifiedSet<Entity>> entities;
/** Minimum size of objects to draw */
double minsize;
/** Viewport of the map */
Viewport viewport;
/**
* Constructor.
*/
public BuildLayeredIndex() {
super();
}
@Override
public void init() throws Exception {
super.init();
Map<String, String> named = getParameters().getNamed();
String v = named.get("input");
if (v == null) {
throw new RuntimeException("Missing parameter --input");
}
this.infile = new File(v);
v = named.get("output");
if (v == null) {
throw new RuntimeException("Missing parameter --output");
}
this.oufile = new File(v);
v = named.get("vis");
this.imfile = v != null ? new File(v) : null;
v = named.get("minlevel");
minLevel = v != null ? Integer.valueOf(v) : minLevel;
v = named.get("maxlevel");
maxLevel = v != null ? Integer.valueOf(v) : maxLevel;
// Initialize entity level sets:
this.entities = new ArrayList<>(maxLevel + 1);
for (int i = 0; i < minLevel; i++) {
entities.add(null);
}
for (int i = minLevel; i <= maxLevel; i++) {
entities.add(new UnifiedSet<>());
}
// Viewport on map
v = named.get("resolution");
double resolution = v != null ? Double.valueOf(v) : 0.01;
// TODO: make clipping configurable.
this.viewport = new Viewport(360., 140., 180., 60., resolution);
// Minimum size of bounding box.
v = named.get("minsize");
double pixel_minsize = v != null ? Double.valueOf(v) : 4;
this.minsize = pixel_minsize * resolution;
}
@Override
public void start(Stage stage) throws Exception {
// Preallocate objects (will be reset and reused!)
Matcher m = coordPattern.matcher(""), lm = levelPattern.matcher("");
FloatArrayList points = new FloatArrayList();
BoundingBox bb = new BoundingBox();
int polycount = 0, lines = 0, ecounter = 0;
// Everybody just "loves" such Java constructs...
try (BufferedReader b = new BufferedReader(new InputStreamReader(
new GZIPInputStream(new FileInputStream(infile))))) {
long start = System.currentTimeMillis();
String line = null;
while ((line = b.readLine()) != null) {
++lines;
points.clear();
bb.reset();
String meta = null;
lm.reset(line);
if (!lm.find()) {
LOG.warn("Line was not matched: {}", line);
continue;
}
// We keep metadata 0-terminated as seperator!
meta = line.substring(0, lm.end()) + '\0';
int level = Integer.parseInt(lm.group(1));
assert (!lm.find());
m.reset(line);
while (m.find()) {
assert (m.start() >= lm.end());
float lon = Float.parseFloat(m.group(1));
float lat = Float.parseFloat(m.group(2));
points.add(lon);
points.add(lat);
bb.update(lon, lat);
}
if (points.size() == 0) {
LOG.warn("Line was not matched: {}", line);
continue;
}
if (bb.size() < minsize) {
continue;
}
Entity ent = new Entity(meta);
if (level >= entities.size()) {
// Level not used.
continue;
}
UnifiedSet<Entity> levdata = entities.get(level);
if (levdata == null) {
// Level not used.
continue;
}
Entity exist = levdata.get(ent);
if (exist != null) {
exist.bb.update(bb);
exist.polys.add(points.toArray());
++polycount;
} else {
levdata.add(ent);
ent.bb = new BoundingBox(bb);
ent.polys = new LinkedList<>();
ent.polys.add(points.toArray());
++polycount;
++ecounter;
}
}
long end = System.currentTimeMillis();
LOG.info("Parsing time: {} ms", end - start);
LOG.info("Read {} lines, kept {} entities, {} polygons", //
lines, ecounter, polycount);
render(stage);
} catch (IOException e) {
LOG.error("IO Error", e);
}
Platform.exit();
}
/**
* Render the polygons onto the "winner" map.
*
* @param stage
* Empty JavaFX stage used for rendering
*/
public void render(Stage stage) {
final int blocksize = 512;
Group rootGroup = new Group();
Scene scene = new Scene(rootGroup, blocksize, blocksize, Color.BLACK);
WritableImage writableImage = null; // Buffer
MutableIntObjectMap<String> meta = new IntObjectHashMap<>();
meta.put(0, ""); // Note: deliberately not \0 terminated.
int entnum = 1;
int[][] winners = new int[viewport.height][viewport.width];
int[][] winner = new int[viewport.height][viewport.width];
byte[][] alphas = new byte[viewport.height][viewport.width];
long start = System.currentTimeMillis();
for (int lev = minLevel; lev <= maxLevel; lev++) {
if (entities.get(lev) == null) {
continue;
}
LOG.info("Rendering level {}", lev);
for (int y = 0; y < viewport.height; y++) {
Arrays.fill(alphas[y], (byte) 0);
Arrays.fill(winner[y], 0);
}
// Sort by size.
ArrayList<Entity> order = new ArrayList<>(entities.get(lev));
Collections.sort(order);
Path path = new Path();
ObservableList<PathElement> elems = path.getElements();
for (Entity e : order) {
if (e.polys.size() <= 0) {
continue;
}
// Area to inspect
int xmin = Math.max(0,
(int) Math.floor(viewport.projLon(e.bb.lonmin)) - 1);
int xmax = Math.min(viewport.width,
(int) Math.ceil(viewport.projLon(e.bb.lonmax)) + 1);
int ymin = Math.max(0,
(int) Math.ceil(viewport.projLat(e.bb.latmin)) - 1);
int ymax = Math.min(viewport.height,
(int) Math.floor(viewport.projLat(e.bb.latmax)) + 1);
// System.out.format("%d-%d %d-%d; ", xmin, xmax, ymin, ymax);
for (int x1 = xmin; x1 < xmax; x1 += blocksize) {
int x2 = Math.min(x1 + blocksize, xmax);
for (int y1 = ymin; y1 < ymax; y1 += blocksize) {
int y2 = Math.min(y1 + blocksize, ymax);
// Implementation note: we are drawing upside down.
elems.clear();
for (float[] f : e.polys) {
assert (f.length > 1);
elems.add(new MoveTo(viewport.projLon(f[0]) - x1,
viewport.projLat(f[1]) - y1));
for (int i = 2, l = f.length; i < l; i += 2) {
elems.add(new LineTo(viewport.projLon(f[i])
- x1, viewport.projLat(f[i + 1]) - y1));
}
}
path.setStroke(Color.TRANSPARENT);
path.setFill(Color.WHITE);
path.setFillRule(FillRule.EVEN_ODD);
rootGroup.getChildren().add(path);
writableImage = scene.snapshot(writableImage);
rootGroup.getChildren().remove(path);
transferPixels(writableImage, x1, x2, y1, y2, winner,
entnum, alphas);
}
}
// Note: we construct meta 0-terminated!
meta.put(entnum, e.key);
++entnum;
}
flatten(winners, winner, meta);
}
long end = System.currentTimeMillis();
LOG.info("Rendering time: {} ms", end - start);
buildIndex(meta, winners);
if (imfile != null) {
visualize(meta.size(), winners);
}
}
/**
* Transfer pixels from the rendering buffer to the winner/alpha maps.
*
* @param img
* Rendering buffer
* @param x1
* Left
* @param x2
* Right
* @param y1
* Bottom
* @param y2
* Top
* @param winner
* Output array
* @param c
* Entity number
* @param alphas
* Alpha buffer
*/
public void transferPixels(WritableImage img, int x1, int x2, int y1,
int y2, int[][] winner, int c, byte[][] alphas) {
PixelReader reader = img.getPixelReader();
for (int y = y1, py = 0; y < y2; y++, py++) {
for (int x = x1, px = 0; x < x2; x++, px++) {
int col = reader.getArgb(px, py);
int alpha = (col & 0xFF);
// Always ignore cover less than 10%
if (alpha < 0x19) {
continue;
}
// Clip value range to positive bytes,
alpha = alpha > 0x7F ? 0x7F : alpha;
if (alpha == 0x7F || (alpha > 0 && alpha >= alphas[y][x])) {
alphas[y][x] = (byte) alpha;
winner[y][x] = c;
}
}
}
}
/**
* Flatten multiple layers of "winners".
*
* @param winners
* Input layers
* @param winner
* Output array
* @param ents
* Entities
* @param meta
* Reduce metadata
*/
private void flatten(int[][] winners, int[][] winner,
MutableIntObjectMap<String> meta) {
MutableIntObjectMap<MutableIntBag> parents = new IntObjectHashMap<>();
for (int y = 0; y < viewport.height; y++) {
for (int x = 0; x < viewport.width; x++) {
int id = winner[y][x];
if (id > 0) {
parents.getIfAbsentPut(id, () -> {
return new IntHashBag();
}).add(winners[y][x]);
}
}
}
// Find the most frequent parent:
parents.forEachKeyValue((i, b) -> {
int best = -1, bcount = -1;
for (IntIterator it = b.intIterator(); it.hasNext();) {
int p = it.next(), c = b.occurrencesOf(p);
if (c > bcount || (c == bcount && p < best)) {
bcount = c;
best = p;
}
}
if (best > 0) {
meta.put(i, meta.get(i) /* 0 terminated! *///
+ meta.get(best) /* 0 terminated */);
}
});
for (int y = 0; y < viewport.height; y++) {
for (int x = 0; x < viewport.width; x++) {
int id = winner[y][x];
if (id > 0) {
winners[y][x] = id;
}
}
}
}
/**
* Build the output index file.
*
* @param meta
* Metadata
* @param winner
* Winner array
*/
private void buildIndex(IntObjectMap<String> meta, int[][] winner) {
int[] map = new int[meta.size()];
// Scan pixels for used indexes.
for (int y = 0; y < viewport.height; y++) {
int[] row = winner[y];
for (int x = 0; x < viewport.width; x++) {
map[row[x]] = 1; // present
}
}
// Enumerate used indexes.
int c = 0;
for (int i = 0; i < map.length; i++) {
map[i] = (map[i] == 0) ? -1 : c++;
}
LOG.info("Number of used entities: {}", c);
byte[] buffer = new byte[viewport.width * 8]; // Output buffer.
if (c > 0xFFFF) {
// In this case, you'll need to extend the file format below.
throw new RuntimeException(
"Current file version only allows 0xFFFF entities.");
}
try (DataOutputStream os = new DataOutputStream(//
new FileOutputStream(oufile))) {
// First prepare all the data, so that we can put
// the final positions into the header table immediately.
// Encode the rows
byte[][] rows = new byte[viewport.height][];
for (int y = 0; y < viewport.height; y++) {
int len = encodeLine16(winner[y], map, buffer);
rows[y] = Arrays.copyOf(buffer, len);
}
// Encode the metadata
byte[][] metadata = new byte[c][];
int c2 = 0;
for (int i = 0; i < map.length; i++) {
if (map[i] <= -1) {
continue;
}
byte[] bytes = meta.get(i).getBytes("UTF-8");
assert (bytes.length <= 0xFFFF);
metadata[c2++] = bytes;
}
assert (c2 == c);
// Part 1: HEADER
// Write a "magic" header first.
os.writeInt(0x6e0_6e0_01);
// Write dimensions
os.writeInt(viewport.width);
os.writeInt(viewport.height);
// Write coverage
os.writeFloat((float) viewport.xcover);
os.writeFloat((float) viewport.ycover);
os.writeFloat((float) viewport.xshift);
os.writeFloat((float) viewport.yshift);
// Write the number of indexes
os.writeInt(c);
final int headersize = os.size();
LOG.warn("Position of pixmap index: {}", headersize);
// Position of first row in the data:
final int firstpos = headersize + //
((viewport.height + metadata.length + 1) << 2);
int pos = firstpos;
// Part 2: PIXMAP header
// Write the row header table
for (int y = 0; y < viewport.height; y++) {
os.writeInt(pos);
pos += rows[y].length;
assert (pos > 0);
}
// Part 3: METADATA header
// Write the metadata header table
for (byte[] row : metadata) {
os.writeInt(pos);
pos += row.length;
assert (pos > 0);
}
os.writeInt(pos); // End of line extra value.
if (os.size() != firstpos) {
throw new RuntimeException(
"File construction logic is inconsistent. Expected: "
+ firstpos + " position: " + os.size());
}
// Part 2: PIXMAP rows
for (byte[] row : rows) {
os.write(row, 0, row.length);
}
// Part 3: METADATA entries
for (byte[] row : metadata) {
os.write(row, 0, row.length);
}
// Ensure we are at the predicted position.
if (pos != os.size()) {
throw new RuntimeException(
"File construction logic is inconsistent. Expected: "
+ firstpos + " position: " + os.size());
}
} catch (IOException e) {
LOG.error("IO error writing index.", e);
}
}
/**
* Encode a line of the output image map.
*
* @param winner
* Image map
* @param map
* Entity ID mapping
* @param buffer
* Output buffer
* @return Length
*/
// TODO: develop even more compact RLEs for this use case.
private int encodeLine16(int[] winner, int[] map, byte[] buffer) {
int len = 0;
// Perform a simple run-length encoding.
for (int x = 0; x < winner.length;) {
final int first = x;
final int cur = winner[x++];
while (x < winner.length && winner[x] == cur) {
++x;
}
// Write value of map.
len = writeUnsignedVarint(buffer, len, map[cur]);
// Write repetition count - 1
len = writeUnsignedVarint(buffer, len, x - first - 1);
}
return len;
}
/**
* Write a single varint.
*
* @param buffer
* Buffer to write to
* @param pos
* Current position
* @param val
* Value to write
* @return New position
*/
private static int writeUnsignedVarint(byte[] buffer, int pos, int val) {
// Extra bytes have the high bit set
while ((val & 0x7F) != val) {
buffer[pos++] = (byte) ((val & 0x7F) | 0x80);
val >>>= 7;
}
// Last byte doesn't have high bit set
buffer[pos++] = (byte) (val & 0x7F);
return pos;
}
/**
* Visualize the map.
*
* @param Maximum
* color
* @param winner
* Winners array
*/
public void visualize(int max, int[][] winner) {
// Randomly assign colors for visualization:
Random r = new Random();
int[] cols = new int[max + 1];
for (int i = 1; i < cols.length; i++) {
cols[i] = r.nextInt(0x1000000) | 0xFF000000;
}
try {
WritableImage writableImage = new WritableImage(viewport.width,
viewport.height);
PixelWriter writer = writableImage.getPixelWriter();
for (int y = 0; y < viewport.height; y++) {
// Note: visualization is drawn upside down.
int[] row = winner[viewport.height - 1 - y];
for (int x = 0; x < viewport.width; x++) {
writer.setArgb(x, y, cols[row[x]]);
}
}
ImageIO.write(SwingFXUtils.fromFXImage(writableImage, null), "png",
imfile);
} catch (IOException e) {
LOG.error("IO error writing visualization.", e);
}
}
/**
* An entity on the map.
*
* @author Erich Schubert
*/
public static class Entity implements Comparable<Entity> {
/** Index key (description) */
final String key;
/** Bounding box */
BoundingBox bb;
List<float[]> polys;
public Entity(String key) {
this.key = key;
}
@Override
public int hashCode() {
return key.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return key.equals(((Entity) obj).key);
}
/**
* Order descending by size.
*/
@Override
public int compareTo(Entity o) {
return Double.compare(o.bb.size(), bb.size());
}
}
/**
* Launch, as JavaFX application.
*
* @param args
* Parameters
*/
public static void main(String[] args) {
launch(args);
}
}
| Reduce memory usage by 20%, by storing alpha in top byte if winner array | src/indexer/java/com/kno10/reversegeocode/indexer/BuildLayeredIndex.java | Reduce memory usage by 20%, by storing alpha in top byte if winner array |
|
Java | agpl-3.0 | 28c329dc01c0d855fd262afcf320fd3e018d0cfc | 0 | Heiner1/AndroidAPS,RoumenGeorgiev/AndroidAPS,samihusseingit/AndroidAPS,MilosKozak/AndroidAPS,MilosKozak/AndroidAPS,PoweRGbg/AndroidAPS,PoweRGbg/AndroidAPS,jotomo/AndroidAPS,AdrianLxM/AndroidAPS,jotomo/AndroidAPS,MilosKozak/AndroidAPS,winni67/AndroidAPS,samihusseingit/AndroidAPS,Heiner1/AndroidAPS,jotomo/AndroidAPS,Heiner1/AndroidAPS,winni67/AndroidAPS,LadyViktoria/AndroidAPS,RoumenGeorgiev/AndroidAPS,LadyViktoria/AndroidAPS,Heiner1/AndroidAPS,AdrianLxM/AndroidAPS,PoweRGbg/AndroidAPS | package info.nightscout.androidaps.plugins.OpenAPSAMA;
import com.eclipsesource.v8.JavaVoidCallback;
import com.eclipsesource.v8.V8;
import com.eclipsesource.v8.V8Array;
import com.eclipsesource.v8.V8Object;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import info.nightscout.androidaps.Config;
import info.nightscout.androidaps.Constants;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.data.GlucoseStatus;
import info.nightscout.androidaps.data.MealData;
import info.nightscout.androidaps.interfaces.PumpInterface;
import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderFragment;
import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderPlugin;
import info.nightscout.androidaps.plugins.Loop.ScriptReader;
import info.nightscout.androidaps.data.IobTotal;
import info.nightscout.client.data.NSProfile;
public class DetermineBasalAdapterAMAJS {
private static Logger log = LoggerFactory.getLogger(DetermineBasalAdapterAMAJS.class);
private ScriptReader mScriptReader = null;
V8 mV8rt;
private V8Object mProfile;
private V8Object mGlucoseStatus;
private V8Array mIobData;
private V8Object mMealData;
private V8Object mCurrentTemp;
private V8Object mAutosensData;
private final String PARAM_currentTemp = "currentTemp";
private final String PARAM_iobData = "iobData";
private final String PARAM_glucoseStatus = "glucose_status";
private final String PARAM_profile = "profile";
private final String PARAM_meal_data = "meal_data";
private final String PARAM_autosens_data = "autosens_data";
private String storedCurrentTemp = null;
private String storedIobData = null;
private String storedGlucoseStatus = null;
private String storedProfile = null;
private String storedMeal_data = null;
private String storedAutosens_data = null;
/**
* Main code
*/
public DetermineBasalAdapterAMAJS(ScriptReader scriptReader) throws IOException {
mV8rt = V8.createV8Runtime();
mScriptReader = scriptReader;
init();
initLogCallback();
initProcessExitCallback();
initModuleParent();
loadScript();
}
public void init() {
// Profile
mProfile = new V8Object(mV8rt);
mProfile.add("max_iob", 0);
mProfile.add("carbs_hr", 0);
mProfile.add("dia", 0);
mProfile.add("type", "current");
mProfile.add("max_daily_basal", 0);
mProfile.add("max_basal", 0);
mProfile.add("max_bg", 0);
mProfile.add("min_bg", 0);
mProfile.add("carb_ratio", 0);
mProfile.add("sens", 0);
mProfile.add("max_daily_safety_multiplier", Constants.MAX_DAILY_SAFETY_MULTIPLIER);
mProfile.add("current_basal_safety_multiplier", Constants.CURRENT_BASAL_SAFETY_MULTIPLIER);
mProfile.add("skip_neutral_temps", true);
mProfile.add("temptargetSet", false);
mProfile.add("autosens_adjust_targets", false);
mProfile.add("min_5m_carbimpact", 0);
mProfile.add("current_basal", 0);
mV8rt.add(PARAM_profile, mProfile);
// Current temp
mCurrentTemp = new V8Object(mV8rt);
mCurrentTemp.add("temp", "absolute");
mCurrentTemp.add("duration", 0);
mCurrentTemp.add("rate", 0);
mV8rt.add(PARAM_currentTemp, mCurrentTemp);
// IOB data
// mIobData = new V8Array(mV8rt);
// mV8rt.add(PARAM_iobData, mIobData);
// Glucose status
mGlucoseStatus = new V8Object(mV8rt);
mGlucoseStatus.add("glucose", 0);
mGlucoseStatus.add("delta", 0);
mGlucoseStatus.add("avgdelta", 0);
mV8rt.add(PARAM_glucoseStatus, mGlucoseStatus);
// Meal data
mMealData = new V8Object(mV8rt);
mMealData.add("carbs", 0);
mMealData.add("boluses", 0);
mMealData.add("mealCOB", 0.0d);
mMealData.add("ratio", 0.0d);
mV8rt.add(PARAM_meal_data, mMealData);
// Autosens data
mAutosensData = new V8Object(mV8rt);
mV8rt.add(PARAM_autosens_data, mAutosensData);
}
public DetermineBasalResultAMA invoke() {
log.debug(">>> Invoking detemine_basal <<<");
log.debug("Glucose status: " + mV8rt.executeStringScript("JSON.stringify(" + PARAM_glucoseStatus + ");"));
log.debug("IOB data: " + mV8rt.executeStringScript("JSON.stringify(" + PARAM_iobData + ");"));
log.debug("Current temp: " + mV8rt.executeStringScript("JSON.stringify(" + PARAM_currentTemp + ");"));
log.debug("Profile: " + mV8rt.executeStringScript("JSON.stringify(" + PARAM_profile + ");"));
log.debug("Meal data: " + mV8rt.executeStringScript("JSON.stringify(" + PARAM_meal_data + ");"));
log.debug("Autosens data: " + mV8rt.executeStringScript("JSON.stringify(" + PARAM_autosens_data + ");"));
mV8rt.executeVoidScript(
"var rT = determine_basal(" +
PARAM_glucoseStatus + ", " +
PARAM_currentTemp + ", " +
PARAM_iobData + ", " +
PARAM_profile + ", " +
PARAM_autosens_data + ", " +
PARAM_meal_data + ", " +
"tempBasalFunctions" +
");");
String ret = mV8rt.executeStringScript("JSON.stringify(rT);");
if (Config.logAPSResult)
log.debug("Result: " + ret);
V8Object v8ObjectReuslt = mV8rt.getObject("rT");
DetermineBasalResultAMA result = null;
try {
result = new DetermineBasalResultAMA(v8ObjectReuslt, new JSONObject(ret));
} catch (JSONException e) {
e.printStackTrace();
}
storedGlucoseStatus = mV8rt.executeStringScript("JSON.stringify(" + PARAM_glucoseStatus + ");");
storedIobData = mV8rt.executeStringScript("JSON.stringify(" + PARAM_iobData + ");");
storedCurrentTemp = mV8rt.executeStringScript("JSON.stringify(" + PARAM_currentTemp + ");");
storedProfile = mV8rt.executeStringScript("JSON.stringify(" + PARAM_profile + ");");
storedMeal_data = mV8rt.executeStringScript("JSON.stringify(" + PARAM_meal_data + ");");
storedAutosens_data = mV8rt.executeStringScript("JSON.stringify(" + PARAM_autosens_data + ");");
return result;
}
String getGlucoseStatusParam() {
return storedGlucoseStatus;
}
String getCurrentTempParam() {
return storedCurrentTemp;
}
String getIobDataParam() {
return storedIobData;
}
String getProfileParam() {
return storedProfile;
}
String getMealDataParam() {
return storedMeal_data;
}
String getAutosensDataParam() {
return storedAutosens_data;
}
private void loadScript() throws IOException {
mV8rt.executeVoidScript(readFile("OpenAPSAMA/round-basal.js"), "OpenAPSAMA/round-basal.js", 0);
mV8rt.executeVoidScript("var round_basal = module.exports;");
mV8rt.executeVoidScript("require = function() {return round_basal;};");
mV8rt.executeVoidScript(readFile("OpenAPSAMA/basal-set-temp.js"), "OpenAPSAMA/basal-set-temp.js ", 0);
mV8rt.executeVoidScript("var tempBasalFunctions = module.exports;");
mV8rt.executeVoidScript(
readFile("OpenAPSAMA/determine-basal.js"),
"OpenAPSAMA/determine-basal.js",
0);
mV8rt.executeVoidScript("var determine_basal = module.exports;");
mV8rt.executeVoidScript(
"var setTempBasal = function (rate, duration, profile, rT, offline) {" +
"rT.duration = duration;\n" +
" rT.rate = rate;" +
"return rT;" +
"};",
"setTempBasal.js",
0
);
}
private void initModuleParent() {
mV8rt.executeVoidScript("var module = {\"parent\":Boolean(1)};");
}
private void initProcessExitCallback() {
JavaVoidCallback callbackProccessExit = new JavaVoidCallback() {
@Override
public void invoke(V8Object arg0, V8Array parameters) {
if (parameters.length() > 0) {
Object arg1 = parameters.get(0);
log.error("ProccessExit " + arg1);
}
}
};
mV8rt.registerJavaMethod(callbackProccessExit, "proccessExit");
mV8rt.executeVoidScript("var process = {\"exit\": function () { proccessExit(); } };");
}
private void initLogCallback() {
JavaVoidCallback callbackLog = new JavaVoidCallback() {
@Override
public void invoke(V8Object arg0, V8Array parameters) {
int i = 0;
String s = "";
while (i < parameters.length()) {
Object arg = parameters.get(i);
s += arg + " ";
i++;
}
if (!s.equals("") && Config.logAPSResult)
log.debug("Script debug: " + s);
}
};
mV8rt.registerJavaMethod(callbackLog, "log");
mV8rt.executeVoidScript("var console = {\"log\":log, \"error\":log};");
}
public void setData(NSProfile profile,
double maxIob,
double maxBasal,
double minBg,
double maxBg,
double targetBg,
PumpInterface pump,
IobTotal[] iobArray,
GlucoseStatus glucoseStatus,
MealData mealData,
double autosensDataRatio,
boolean tempTargetSet,
double min_5m_carbimpact) {
String units = profile.getUnits();
mProfile.add("max_iob", maxIob);
mProfile.add("carbs_hr", profile.getCarbAbsorbtionRate());
mProfile.add("dia", profile.getDia());
mProfile.add("type", "current");
mProfile.add("max_daily_basal", profile.getMaxDailyBasal());
mProfile.add("max_basal", maxBasal);
mProfile.add("min_bg", minBg);
mProfile.add("max_bg", maxBg);
mProfile.add("target_bg", targetBg);
mProfile.add("carb_ratio", profile.getIc(profile.secondsFromMidnight()));
mProfile.add("sens", NSProfile.toMgdl(profile.getIsf(NSProfile.secondsFromMidnight()).doubleValue(), units));
mProfile.add("current_basal", pump.getBaseBasalRate());
mProfile.add("temptargetSet", tempTargetSet);
mProfile.add("autosens_adjust_targets", MainApp.getConfigBuilder().isAMAModeEnabled());
mProfile.add("min_5m_carbimpact", min_5m_carbimpact);
mCurrentTemp.add("duration", pump.getTempBasalRemainingMinutes());
mCurrentTemp.add("rate", pump.getTempBasalAbsoluteRate());
mIobData = mV8rt.executeArrayScript(IobTotal.convertToJSONArray(iobArray).toString());
mV8rt.add(PARAM_iobData, mIobData);
mGlucoseStatus.add("glucose", glucoseStatus.glucose);
mGlucoseStatus.add("delta", glucoseStatus.delta);
mGlucoseStatus.add("short_avgdelta", glucoseStatus.short_avgdelta);
mGlucoseStatus.add("long_avgdelta", glucoseStatus.long_avgdelta);
mMealData.add("carbs", mealData.carbs);
mMealData.add("boluses", mealData.boluses);
mMealData.add("mealCOB", mealData.mealCOB);
mAutosensData.add("ratio", autosensDataRatio);
}
public void release() {
mProfile.release();
mCurrentTemp.release();
mIobData.release();
mMealData.release();
mGlucoseStatus.release();
if (mAutosensData != null) {
mAutosensData.release();
}
mV8rt.release();
}
public String readFile(String filename) throws IOException {
byte[] bytes = mScriptReader.readFile(filename);
String string = new String(bytes, "UTF-8");
if (string.startsWith("#!/usr/bin/env node")) {
string = string.substring(20);
}
return string;
}
}
| app/src/main/java/info/nightscout/androidaps/plugins/OpenAPSAMA/DetermineBasalAdapterAMAJS.java | package info.nightscout.androidaps.plugins.OpenAPSAMA;
import com.eclipsesource.v8.JavaVoidCallback;
import com.eclipsesource.v8.V8;
import com.eclipsesource.v8.V8Array;
import com.eclipsesource.v8.V8Object;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import info.nightscout.androidaps.Config;
import info.nightscout.androidaps.Constants;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.data.GlucoseStatus;
import info.nightscout.androidaps.data.MealData;
import info.nightscout.androidaps.interfaces.PumpInterface;
import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderFragment;
import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderPlugin;
import info.nightscout.androidaps.plugins.Loop.ScriptReader;
import info.nightscout.androidaps.data.IobTotal;
import info.nightscout.client.data.NSProfile;
public class DetermineBasalAdapterAMAJS {
private static Logger log = LoggerFactory.getLogger(DetermineBasalAdapterAMAJS.class);
private ScriptReader mScriptReader = null;
V8 mV8rt;
private V8Object mProfile;
private V8Object mGlucoseStatus;
private V8Array mIobData;
private V8Object mMealData;
private V8Object mCurrentTemp;
private V8Object mAutosensData;
private final String PARAM_currentTemp = "currentTemp";
private final String PARAM_iobData = "iobData";
private final String PARAM_glucoseStatus = "glucose_status";
private final String PARAM_profile = "profile";
private final String PARAM_meal_data = "meal_data";
private final String PARAM_autosens_data = "autosens_data";
private String storedCurrentTemp = null;
private String storedIobData = null;
private String storedGlucoseStatus = null;
private String storedProfile = null;
private String storedMeal_data = null;
private String storedAutosens_data = null;
/**
* Main code
*/
public DetermineBasalAdapterAMAJS(ScriptReader scriptReader) throws IOException {
mV8rt = V8.createV8Runtime();
mScriptReader = scriptReader;
init();
initLogCallback();
initProcessExitCallback();
initModuleParent();
loadScript();
}
public void init() {
// Profile
mProfile = new V8Object(mV8rt);
mProfile.add("max_iob", 0);
mProfile.add("carbs_hr", 0);
mProfile.add("dia", 0);
mProfile.add("type", "current");
mProfile.add("max_daily_basal", 0);
mProfile.add("max_basal", 0);
mProfile.add("max_bg", 0);
mProfile.add("min_bg", 0);
mProfile.add("carb_ratio", 0);
mProfile.add("sens", 0);
mProfile.add("max_daily_safety_multiplier", Constants.MAX_DAILY_SAFETY_MULTIPLIER);
mProfile.add("current_basal_safety_multiplier", Constants.CURRENT_BASAL_SAFETY_MULTIPLIER);
mProfile.add("skip_neutral_temps", true);
mProfile.add("temptargetSet", false);
mProfile.add("autosens_adjust_targets", false);
mProfile.add("min_5m_carbimpact", 0);
mProfile.add("current_basal", 0);
mV8rt.add(PARAM_profile, mProfile);
// Current temp
mCurrentTemp = new V8Object(mV8rt);
mCurrentTemp.add("temp", "absolute");
mCurrentTemp.add("duration", 0);
mCurrentTemp.add("rate", 0);
mV8rt.add(PARAM_currentTemp, mCurrentTemp);
// IOB data
// mIobData = new V8Array(mV8rt);
// mV8rt.add(PARAM_iobData, mIobData);
// Glucose status
mGlucoseStatus = new V8Object(mV8rt);
mGlucoseStatus.add("glucose", 0);
mGlucoseStatus.add("delta", 0);
mGlucoseStatus.add("avgdelta", 0);
mV8rt.add(PARAM_glucoseStatus, mGlucoseStatus);
// Meal data
mMealData = new V8Object(mV8rt);
mMealData.add("carbs", 0);
mMealData.add("boluses", 0);
mMealData.add("mealCOB", 0.0d);
mMealData.add("ratio", 0.0d);
mV8rt.add(PARAM_meal_data, mMealData);
// Autosens data
mAutosensData = new V8Object(mV8rt);
mV8rt.add(PARAM_autosens_data, mAutosensData);
}
public DetermineBasalResultAMA invoke() {
mV8rt.executeVoidScript(
"console.error(\"determine_basal(\"+\n" +
"JSON.stringify(" + PARAM_glucoseStatus + ")+ \", \" +\n" +
"JSON.stringify(" + PARAM_currentTemp + ")+ \", \" +\n" +
"JSON.stringify(" + PARAM_iobData + ")+ \", \" +\n" +
"JSON.stringify(" + PARAM_profile + ")+ \", \" +\n" +
"JSON.stringify(" + PARAM_autosens_data + ")+ \", \" +\n" +
"JSON.stringify(" + PARAM_meal_data + ")+ \") \");"
);
mV8rt.executeVoidScript(
"var rT = determine_basal(" +
PARAM_glucoseStatus + ", " +
PARAM_currentTemp + ", " +
PARAM_iobData + ", " +
PARAM_profile + ", " +
PARAM_autosens_data + ", " +
PARAM_meal_data + ", " +
"tempBasalFunctions" +
");");
String ret = mV8rt.executeStringScript("JSON.stringify(rT);");
if (Config.logAPSResult)
log.debug("Result: " + ret);
V8Object v8ObjectReuslt = mV8rt.getObject("rT");
DetermineBasalResultAMA result = null;
try {
result = new DetermineBasalResultAMA(v8ObjectReuslt, new JSONObject(ret));
} catch (JSONException e) {
e.printStackTrace();
}
storedGlucoseStatus = mV8rt.executeStringScript("JSON.stringify(" + PARAM_glucoseStatus + ");");
storedIobData = mV8rt.executeStringScript("JSON.stringify(" + PARAM_iobData + ");");
storedCurrentTemp = mV8rt.executeStringScript("JSON.stringify(" + PARAM_currentTemp + ");");
storedProfile = mV8rt.executeStringScript("JSON.stringify(" + PARAM_profile + ");");
storedMeal_data = mV8rt.executeStringScript("JSON.stringify(" + PARAM_meal_data + ");");
storedAutosens_data = mV8rt.executeStringScript("JSON.stringify(" + PARAM_autosens_data + ");");
return result;
}
String getGlucoseStatusParam() {
return storedGlucoseStatus;
}
String getCurrentTempParam() {
return storedCurrentTemp;
}
String getIobDataParam() {
return storedIobData;
}
String getProfileParam() {
return storedProfile;
}
String getMealDataParam() {
return storedMeal_data;
}
String getAutosensDataParam() {
return storedAutosens_data;
}
private void loadScript() throws IOException {
mV8rt.executeVoidScript(readFile("OpenAPSAMA/round-basal.js"), "OpenAPSAMA/round-basal.js", 0);
mV8rt.executeVoidScript("var round_basal = module.exports;");
mV8rt.executeVoidScript("require = function() {return round_basal;};");
mV8rt.executeVoidScript(readFile("OpenAPSAMA/basal-set-temp.js"), "OpenAPSAMA/basal-set-temp.js ", 0);
mV8rt.executeVoidScript("var tempBasalFunctions = module.exports;");
mV8rt.executeVoidScript(
readFile("OpenAPSAMA/determine-basal.js"),
"OpenAPSAMA/determine-basal.js",
0);
mV8rt.executeVoidScript("var determine_basal = module.exports;");
mV8rt.executeVoidScript(
"var setTempBasal = function (rate, duration, profile, rT, offline) {" +
"rT.duration = duration;\n" +
" rT.rate = rate;" +
"return rT;" +
"};",
"setTempBasal.js",
0
);
}
private void initModuleParent() {
mV8rt.executeVoidScript("var module = {\"parent\":Boolean(1)};");
}
private void initProcessExitCallback() {
JavaVoidCallback callbackProccessExit = new JavaVoidCallback() {
@Override
public void invoke(V8Object arg0, V8Array parameters) {
if (parameters.length() > 0) {
Object arg1 = parameters.get(0);
log.error("ProccessExit " + arg1);
}
}
};
mV8rt.registerJavaMethod(callbackProccessExit, "proccessExit");
mV8rt.executeVoidScript("var process = {\"exit\": function () { proccessExit(); } };");
}
private void initLogCallback() {
JavaVoidCallback callbackLog = new JavaVoidCallback() {
@Override
public void invoke(V8Object arg0, V8Array parameters) {
int i = 0;
String s = "";
while (i < parameters.length()) {
Object arg = parameters.get(i);
s += arg + " ";
i++;
}
if (!s.equals("") && Config.logAPSResult)
log.debug("Script debug: " + s);
}
};
mV8rt.registerJavaMethod(callbackLog, "log");
mV8rt.executeVoidScript("var console = {\"log\":log, \"error\":log};");
}
public void setData(NSProfile profile,
double maxIob,
double maxBasal,
double minBg,
double maxBg,
double targetBg,
PumpInterface pump,
IobTotal[] iobArray,
GlucoseStatus glucoseStatus,
MealData mealData,
double autosensDataRatio,
boolean tempTargetSet,
double min_5m_carbimpact) {
String units = profile.getUnits();
mProfile.add("max_iob", maxIob);
mProfile.add("carbs_hr", profile.getCarbAbsorbtionRate());
mProfile.add("dia", profile.getDia());
mProfile.add("type", "current");
mProfile.add("max_daily_basal", profile.getMaxDailyBasal());
mProfile.add("max_basal", maxBasal);
mProfile.add("min_bg", minBg);
mProfile.add("max_bg", maxBg);
mProfile.add("target_bg", targetBg);
mProfile.add("carb_ratio", profile.getIc(profile.secondsFromMidnight()));
mProfile.add("sens", NSProfile.toMgdl(profile.getIsf(NSProfile.secondsFromMidnight()).doubleValue(), units));
mProfile.add("current_basal", pump.getBaseBasalRate());
mProfile.add("temptargetSet", tempTargetSet);
mProfile.add("autosens_adjust_targets", MainApp.getConfigBuilder().isAMAModeEnabled());
mProfile.add("min_5m_carbimpact", min_5m_carbimpact);
mCurrentTemp.add("duration", pump.getTempBasalRemainingMinutes());
mCurrentTemp.add("rate", pump.getTempBasalAbsoluteRate());
mIobData = mV8rt.executeArrayScript(IobTotal.convertToJSONArray(iobArray).toString());
mV8rt.add(PARAM_iobData, mIobData);
mGlucoseStatus.add("glucose", glucoseStatus.glucose);
mGlucoseStatus.add("delta", glucoseStatus.delta);
mGlucoseStatus.add("short_avgdelta", glucoseStatus.short_avgdelta);
mGlucoseStatus.add("long_avgdelta", glucoseStatus.long_avgdelta);
mMealData.add("carbs", mealData.carbs);
mMealData.add("boluses", mealData.boluses);
mMealData.add("mealCOB", mealData.mealCOB);
mAutosensData.add("ratio", autosensDataRatio);
}
public void release() {
mProfile.release();
mCurrentTemp.release();
mIobData.release();
mMealData.release();
mGlucoseStatus.release();
if (mAutosensData != null) {
mAutosensData.release();
}
mV8rt.release();
}
public String readFile(String filename) throws IOException {
byte[] bytes = mScriptReader.readFile(filename);
String string = new String(bytes, "UTF-8");
if (string.startsWith("#!/usr/bin/env node")) {
string = string.substring(20);
}
return string;
}
}
| log script params separately
| app/src/main/java/info/nightscout/androidaps/plugins/OpenAPSAMA/DetermineBasalAdapterAMAJS.java | log script params separately |
|
Java | agpl-3.0 | 7a8f596be66883f61f637bb93990a7f961ecbb54 | 0 | neo4j-attic/shell | package org.neo4j.util.shell;
import java.rmi.RemoteException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class BashVariableInterpreter
{
private static final Map<String, Replacer> REPLACERS =
new HashMap<String, Replacer>();
static
{
REPLACERS.put( "d", new DateReplacer( "EEE MMM dd" ) );
REPLACERS.put( "h", new HostReplacer() );
REPLACERS.put( "H", new HostReplacer() );
REPLACERS.put( "s", new HostReplacer() );
REPLACERS.put( "t", new DateReplacer( "HH:mm:ss" ) );
REPLACERS.put( "T", new DateReplacer( "KK:mm:ss" ) );
REPLACERS.put( "@", new DateReplacer( "KK:mm aa" ) );
REPLACERS.put( "A", new DateReplacer( "HH:mm" ) );
}
public void addReplacer( String key, Replacer replacer )
{
REPLACERS.put( key, replacer );
}
public String interpret( String string, ShellServer server,
Session session )
{
for ( String key : REPLACERS.keySet() )
{
Replacer replacer = REPLACERS.get( key );
String value = replacer.getReplacement( server, session );
string = string.replaceAll( "\\\\" + key, value );
}
return string;
}
public static interface Replacer
{
String getReplacement( ShellServer server, Session session );
}
public static class StaticReplacer implements Replacer
{
private String value;
public StaticReplacer( String value )
{
this.value = value;
}
public String getReplacement( ShellServer server, Session session )
{
return this.value;
}
}
public static class DateReplacer implements Replacer
{
private DateFormat format;
public DateReplacer( String format )
{
this.format = new SimpleDateFormat( format );
}
public String getReplacement( ShellServer server, Session session )
{
return format.format( new Date() );
}
}
public static class HostReplacer implements Replacer
{
public String getReplacement( ShellServer server, Session session )
{
try
{
return server.getName();
}
catch ( RemoteException e )
{
return "";
}
}
}
}
| src/java/org/neo4j/util/shell/BashVariableInterpreter.java | package org.neo4j.util.shell;
import java.rmi.RemoteException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class BashVariableInterpreter
{
private static final Map<String, Replacer> REPLACERS =
new HashMap<String, Replacer>();
static
{
REPLACERS.put( "d", new DateReplacer( "EEE MMM dd" ) );
REPLACERS.put( "h", new HostReplacer() );
REPLACERS.put( "H", new HostReplacer() );
REPLACERS.put( "s", new HostReplacer() );
REPLACERS.put( "t", new DateReplacer( "HH:mm:ss" ) );
REPLACERS.put( "T", new DateReplacer( "KK:mm:ss" ) );
REPLACERS.put( "@", new DateReplacer( "KK:mm aa" ) );
REPLACERS.put( "A", new DateReplacer( "HH:mm" ) );
REPLACERS.put( "u", new StaticReplacer( "user" ) );
REPLACERS.put( "v", new StaticReplacer( "1.0-b6" ) );
REPLACERS.put( "V", new StaticReplacer( "1.0-b6" ) );
}
public void addReplacer( String key, Replacer replacer )
{
REPLACERS.put( key, replacer );
}
public String interpret( String string, ShellServer server,
Session session )
{
for ( String key : REPLACERS.keySet() )
{
Replacer replacer = REPLACERS.get( key );
String value = replacer.getReplacement( server, session );
string = string.replaceAll( "\\\\" + key, value );
}
return string;
}
public static interface Replacer
{
String getReplacement( ShellServer server, Session session );
}
public static class StaticReplacer implements Replacer
{
private String value;
public StaticReplacer( String value )
{
this.value = value;
}
public String getReplacement( ShellServer server, Session session )
{
return this.value;
}
}
public static class DateReplacer implements Replacer
{
private DateFormat format;
public DateReplacer( String format )
{
this.format = new SimpleDateFormat( format );
}
public String getReplacement( ShellServer server, Session session )
{
return format.format( new Date() );
}
}
public static class HostReplacer implements Replacer
{
public String getReplacement( ShellServer server, Session session )
{
try
{
return server.getName();
}
catch ( RemoteException e )
{
return "";
}
}
}
}
| Removed \v and \V since those had a hardcoded version "1.0-b6". Also it would have to be
changed each time the version bump up, unnessecary says I.
Also removed \u since it only displayed hardcoded "user".
git-svn-id: 0207cb1dc1436c9eacee4992d202b61611a1c031@623 0b971d98-bb2f-0410-8247-b05b2b5feb2a
| src/java/org/neo4j/util/shell/BashVariableInterpreter.java | Removed \v and \V since those had a hardcoded version "1.0-b6". Also it would have to be changed each time the version bump up, unnessecary says I. Also removed \u since it only displayed hardcoded "user". |
|
Java | agpl-3.0 | 85144bdea39aaaa849b4f91f59de37a4dc7f4251 | 0 | aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,aihua/opennms,aihua/opennms,roskens/opennms-pre-github,tdefilip/opennms,aihua/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,tdefilip/opennms,roskens/opennms-pre-github,aihua/opennms,roskens/opennms-pre-github,rdkgit/opennms,aihua/opennms,tdefilip/opennms,tdefilip/opennms,aihua/opennms,tdefilip/opennms,rdkgit/opennms,aihua/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github | //
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2002-2003 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// 2003 Jan 31: Cleaned up some unused imports.
//
// Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <[email protected]>
// http://www.opennms.org/
// http://www.opennms.com/
//
// Tab Size = 8
//
// ReparentViaSmb.java,v 1.1.1.1 2001/11/11 17:34:35 ben Exp
//
package org.opennms.netmgt.capsd;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.opennms.core.utils.DBUtils;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.EventConstants;
import org.opennms.netmgt.eventd.EventIpcManagerFactory;
import org.opennms.netmgt.model.events.EventBuilder;
/**
* This class is designed to reparent interfaces in the database based on the
* SMB protocol. Specifically, if two nodes in the 'node' table have identical
* NetBIOS names it is assumed that those two nodes actually represent different
* interfaces (physical or alias'd) on the same box. The node with the lowest
* nodeID becomes the "reparent node" and the other nodes are considered
* duplicates. All interfaces under each duplicate node are then reparented
* under the "reparent node" and the duplicate node(s) are flagged in the
* database as deleted (nodeType='D').
*
* @author <A HREF="[email protected]">Mike </A>
* @author <A HREF="http://www.opennms.org/">OpenNMS </A>
* @author <A HREF="[email protected]">Mike </A>
* @author <A HREF="http://www.opennms.org/">OpenNMS </A>
* @version 1.1.1.1
*/
public final class ReparentViaSmb {
/**
* SQL Statements
*/
final static String SQL_DB_RETRIEVE_NODES = "SELECT nodeid,nodenetbiosname FROM node WHERE nodeType!='D' AND nodenetbiosname is not null ORDER BY nodeid";
final static String SQL_DB_RETRIEVE_NODE = "SELECT nodesysname,nodesysdescription,nodelabel,nodelabelsource FROM node WHERE nodeid=? AND nodeType!='D'";
final static String SQL_DB_RETRIEVE_INTERFACES = "SELECT ipaddr,iphostname FROM ipinterface WHERE nodeid=? AND isManaged!='D'";
final static String SQL_DB_REPARENT_IP_INTERFACE = "UPDATE ipinterface SET nodeID=? WHERE nodeID=? AND isManaged!='D'";
final static String SQL_DB_REPARENT_SNMP_INTERFACE = "UPDATE snmpinterface SET nodeID=? WHERE nodeID=?";
final static String SQL_DB_REPARENT_IF_SERVICES = "UPDATE ifservices SET nodeID=? WHERE nodeID=? AND status!='D'";
final static String SQL_DB_DELETE_NODE = "UPDATE node SET nodeType='D' WHERE nodeID=?";
/**
* Database connection
*/
private java.sql.Connection m_connection;
/**
* List of LightWeightNodeEntry objects intialized from the content of the
* 'node' table.
*/
private List<LightWeightNodeEntry> m_existingNodeList;
/**
* Contains a mapping of reparent nodes and the list of interfaces which
* were reparented under them.
*/
private Map<LightWeightNodeEntry, List<LightWeightIfEntry>> m_reparentedIfMap;
/**
* Contains of mapping of reparent nodes and the list of duplicate nodes
* associated with them.
*/
private Map<LightWeightNodeEntry, List<LightWeightNodeEntry>> m_reparentNodeMap;
/**
* Contains hard-coded list of NetBIOS names which are not subject to
* reparenting via SMB.
*/
private static List<String> m_netbiosNamesToSkip;
//
// Static initialization block to initialize list of NetBIOS names
// which should not be considered for reparenting
//
static {
m_netbiosNamesToSkip = new ArrayList<String>(4);
m_netbiosNamesToSkip.add("WORKSTATION");
m_netbiosNamesToSkip.add("DEFAULT");
m_netbiosNamesToSkip.add("OEMCOMPUTER");
m_netbiosNamesToSkip.add("COMPUTER");
}
/**
* <P>
* LightWeightIfEntry is designed to hold specific information about an IP
* interface in the database such as its IP address, its parent node id, and
* its managed status and represents a lighter weight version of the
* DbIpInterfaceEntry class.
* </P>
*/
private static final class LightWeightIfEntry {
private String m_address;
private String m_hostname;
private int m_nodeId;
private int m_oldNodeId;
/**
* <P>
* Constructs a new LightWeightIfEntry object.
* </P>
*
* @param address
* Interface's ip address
* @param hostname
* Interface's ip host name
* @param nodeId
* Interface's parent node id
* @param oldNodeId
* Interface's original parent node id
*/
public LightWeightIfEntry(String address, String hostname, int nodeId, int oldNodeId) {
m_address = address;
m_hostname = hostname;
m_nodeId = nodeId;
m_oldNodeId = oldNodeId;
}
/**
* <P>
* Returns the IP address of the interface.
* </P>
*/
public String getAddress() {
return m_address;
}
/**
* <P>
* Returns the IP hostname of the interface.
* </P>
*/
public String getHostName() {
return m_hostname;
}
/**
* <P>
* Returns the parent node id of the interface.
* </P>
*/
public int getParentNodeId() {
return m_nodeId;
}
/**
* <P>
* Returns the old parent node id of the interface.
* </P>
*/
public int getOldParentNodeId() {
return m_oldNodeId;
}
}
/**
* This class is a lighter weight version of the DbNodeEntry class for use
* in SMB reparenting.
*/
private static final class LightWeightNodeEntry {
private int m_nodeId;
private String m_netbiosName;
private boolean m_duplicate;
private DbNodeEntry m_hwNodeEntry;
/**
* <P>
* Constructs a new LightWeightNodeEntry object.
* </P>
*
* @param nodeID
* Node's identifier
* @param netbiosName
* Node's NetBIOS name
*/
LightWeightNodeEntry(int nodeID, String netbiosName) {
m_nodeId = nodeID;
if (netbiosName != null)
m_netbiosName = netbiosName.toUpperCase();
else
m_netbiosName = null;
m_duplicate = false;
m_hwNodeEntry = null;
}
/**
* <P>
* Returns the node identifer.
* </P>
*/
int getNodeId() {
return m_nodeId;
}
/**
* <P>
* Returns the NetBIOS name of the node.
* </P>
*/
String getNetbiosName() {
return m_netbiosName;
}
/**
* <P>
* Sets the duplicate flag for the node..
* </P>
*
* @param dupFlag
* the state for the duplicate flag
*/
void setDuplicate(boolean dupFlag) {
m_duplicate = dupFlag;
}
/**
* <P>
* Returns true if this LightWeightNodeEntry object has been marked as a
* duplicate, false otherwise.
* </P>
*/
boolean isDuplicate() {
return m_duplicate;
}
/**
*
*/
void setHeavyWeightNodeEntry(DbNodeEntry hwNodeEntry) {
m_hwNodeEntry = hwNodeEntry;
}
/**
*
*/
DbNodeEntry getHeavyWeightNodeEntry() {
return m_hwNodeEntry;
}
/**
*
*/
boolean hasHeavyWeightNodeEntry() {
if (m_hwNodeEntry == null)
return false;
else
return true;
}
/**
* <P>
* Node equality test...currently returns true if the
* LightWeightNodeEntry objects have the same NetBIOS name.
* </P>
*
* @return true if this and the passed object are equivalent.
*/
@Override
public boolean equals(final Object o) {
if (o == null) return false;
if (!(o instanceof LightWeightNodeEntry)) return false;
LightWeightNodeEntry node = (LightWeightNodeEntry) o;
if (m_netbiosName == null || node.getNetbiosName() == null)
return false;
else if (node.getNetbiosName().equals(m_netbiosName))
return true;
else
return false;
}
@Override
public int hashCode() {
return new HashCodeBuilder(7, 23)
.append(m_nodeId)
.append(m_netbiosName)
.append(m_duplicate)
.append(m_hwNodeEntry)
.toHashCode();
}
}
/**
* Class constructor.
*
* @param connection
* Database connection
*/
public ReparentViaSmb(java.sql.Connection connection) {
m_connection = connection;
m_existingNodeList = null;
m_reparentedIfMap = null;
m_reparentNodeMap = null;
}
/**
* This method is responsible for building a list of existing nodes from the
* 'node' table and then processing that list of nodes in order to determine
* if there are any nodes which must be reparented because they share the
* same NetBIOS name with another node. During this processing the reparent
* node map is built which contains a mapping of reparent nodes to their
* duplicate node lists.
*
* @throws SQLException
* if an error occurs querying the database.
*/
private void buildNodeLists() throws SQLException {
ThreadCategory log = ThreadCategory.getInstance(getClass());
m_existingNodeList = new ArrayList<LightWeightNodeEntry>();
final DBUtils d = new DBUtils(getClass());
try {
PreparedStatement stmt = m_connection.prepareStatement(SQL_DB_RETRIEVE_NODES);
d.watch(stmt);
ResultSet rs = stmt.executeQuery();
d.watch(rs);
// Process result set
// Build list of LightWeightNodeEntry objects representing each of
// the
// nodes pulled from the 'node' table
while (rs.next()) {
m_existingNodeList.add(new LightWeightNodeEntry(rs.getInt(1), rs.getString(2)));
}
} finally {
d.cleanUp();
}
//
// Loop through node list and verify that all of the nodes
// have unique NetBIOS names. If any two nodes have the same
// NetBIOS name then an entry will be added to the reparenting
// map.
//
// Currently the nodeID with the lowest nodeID will have all
// the interfaces associated with the other node(s) reparented
// under it and it's LightWeightNodeEntry object will serve as the map
// key.
// Each of the other (duplicate) nodes will be added to a reparent
// list and then added to the map under the reparent node key.
//
Iterator<LightWeightNodeEntry> outer = m_existingNodeList.iterator();
while (outer.hasNext()) {
LightWeightNodeEntry outerEntry = outer.next();
String outerNetbiosName = outerEntry.getNetbiosName();
// Skip this node if NetBIOS name is null or is in list to skip
if (outerNetbiosName == null || m_netbiosNamesToSkip.contains(outerNetbiosName))
continue;
// If node is already marked as a duplicate just move on
if (outerEntry.isDuplicate())
continue;
List<LightWeightNodeEntry> duplicateNodeList = null;
Iterator<LightWeightNodeEntry> inner = m_existingNodeList.iterator();
while (inner.hasNext()) {
LightWeightNodeEntry innerEntry = inner.next();
String innerNetbiosName = innerEntry.getNetbiosName();
// Skip if inner node id is less than or equal to
// the current outer node id (since these have already
// been processed as an outer node).
if (innerEntry.getNodeId() <= outerEntry.getNodeId())
continue;
// Skip this node if NetBIOS name is null or is in list to skip
if (innerNetbiosName == null || m_netbiosNamesToSkip.contains(innerNetbiosName))
continue;
// Skip if current node is already marked as a duplicate
if (innerEntry.isDuplicate())
continue;
if (innerNetbiosName.equals(outerNetbiosName)) {
// We've found two nodes with same NetBIOS name
// Add innerEntry to duplicate node list
if (duplicateNodeList == null)
duplicateNodeList = new ArrayList<LightWeightNodeEntry>();
innerEntry.setDuplicate(true); // mark node as duplicate
duplicateNodeList.add(innerEntry); // add to current dup
// list
if (log.isDebugEnabled())
log.debug("ReparentViaSmb.retrieveNodeData: found that nodeid " + innerEntry.getNodeId() + " is a duplicate of nodeid " + outerEntry.getNodeId());
}
} // end inner while()
// Anything need reparenting?
if (duplicateNodeList != null) {
// We found duplicates...add to reparent map
if (m_reparentNodeMap == null)
m_reparentNodeMap = new HashMap<LightWeightNodeEntry, List<LightWeightNodeEntry>>();
if (log.isDebugEnabled())
log.debug("ReparentViaSmb.retrieveNodeData: adding dup list w/ " + duplicateNodeList.size() + " to reparent Map for reparent nodeid " + outerEntry.getNodeId());
m_reparentNodeMap.put(outerEntry, duplicateNodeList);
}
}// end outer while()
}
/**
* Performs reparenting if necessary and generates appropriate events to
* inform other OpenNMS processes of any database changes..
*
* @throws java.sql.SQLException
* if error occurs updating the database
*/
public void sync() throws SQLException {
// Build node lists
buildNodeLists();
// Reparent interfaces if necessary
if (m_reparentNodeMap != null && !m_reparentNodeMap.isEmpty()) {
reparentInterfaces();
// Generate 'interfaceReparented' events if necessary
if (m_reparentedIfMap != null && !m_reparentedIfMap.isEmpty())
generateEvents();
}
}
/**
* This method is responsible for reparenting interfaces belonging to
* duplicate nodes under the appropriate reparent node id. During this
* processing the reparented interface map is generated. This map contains a
* list of reparented interfaces associated with each reparent node. This
* list will make it possible to generate 'interfaceReparented' events for
* each reparented interface.
*
* During reparenting the 'ipInterface', 'snmpInterface', and 'ifServices'
* tables are all updated to reflect the new parent node id for the
* reparented interface.
*
* @throws SQLException
* if error occurs updating the database
*/
private void reparentInterfaces() throws SQLException {
ThreadCategory log = ThreadCategory.getInstance(getClass());
List<LightWeightIfEntry> reparentedIfList = null;
m_reparentedIfMap = null;
final DBUtils d = new DBUtils(getClass());
try {
PreparedStatement ipInterfaceStmt = m_connection.prepareStatement(SQL_DB_REPARENT_IP_INTERFACE);
d.watch(ipInterfaceStmt);
PreparedStatement snmpInterfaceStmt = m_connection.prepareStatement(SQL_DB_REPARENT_SNMP_INTERFACE);
d.watch(snmpInterfaceStmt);
PreparedStatement ifServicesStmt = m_connection.prepareStatement(SQL_DB_REPARENT_IF_SERVICES);
d.watch(ifServicesStmt);
Set<LightWeightNodeEntry> keys = m_reparentNodeMap.keySet();
Iterator<LightWeightNodeEntry> iter = keys.iterator();
while (iter.hasNext()) {
LightWeightNodeEntry reparentNode = iter.next();
int reparentNodeID = reparentNode.getNodeId();
// Now construct a "heavier weight" DbNodeEntry object for this
// node...sysName, sysDescription and other fields from the node
// table will be necessary later when the reparentInterface
// event is generated.
reparentNode.setHeavyWeightNodeEntry(DbNodeEntry.get(reparentNodeID));
// Retrieve duplicate node list for this reparent node key
List<LightWeightNodeEntry> dupList = m_reparentNodeMap.get(reparentNode);
log.debug("ReparentViaSmb.retrieveNodeData: duplicate node list retrieved, list size=" + dupList.size());
Iterator<LightWeightNodeEntry> dupIter = dupList.iterator();
while (dupIter.hasNext()) {
LightWeightNodeEntry dupNode = dupIter.next();
int dupNodeID = dupNode.getNodeId();
try {
if (log.isDebugEnabled())
log.debug("reparentInterfaces: reparenting all interfaces/services for nodeID " + dupNodeID + " under reparent nodeID " + reparentNodeID);
//
// Prior to reparenting the interfaces associated with the
// duplicate node retrieve a list of the node's interface
// IP addresses and add them to the m_reparentedIfMap. This
// list will allow us to generate 'interfaceReparented'
// events for each one
//
PreparedStatement stmt = m_connection.prepareStatement(SQL_DB_RETRIEVE_INTERFACES);
d.watch(stmt);
stmt.setInt(1, dupNodeID);
// Issue database query
if (log.isDebugEnabled())
log.debug("reparentInterfaces: issuing db query...");
ResultSet rs = stmt.executeQuery();
d.watch(rs);
// Process result set
// Build list of LightWeightIfEntry objects representing
// each of the
// interfaces pulled from the 'ipInterface' table
while (rs.next()) {
String ifAddress = rs.getString(1);
String hostName = rs.getString(2);
LightWeightIfEntry lwIfEntry = new LightWeightIfEntry(ifAddress, hostName, reparentNodeID, dupNodeID);
if (reparentedIfList == null) {
reparentedIfList = new ArrayList<LightWeightIfEntry>();
}
reparentedIfList.add(lwIfEntry);
if (log.isDebugEnabled())
log.debug("reparentInterfaces: will reparent " + lwIfEntry.getAddress() + " : oldNodeId: " + lwIfEntry.getOldParentNodeId() + " newNodeId: " + lwIfEntry.getParentNodeId());
}
// Update the 'ipInterface' table so that all interfaces
// associated with the duplicate node are reparented.
ipInterfaceStmt.setInt(1, reparentNodeID);
ipInterfaceStmt.setInt(2, dupNodeID);
// execute and log
ipInterfaceStmt.executeUpdate();
// Update the 'snmpinterface' table so that all interfaces
// associated with the duplicate node are reparented
snmpInterfaceStmt.setInt(1, reparentNodeID);
snmpInterfaceStmt.setInt(2, dupNodeID);
// execute and log
snmpInterfaceStmt.executeUpdate();
// Update the 'ifservices' table so that all services
// associated
// with the duplicate node are reparented
ifServicesStmt.setInt(1, reparentNodeID);
ifServicesStmt.setInt(2, dupNodeID);
// execute and log
ifServicesStmt.executeUpdate();
} catch (SQLException sqlE) {
log.error("SQLException while reparenting duplicate node w/ nodeID " + dupNodeID);
throw sqlE;
}
//
// Now that all the interfaces have been reparented...lets
// delete this duplicate node from the 'node' table
//
if (log.isDebugEnabled())
log.debug("reparentInterfaces: deleting duplicate node id: " + dupNodeID);
PreparedStatement deleteNodeStmt = m_connection.prepareStatement(SQL_DB_DELETE_NODE);
d.watch(deleteNodeStmt);
deleteNodeStmt.setInt(1, dupNodeID);
// execute update
deleteNodeStmt.executeUpdate();
} // end while(dupIter.hasNext())
// Should have a reparented interface list now...add it to
// the reparented interface map with the reparent node as the key
if (reparentedIfList != null && !reparentedIfList.isEmpty()) {
if (m_reparentedIfMap == null) {
m_reparentedIfMap = new HashMap<LightWeightNodeEntry, List<LightWeightIfEntry>>();
}
m_reparentedIfMap.put(reparentNode, reparentedIfList);
}
} // end while(iter.hasNext())
} finally {
d.cleanUp();
}
}
/**
* Generates appropriate events to inform other OpenNMS processes of the
* database changes. Loops through the keys of the reparent interface and
* generates 'interfaceReparented' events for each reparented interface.
*/
private void generateEvents() {
//
// iterate through the reparent interface list
//
ThreadCategory log = ThreadCategory.getInstance(getClass());
if (log.isDebugEnabled())
log.debug("generateEvents: Generating reparent events...reparentedIfMap size: " + m_reparentedIfMap.size());
Set<LightWeightNodeEntry> keys = m_reparentedIfMap.keySet();
Iterator<LightWeightNodeEntry> iter = keys.iterator();
while (iter.hasNext()) {
// Get reparent node object
LightWeightNodeEntry reparentNode = iter.next();
if (!reparentNode.hasHeavyWeightNodeEntry()) {
log.warn("generateEvents: No valid reparent node entry for node " + reparentNode.getNodeId() + ". Unable to generate reparenting events.");
continue;
}
if (log.isDebugEnabled())
log.debug("generateEvents: generating events for reparent node w/ id/netbiosName: " + reparentNode.getNodeId() + "/" + reparentNode.getNetbiosName());
// Get list of interface objects associated with this reparent node
List<LightWeightIfEntry> ifList = m_reparentedIfMap.get(reparentNode);
if (ifList != null && !ifList.isEmpty()) {
Iterator<LightWeightIfEntry> ifIter = ifList.iterator();
while (ifIter.hasNext()) {
LightWeightIfEntry lwIfEntry = ifIter.next();
// Generate interfaceReparented event
sendInterfaceReparentedEvent(lwIfEntry.getAddress(), lwIfEntry.getHostName(), lwIfEntry.getParentNodeId(), lwIfEntry.getOldParentNodeId(), reparentNode.getHeavyWeightNodeEntry());
if (log.isDebugEnabled())
log.debug("generateEvents: sent interfaceReparented event for interface " + lwIfEntry.getAddress());
}
}
}
if (log.isDebugEnabled())
log.debug("generateEvents: completed all event generation...");
}
/**
* This method is responsible for generating a interfaceReparented event and
* sending it to Eventd.
*
* @param ipAddr
* IP address of interface which was reparented
* @param ipHostName
* IP Host Name for the interface
* @param newNodeId
* Interface's new nodeID
* @param oldNodeId
* Interface's old nodeID
* @param reparentNodeEntry
* DbNodeEntry object with all info associated with the reparent
* node
*/
private synchronized void sendInterfaceReparentedEvent(String ipAddr, String ipHostName, int newNodeId, int oldNodeId, DbNodeEntry reparentNodeEntry) {
ThreadCategory log = ThreadCategory.getInstance(getClass());
if (log.isDebugEnabled())
log.debug("sendInterfaceReparentedEvent: ipAddr: " + ipAddr + " ipHostName: " + ipHostName + " newNodeId: " + newNodeId + " oldNodeId: " + oldNodeId);
// Make sure host name not null
if (ipHostName == null)
ipHostName = "";
// create the event to be sent
EventBuilder bldr = new EventBuilder(EventConstants.INTERFACE_REPARENTED_EVENT_UEI, "OpenNMS.Capsd");
bldr.setNodeid(newNodeId);
bldr.setHost(Capsd.getLocalHostAddress());
bldr.setInterface(ipAddr);
bldr.addParam(EventConstants.PARM_IP_HOSTNAME, ipHostName);
bldr.addParam(EventConstants.PARM_OLD_NODEID, oldNodeId);
bldr.addParam(EventConstants.PARM_NEW_NODEID, newNodeId);
bldr.addParam(EventConstants.PARM_NODE_LABEL, reparentNodeEntry.getLabel());
bldr.addParam(EventConstants.PARM_NODE_LABEL_SOURCE, reparentNodeEntry.getLabelSource());
if (reparentNodeEntry.getSystemName() != null) {
bldr.addParam(EventConstants.PARM_NODE_SYSNAME, reparentNodeEntry.getSystemName());
}
if (reparentNodeEntry.getSystemDescription() != null) {
bldr.addParam(EventConstants.PARM_NODE_SYSDESCRIPTION, reparentNodeEntry.getSystemDescription());
}
// Send event to Eventd
try {
EventIpcManagerFactory.getIpcManager().sendNow(bldr.getEvent());
} catch (Throwable t) {
log.warn("run: unexpected throwable exception caught during send to middleware", t);
}
}
}
| opennms-services/src/main/java/org/opennms/netmgt/capsd/ReparentViaSmb.java | //
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2002-2003 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// 2003 Jan 31: Cleaned up some unused imports.
//
// Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <[email protected]>
// http://www.opennms.org/
// http://www.opennms.com/
//
// Tab Size = 8
//
// ReparentViaSmb.java,v 1.1.1.1 2001/11/11 17:34:35 ben Exp
//
package org.opennms.netmgt.capsd;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.opennms.core.utils.DBUtils;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.EventConstants;
import org.opennms.netmgt.eventd.EventIpcManagerFactory;
import org.opennms.netmgt.xml.event.Event;
import org.opennms.netmgt.xml.event.Parm;
import org.opennms.netmgt.xml.event.Parms;
import org.opennms.netmgt.xml.event.Value;
/**
* This class is designed to reparent interfaces in the database based on the
* SMB protocol. Specifically, if two nodes in the 'node' table have identical
* NetBIOS names it is assumed that those two nodes actually represent different
* interfaces (physical or alias'd) on the same box. The node with the lowest
* nodeID becomes the "reparent node" and the other nodes are considered
* duplicates. All interfaces under each duplicate node are then reparented
* under the "reparent node" and the duplicate node(s) are flagged in the
* database as deleted (nodeType='D').
*
* @author <A HREF="[email protected]">Mike </A>
* @author <A HREF="http://www.opennms.org/">OpenNMS </A>
* @author <A HREF="[email protected]">Mike </A>
* @author <A HREF="http://www.opennms.org/">OpenNMS </A>
* @version 1.1.1.1
*/
public final class ReparentViaSmb {
/**
* SQL Statements
*/
final static String SQL_DB_RETRIEVE_NODES = "SELECT nodeid,nodenetbiosname FROM node WHERE nodeType!='D' AND nodenetbiosname is not null ORDER BY nodeid";
final static String SQL_DB_RETRIEVE_NODE = "SELECT nodesysname,nodesysdescription,nodelabel,nodelabelsource FROM node WHERE nodeid=? AND nodeType!='D'";
final static String SQL_DB_RETRIEVE_INTERFACES = "SELECT ipaddr,iphostname FROM ipinterface WHERE nodeid=? AND isManaged!='D'";
final static String SQL_DB_REPARENT_IP_INTERFACE = "UPDATE ipinterface SET nodeID=? WHERE nodeID=? AND isManaged!='D'";
final static String SQL_DB_REPARENT_SNMP_INTERFACE = "UPDATE snmpinterface SET nodeID=? WHERE nodeID=?";
final static String SQL_DB_REPARENT_IF_SERVICES = "UPDATE ifservices SET nodeID=? WHERE nodeID=? AND status!='D'";
final static String SQL_DB_DELETE_NODE = "UPDATE node SET nodeType='D' WHERE nodeID=?";
/**
* Database connection
*/
private java.sql.Connection m_connection;
/**
* List of LightWeightNodeEntry objects intialized from the content of the
* 'node' table.
*/
private List<LightWeightNodeEntry> m_existingNodeList;
/**
* Contains a mapping of reparent nodes and the list of interfaces which
* were reparented under them.
*/
private Map<LightWeightNodeEntry, List<LightWeightIfEntry>> m_reparentedIfMap;
/**
* Contains of mapping of reparent nodes and the list of duplicate nodes
* associated with them.
*/
private Map<LightWeightNodeEntry, List<LightWeightNodeEntry>> m_reparentNodeMap;
/**
* Contains hard-coded list of NetBIOS names which are not subject to
* reparenting via SMB.
*/
private static List<String> m_netbiosNamesToSkip;
//
// Static initialization block to initialize list of NetBIOS names
// which should not be considered for reparenting
//
static {
m_netbiosNamesToSkip = new ArrayList<String>(4);
m_netbiosNamesToSkip.add("WORKSTATION");
m_netbiosNamesToSkip.add("DEFAULT");
m_netbiosNamesToSkip.add("OEMCOMPUTER");
m_netbiosNamesToSkip.add("COMPUTER");
}
/**
* <P>
* LightWeightIfEntry is designed to hold specific information about an IP
* interface in the database such as its IP address, its parent node id, and
* its managed status and represents a lighter weight version of the
* DbIpInterfaceEntry class.
* </P>
*/
private static final class LightWeightIfEntry {
private String m_address;
private String m_hostname;
private int m_nodeId;
private int m_oldNodeId;
/**
* <P>
* Constructs a new LightWeightIfEntry object.
* </P>
*
* @param address
* Interface's ip address
* @param hostname
* Interface's ip host name
* @param nodeId
* Interface's parent node id
* @param oldNodeId
* Interface's original parent node id
*/
public LightWeightIfEntry(String address, String hostname, int nodeId, int oldNodeId) {
m_address = address;
m_hostname = hostname;
m_nodeId = nodeId;
m_oldNodeId = oldNodeId;
}
/**
* <P>
* Returns the IP address of the interface.
* </P>
*/
public String getAddress() {
return m_address;
}
/**
* <P>
* Returns the IP hostname of the interface.
* </P>
*/
public String getHostName() {
return m_hostname;
}
/**
* <P>
* Returns the parent node id of the interface.
* </P>
*/
public int getParentNodeId() {
return m_nodeId;
}
/**
* <P>
* Returns the old parent node id of the interface.
* </P>
*/
public int getOldParentNodeId() {
return m_oldNodeId;
}
}
/**
* This class is a lighter weight version of the DbNodeEntry class for use
* in SMB reparenting.
*/
private static final class LightWeightNodeEntry {
private int m_nodeId;
private String m_netbiosName;
private boolean m_duplicate;
private DbNodeEntry m_hwNodeEntry;
/**
* <P>
* Constructs a new LightWeightNodeEntry object.
* </P>
*
* @param nodeID
* Node's identifier
* @param netbiosName
* Node's NetBIOS name
*/
LightWeightNodeEntry(int nodeID, String netbiosName) {
m_nodeId = nodeID;
if (netbiosName != null)
m_netbiosName = netbiosName.toUpperCase();
else
m_netbiosName = null;
m_duplicate = false;
m_hwNodeEntry = null;
}
/**
* <P>
* Returns the node identifer.
* </P>
*/
int getNodeId() {
return m_nodeId;
}
/**
* <P>
* Returns the NetBIOS name of the node.
* </P>
*/
String getNetbiosName() {
return m_netbiosName;
}
/**
* <P>
* Sets the duplicate flag for the node..
* </P>
*
* @param dupFlag
* the state for the duplicate flag
*/
void setDuplicate(boolean dupFlag) {
m_duplicate = dupFlag;
}
/**
* <P>
* Returns true if this LightWeightNodeEntry object has been marked as a
* duplicate, false otherwise.
* </P>
*/
boolean isDuplicate() {
return m_duplicate;
}
/**
*
*/
void setHeavyWeightNodeEntry(DbNodeEntry hwNodeEntry) {
m_hwNodeEntry = hwNodeEntry;
}
/**
*
*/
DbNodeEntry getHeavyWeightNodeEntry() {
return m_hwNodeEntry;
}
/**
*
*/
boolean hasHeavyWeightNodeEntry() {
if (m_hwNodeEntry == null)
return false;
else
return true;
}
/**
* <P>
* Node equality test...currently returns true if the
* LightWeightNodeEntry objects have the same NetBIOS name.
* </P>
*
* @return true if this and the passed object are equivalent.
*/
@Override
public boolean equals(final Object o) {
if (o == null) return false;
if (!(o instanceof LightWeightNodeEntry)) return false;
LightWeightNodeEntry node = (LightWeightNodeEntry) o;
if (m_netbiosName == null || node.getNetbiosName() == null)
return false;
else if (node.getNetbiosName().equals(m_netbiosName))
return true;
else
return false;
}
@Override
public int hashCode() {
return new HashCodeBuilder(7, 23)
.append(m_nodeId)
.append(m_netbiosName)
.append(m_duplicate)
.append(m_hwNodeEntry)
.toHashCode();
}
}
/**
* Class constructor.
*
* @param connection
* Database connection
*/
public ReparentViaSmb(java.sql.Connection connection) {
m_connection = connection;
m_existingNodeList = null;
m_reparentedIfMap = null;
m_reparentNodeMap = null;
}
/**
* This method is responsible for building a list of existing nodes from the
* 'node' table and then processing that list of nodes in order to determine
* if there are any nodes which must be reparented because they share the
* same NetBIOS name with another node. During this processing the reparent
* node map is built which contains a mapping of reparent nodes to their
* duplicate node lists.
*
* @throws SQLException
* if an error occurs querying the database.
*/
private void buildNodeLists() throws SQLException {
ThreadCategory log = ThreadCategory.getInstance(getClass());
m_existingNodeList = new ArrayList<LightWeightNodeEntry>();
final DBUtils d = new DBUtils(getClass());
try {
PreparedStatement stmt = m_connection.prepareStatement(SQL_DB_RETRIEVE_NODES);
d.watch(stmt);
ResultSet rs = stmt.executeQuery();
d.watch(rs);
// Process result set
// Build list of LightWeightNodeEntry objects representing each of
// the
// nodes pulled from the 'node' table
while (rs.next()) {
m_existingNodeList.add(new LightWeightNodeEntry(rs.getInt(1), rs.getString(2)));
}
} finally {
d.cleanUp();
}
//
// Loop through node list and verify that all of the nodes
// have unique NetBIOS names. If any two nodes have the same
// NetBIOS name then an entry will be added to the reparenting
// map.
//
// Currently the nodeID with the lowest nodeID will have all
// the interfaces associated with the other node(s) reparented
// under it and it's LightWeightNodeEntry object will serve as the map
// key.
// Each of the other (duplicate) nodes will be added to a reparent
// list and then added to the map under the reparent node key.
//
Iterator<LightWeightNodeEntry> outer = m_existingNodeList.iterator();
while (outer.hasNext()) {
LightWeightNodeEntry outerEntry = outer.next();
String outerNetbiosName = outerEntry.getNetbiosName();
// Skip this node if NetBIOS name is null or is in list to skip
if (outerNetbiosName == null || m_netbiosNamesToSkip.contains(outerNetbiosName))
continue;
// If node is already marked as a duplicate just move on
if (outerEntry.isDuplicate())
continue;
List<LightWeightNodeEntry> duplicateNodeList = null;
Iterator<LightWeightNodeEntry> inner = m_existingNodeList.iterator();
while (inner.hasNext()) {
LightWeightNodeEntry innerEntry = inner.next();
String innerNetbiosName = innerEntry.getNetbiosName();
// Skip if inner node id is less than or equal to
// the current outer node id (since these have already
// been processed as an outer node).
if (innerEntry.getNodeId() <= outerEntry.getNodeId())
continue;
// Skip this node if NetBIOS name is null or is in list to skip
if (innerNetbiosName == null || m_netbiosNamesToSkip.contains(innerNetbiosName))
continue;
// Skip if current node is already marked as a duplicate
if (innerEntry.isDuplicate())
continue;
if (innerNetbiosName.equals(outerNetbiosName)) {
// We've found two nodes with same NetBIOS name
// Add innerEntry to duplicate node list
if (duplicateNodeList == null)
duplicateNodeList = new ArrayList<LightWeightNodeEntry>();
innerEntry.setDuplicate(true); // mark node as duplicate
duplicateNodeList.add(innerEntry); // add to current dup
// list
if (log.isDebugEnabled())
log.debug("ReparentViaSmb.retrieveNodeData: found that nodeid " + innerEntry.getNodeId() + " is a duplicate of nodeid " + outerEntry.getNodeId());
}
} // end inner while()
// Anything need reparenting?
if (duplicateNodeList != null) {
// We found duplicates...add to reparent map
if (m_reparentNodeMap == null)
m_reparentNodeMap = new HashMap<LightWeightNodeEntry, List<LightWeightNodeEntry>>();
if (log.isDebugEnabled())
log.debug("ReparentViaSmb.retrieveNodeData: adding dup list w/ " + duplicateNodeList.size() + " to reparent Map for reparent nodeid " + outerEntry.getNodeId());
m_reparentNodeMap.put(outerEntry, duplicateNodeList);
}
}// end outer while()
}
/**
* Performs reparenting if necessary and generates appropriate events to
* inform other OpenNMS processes of any database changes..
*
* @throws java.sql.SQLException
* if error occurs updating the database
*/
public void sync() throws SQLException {
// Build node lists
buildNodeLists();
// Reparent interfaces if necessary
if (m_reparentNodeMap != null && !m_reparentNodeMap.isEmpty()) {
reparentInterfaces();
// Generate 'interfaceReparented' events if necessary
if (m_reparentedIfMap != null && !m_reparentedIfMap.isEmpty())
generateEvents();
}
}
/**
* This method is responsible for reparenting interfaces belonging to
* duplicate nodes under the appropriate reparent node id. During this
* processing the reparented interface map is generated. This map contains a
* list of reparented interfaces associated with each reparent node. This
* list will make it possible to generate 'interfaceReparented' events for
* each reparented interface.
*
* During reparenting the 'ipInterface', 'snmpInterface', and 'ifServices'
* tables are all updated to reflect the new parent node id for the
* reparented interface.
*
* @throws SQLException
* if error occurs updating the database
*/
private void reparentInterfaces() throws SQLException {
ThreadCategory log = ThreadCategory.getInstance(getClass());
List<LightWeightIfEntry> reparentedIfList = null;
m_reparentedIfMap = null;
final DBUtils d = new DBUtils(getClass());
try {
PreparedStatement ipInterfaceStmt = m_connection.prepareStatement(SQL_DB_REPARENT_IP_INTERFACE);
d.watch(ipInterfaceStmt);
PreparedStatement snmpInterfaceStmt = m_connection.prepareStatement(SQL_DB_REPARENT_SNMP_INTERFACE);
d.watch(snmpInterfaceStmt);
PreparedStatement ifServicesStmt = m_connection.prepareStatement(SQL_DB_REPARENT_IF_SERVICES);
d.watch(ifServicesStmt);
Set<LightWeightNodeEntry> keys = m_reparentNodeMap.keySet();
Iterator<LightWeightNodeEntry> iter = keys.iterator();
while (iter.hasNext()) {
LightWeightNodeEntry reparentNode = iter.next();
int reparentNodeID = reparentNode.getNodeId();
// Now construct a "heavier weight" DbNodeEntry object for this
// node...sysName, sysDescription and other fields from the node
// table will be necessary later when the reparentInterface
// event is generated.
reparentNode.setHeavyWeightNodeEntry(DbNodeEntry.get(reparentNodeID));
// Retrieve duplicate node list for this reparent node key
List<LightWeightNodeEntry> dupList = m_reparentNodeMap.get(reparentNode);
log.debug("ReparentViaSmb.retrieveNodeData: duplicate node list retrieved, list size=" + dupList.size());
Iterator<LightWeightNodeEntry> dupIter = dupList.iterator();
while (dupIter.hasNext()) {
LightWeightNodeEntry dupNode = dupIter.next();
int dupNodeID = dupNode.getNodeId();
try {
if (log.isDebugEnabled())
log.debug("reparentInterfaces: reparenting all interfaces/services for nodeID " + dupNodeID + " under reparent nodeID " + reparentNodeID);
//
// Prior to reparenting the interfaces associated with the
// duplicate node retrieve a list of the node's interface
// IP addresses and add them to the m_reparentedIfMap. This
// list will allow us to generate 'interfaceReparented'
// events for each one
//
PreparedStatement stmt = m_connection.prepareStatement(SQL_DB_RETRIEVE_INTERFACES);
d.watch(stmt);
stmt.setInt(1, dupNodeID);
// Issue database query
if (log.isDebugEnabled())
log.debug("reparentInterfaces: issuing db query...");
ResultSet rs = stmt.executeQuery();
d.watch(rs);
// Process result set
// Build list of LightWeightIfEntry objects representing
// each of the
// interfaces pulled from the 'ipInterface' table
while (rs.next()) {
String ifAddress = rs.getString(1);
String hostName = rs.getString(2);
LightWeightIfEntry lwIfEntry = new LightWeightIfEntry(ifAddress, hostName, reparentNodeID, dupNodeID);
if (reparentedIfList == null) {
reparentedIfList = new ArrayList<LightWeightIfEntry>();
}
reparentedIfList.add(lwIfEntry);
if (log.isDebugEnabled())
log.debug("reparentInterfaces: will reparent " + lwIfEntry.getAddress() + " : oldNodeId: " + lwIfEntry.getOldParentNodeId() + " newNodeId: " + lwIfEntry.getParentNodeId());
}
// Update the 'ipInterface' table so that all interfaces
// associated with the duplicate node are reparented.
ipInterfaceStmt.setInt(1, reparentNodeID);
ipInterfaceStmt.setInt(2, dupNodeID);
// execute and log
ipInterfaceStmt.executeUpdate();
// Update the 'snmpinterface' table so that all interfaces
// associated with the duplicate node are reparented
snmpInterfaceStmt.setInt(1, reparentNodeID);
snmpInterfaceStmt.setInt(2, dupNodeID);
// execute and log
snmpInterfaceStmt.executeUpdate();
// Update the 'ifservices' table so that all services
// associated
// with the duplicate node are reparented
ifServicesStmt.setInt(1, reparentNodeID);
ifServicesStmt.setInt(2, dupNodeID);
// execute and log
ifServicesStmt.executeUpdate();
} catch (SQLException sqlE) {
log.error("SQLException while reparenting duplicate node w/ nodeID " + dupNodeID);
throw sqlE;
}
//
// Now that all the interfaces have been reparented...lets
// delete this duplicate node from the 'node' table
//
if (log.isDebugEnabled())
log.debug("reparentInterfaces: deleting duplicate node id: " + dupNodeID);
PreparedStatement deleteNodeStmt = m_connection.prepareStatement(SQL_DB_DELETE_NODE);
d.watch(deleteNodeStmt);
deleteNodeStmt.setInt(1, dupNodeID);
// execute update
deleteNodeStmt.executeUpdate();
} // end while(dupIter.hasNext())
// Should have a reparented interface list now...add it to
// the reparented interface map with the reparent node as the key
if (reparentedIfList != null && !reparentedIfList.isEmpty()) {
if (m_reparentedIfMap == null) {
m_reparentedIfMap = new HashMap<LightWeightNodeEntry, List<LightWeightIfEntry>>();
}
m_reparentedIfMap.put(reparentNode, reparentedIfList);
}
} // end while(iter.hasNext())
} finally {
d.cleanUp();
}
}
/**
* Generates appropriate events to inform other OpenNMS processes of the
* database changes. Loops through the keys of the reparent interface and
* generates 'interfaceReparented' events for each reparented interface.
*/
private void generateEvents() {
//
// iterate through the reparent interface list
//
ThreadCategory log = ThreadCategory.getInstance(getClass());
if (log.isDebugEnabled())
log.debug("generateEvents: Generating reparent events...reparentedIfMap size: " + m_reparentedIfMap.size());
Set<LightWeightNodeEntry> keys = m_reparentedIfMap.keySet();
Iterator<LightWeightNodeEntry> iter = keys.iterator();
while (iter.hasNext()) {
// Get reparent node object
LightWeightNodeEntry reparentNode = iter.next();
if (!reparentNode.hasHeavyWeightNodeEntry()) {
log.warn("generateEvents: No valid reparent node entry for node " + reparentNode.getNodeId() + ". Unable to generate reparenting events.");
continue;
}
if (log.isDebugEnabled())
log.debug("generateEvents: generating events for reparent node w/ id/netbiosName: " + reparentNode.getNodeId() + "/" + reparentNode.getNetbiosName());
// Get list of interface objects associated with this reparent node
List<LightWeightIfEntry> ifList = m_reparentedIfMap.get(reparentNode);
if (ifList != null && !ifList.isEmpty()) {
Iterator<LightWeightIfEntry> ifIter = ifList.iterator();
while (ifIter.hasNext()) {
LightWeightIfEntry lwIfEntry = ifIter.next();
// Generate interfaceReparented event
sendInterfaceReparentedEvent(lwIfEntry.getAddress(), lwIfEntry.getHostName(), lwIfEntry.getParentNodeId(), lwIfEntry.getOldParentNodeId(), reparentNode.getHeavyWeightNodeEntry());
if (log.isDebugEnabled())
log.debug("generateEvents: sent interfaceReparented event for interface " + lwIfEntry.getAddress());
}
}
}
if (log.isDebugEnabled())
log.debug("generateEvents: completed all event generation...");
}
/**
* This method is responsible for generating a interfaceReparented event and
* sending it to Eventd.
*
* @param ipAddr
* IP address of interface which was reparented
* @param ipHostName
* IP Host Name for the interface
* @param newNodeId
* Interface's new nodeID
* @param oldNodeId
* Interface's old nodeID
* @param reparentNodeEntry
* DbNodeEntry object with all info associated with the reparent
* node
*/
private synchronized void sendInterfaceReparentedEvent(String ipAddr, String ipHostName, int newNodeId, int oldNodeId, DbNodeEntry reparentNodeEntry) {
ThreadCategory log = ThreadCategory.getInstance(getClass());
if (log.isDebugEnabled())
log.debug("sendInterfaceReparentedEvent: ipAddr: " + ipAddr + " ipHostName: " + ipHostName + " newNodeId: " + newNodeId + " oldNodeId: " + oldNodeId);
// Make sure host name not null
if (ipHostName == null)
ipHostName = "";
// create the event to be sent
Event newEvent = new Event();
newEvent.setUei(EventConstants.INTERFACE_REPARENTED_EVENT_UEI);
newEvent.setSource("OpenNMS.Capsd");
newEvent.setNodeid(newNodeId);
newEvent.setHost(Capsd.getLocalHostAddress());
newEvent.setInterface(ipAddr);
newEvent.setTime(EventConstants.formatToString(new java.util.Date()));
// Add appropriate parms
Parms eventParms = new Parms();
Parm eventParm = null;
Value parmValue = null;
// Add IP host name
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_IP_HOSTNAME);
parmValue = new Value();
parmValue.setContent(ipHostName);
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
// Add old nodeid
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_OLD_NODEID);
parmValue = new Value();
parmValue.setContent(String.valueOf(oldNodeId));
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
// Add new nodeid
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_NEW_NODEID);
parmValue = new Value();
parmValue.setContent(String.valueOf(newNodeId));
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
// Add nodeLabel and nodeLabelSource
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_NODE_LABEL);
parmValue = new Value();
parmValue.setContent(reparentNodeEntry.getLabel());
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_NODE_LABEL_SOURCE);
parmValue = new Value();
parmValue.setContent(new String(new char[] { reparentNodeEntry.getLabelSource() }));
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
if (reparentNodeEntry.getSystemName() != null) {
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_NODE_SYSNAME);
parmValue = new Value();
parmValue.setContent(reparentNodeEntry.getSystemName());
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
}
if (reparentNodeEntry.getSystemDescription() != null) {
eventParm = new Parm();
eventParm.setParmName(EventConstants.PARM_NODE_SYSDESCRIPTION);
parmValue = new Value();
parmValue.setContent(reparentNodeEntry.getSystemDescription());
eventParm.setValue(parmValue);
eventParms.addParm(eventParm);
}
// Add Parms to the event
newEvent.setParms(eventParms);
// Send event to Eventd
try {
EventIpcManagerFactory.getIpcManager().sendNow(newEvent);
} catch (Throwable t) {
log.warn("run: unexpected throwable exception caught during send to middleware", t);
}
}
}
| switch to use EventBuilder
| opennms-services/src/main/java/org/opennms/netmgt/capsd/ReparentViaSmb.java | switch to use EventBuilder |
|
Java | lgpl-2.1 | f27ac85d1b516a56f40e8001e11a873dc4bb5562 | 0 | spotbugs/spotbugs,spotbugs/spotbugs,sewe/spotbugs,johnscancella/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,sewe/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs | /*
* FindBugs - Find Bugs in Java programs
* Copyright (C) 2003-2007 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import edu.umd.cs.findbugs.config.UserPreferences;
import edu.umd.cs.findbugs.plan.ExecutionPlan;
import junit.framework.TestCase;
/**
* Abstract base class for TestCase classes that need to
* run in the context of a FindBugs2 object doing a full
* execution. Ensures that things like AnalysisCache,
* AnalysisContext, etc. are fully initialized.
*
* <p> Is this mock objects? Or is this just a hack?
* Probably the latter :-)
*
* @author David Hovemeyer
*/
public abstract class FindBugsTestCase extends TestCase {
/**
* Data of an empty class in the default package called "Empty".
*/
public static final byte[] EMPTY_CLASS_DATA = {
(byte) 0xca, (byte) 0xfe, (byte) 0xba, (byte) 0xbe,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x32,
(byte) 0x00, (byte) 0x0d, (byte) 0x0a, (byte) 0x00,
(byte) 0x03, (byte) 0x00, (byte) 0x0a, (byte) 0x07,
(byte) 0x00, (byte) 0x0b, (byte) 0x07, (byte) 0x00,
(byte) 0x0c, (byte) 0x01, (byte) 0x00, (byte) 0x06,
(byte) 0x3c, (byte) 0x69, (byte) 0x6e, (byte) 0x69,
(byte) 0x74, (byte) 0x3e, (byte) 0x01, (byte) 0x00,
(byte) 0x03, (byte) 0x28, (byte) 0x29, (byte) 0x56,
(byte) 0x01, (byte) 0x00, (byte) 0x04, (byte) 0x43,
(byte) 0x6f, (byte) 0x64, (byte) 0x65, (byte) 0x01,
(byte) 0x00, (byte) 0x0f, (byte) 0x4c, (byte) 0x69,
(byte) 0x6e, (byte) 0x65, (byte) 0x4e, (byte) 0x75,
(byte) 0x6d, (byte) 0x62, (byte) 0x65, (byte) 0x72,
(byte) 0x54, (byte) 0x61, (byte) 0x62, (byte) 0x6c,
(byte) 0x65, (byte) 0x01, (byte) 0x00, (byte) 0x0a,
(byte) 0x53, (byte) 0x6f, (byte) 0x75, (byte) 0x72,
(byte) 0x63, (byte) 0x65, (byte) 0x46, (byte) 0x69,
(byte) 0x6c, (byte) 0x65, (byte) 0x01, (byte) 0x00,
(byte) 0x0a, (byte) 0x45, (byte) 0x6d, (byte) 0x70,
(byte) 0x74, (byte) 0x79, (byte) 0x2e, (byte) 0x6a,
(byte) 0x61, (byte) 0x76, (byte) 0x61, (byte) 0x0c,
(byte) 0x00, (byte) 0x04, (byte) 0x00, (byte) 0x05,
(byte) 0x01, (byte) 0x00, (byte) 0x05, (byte) 0x45,
(byte) 0x6d, (byte) 0x70, (byte) 0x74, (byte) 0x79,
(byte) 0x01, (byte) 0x00, (byte) 0x10, (byte) 0x6a,
(byte) 0x61, (byte) 0x76, (byte) 0x61, (byte) 0x2f,
(byte) 0x6c, (byte) 0x61, (byte) 0x6e, (byte) 0x67,
(byte) 0x2f, (byte) 0x4f, (byte) 0x62, (byte) 0x6a,
(byte) 0x65, (byte) 0x63, (byte) 0x74, (byte) 0x00,
(byte) 0x21, (byte) 0x00, (byte) 0x02, (byte) 0x00,
(byte) 0x03, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00,
(byte) 0x01, (byte) 0x00, (byte) 0x04, (byte) 0x00,
(byte) 0x05, (byte) 0x00, (byte) 0x01, (byte) 0x00,
(byte) 0x06, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x1d, (byte) 0x00, (byte) 0x01, (byte) 0x00,
(byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x05, (byte) 0x2a, (byte) 0xb7, (byte) 0x00,
(byte) 0x01, (byte) 0xb1, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x07,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x06,
(byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x01,
(byte) 0x00, (byte) 0x08, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x02, (byte) 0x00, (byte) 0x09,
};
private final class TestRunnerThread extends Thread {
private RunnableWithExceptions runnable;
private JUnitDetectorAdapter detectorAdapter;
private TestRunnerThread(RunnableWithExceptions runnable) {
this.runnable = runnable;
}
/**
* @return Returns the detectorAdapter.
*/
public JUnitDetectorAdapter getDetectorAdapter() {
return detectorAdapter;
}
/* (non-Javadoc)
* @see java.lang.Thread#run()
*/
@Override
public void run() {
try {
runTest(runnable);
} catch (Exception e) {
// Hmm...
System.err.println("Exception running test:");
e.printStackTrace();
}
}
private void runTest(RunnableWithExceptions runnable) throws IOException, InterruptedException {
// Create temporary directory in filesystem
File tmpdir = File.createTempFile("fbtest", null);
if (!tmpdir.delete() || !tmpdir.mkdir()) {
throw new IOException("Could not create temp dir");
}
File tmpfile = null;
try {
// Create a class file to analyze
tmpfile = createEmptyClassFile(tmpdir);
// Unfortunately there's quite a bit of gobbledygook required
// to set up a FindBugs2.
FindBugs2 engine = new FindBugs2();
engine.setBugReporter(new PrintingBugReporter());
// Analyze the temporary directory we just created
Project project = new Project();
project.addFile(tmpdir.getAbsolutePath());
engine.setProject(project);
DetectorFactoryCollection dfc = new DetectorFactoryCollection();
DetectorFactoryCollection.resetInstance(dfc);
Plugin fakePlugin = new Plugin("edu.umd.cs.findbugs.fakeplugin", null);
fakePlugin.setEnabled(true);
dfc.setPlugins(new Plugin[]{fakePlugin});
DetectorFactory detectorFactory =
new DetectorFactory(fakePlugin, JUnitDetectorAdapter.class, true, "fast", "", "");
fakePlugin.addDetectorFactory(detectorFactory);
dfc.registerDetector(detectorFactory);
if (!dfc.factoryIterator().hasNext() || !fakePlugin.detectorFactoryIterator().hasNext()) {
throw new IllegalStateException();
}
engine.setDetectorFactoryCollection(dfc);
engine.setUserPreferences(UserPreferences.createDefaultUserPreferences());
JUnitDetectorAdapter.setRunnable(runnable);
engine.execute();
// Get a handle to the JUnitDetectorAdapter, since it is the
// object that knows whether or not the test code actually passed
// or failed.
detectorAdapter = JUnitDetectorAdapter.instance();
} finally {
if (tmpfile != null) {
tmpfile.delete();
}
tmpdir.delete();
}
}
/**
* @param tmpdir
* @throws IOException
*/
private File createEmptyClassFile(File tmpdir) throws IOException {
File outFile = new File(tmpdir, "Empty.class");
OutputStream out = new FileOutputStream(outFile);
try {
out.write(EMPTY_CLASS_DATA);
} finally {
out.close();
}
return outFile;
}
}
/**
* Execute some JUnit test code inside a Detector2 class
* running inside a FindBugs2 analysis run.
* In theory, any code legal in a FindBugs detector should
* work.
*
* @param runnable a RunnableWithExceptions object whose run() method has some JUnit test code
* @throws Throwable
*/
protected void executeFindBugsTest(final RunnableWithExceptions runnable) throws Throwable {
TestRunnerThread thread = new TestRunnerThread(runnable);
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
throw new IllegalStateException();
}
if (thread.getDetectorAdapter() == null) {
throw new IllegalStateException("Test code did not complete");
}
thread.getDetectorAdapter().finishTest();
}
}
| findbugs/src/junit/edu/umd/cs/findbugs/FindBugsTestCase.java | /*
* FindBugs - Find Bugs in Java programs
* Copyright (C) 2003-2007 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import edu.umd.cs.findbugs.config.UserPreferences;
import edu.umd.cs.findbugs.plan.ExecutionPlan;
import junit.framework.TestCase;
/**
* Abstract base class for TestCase classes that need to
* run in the context of a FindBugs2 object doing a full
* execution. Ensures that things like AnalysisCache,
* AnalysisContext, etc. are fully initialized.
*
* <p> Is this mock objects? Or is this just a hack?
* Probably the latter :-)
*
* @author David Hovemeyer
*/
public abstract class FindBugsTestCase extends TestCase {
/**
* Data of an empty class in the default package called "Empty".
*/
public static final byte[] EMPTY_CLASS_DATA = {
(byte) 0xca, (byte) 0xfe, (byte) 0xba, (byte) 0xbe,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x32,
(byte) 0x00, (byte) 0x0d, (byte) 0x0a, (byte) 0x00,
(byte) 0x03, (byte) 0x00, (byte) 0x0a, (byte) 0x07,
(byte) 0x00, (byte) 0x0b, (byte) 0x07, (byte) 0x00,
(byte) 0x0c, (byte) 0x01, (byte) 0x00, (byte) 0x06,
(byte) 0x3c, (byte) 0x69, (byte) 0x6e, (byte) 0x69,
(byte) 0x74, (byte) 0x3e, (byte) 0x01, (byte) 0x00,
(byte) 0x03, (byte) 0x28, (byte) 0x29, (byte) 0x56,
(byte) 0x01, (byte) 0x00, (byte) 0x04, (byte) 0x43,
(byte) 0x6f, (byte) 0x64, (byte) 0x65, (byte) 0x01,
(byte) 0x00, (byte) 0x0f, (byte) 0x4c, (byte) 0x69,
(byte) 0x6e, (byte) 0x65, (byte) 0x4e, (byte) 0x75,
(byte) 0x6d, (byte) 0x62, (byte) 0x65, (byte) 0x72,
(byte) 0x54, (byte) 0x61, (byte) 0x62, (byte) 0x6c,
(byte) 0x65, (byte) 0x01, (byte) 0x00, (byte) 0x0a,
(byte) 0x53, (byte) 0x6f, (byte) 0x75, (byte) 0x72,
(byte) 0x63, (byte) 0x65, (byte) 0x46, (byte) 0x69,
(byte) 0x6c, (byte) 0x65, (byte) 0x01, (byte) 0x00,
(byte) 0x0a, (byte) 0x45, (byte) 0x6d, (byte) 0x70,
(byte) 0x74, (byte) 0x79, (byte) 0x2e, (byte) 0x6a,
(byte) 0x61, (byte) 0x76, (byte) 0x61, (byte) 0x0c,
(byte) 0x00, (byte) 0x04, (byte) 0x00, (byte) 0x05,
(byte) 0x01, (byte) 0x00, (byte) 0x05, (byte) 0x45,
(byte) 0x6d, (byte) 0x70, (byte) 0x74, (byte) 0x79,
(byte) 0x01, (byte) 0x00, (byte) 0x10, (byte) 0x6a,
(byte) 0x61, (byte) 0x76, (byte) 0x61, (byte) 0x2f,
(byte) 0x6c, (byte) 0x61, (byte) 0x6e, (byte) 0x67,
(byte) 0x2f, (byte) 0x4f, (byte) 0x62, (byte) 0x6a,
(byte) 0x65, (byte) 0x63, (byte) 0x74, (byte) 0x00,
(byte) 0x21, (byte) 0x00, (byte) 0x02, (byte) 0x00,
(byte) 0x03, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00,
(byte) 0x01, (byte) 0x00, (byte) 0x04, (byte) 0x00,
(byte) 0x05, (byte) 0x00, (byte) 0x01, (byte) 0x00,
(byte) 0x06, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x1d, (byte) 0x00, (byte) 0x01, (byte) 0x00,
(byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x05, (byte) 0x2a, (byte) 0xb7, (byte) 0x00,
(byte) 0x01, (byte) 0xb1, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x07,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x06,
(byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x01,
(byte) 0x00, (byte) 0x08, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x02, (byte) 0x00, (byte) 0x09,
};
private final class TestRunnerThread extends Thread {
private RunnableWithExceptions runnable;
private JUnitDetectorAdapter detectorAdapter;
private TestRunnerThread(RunnableWithExceptions runnable) {
this.runnable = runnable;
}
/**
* @return Returns the detectorAdapter.
*/
public JUnitDetectorAdapter getDetectorAdapter() {
return detectorAdapter;
}
/* (non-Javadoc)
* @see java.lang.Thread#run()
*/
@Override
public void run() {
try {
runTest(runnable);
} catch (Exception e) {
// Hmm...
System.err.println("Exception running test:");
e.printStackTrace();
}
}
private void runTest(RunnableWithExceptions runnable) throws IOException, InterruptedException {
// Create temporary directory in filesystem
File tmpdir = File.createTempFile("fbtest", null);
if (!tmpdir.delete() || !tmpdir.mkdir()) {
throw new IOException("Could not create temp dir");
}
File tmpfile = null;
try {
// Create a class file to analyze
tmpfile = createEmptyClassFile(tmpdir);
// Unfortunately there's quite a bit of gobbledygook required
// to set up a FindBugs2.
FindBugs2 engine = new FindBugs2();
engine.setBugReporter(new PrintingBugReporter());
// Analyze the temporary directory we just created
Project project = new Project();
project.addFile(tmpdir.getAbsolutePath());
engine.setProject(project);
DetectorFactoryCollection dfc = new DetectorFactoryCollection();
DetectorFactoryCollection.resetInstance(dfc);
Plugin fakePlugin = new Plugin("edu.umd.cs.findbugs.fakeplugin", null);
fakePlugin.setEnabled(true);
dfc.setPlugins(new Plugin[]{fakePlugin});
DetectorFactory detectorFactory =
new DetectorFactory(fakePlugin, JUnitDetectorAdapter.class, true, "fast", "", "");
fakePlugin.addDetectorFactory(detectorFactory);
dfc.registerDetector(detectorFactory);
if (!dfc.factoryIterator().hasNext() || !fakePlugin.detectorFactoryIterator().hasNext()) {
throw new IllegalStateException();
}
engine.setDetectorFactoryCollection(dfc);
engine.setUserPreferences(UserPreferences.createDefaultUserPreferences());
JUnitDetectorAdapter.setRunnable(runnable);
engine.execute();
// Get a handle to the JUnitDetectorAdapter, since it is the
// object that knows whether or not the test code actually passed
// or failed.
detectorAdapter = JUnitDetectorAdapter.instance();
} finally {
if (tmpfile != null) {
tmpfile.delete();
}
tmpdir.delete();
}
}
/**
* @param tmpdir
* @throws IOException
*/
private File createEmptyClassFile(File tmpdir) throws IOException {
File outFile = new File(tmpdir, "Empty.class");
OutputStream out = new FileOutputStream(outFile);
try {
out.write(EMPTY_CLASS_DATA);
} finally {
out.close();
}
return outFile;
}
}
protected void executeFindBugsTest(final RunnableWithExceptions runnable) throws Throwable {
TestRunnerThread thread = new TestRunnerThread(runnable);
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
throw new IllegalStateException();
}
if (thread.getDetectorAdapter() == null) {
throw new IllegalStateException("Test code did not complete");
}
thread.getDetectorAdapter().finishTest();
}
}
| Added some javadoc.
git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@8345 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
| findbugs/src/junit/edu/umd/cs/findbugs/FindBugsTestCase.java | Added some javadoc. |
|
Java | lgpl-2.1 | 1df6e0093d9df802137a862fa3df0b6baf0a92e8 | 0 | certusoft/swingx,certusoft/swingx | /*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jdesktop.swingx;
import java.awt.Dialog;
import java.awt.Dimension;
import java.awt.Frame;
import java.util.Locale;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.JToolBar;
import javax.swing.plaf.basic.BasicOptionPaneUI;
import org.jdesktop.swingx.action.BoundAction;
import org.jdesktop.swingx.plaf.LookAndFeelAddons;
import org.jdesktop.swingx.plaf.UIManagerExt;
/**
* First cut for enhanced Dialog. The idea is to have a pluggable content
* from which the dialog auto-configures all its "dialogueness".
*
* <ul>
* <li> accepts a content and configures itself from content's properties -
* replaces the execute action from the appropriate action in content's action map (if any)
* and set's its title from the content's name.
* <li> registers stand-in actions for close/execute with the dialog's RootPane
* <li> registers keyStrokes for esc/enter to trigger the close/execute actions
* <li> takes care of building the button panel using the close/execute actions.
* </ul>
*
* <ul>
* <li>TODO: add link to forum discussion, wiki summary?
* <li>PENDING: add support for vetoing the close.
* <li>PENDING: add complete set of constructors
* <li>PENDING: add windowListener to delegate to close action
* </ul>
*
* @author Jeanette Winzenburg
* @author Karl Schaefer
*/
public class JXDialog extends JDialog {
static {
// Hack to enforce loading of SwingX framework ResourceBundle
LookAndFeelAddons.getAddon();
}
public static final String EXECUTE_ACTION_COMMAND = "execute";
public static final String CLOSE_ACTION_COMMAND = "close";
public static final String UIPREFIX = "XDialog.";
protected JComponent content;
/**
* Creates a non-modal dialog with the given component as
* content and without specified owner. A shared, hidden frame will be
* set as the owner of the dialog.
* <p>
* @param content the component to show and to auto-configure from.
*/
public JXDialog(JComponent content) {
super();
setContent(content);
}
/**
* Creates a non-modal dialog with the given component as content and the
* specified <code>Frame</code> as owner.
* <p>
* @param frame the owner
* @param content the component to show and to auto-configure from.
*/
public JXDialog(Frame frame, JComponent content) {
super(frame);
setContent(content);
}
/**
* Creates a non-modal dialog with the given component as content and the
* specified <code>Dialog</code> as owner.
* <p>
* @param dialog the owner
* @param content the component to show and to auto-configure from.
*/
public JXDialog(Dialog dialog, JComponent content) {
super(dialog);
setContent(content);
}
/**
* {@inheritDoc}
*/
@Override
protected JXRootPane createRootPane() {
return new JXRootPane();
}
/**
* {@inheritDoc}
*/
public JXRootPane getRootPane() {
return (JXRootPane) super.getRootPane();
}
/**
* Sets the status bar property on the underlying {@code JXRootPane}.
*
* @param statusBar
* the {@code JXStatusBar} which is to be the status bar
* @see #getStatusBar()
* @see JXRootPane#setStatusBar(JXStatusBar)
*/
public void setStatusBar(JXStatusBar statusBar) {
getRootPane().setStatusBar(statusBar);
}
/**
* Returns the value of the status bar property from the underlying
* {@code JXRootPane}.
*
* @return the {@code JXStatusBar} which is the current status bar
* @see #setStatusBar(JXStatusBar)
* @see JXRootPane#getStatusBar()
*/
public JXStatusBar getStatusBar() {
return getRootPane().getStatusBar();
}
/**
* Sets the tool bar property on the underlying {@code JXRootPane}.
*
* @param toolBar
* the {@code JToolBar} which is to be the tool bar
* @see #getToolBar()
* @see JXRootPane#setToolBar(JToolBar)
*/
public void setToolBar(JToolBar toolBar) {
getRootPane().setToolBar(toolBar);
}
/**
* Returns the value of the tool bar property from the underlying
* {@code JXRootPane}.
*
* @return the {@code JToolBar} which is the current tool bar
* @see #setToolBar(JToolBar)
* @see JXRootPane#getToolBar()
*/
public JToolBar getToolBar() {
return getRootPane().getToolBar();
}
/**
* PENDING: widen access - this could be public to make the content really
* pluggable?
*
* @param content
*/
private void setContent(JComponent content) {
if (this.content != null) {
throw new IllegalStateException("content must not be set more than once");
}
initActions();
Action contentCloseAction = content.getActionMap().get(CLOSE_ACTION_COMMAND);
if (contentCloseAction != null) {
putAction(CLOSE_ACTION_COMMAND, contentCloseAction);
}
Action contentExecuteAction = content.getActionMap().get(EXECUTE_ACTION_COMMAND);
if (contentExecuteAction != null) {
putAction(EXECUTE_ACTION_COMMAND, contentExecuteAction);
}
this.content = content;
build();
setTitleFromContent();
}
/**
* Infers and sets this dialog's title from the the content.
* Does nothing if content is null.
*
* Here: uses the content's name as title.
*/
protected void setTitleFromContent() {
if (content == null) return;
setTitle(content.getName());
}
/**
* pre: content != null.
*
*/
private void build() {
JComponent contentBox = new Box(BoxLayout.PAGE_AXIS);
contentBox.add(content);
JComponent buttonPanel = createButtonPanel();
contentBox.add(buttonPanel);
contentBox.setBorder(BorderFactory.createEmptyBorder(14, 14, 14, 14));
// content.applyComponentOrientation(ComponentOrientation.RIGHT_TO_LEFT);
// fieldPanel.setAlignmentX();
// buttonPanel.setAlignmentX(Component.RIGHT_ALIGNMENT);
add(contentBox);
}
/**
* {@inheritDoc}
*
* Overridden to check if content is available. <p>
* PENDING: doesn't make sense - the content is immutable and guaranteed
* to be not null.
*/
@Override
public void setVisible(boolean visible) {
if (content == null) throw
new IllegalStateException("content must be built before showing the dialog");
super.setVisible(visible);
}
//------------------------ dynamic locale support
/**
* {@inheritDoc} <p>
*
* Overridden to set the content's Locale and then updated
* this dialog's internal state. <p>
*
*
*/
@Override
public void setLocale(Locale l) {
/*
* NOTE: this is called from super's constructor as one of the
* first methods (prior to setting the rootPane!). So back out
*
*/
if (content != null) {
content.setLocale(l);
updateLocaleState(l);
}
super.setLocale(l);
}
/**
* Updates this dialog's locale-dependent state.
*
* Here: updates title and actions.
* <p>
*
*
* @see #setLocale(Locale)
*/
protected void updateLocaleState(Locale locale) {
setTitleFromContent();
for (Object key : getRootPane().getActionMap().allKeys()) {
if (key instanceof String) {
Action contentAction = content.getActionMap().get(key);
Action rootPaneAction = getAction(key);
if ((!rootPaneAction.equals(contentAction))) {
String keyString = getUIString((String) key, locale);
if (!key.equals(keyString)) {
rootPaneAction.putValue(Action.NAME, keyString);
}
}
}
}
}
/**
* The callback method executed when closing the dialog. <p>
* Here: calls dispose.
*
*/
public void doClose() {
dispose();
}
private void initActions() {
Action defaultAction = createCloseAction();
putAction(CLOSE_ACTION_COMMAND, defaultAction);
putAction(EXECUTE_ACTION_COMMAND, defaultAction);
}
private Action createCloseAction() {
String actionName = getUIString(CLOSE_ACTION_COMMAND);
BoundAction action = new BoundAction(actionName,
CLOSE_ACTION_COMMAND);
action.registerCallback(this, "doClose");
return action;
}
/**
* create the dialog button controls.
*
*
* @return panel containing button controls
*/
protected JComponent createButtonPanel() {
// PENDING: this is a hack until we have a dedicated ButtonPanel!
JPanel panel = new JPanel(new BasicOptionPaneUI.ButtonAreaLayout(true, 6))
{
@Override
public Dimension getMaximumSize() {
return getPreferredSize();
}
};
panel.setBorder(BorderFactory.createEmptyBorder(9, 0, 0, 0));
Action executeAction = getAction(EXECUTE_ACTION_COMMAND);
Action closeAction = getAction(CLOSE_ACTION_COMMAND);
JButton defaultButton = new JButton(executeAction);
panel.add(defaultButton);
getRootPane().setDefaultButton(defaultButton);
if (executeAction != closeAction) {
JButton b = new JButton(closeAction);
panel.add(b);
getRootPane().setCancelButton(b);
}
return panel;
}
/**
* convenience wrapper to access rootPane's actionMap.
* @param key
* @param action
*/
private void putAction(Object key, Action action) {
getRootPane().getActionMap().put(key, action);
}
/**
* convenience wrapper to access rootPane's actionMap.
*
* @param key
* @return root pane's <code>ActionMap</code>
*/
private Action getAction(Object key) {
return getRootPane().getActionMap().get(key);
}
/**
* Returns a potentially localized value from the UIManager. The given key
* is prefixed by this component|s <code>UIPREFIX</code> before doing the
* lookup. The lookup respects this table's current <code>locale</code>
* property. Returns the key, if no value is found.
*
* @param key the bare key to look up in the UIManager.
* @return the value mapped to UIPREFIX + key or key if no value is found.
*/
protected String getUIString(String key) {
return getUIString(key, getLocale());
}
/**
* Returns a potentially localized value from the UIManager for the
* given locale. The given key
* is prefixed by this component's <code>UIPREFIX</code> before doing the
* lookup. Returns the key, if no value is found.
*
* @param key the bare key to look up in the UIManager.
* @param locale the locale use for lookup
* @return the value mapped to UIPREFIX + key in the given locale,
* or key if no value is found.
*/
protected String getUIString(String key, Locale locale) {
String text = UIManagerExt.getString(UIPREFIX + key, locale);
return text != null ? text : key;
}
}
| src/java/org/jdesktop/swingx/JXDialog.java | /*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jdesktop.swingx;
import java.awt.Dialog;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.event.KeyEvent;
import java.util.Locale;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import javax.swing.plaf.basic.BasicOptionPaneUI;
import org.jdesktop.swingx.action.BoundAction;
import org.jdesktop.swingx.plaf.LookAndFeelAddons;
import org.jdesktop.swingx.plaf.UIManagerExt;
/**
* First cut for enhanced Dialog. The idea is to have a pluggable content
* from which the dialog auto-configures all its "dialogueness".
*
* <ul>
* <li> accepts a content and configures itself from content's properties -
* replaces the execute action from the appropriate action in content's action map (if any)
* and set's its title from the content's name.
* <li> registers stand-in actions for close/execute with the dialog's RootPane
* <li> registers keyStrokes for esc/enter to trigger the close/execute actions
* <li> takes care of building the button panel using the close/execute actions.
* </ul>
*
* <ul>
* <li>TODO: add link to forum discussion, wiki summary?
* <li>PENDING: add support for vetoing the close.
* <li>PENDING: add complete set of constructors
* <li>PENDING: add windowListener to delegate to close action
* </ul>
*
* @author Jeanette Winzenburg
*/
public class JXDialog extends JDialog {
static {
// Hack to enforce loading of SwingX framework ResourceBundle
LookAndFeelAddons.getAddon();
}
public static final String EXECUTE_ACTION_COMMAND = "execute";
public static final String CLOSE_ACTION_COMMAND = "close";
public static final String UIPREFIX = "XDialog.";
protected JComponent content;
/**
* Creates a non-modal dialog with the given component as
* content and without specified owner. A shared, hidden frame will be
* set as the owner of the dialog.
* <p>
* @param content the component to show and to auto-configure from.
*/
public JXDialog(JComponent content) {
super();
setContent(content);
}
/**
* Creates a non-modal dialog with the given component as content and the
* specified <code>Frame</code> as owner.
* <p>
* @param frame the owner
* @param content the component to show and to auto-configure from.
*/
public JXDialog(Frame frame, JComponent content) {
super(frame);
setContent(content);
}
/**
* Creates a non-modal dialog with the given component as content and the
* specified <code>Dialog</code> as owner.
* <p>
* @param dialog the owner
* @param content the component to show and to auto-configure from.
*/
public JXDialog(Dialog dialog, JComponent content) {
super(dialog);
setContent(content);
}
/**
* PENDING: widen access - this could be public to make the content really
* pluggable?
*
* @param content
*/
private void setContent(JComponent content) {
if (this.content != null) {
throw new IllegalStateException("content must not be set more than once");
}
initActions();
Action contentCloseAction = content.getActionMap().get(CLOSE_ACTION_COMMAND);
if (contentCloseAction != null) {
putAction(CLOSE_ACTION_COMMAND, contentCloseAction);
}
Action contentExecuteAction = content.getActionMap().get(EXECUTE_ACTION_COMMAND);
if (contentExecuteAction != null) {
putAction(EXECUTE_ACTION_COMMAND, contentExecuteAction);
}
this.content = content;
build();
setTitleFromContent();
}
/**
* Infers and sets this dialog's title from the the content.
* Does nothing if content is null.
*
* Here: uses the content's name as title.
*/
protected void setTitleFromContent() {
if (content == null) return;
setTitle(content.getName());
}
/**
* pre: content != null.
*
*/
private void build() {
JComponent contentBox = new Box(BoxLayout.PAGE_AXIS);
contentBox.add(content);
JComponent buttonPanel = createButtonPanel();
contentBox.add(buttonPanel);
contentBox.setBorder(BorderFactory.createEmptyBorder(14, 14, 14, 14));
// content.applyComponentOrientation(ComponentOrientation.RIGHT_TO_LEFT);
// fieldPanel.setAlignmentX();
// buttonPanel.setAlignmentX(Component.RIGHT_ALIGNMENT);
add(contentBox);
}
/**
* {@inheritDoc}
*
* Overridden to check if content is available. <p>
* PENDING: doesn't make sense - the content is immutable and guaranteed
* to be not null.
*/
@Override
public void setVisible(boolean visible) {
if (content == null) throw
new IllegalStateException("content must be built before showing the dialog");
super.setVisible(visible);
}
//------------------------ dynamic locale support
/**
* {@inheritDoc} <p>
*
* Overridden to set the content's Locale and then updated
* this dialog's internal state. <p>
*
*
*/
@Override
public void setLocale(Locale l) {
/*
* NOTE: this is called from super's constructor as one of the
* first methods (prior to setting the rootPane!). So back out
*
*/
if (content != null) {
content.setLocale(l);
updateLocaleState(l);
}
super.setLocale(l);
}
/**
* Updates this dialog's locale-dependent state.
*
* Here: updates title and actions.
* <p>
*
*
* @see #setLocale(Locale)
*/
protected void updateLocaleState(Locale locale) {
setTitleFromContent();
for (Object key : getRootPane().getActionMap().allKeys()) {
if (key instanceof String) {
Action contentAction = content.getActionMap().get(key);
Action rootPaneAction = getAction(key);
if ((!rootPaneAction.equals(contentAction))) {
String keyString = getUIString((String) key, locale);
if (!key.equals(keyString)) {
rootPaneAction.putValue(Action.NAME, keyString);
}
}
}
}
}
/**
* The callback method executed when closing the dialog. <p>
* Here: calls dispose.
*
*/
public void doClose() {
dispose();
}
private void initActions() {
Action defaultAction = createCloseAction();
putAction(CLOSE_ACTION_COMMAND, defaultAction);
putAction(EXECUTE_ACTION_COMMAND, defaultAction);
}
private Action createCloseAction() {
String actionName = getUIString(CLOSE_ACTION_COMMAND);
BoundAction action = new BoundAction(actionName,
CLOSE_ACTION_COMMAND);
action.registerCallback(this, "doClose");
return action;
}
/**
* create the dialog button controls.
*
*
* @return panel containing button controls
*/
protected JComponent createButtonPanel() {
// PENDING: this is a hack until we have a dedicated ButtonPanel!
JPanel panel = new JPanel(new BasicOptionPaneUI.ButtonAreaLayout(true, 6))
{
@Override
public Dimension getMaximumSize() {
return getPreferredSize();
}
};
panel.setBorder(BorderFactory.createEmptyBorder(9, 0, 0, 0));
Action executeAction = getAction(EXECUTE_ACTION_COMMAND);
Action closeAction = getAction(CLOSE_ACTION_COMMAND);
JButton findButton = new JButton(executeAction);
panel.add(findButton);
if (executeAction != closeAction) {
panel.add(new JButton(closeAction));
}
KeyStroke enterKey = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0, false);
KeyStroke escapeKey = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false);
InputMap inputMap = getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
inputMap.put(enterKey, EXECUTE_ACTION_COMMAND);
inputMap.put(escapeKey, CLOSE_ACTION_COMMAND);
getRootPane().setDefaultButton(findButton);
return panel;
}
/**
* convenience wrapper to access rootPane's actionMap.
* @param key
* @param action
*/
private void putAction(Object key, Action action) {
getRootPane().getActionMap().put(key, action);
}
/**
* convenience wrapper to access rootPane's actionMap.
*
* @param key
* @return root pane's <code>ActionMap</code>
*/
private Action getAction(Object key) {
return getRootPane().getActionMap().get(key);
}
/**
* Returns a potentially localized value from the UIManager. The given key
* is prefixed by this component|s <code>UIPREFIX</code> before doing the
* lookup. The lookup respects this table's current <code>locale</code>
* property. Returns the key, if no value is found.
*
* @param key the bare key to look up in the UIManager.
* @return the value mapped to UIPREFIX + key or key if no value is found.
*/
protected String getUIString(String key) {
return getUIString(key, getLocale());
}
/**
* Returns a potentially localized value from the UIManager for the
* given locale. The given key
* is prefixed by this component's <code>UIPREFIX</code> before doing the
* lookup. Returns the key, if no value is found.
*
* @param key the bare key to look up in the UIManager.
* @param locale the locale use for lookup
* @return the value mapped to UIPREFIX + key in the given locale,
* or key if no value is found.
*/
protected String getUIString(String key, Locale locale) {
String text = UIManagerExt.getString(UIPREFIX + key, locale);
return text != null ? text : key;
}
}
| Bugs 1016, 1017: JXDialog now uses the JXRootPane at its root pane. StatusBar delegate methods have been added. | src/java/org/jdesktop/swingx/JXDialog.java | Bugs 1016, 1017: JXDialog now uses the JXRootPane at its root pane. StatusBar delegate methods have been added. |
|
Java | lgpl-2.1 | 32ce5c7048a0b6f7951242d8b6c1fbc5ac3b3d47 | 0 | samskivert/samskivert,samskivert/samskivert | //
// $Id: AttachableURLFactory.java,v 1.1 2003/07/09 18:44:11 ray Exp $
package com.samskivert.net;
import java.net.URL;
import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory;
import java.util.HashMap;
import com.samskivert.Log;
/**
* Allows other entities in an application to register URLStreamHandler
* classes for protocols of their own making.
*/
public class AttachableURLFactory implements URLStreamHandlerFactory
{
/**
* Register a URL handler.
*
* @param protocol the protocol to register.
* @param handlerClass a Class of type java.net.URLStreamHandler
*/
public static void attachHandler (String protocol, Class handlerClass)
{
if (!URLStreamHandler.class.isAssignableFrom(handlerClass)) {
throw new IllegalArgumentException(
"Specified class is not a java.net.URLStreamHandler.");
}
// set up the factory.
if (_handlers == null) {
_handlers = new HashMap();
// There are two ways to do this.
// Method 1, which is the only one that seems to work under
// Java Web Start, is to register a factory. This can throw an
// Error if another factory is already registered. We let that
// error bubble on back.
URL.setURLStreamHandlerFactory(new AttachableURLFactory());
// Method 2 seems like a better idea but doesn't work under
// Java Web Start. We add on a property that registers this
// very class as the handler for the resource property. It
// would be instantiated with Class.forName().
// (And I did check, it's not dasho that is preventing this
// from working under JWS, it's something else.)
/*
// dug up from java.net.URL
String HANDLER_PROP = "java.protocol.handler.pkgs";
String prop = System.getProperty(HANDLER_PROP, "");
if (!"".equals(prop)) {
prop += "|";
}
prop += "com.threerings";
System.setProperty(HANDLER_PROP, prop);
*/
}
_handlers.put(protocol.toLowerCase(), handlerClass);
}
/**
* Do not let others instantiate us.
*/
private AttachableURLFactory ()
{
}
// documentation inherited from interface URLStreamHandlerFactory
public URLStreamHandler createURLStreamHandler (String protocol)
{
Class handler = (Class) _handlers.get(protocol.toLowerCase());
if (handler != null) {
try {
return (URLStreamHandler) handler.newInstance();
} catch (Exception e) {
Log.warning("Unable to instantiate URLStreamHandler" +
" [protocol=" + protocol + ", cause=" + e + "].");
}
}
return null;
}
/** A mapping of protocol name to handler classes. */
protected static HashMap _handlers;
}
| projects/samskivert/src/java/com/samskivert/net/AttachableURLFactory.java | //
// $Id: AttachableURLFactory.java,v 1.1 2003/07/09 18:44:11 ray Exp $
package com.samskivert.net;
import java.net.URL;
import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory;
import java.util.HashMap;
import com.samskivert.Log;
/**
* Allows other entities in an application to register URLStreamHandler
* classes for protocols of their own making.
*/
public class AttachableURLFactory implements URLStreamHandlerFactory
{
/**
* Register a URL handler.
*
* @param protocol the protocol to register.
* @param handlerClass a Class of type java.net.URLStreamHandler
*/
public static void attachHandler (String protocol, Class handlerClass)
{
if (!URLStreamHandler.class.isAssignableFrom(handlerClass)) {
throw new IllegalArgumentException(
"Specified class is not a java.net.URLStreamHandler.");
}
// set up the factory.
if (_handlers == null) {
_handlers = new HashMap();
// this could throw an Error if another factory is already
// registered. We let that error bubble on back.
URL.setURLStreamHandlerFactory(new AttachableURLFactory());
}
_handlers.put(protocol.toLowerCase(), handlerClass);
}
/**
* Do not let others instantiate us.
*/
private AttachableURLFactory ()
{
}
// documentation inherited from interface URLStreamHandlerFactory
public URLStreamHandler createURLStreamHandler (String protocol)
{
Class handler = (Class) _handlers.get(protocol.toLowerCase());
if (handler != null) {
try {
return (URLStreamHandler) handler.newInstance();
} catch (Exception e) {
Log.warning("Unable to instantiate URLStreamHandler" +
" [protocol=" + protocol + ", cause=" + e + "].");
}
}
return null;
}
/** A mapping of protocol name to handler classes. */
protected static HashMap _handlers;
}
| Migrated a comment on registering protocol handlers into the place where
we actually register protocol handlers.
git-svn-id: 64ebf368729f38804935acb7146e017e0f909c6b@1488 6335cc39-0255-0410-8fd6-9bcaacd3b74c
| projects/samskivert/src/java/com/samskivert/net/AttachableURLFactory.java | Migrated a comment on registering protocol handlers into the place where we actually register protocol handlers. |
|
Java | lgpl-2.1 | b0d5b05c28b2d98ead2b9aaaacc0db50ae1d2cf2 | 0 | deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3 | //$HeadURL$
/*----------------------------------------------------------------------------
This file is part of deegree, http://deegree.org/
Copyright (C) 2001-2012 by:
- Department of Geography, University of Bonn -
and
- lat/lon GmbH -
and
- Occam Labs UG (haftungsbeschränkt) -
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free
Software Foundation; either version 2.1 of the License, or (at your option)
any later version.
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact information:
lat/lon GmbH
Aennchenstr. 19, 53177 Bonn
Germany
http://lat-lon.de/
Department of Geography, University of Bonn
Prof. Dr. Klaus Greve
Postfach 1147, 53001 Bonn
Germany
http://www.geographie.uni-bonn.de/deegree/
Occam Labs UG (haftungsbeschränkt)
Godesberger Allee 139, 53175 Bonn
Germany
e-mail: [email protected]
----------------------------------------------------------------------------*/
package org.deegree.workspace.standard;
import static org.deegree.workspace.ResourceStates.ResourceState.Built;
import static org.deegree.workspace.ResourceStates.ResourceState.Deactivated;
import static org.deegree.workspace.ResourceStates.ResourceState.Error;
import static org.deegree.workspace.ResourceStates.ResourceState.Initialized;
import static org.deegree.workspace.ResourceStates.ResourceState.Prepared;
import static org.deegree.workspace.ResourceStates.ResourceState.Scanned;
import static org.slf4j.LoggerFactory.getLogger;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import org.deegree.workspace.Destroyable;
import org.deegree.workspace.ErrorHandler;
import org.deegree.workspace.Initializable;
import org.deegree.workspace.LocationHandler;
import org.deegree.workspace.PreparedResources;
import org.deegree.workspace.Resource;
import org.deegree.workspace.ResourceBuilder;
import org.deegree.workspace.ResourceIdentifier;
import org.deegree.workspace.ResourceInitException;
import org.deegree.workspace.ResourceLocation;
import org.deegree.workspace.ResourceManager;
import org.deegree.workspace.ResourceMetadata;
import org.deegree.workspace.ResourceProvider;
import org.deegree.workspace.ResourceStates;
import org.deegree.workspace.ResourceStates.ResourceState;
import org.deegree.workspace.Workspace;
import org.deegree.workspace.WorkspaceUtils;
import org.deegree.workspace.graph.ResourceGraph;
import org.deegree.workspace.graph.ResourceNode;
import org.slf4j.Logger;
/**
* Directory based workspace implementation.
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
*
* @since 3.4
*/
public class DefaultWorkspace implements Workspace {
private static final Logger LOG = getLogger( DefaultWorkspace.class );
private File directory;
private ClassLoader moduleClassLoader;
private List<ModuleInfo> wsModules;
private Map<Class<? extends ResourceProvider<? extends Resource>>, ResourceManager<? extends Resource>> resourceManagers;
private Map<ResourceIdentifier<? extends Resource>, ResourceMetadata<? extends Resource>> resourceMetadata;
private Map<ResourceIdentifier<? extends Resource>, Resource> resources;
private Map<Class<? extends Initializable>, Initializable> initializables = new HashMap<Class<? extends Initializable>, Initializable>();
private ResourceGraph graph;
private ErrorHandler errors = new ErrorHandler();
private LocationHandler locationHandler;
private ResourceStates states;
private boolean startedUp = false;
public DefaultWorkspace( File directory ) {
this.directory = directory;
}
@Override
public void initAll() {
startup();
errors.clear();
scan();
PreparedResources prepared = prepare();
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "Building and initializing resources." );
LOG.info( "--------------------------------------------------------------------------------" );
// probably better to implement an insert bulk operation on the graph
for ( ResourceMetadata<? extends Resource> md : prepared.getMetadata() ) {
graph.insertNode( md );
}
outer: for ( ResourceMetadata<? extends Resource> md : graph.toSortedList() ) {
if ( states.getState( md.getIdentifier() ) == Deactivated ) {
continue;
}
for ( ResourceIdentifier<? extends Resource> dep : md.getDependencies() ) {
if ( states.getState( dep ) != Initialized ) {
states.setState( md.getIdentifier(), Error );
LOG.error( "Dependency {} for resource {} failed to initialize.", dep, md );
continue outer;
}
}
LOG.info( "Building resource {}.", md.getIdentifier() );
try {
Resource res = prepared.getBuilder( md.getIdentifier() ).build();
if ( res == null ) {
errors.registerError( md.getIdentifier(), "Unable to prepare." );
states.setState( md.getIdentifier(), Error );
LOG.error( "Unable to build resource {}.", md.getIdentifier() );
continue;
}
states.setState( md.getIdentifier(), Built );
LOG.info( "Initializing resource {}.", md.getIdentifier() );
res.init();
states.setState( md.getIdentifier(), Initialized );
resources.put( res.getMetadata().getIdentifier(), res );
} catch ( Exception ex ) {
states.setState( md.getIdentifier(), Error );
String msg = "Unable to build resource " + md.getIdentifier() + ": " + ex.getLocalizedMessage();
errors.registerError( md.getIdentifier(), msg );
LOG.error( msg );
LOG.trace( "Stack trace:", ex );
}
}
}
@Override
public void destroy() {
List<ResourceMetadata<? extends Resource>> list = graph.toSortedList();
Collections.reverse( list );
for ( ResourceMetadata<? extends Resource> md : list ) {
Resource res = resources.get( md.getIdentifier() );
try {
if ( res != null ) {
LOG.info( "Shutting down {}.", md.getIdentifier() );
res.destroy();
}
} catch ( Exception e ) {
LOG.warn( "Unable to destroy resource {}: {}", md.getIdentifier(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
for ( ResourceManager<? extends Resource> mgr : resourceManagers.values() ) {
mgr.shutdown();
}
Iterator<Destroyable> it = ServiceLoader.load( Destroyable.class, moduleClassLoader ).iterator();
while ( it.hasNext() ) {
Destroyable init = it.next();
try {
init.destroy( this );
} catch ( Exception e ) {
LOG.error( "Could not destroy {}: {}", init.getClass().getSimpleName(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
moduleClassLoader = null;
resourceMetadata = null;
resources = null;
resourceManagers = null;
wsModules = null;
initializables.clear();
states = null;
locationHandler = null;
startedUp = false;
errors.clear();
}
private void initClassloader() {
// setup classloader
File modules = new File( directory, "modules" );
File classes = new File( modules, "classes/" );
moduleClassLoader = Thread.currentThread().getContextClassLoader();
if ( modules.exists() ) {
File[] fs = modules.listFiles();
if ( fs != null && fs.length > 0 ) {
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "deegree modules (additional)" );
LOG.info( "--------------------------------------------------------------------------------" );
List<URL> urls = new ArrayList<URL>( fs.length );
if ( classes.isDirectory() ) {
LOG.info( "Added modules/classes/." );
try {
urls.add( classes.toURI().toURL() );
} catch ( MalformedURLException e ) {
LOG.warn( "Could not add modules/classes/ to classpath." );
}
}
for ( int i = 0; i < fs.length; ++i ) {
if ( fs[i].isFile() ) {
try {
URL url = fs[i].toURI().toURL();
if ( url.getFile().endsWith( ".jar" ) ) {
urls.add( url );
ModuleInfo moduleInfo = ModuleInfo.extractModuleInfo( url );
if ( moduleInfo != null ) {
LOG.info( " - " + moduleInfo );
wsModules.add( moduleInfo );
} else {
LOG.info( " - " + fs[i] + " (non-deegree)" );
}
}
} catch ( Exception e ) {
LOG.warn( "Module {} could not be loaded: {}", fs[i].getName(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
}
moduleClassLoader = new URLClassLoader( urls.toArray( new URL[urls.size()] ), moduleClassLoader );
} else {
LOG.info( "Not loading additional modules." );
}
} else {
LOG.info( "Not loading additional modules." );
}
}
@Override
public ClassLoader getModuleClassLoader() {
return moduleClassLoader;
}
@Override
public <T extends Resource> ResourceMetadata<T> getResourceMetadata( Class<? extends ResourceProvider<T>> providerClass,
String id ) {
return (ResourceMetadata<T>) resourceMetadata.get( new DefaultResourceIdentifier<T>( providerClass, id ) );
}
@Override
public <T extends Resource> T getResource( Class<? extends ResourceProvider<T>> providerClass, String id ) {
return (T) resources.get( new DefaultResourceIdentifier( providerClass, id ) );
}
/**
* @return the directory this workspace is based on, never <code>null</code>
*/
public File getLocation() {
return directory;
}
@Override
public <T extends ResourceManager<? extends Resource>> T getResourceManager( Class<T> managerClass ) {
for ( ResourceManager<?> mgr : resourceManagers.values() ) {
if ( mgr.getClass().equals( managerClass ) ) {
return (T) mgr;
}
}
return null;
}
@Override
public List<ResourceManager<? extends Resource>> getResourceManagers() {
return new ArrayList<ResourceManager<?>>( resourceManagers.values() );
}
@Override
public void startup() {
if ( startedUp ) {
return;
}
wsModules = new ArrayList<ModuleInfo>();
resourceManagers = new HashMap<Class<? extends ResourceProvider<? extends Resource>>, ResourceManager<? extends Resource>>();
resourceMetadata = new HashMap<ResourceIdentifier<? extends Resource>, ResourceMetadata<? extends Resource>>();
resources = new HashMap<ResourceIdentifier<? extends Resource>, Resource>();
initializables.clear();
graph = new ResourceGraph();
states = new ResourceStates();
locationHandler = new DefaultLocationHandler( directory, resourceManagers, states );
errors.clear();
initClassloader();
Iterator<Initializable> it = ServiceLoader.load( Initializable.class, moduleClassLoader ).iterator();
while ( it.hasNext() ) {
Initializable init = it.next();
try {
init.init( this );
initializables.put( init.getClass(), init );
} catch ( Exception e ) {
LOG.error( "Could not initialize {}: {}", init.getClass().getSimpleName(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
// setup managers
Iterator<ResourceManager> iter = ServiceLoader.load( ResourceManager.class, moduleClassLoader ).iterator();
while ( iter.hasNext() ) {
ResourceManager<?> mgr = iter.next();
LOG.info( "Found resource manager {}.", mgr.getClass().getSimpleName() );
resourceManagers.put( mgr.getMetadata().getProviderClass(), mgr );
LOG.info( "Starting up resource manager {}.", mgr.getClass().getSimpleName() );
// try/catch?
mgr.startup( this );
}
startedUp = true;
}
@Override
public <T extends Resource> T init( ResourceIdentifier<T> id, PreparedResources prepared ) {
if ( states.getState( id ) == Deactivated ) {
return null;
}
if ( prepared == null ) {
prepared = new PreparedResources( this );
}
LOG.info( "Collecting, building and initializing dependencies for {}.", id );
List<ResourceMetadata<? extends Resource>> mdList = new ArrayList<ResourceMetadata<? extends Resource>>();
ResourceMetadata<? extends Resource> md = resourceMetadata.get( id );
mdList.add( md );
graph.insertNode( md );
List<ResourceMetadata<? extends Resource>> dependencies = new ArrayList<ResourceMetadata<?>>();
WorkspaceUtils.collectDependencies( dependencies, graph.getNode( id ) );
mdList.addAll( dependencies );
ResourceGraph g = new ResourceGraph( mdList );
mdList = g.toSortedList();
for ( ResourceMetadata<? extends Resource> metadata : mdList ) {
if ( resources.get( metadata.getIdentifier() ) != null ) {
LOG.info( "Resource {} already available.", metadata.getIdentifier() );
continue;
}
ResourceBuilder<? extends Resource> builder = prepared.getBuilder( metadata.getIdentifier() );
LOG.info( "Building resource {}.", metadata.getIdentifier() );
try {
Resource res = builder.build();
if ( res == null ) {
states.setState( metadata.getIdentifier(), Error );
errors.registerError( metadata.getIdentifier(), "Unable to build resource." );
LOG.error( "Unable to build resource {}.", metadata.getIdentifier() );
throw new ResourceInitException( "Unable to build resource " + metadata.getIdentifier() + "." );
}
states.setState( metadata.getIdentifier(), Built );
LOG.info( "Initializing resource {}.", metadata.getIdentifier() );
res.init();
states.setState( metadata.getIdentifier(), Initialized );
resources.put( res.getMetadata().getIdentifier(), res );
} catch ( Exception ex ) {
states.setState( metadata.getIdentifier(), Error );
String msg = "Unable to build resource " + metadata.getIdentifier() + ": " + ex.getLocalizedMessage();
errors.registerError( metadata.getIdentifier(), msg );
LOG.error( msg );
LOG.trace( "Stack trace:", ex );
throw new ResourceInitException( "Unable to build resource " + metadata.getIdentifier() + ": "
+ ex.getLocalizedMessage(), ex );
}
}
return getResource( id.getProvider(), id.getId() );
}
@Override
public PreparedResources prepare() {
scan();
PreparedResources prepared = new PreparedResources( this );
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "Preparing resources." );
LOG.info( "--------------------------------------------------------------------------------" );
outer: for ( ResourceMetadata<? extends Resource> md : resourceMetadata.values() ) {
ResourceState state = states.getState( md.getIdentifier() );
if ( state == null ) {
continue outer;
}
for ( ResourceIdentifier<? extends Resource> id : md.getDependencies() ) {
state = states.getState( id );
if ( state == null || state == Scanned ) {
continue outer;
}
}
LOG.info( "Preparing resource {}.", md.getIdentifier() );
try {
ResourceBuilder<? extends Resource> builder = md.prepare();
if ( builder == null ) {
LOG.error( "Could not prepare resource {}.", md.getIdentifier() );
if ( states.getState( md.getIdentifier() ) != Deactivated ) {
states.setState( md.getIdentifier(), Error );
}
continue;
}
graph.insertNode( md );
if ( states.getState( md.getIdentifier() ) != Deactivated ) {
states.setState( md.getIdentifier(), Prepared );
}
prepared.addBuilder( (ResourceIdentifier) md.getIdentifier(), builder );
} catch ( Exception e ) {
String msg = "Error preparing resource " + md.getIdentifier() + ": " + e.getLocalizedMessage();
errors.registerError( md.getIdentifier(), msg );
LOG.error( msg );
LOG.trace( "Stack trace:", e );
}
}
return prepared;
}
private void scan() {
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "Scanning resources." );
LOG.info( "--------------------------------------------------------------------------------" );
for ( ResourceManager<? extends Resource> mgr : resourceManagers.values() ) {
mgr.find();
Collection<? extends ResourceMetadata<? extends Resource>> mds = mgr.getResourceMetadata();
for ( ResourceMetadata<? extends Resource> md : mds ) {
resourceMetadata.put( md.getIdentifier(), md );
if ( states.getState( md.getIdentifier() ) != Deactivated ) {
states.setState( md.getIdentifier(), Scanned );
}
}
}
}
@Override
public <T extends Resource> ResourceBuilder<T> prepare( ResourceIdentifier<T> id ) {
if ( states.getState( id ) == Deactivated ) {
return null;
}
LOG.info( "Preparing {}", id );
ResourceMetadata<T> md = (ResourceMetadata) resourceMetadata.get( id );
ResourceBuilder<T> builder = md.prepare();
if ( builder == null ) {
states.setState( id, Error );
} else {
graph.insertNode( md );
states.setState( id, Prepared );
}
return builder;
}
@Override
public <T extends Resource> List<ResourceIdentifier<T>> getResourcesOfType( Class<? extends ResourceProvider<T>> providerClass ) {
List<ResourceIdentifier<T>> list = new ArrayList<ResourceIdentifier<T>>();
for ( ResourceIdentifier<?> id : resources.keySet() ) {
if ( id.getProvider().equals( providerClass ) ) {
list.add( (ResourceIdentifier) id );
}
}
return list;
}
@Override
public ResourceGraph getDependencyGraph() {
return graph;
}
@Override
public <T extends Resource> void add( ResourceLocation<T> location ) {
LOG.info( "Scanning {}", location.getIdentifier() );
ResourceManager<T> mgr = (ResourceManager) resourceManagers.get( location.getIdentifier().getProvider() );
ResourceMetadata<T> md = mgr.add( location );
resourceMetadata.put( md.getIdentifier(), md );
}
@Override
public <T extends Resource> void destroy( ResourceIdentifier<T> id ) {
ResourceNode<T> node = graph.getNode( id );
if ( node == null ) {
return;
}
for ( ResourceNode<? extends Resource> n : node.getDependents() ) {
destroy( n.getMetadata().getIdentifier() );
}
T res = (T) resources.get( id );
if ( res != null ) {
LOG.info( "Shutting down {}.", id );
res.destroy();
}
states.setState( id, Scanned );
resources.remove( id );
errors.clear( id );
}
@Override
public ErrorHandler getErrorHandler() {
return errors;
}
@Override
public ResourceStates getStates() {
return states;
}
@Override
public <T extends Initializable> T getInitializable( Class<T> className ) {
return (T) initializables.get( className );
}
@Override
public LocationHandler getLocationHandler() {
return locationHandler;
}
}
| deegree-core/deegree-core-workspace/src/main/java/org/deegree/workspace/standard/DefaultWorkspace.java | //$HeadURL$
/*----------------------------------------------------------------------------
This file is part of deegree, http://deegree.org/
Copyright (C) 2001-2012 by:
- Department of Geography, University of Bonn -
and
- lat/lon GmbH -
and
- Occam Labs UG (haftungsbeschränkt) -
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free
Software Foundation; either version 2.1 of the License, or (at your option)
any later version.
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact information:
lat/lon GmbH
Aennchenstr. 19, 53177 Bonn
Germany
http://lat-lon.de/
Department of Geography, University of Bonn
Prof. Dr. Klaus Greve
Postfach 1147, 53001 Bonn
Germany
http://www.geographie.uni-bonn.de/deegree/
Occam Labs UG (haftungsbeschränkt)
Godesberger Allee 139, 53175 Bonn
Germany
e-mail: [email protected]
----------------------------------------------------------------------------*/
package org.deegree.workspace.standard;
import static org.deegree.workspace.ResourceStates.ResourceState.Built;
import static org.deegree.workspace.ResourceStates.ResourceState.Deactivated;
import static org.deegree.workspace.ResourceStates.ResourceState.Error;
import static org.deegree.workspace.ResourceStates.ResourceState.Initialized;
import static org.deegree.workspace.ResourceStates.ResourceState.Prepared;
import static org.deegree.workspace.ResourceStates.ResourceState.Scanned;
import static org.slf4j.LoggerFactory.getLogger;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import org.deegree.workspace.Destroyable;
import org.deegree.workspace.ErrorHandler;
import org.deegree.workspace.Initializable;
import org.deegree.workspace.LocationHandler;
import org.deegree.workspace.PreparedResources;
import org.deegree.workspace.Resource;
import org.deegree.workspace.ResourceBuilder;
import org.deegree.workspace.ResourceIdentifier;
import org.deegree.workspace.ResourceInitException;
import org.deegree.workspace.ResourceLocation;
import org.deegree.workspace.ResourceManager;
import org.deegree.workspace.ResourceMetadata;
import org.deegree.workspace.ResourceProvider;
import org.deegree.workspace.ResourceStates;
import org.deegree.workspace.ResourceStates.ResourceState;
import org.deegree.workspace.Workspace;
import org.deegree.workspace.WorkspaceUtils;
import org.deegree.workspace.graph.ResourceGraph;
import org.deegree.workspace.graph.ResourceNode;
import org.slf4j.Logger;
/**
* Directory based workspace implementation.
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
*
* @since 3.4
*/
public class DefaultWorkspace implements Workspace {
private static final Logger LOG = getLogger( DefaultWorkspace.class );
private File directory;
private ClassLoader moduleClassLoader;
private List<ModuleInfo> wsModules;
private Map<Class<? extends ResourceProvider<? extends Resource>>, ResourceManager<? extends Resource>> resourceManagers;
private Map<ResourceIdentifier<? extends Resource>, ResourceMetadata<? extends Resource>> resourceMetadata;
private Map<ResourceIdentifier<? extends Resource>, Resource> resources;
private Map<Class<? extends Initializable>, Initializable> initializables = new HashMap<Class<? extends Initializable>, Initializable>();
private ResourceGraph graph;
private ErrorHandler errors = new ErrorHandler();
private LocationHandler locationHandler;
private ResourceStates states;
private boolean startedUp = false;
public DefaultWorkspace( File directory ) {
this.directory = directory;
}
@Override
public void initAll() {
startup();
errors.clear();
scan();
PreparedResources prepared = prepare();
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "Building and initializing resources." );
LOG.info( "--------------------------------------------------------------------------------" );
// probably better to implement an insert bulk operation on the graph
for ( ResourceMetadata<? extends Resource> md : prepared.getMetadata() ) {
graph.insertNode( md );
}
outer: for ( ResourceMetadata<? extends Resource> md : graph.toSortedList() ) {
if ( states.getState( md.getIdentifier() ) == Deactivated ) {
continue;
}
for ( ResourceIdentifier<? extends Resource> dep : md.getDependencies() ) {
if ( states.getState( dep ) != Initialized ) {
states.setState( md.getIdentifier(), Error );
LOG.error( "Dependency {} for resource {} failed to initialize.", dep, md );
continue outer;
}
}
LOG.info( "Building resource {}.", md.getIdentifier() );
try {
Resource res = prepared.getBuilder( md.getIdentifier() ).build();
if ( res == null ) {
errors.registerError( md.getIdentifier(), "Unable to prepare." );
states.setState( md.getIdentifier(), Error );
LOG.error( "Unable to build resource {}.", md.getIdentifier() );
continue;
}
states.setState( md.getIdentifier(), Built );
LOG.info( "Initializing resource {}.", md.getIdentifier() );
res.init();
states.setState( md.getIdentifier(), Initialized );
resources.put( res.getMetadata().getIdentifier(), res );
} catch ( Exception ex ) {
states.setState( md.getIdentifier(), Error );
String msg = "Unable to build resource " + md.getIdentifier() + ": " + ex.getLocalizedMessage();
errors.registerError( md.getIdentifier(), msg );
LOG.error( msg );
LOG.trace( "Stack trace:", ex );
}
}
}
@Override
public void destroy() {
List<ResourceMetadata<? extends Resource>> list = graph.toSortedList();
Collections.reverse( list );
for ( ResourceMetadata<? extends Resource> md : list ) {
Resource res = resources.get( md.getIdentifier() );
try {
if ( res != null ) {
LOG.info( "Shutting down {}.", md.getIdentifier() );
res.destroy();
}
} catch ( Exception e ) {
LOG.warn( "Unable to destroy resource {}: {}", md.getIdentifier(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
for ( ResourceManager<? extends Resource> mgr : resourceManagers.values() ) {
mgr.shutdown();
}
Iterator<Destroyable> it = ServiceLoader.load( Destroyable.class, moduleClassLoader ).iterator();
while ( it.hasNext() ) {
Destroyable init = it.next();
try {
init.destroy( this );
} catch ( Exception e ) {
LOG.error( "Could not destroy {}: {}", init.getClass().getSimpleName(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
moduleClassLoader = null;
resourceMetadata = null;
resources = null;
resourceManagers = null;
wsModules = null;
initializables.clear();
states = null;
locationHandler = null;
startedUp = false;
errors.clear();
}
private void initClassloader() {
// setup classloader
File modules = new File( directory, "modules" );
File classes = new File( modules, "classes/" );
moduleClassLoader = Thread.currentThread().getContextClassLoader();
if ( modules.exists() ) {
File[] fs = modules.listFiles();
if ( fs != null && fs.length > 0 ) {
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "deegree modules (additional)" );
LOG.info( "--------------------------------------------------------------------------------" );
List<URL> urls = new ArrayList<URL>( fs.length );
if ( classes.isDirectory() ) {
LOG.info( "Added modules/classes/." );
try {
urls.add( classes.toURI().toURL() );
} catch ( MalformedURLException e ) {
LOG.warn( "Could not add modules/classes/ to classpath." );
}
}
for ( int i = 0; i < fs.length; ++i ) {
if ( fs[i].isFile() ) {
try {
URL url = fs[i].toURI().toURL();
urls.add( url );
ModuleInfo moduleInfo = ModuleInfo.extractModuleInfo( url );
if ( moduleInfo != null ) {
LOG.info( " - " + moduleInfo );
wsModules.add( moduleInfo );
} else {
LOG.info( " - " + fs[i] + " (non-deegree)" );
}
} catch ( Exception e ) {
LOG.warn( "Module {} could not be loaded: {}", fs[i].getName(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
}
moduleClassLoader = new URLClassLoader( urls.toArray( new URL[urls.size()] ), moduleClassLoader );
} else {
LOG.info( "Not loading additional modules." );
}
} else {
LOG.info( "Not loading additional modules." );
}
}
@Override
public ClassLoader getModuleClassLoader() {
return moduleClassLoader;
}
@Override
public <T extends Resource> ResourceMetadata<T> getResourceMetadata( Class<? extends ResourceProvider<T>> providerClass,
String id ) {
return (ResourceMetadata<T>) resourceMetadata.get( new DefaultResourceIdentifier<T>( providerClass, id ) );
}
@Override
public <T extends Resource> T getResource( Class<? extends ResourceProvider<T>> providerClass, String id ) {
return (T) resources.get( new DefaultResourceIdentifier( providerClass, id ) );
}
/**
* @return the directory this workspace is based on, never <code>null</code>
*/
public File getLocation() {
return directory;
}
@Override
public <T extends ResourceManager<? extends Resource>> T getResourceManager( Class<T> managerClass ) {
for ( ResourceManager<?> mgr : resourceManagers.values() ) {
if ( mgr.getClass().equals( managerClass ) ) {
return (T) mgr;
}
}
return null;
}
@Override
public List<ResourceManager<? extends Resource>> getResourceManagers() {
return new ArrayList<ResourceManager<?>>( resourceManagers.values() );
}
@Override
public void startup() {
if ( startedUp ) {
return;
}
wsModules = new ArrayList<ModuleInfo>();
resourceManagers = new HashMap<Class<? extends ResourceProvider<? extends Resource>>, ResourceManager<? extends Resource>>();
resourceMetadata = new HashMap<ResourceIdentifier<? extends Resource>, ResourceMetadata<? extends Resource>>();
resources = new HashMap<ResourceIdentifier<? extends Resource>, Resource>();
initializables.clear();
graph = new ResourceGraph();
states = new ResourceStates();
locationHandler = new DefaultLocationHandler( directory, resourceManagers, states );
errors.clear();
initClassloader();
Iterator<Initializable> it = ServiceLoader.load( Initializable.class, moduleClassLoader ).iterator();
while ( it.hasNext() ) {
Initializable init = it.next();
try {
init.init( this );
initializables.put( init.getClass(), init );
} catch ( Exception e ) {
LOG.error( "Could not initialize {}: {}", init.getClass().getSimpleName(), e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
// setup managers
Iterator<ResourceManager> iter = ServiceLoader.load( ResourceManager.class, moduleClassLoader ).iterator();
while ( iter.hasNext() ) {
ResourceManager<?> mgr = iter.next();
LOG.info( "Found resource manager {}.", mgr.getClass().getSimpleName() );
resourceManagers.put( mgr.getMetadata().getProviderClass(), mgr );
LOG.info( "Starting up resource manager {}.", mgr.getClass().getSimpleName() );
// try/catch?
mgr.startup( this );
}
startedUp = true;
}
@Override
public <T extends Resource> T init( ResourceIdentifier<T> id, PreparedResources prepared ) {
if ( states.getState( id ) == Deactivated ) {
return null;
}
if ( prepared == null ) {
prepared = new PreparedResources( this );
}
LOG.info( "Collecting, building and initializing dependencies for {}.", id );
List<ResourceMetadata<? extends Resource>> mdList = new ArrayList<ResourceMetadata<? extends Resource>>();
ResourceMetadata<? extends Resource> md = resourceMetadata.get( id );
mdList.add( md );
graph.insertNode( md );
List<ResourceMetadata<? extends Resource>> dependencies = new ArrayList<ResourceMetadata<?>>();
WorkspaceUtils.collectDependencies( dependencies, graph.getNode( id ) );
mdList.addAll( dependencies );
ResourceGraph g = new ResourceGraph( mdList );
mdList = g.toSortedList();
for ( ResourceMetadata<? extends Resource> metadata : mdList ) {
if ( resources.get( metadata.getIdentifier() ) != null ) {
LOG.info( "Resource {} already available.", metadata.getIdentifier() );
continue;
}
ResourceBuilder<? extends Resource> builder = prepared.getBuilder( metadata.getIdentifier() );
LOG.info( "Building resource {}.", metadata.getIdentifier() );
try {
Resource res = builder.build();
if ( res == null ) {
states.setState( metadata.getIdentifier(), Error );
errors.registerError( metadata.getIdentifier(), "Unable to build resource." );
LOG.error( "Unable to build resource {}.", metadata.getIdentifier() );
throw new ResourceInitException( "Unable to build resource " + metadata.getIdentifier() + "." );
}
states.setState( metadata.getIdentifier(), Built );
LOG.info( "Initializing resource {}.", metadata.getIdentifier() );
res.init();
states.setState( metadata.getIdentifier(), Initialized );
resources.put( res.getMetadata().getIdentifier(), res );
} catch ( Exception ex ) {
states.setState( metadata.getIdentifier(), Error );
String msg = "Unable to build resource " + metadata.getIdentifier() + ": " + ex.getLocalizedMessage();
errors.registerError( metadata.getIdentifier(), msg );
LOG.error( msg );
LOG.trace( "Stack trace:", ex );
throw new ResourceInitException( "Unable to build resource " + metadata.getIdentifier() + ": "
+ ex.getLocalizedMessage(), ex );
}
}
return getResource( id.getProvider(), id.getId() );
}
@Override
public PreparedResources prepare() {
scan();
PreparedResources prepared = new PreparedResources( this );
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "Preparing resources." );
LOG.info( "--------------------------------------------------------------------------------" );
outer: for ( ResourceMetadata<? extends Resource> md : resourceMetadata.values() ) {
ResourceState state = states.getState( md.getIdentifier() );
if ( state == null ) {
continue outer;
}
for ( ResourceIdentifier<? extends Resource> id : md.getDependencies() ) {
state = states.getState( id );
if ( state == null || state == Scanned ) {
continue outer;
}
}
LOG.info( "Preparing resource {}.", md.getIdentifier() );
try {
ResourceBuilder<? extends Resource> builder = md.prepare();
if ( builder == null ) {
LOG.error( "Could not prepare resource {}.", md.getIdentifier() );
if ( states.getState( md.getIdentifier() ) != Deactivated ) {
states.setState( md.getIdentifier(), Error );
}
continue;
}
graph.insertNode( md );
if ( states.getState( md.getIdentifier() ) != Deactivated ) {
states.setState( md.getIdentifier(), Prepared );
}
prepared.addBuilder( (ResourceIdentifier) md.getIdentifier(), builder );
} catch ( Exception e ) {
String msg = "Error preparing resource " + md.getIdentifier() + ": " + e.getLocalizedMessage();
errors.registerError( md.getIdentifier(), msg );
LOG.error( msg );
LOG.trace( "Stack trace:", e );
}
}
return prepared;
}
private void scan() {
LOG.info( "--------------------------------------------------------------------------------" );
LOG.info( "Scanning resources." );
LOG.info( "--------------------------------------------------------------------------------" );
for ( ResourceManager<? extends Resource> mgr : resourceManagers.values() ) {
mgr.find();
Collection<? extends ResourceMetadata<? extends Resource>> mds = mgr.getResourceMetadata();
for ( ResourceMetadata<? extends Resource> md : mds ) {
resourceMetadata.put( md.getIdentifier(), md );
if ( states.getState( md.getIdentifier() ) != Deactivated ) {
states.setState( md.getIdentifier(), Scanned );
}
}
}
}
@Override
public <T extends Resource> ResourceBuilder<T> prepare( ResourceIdentifier<T> id ) {
if ( states.getState( id ) == Deactivated ) {
return null;
}
LOG.info( "Preparing {}", id );
ResourceMetadata<T> md = (ResourceMetadata) resourceMetadata.get( id );
ResourceBuilder<T> builder = md.prepare();
if ( builder == null ) {
states.setState( id, Error );
} else {
graph.insertNode( md );
states.setState( id, Prepared );
}
return builder;
}
@Override
public <T extends Resource> List<ResourceIdentifier<T>> getResourcesOfType( Class<? extends ResourceProvider<T>> providerClass ) {
List<ResourceIdentifier<T>> list = new ArrayList<ResourceIdentifier<T>>();
for ( ResourceIdentifier<?> id : resources.keySet() ) {
if ( id.getProvider().equals( providerClass ) ) {
list.add( (ResourceIdentifier) id );
}
}
return list;
}
@Override
public ResourceGraph getDependencyGraph() {
return graph;
}
@Override
public <T extends Resource> void add( ResourceLocation<T> location ) {
LOG.info( "Scanning {}", location.getIdentifier() );
ResourceManager<T> mgr = (ResourceManager) resourceManagers.get( location.getIdentifier().getProvider() );
ResourceMetadata<T> md = mgr.add( location );
resourceMetadata.put( md.getIdentifier(), md );
}
@Override
public <T extends Resource> void destroy( ResourceIdentifier<T> id ) {
ResourceNode<T> node = graph.getNode( id );
if ( node == null ) {
return;
}
for ( ResourceNode<? extends Resource> n : node.getDependents() ) {
destroy( n.getMetadata().getIdentifier() );
}
T res = (T) resources.get( id );
if ( res != null ) {
LOG.info( "Shutting down {}.", id );
res.destroy();
}
states.setState( id, Scanned );
resources.remove( id );
errors.clear( id );
}
@Override
public ErrorHandler getErrorHandler() {
return errors;
}
@Override
public ResourceStates getStates() {
return states;
}
@Override
public <T extends Initializable> T getInitializable( Class<T> className ) {
return (T) initializables.get( className );
}
@Override
public LocationHandler getLocationHandler() {
return locationHandler;
}
}
| only load files ending with '.jar' as workspace module | deegree-core/deegree-core-workspace/src/main/java/org/deegree/workspace/standard/DefaultWorkspace.java | only load files ending with '.jar' as workspace module |
|
Java | lgpl-2.1 | aba06bc6fa1bfaf5678a7c963b7f4327ef7e8e6e | 0 | CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine | /*
* jETeL/CloverETL - Java based ETL application framework.
* Copyright (c) Javlin, a.s. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.component;
import java.util.List;
import org.apache.log4j.Logger;
import org.jetel.component.TransformLanguageDetector.TransformLanguage;
import org.jetel.ctl.CTLAbstractTransform;
import org.jetel.ctl.ErrorMessage;
import org.jetel.ctl.ITLCompiler;
import org.jetel.ctl.ITLCompilerFactory;
import org.jetel.ctl.MetadataErrorDetail;
import org.jetel.ctl.TLCompilerFactory;
import org.jetel.ctl.TransformLangExecutor;
import org.jetel.data.Defaults;
import org.jetel.exception.ConfigurationProblem;
import org.jetel.exception.ConfigurationStatus;
import org.jetel.exception.ConfigurationStatus.Priority;
import org.jetel.exception.ConfigurationStatus.Severity;
import org.jetel.exception.JetelRuntimeException;
import org.jetel.exception.LoadClassException;
import org.jetel.exception.MissingFieldException;
import org.jetel.graph.Node;
import org.jetel.graph.TransformationGraph;
import org.jetel.metadata.DataRecordMetadata;
import org.jetel.util.CodeParser;
import org.jetel.util.compile.ClassLoaderUtils;
import org.jetel.util.compile.DynamicJavaClass;
import org.jetel.util.file.FileUtils;
import org.jetel.util.property.PropertyRefResolver;
import org.jetel.util.property.RefResFlag;
import org.jetel.util.string.StringUtils;
/**
* This class is used to instantiate a class based on source code.
* Supported languages are CTL1, CTL2, java and pre-processed java.
* Instance of this factory can be created only by two factory methods<br>
* <li>
* {@link #createTransformFactory(TransformDescriptor)} - creates factory based on given TransformDescriptor
* (provides necessary information about instantiated interface)
* </li>
* <li>
* {@link #createTransformFactory(Class)} - creates factory for the given class - class needs to have
* non-parametric constructor
* </li>
* Other necessary information about instantiated class (source code, inMetadata and outMetadata for CTL transformation, ...)
* are provided by setters. Instance is created by {@link #createTransform()} method.
*
* @author Kokon ([email protected])
* (c) Javlin, a.s. (www.cloveretl.com)
*
* @created 20.8.2012
*/
public class TransformFactory<T> {
/** Descriptor of instantiated transformation/class */
private TransformDescriptor<T> transformDescriptor;
/** Source code of transformation */
private String transform;
/** URL to source code of transformation */
private String transformUrl;
/** Class name of instantiated transformation */
private String transformClass;
/** Charset of source code file defined in transformUrl */
private String charset;
/** Component for which the transformation is instantiated */
private Node component;
/** Optional: Attribute of the component for which the transformation is instantiated */
private String attributeName;
/** Input metadata of transformation, used for CTL compilation */
private DataRecordMetadata[] inMetadata;
/** Output metadata of transformation, used for CTL compilation */
private DataRecordMetadata[] outMetadata;
/** Customizable compiler factory */
private ITLCompilerFactory compilerFactory = new DefaultCompilerFactory();
private TransformFactory(TransformDescriptor<T> transformDescriptor) {
this.transformDescriptor = transformDescriptor;
}
/**
* @param transformDescriptor
* @return {@link TransformFactory} for the given {@link TransformDescriptor}
*/
public static <T> TransformFactory<T> createTransformFactory(TransformDescriptor<T> transformDescriptor) {
return new TransformFactory<T>(transformDescriptor);
}
/**
* {@link TransformFactory} returned by this method is limited and only java code implementation is supported.
* @param transformClass
* @return {@link TransformFactory} for the given class
*/
public static <T> TransformFactory<T> createTransformFactory(final Class<T> transformClass) {
return new TransformFactory<T>(new TransformDescriptor<T>() {
@Override
public Class<T> getTransformClass() {
return transformClass;
}
@Override
public T createCTL1Transform(String transformCode, Logger logger) {
throw new UnsupportedOperationException("CTL1 is not supported in '" + transformClass.getName() + "'.");
}
@Override
public Class<? extends CTLAbstractTransform> getCompiledCTL2TransformClass() {
throw new UnsupportedOperationException("CTL2 is not supported in '" + transformClass.getName() + "'.");
}
@Override
public T createInterpretedCTL2Transform(TransformLangExecutor executor, Logger logger) {
throw new UnsupportedOperationException("CTL2 is not supported in '" + transformClass.getName() + "'.");
}
});
}
private void validateSettings() {
//without these parameters we cannot create transformation
if (StringUtils.isEmpty(transform)
&& StringUtils.isEmpty(transformClass)
&& StringUtils.isEmpty(transformUrl)) {
throw new JetelRuntimeException("Transformation is not defined.");
}
if (component == null) {
throw new JetelRuntimeException("Component is not defined.");
}
}
/**
* Configuration check, mainly invoked from {@link Node#checkConfig(ConfigurationStatus)}.
* Only CTL1 and CTL2 code is compiled to ensure correctness of all settings.
* Java code is not validated.
* @param status
* @return
*/
public ConfigurationStatus checkConfig(ConfigurationStatus status) {
try {
validateSettings();
} catch (Exception e) {
status.add(new ConfigurationProblem(e, Severity.ERROR, component, Priority.NORMAL, null));
}
String checkTransform = null;
if (StringUtils.isEmpty(transformClass)) {
if (!StringUtils.isEmpty(transform)) {
checkTransform = transform;
} else if (!StringUtils.isEmpty(transformUrl)) {
if (charset == null) {
charset = Defaults.DEFAULT_SOURCE_CODE_CHARSET;
}
checkTransform = FileUtils.getStringFromURL(component.getGraph().getRuntimeContext().getContextURL(), transformUrl, charset);
}
// only the transform and transformURL parameters are checked, transformClass is ignored
if (checkTransform != null) {
TransformLanguage transformLanguage = TransformLanguageDetector.guessLanguage(checkTransform);
if (transformLanguage == TransformLanguage.CTL1
|| transformLanguage == TransformLanguage.CTL2) {
// only CTL is checked
try {
createTransform();
} catch (JetelRuntimeException e) {
// report CTL error as a warning
status.add(new ConfigurationProblem(e, Severity.WARNING, component, Priority.NORMAL, null));
}
} else if (transformLanguage == null) {
String messagePrefix = attributeName != null ? attributeName + ": can't" : "Can't";
status.add(new ConfigurationProblem(messagePrefix + " determine transformation language",
Severity.WARNING, component, Priority.NORMAL, attributeName));
}
}
}
return status;
}
/**
* Core method of the factory.
* @return instance of transformation class
* @throws MissingFieldException if the CTL transformation tried to access non-existing field
* @throws LoadClassException transformation cannot be instantiated
*/
public T createTransform() {
validateSettings();
T transformation = null;
if (!StringUtils.isEmpty(transform)) {
//transform has highest priority
transformation = createTransformFromCode(transform);
} else if (!StringUtils.isEmpty(transformUrl)) {
//load transformation code from an URL
if (charset == null) {
charset = Defaults.DEFAULT_SOURCE_CODE_CHARSET;
}
String transformCode = FileUtils.getStringFromURL(component.getGraph().getRuntimeContext().getContextURL(), transformUrl, charset);
PropertyRefResolver refResolver = component.getPropertyRefResolver();
transformCode = refResolver.resolveRef(transformCode, RefResFlag.SPEC_CHARACTERS_OFF);
transformation = createTransformFromCode(transformCode);
} else if (!StringUtils.isEmpty(transformClass)) {
transformation = ClassLoaderUtils.loadClassInstance(transformDescriptor.getTransformClass(), transformClass, component);
} else {
throw new JetelRuntimeException("Transformation is not defined.");
}
if (transformation instanceof Transform) {
((Transform) transformation).setNode(component);
}
return transformation;
}
/**
* Creates transform based on the given source code.
*/
private T createTransformFromCode(String transformCode) {
T transformation = null;
TransformLanguage language = TransformLanguageDetector.guessLanguage(transformCode);
if (language == null) {
throw new LoadClassException("Can't determine transformation language.");
}
switch (language) {
case JAVA:
transformCode = preprocessJavaCode(transformCode, inMetadata, outMetadata, component, false);
transformation = DynamicJavaClass.instantiate(transformCode, transformDescriptor.getTransformClass(), component);
break;
case JAVA_PREPROCESS:
transformCode = preprocessJavaCode(transformCode, inMetadata, outMetadata, component, true);
transformation = DynamicJavaClass.instantiate(transformCode, transformDescriptor.getTransformClass(), component);
break;
case CTL1:
transformation = transformDescriptor.createCTL1Transform(transformCode, component.getLog());
break;
case CTL2:
if (charset == null) {
charset = Defaults.DEFAULT_SOURCE_CODE_CHARSET;
}
final ITLCompiler compiler =
compilerFactory.createCompiler(component.getGraph(), inMetadata, outMetadata, charset);
String id = component.getId();
if (!StringUtils.isEmpty(attributeName)) {
id += "_" + attributeName;
}
List<ErrorMessage> msgs = compiler.compile(transformCode, transformDescriptor.getCompiledCTL2TransformClass(), id);
if (compiler.errorCount() > 0) {
String report = ErrorMessage.listToString(msgs, null); // message does not need to be logged here, will be thrown up as part of an exception
String message = "CTL code compilation finished with " + compiler.errorCount() + " errors." + report;
for (ErrorMessage msg: msgs) {
if (msg.getDetail() instanceof MetadataErrorDetail) {
MetadataErrorDetail detail = (MetadataErrorDetail) msg.getDetail();
throw new MissingFieldException(message, detail.isOutput(), detail.getRecordId(), detail.getFieldName());
}
}
throw new LoadClassException(message);
}
Object ret = compiler.getCompiledCode();
if (ret instanceof TransformLangExecutor) {
// setup interpreted runtime
transformation = transformDescriptor.createInterpretedCTL2Transform((TransformLangExecutor) ret, component.getLog());
} else if (transformDescriptor.getTransformClass().isInstance(ret)) {
transformation = transformDescriptor.getTransformClass().cast(ret);
} else {
// this should never happen as compiler always generates correct interface
throw new LoadClassException("Invalid type of record transformation");
}
break;
default:
throw new LoadClassException("Can't determine transformation code.");
}
return transformation;
}
/**
* Java code is pre-processed by {@link CodeParser} before compilation.
*/
private static String preprocessJavaCode(String transformCode, DataRecordMetadata[] inMetadata, DataRecordMetadata[] outMetadata, Node node, boolean addTransformCodeStub) {
// creating dynamicTransformCode from internal transformation format
CodeParser codeParser = new CodeParser(inMetadata, outMetadata);
if (!addTransformCodeStub)
// we must turn this off, because we don't have control about the rest of Java source
// and thus we cannot create the declaration of symbolic constants
codeParser.setUseSymbolicNames(false);
codeParser.setSourceCode(transformCode);
codeParser.parse();
if (addTransformCodeStub) {
codeParser.addTransformCodeStub("Transform" + node.getId());
}
return codeParser.getSourceCode();
}
/**
* Sets transformation code.
*/
public void setTransform(String transform) {
this.transform = transform;
}
/**
* Sets transformation class name.
*/
public void setTransformClass(String transformClass) {
this.transformClass = transformClass;
}
/**
* Sets URL where the transformation code can be loaded.
*/
public void setTransformUrl(String transformUrl) {
this.transformUrl = transformUrl;
}
/**
* Sets charset of external definition of transformation code defined in transformUrl
* or charset that should be used for import
*/
public void setCharset(String charset) {
this.charset = charset;
}
/**
* Sets component which requests the transformation instantiation.
*/
public void setComponent(Node component) {
this.component = component;
}
/**
* Sets the name of the component attribute which requests the transformation instantiation.
*/
public void setAttributeName(String attributeName) {
this.attributeName = attributeName;
}
/**
* Sets input metadata of transformation necessary for CTL compilation.
*/
public void setInMetadata(DataRecordMetadata... inMetadata) {
this.inMetadata = inMetadata;
}
/**
* Sets input metadata of transformation necessary for CTL compilation.
*/
public void setInMetadata(List<DataRecordMetadata> inMetadata) {
this.inMetadata = inMetadata.toArray(new DataRecordMetadata[inMetadata.size()]);
}
/**
* Sets output metadata of transformation necessary for CTL compilation.
*/
public void setOutMetadata(DataRecordMetadata... outMetadata) {
this.outMetadata = outMetadata;
}
/**
* Sets output metadata of transformation necessary for CTL compilation.
*/
public void setOutMetadata(List<DataRecordMetadata> outMetadata) {
this.outMetadata = outMetadata.toArray(new DataRecordMetadata[outMetadata.size()]);
}
/**
* @return true if the transformation code or class name is specified
*/
public boolean isTransformSpecified() {
return !StringUtils.isEmpty(transform) || !StringUtils.isEmpty(transformClass) || !StringUtils.isEmpty(transformUrl);
}
public void setCompilerFactory(ITLCompilerFactory compilerFactory) {
this.compilerFactory = compilerFactory;
}
/**
* Default {@link ITLCompilerFactory} implementation,
* selects the compiler with maximum priority from registered compilers.
*
* @author krivanekm ([email protected])
* (c) Javlin, a.s. (www.cloveretl.com)
*
* @created 14. 1. 2015
*/
public static class DefaultCompilerFactory implements ITLCompilerFactory {
@Override
public ITLCompiler createCompiler(TransformationGraph graph, DataRecordMetadata[] inMetadata,
DataRecordMetadata[] outMetadata, String encoding) {
return TLCompilerFactory.createCompiler(graph, inMetadata, outMetadata, encoding);
}
}
}
| cloveretl.engine/src/org/jetel/component/TransformFactory.java | /*
* jETeL/CloverETL - Java based ETL application framework.
* Copyright (c) Javlin, a.s. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.component;
import java.util.List;
import org.apache.log4j.Logger;
import org.jetel.component.TransformLanguageDetector.TransformLanguage;
import org.jetel.ctl.CTLAbstractTransform;
import org.jetel.ctl.ErrorMessage;
import org.jetel.ctl.ITLCompiler;
import org.jetel.ctl.ITLCompilerFactory;
import org.jetel.ctl.MetadataErrorDetail;
import org.jetel.ctl.TLCompilerFactory;
import org.jetel.ctl.TransformLangExecutor;
import org.jetel.data.Defaults;
import org.jetel.exception.ConfigurationProblem;
import org.jetel.exception.ConfigurationStatus;
import org.jetel.exception.ConfigurationStatus.Priority;
import org.jetel.exception.ConfigurationStatus.Severity;
import org.jetel.exception.JetelRuntimeException;
import org.jetel.exception.LoadClassException;
import org.jetel.exception.MissingFieldException;
import org.jetel.graph.Node;
import org.jetel.graph.TransformationGraph;
import org.jetel.metadata.DataRecordMetadata;
import org.jetel.util.CodeParser;
import org.jetel.util.compile.ClassLoaderUtils;
import org.jetel.util.compile.DynamicJavaClass;
import org.jetel.util.file.FileUtils;
import org.jetel.util.property.PropertyRefResolver;
import org.jetel.util.property.RefResFlag;
import org.jetel.util.string.StringUtils;
/**
* This class is used to instantiate a class based on source code.
* Supported languages are CTL1, CTL2, java and pre-processed java.
* Instance of this factory can be created only by two factory methods<br>
* <li>
* {@link #createTransformFactory(TransformDescriptor)} - creates factory based on given TransformDescriptor
* (provides necessary information about instantiated interface)
* </li>
* <li>
* {@link #createTransformFactory(Class)} - creates factory for the given class - class needs to have
* non-parametric constructor
* </li>
* Other necessary information about instantiated class (source code, inMetadata and outMetadata for CTL transformation, ...)
* are provided by setters. Instance is created by {@link #createTransform()} method.
*
* @author Kokon ([email protected])
* (c) Javlin, a.s. (www.cloveretl.com)
*
* @created 20.8.2012
*/
public class TransformFactory<T> {
/** Descriptor of instantiated transformation/class */
private TransformDescriptor<T> transformDescriptor;
/** Source code of transformation */
private String transform;
/** URL to source code of transformation */
private String transformUrl;
/** Class name of instantiated transformation */
private String transformClass;
/** Charset of source code file defined in transformUrl */
private String charset;
/** Component for which the transformation is instantiated */
private Node component;
/** Optional: Attribute of the component for which the transformation is instantiated */
private String attributeName;
/** Input metadata of transformation, used for CTL compilation */
private DataRecordMetadata[] inMetadata;
/** Output metadata of transformation, used for CTL compilation */
private DataRecordMetadata[] outMetadata;
/** Customizable compiler factory */
private ITLCompilerFactory compilerFactory = new DefaultCompilerFactory();
private TransformFactory(TransformDescriptor<T> transformDescriptor) {
this.transformDescriptor = transformDescriptor;
}
/**
* @param transformDescriptor
* @return {@link TransformFactory} for the given {@link TransformDescriptor}
*/
public static <T> TransformFactory<T> createTransformFactory(TransformDescriptor<T> transformDescriptor) {
return new TransformFactory<T>(transformDescriptor);
}
/**
* {@link TransformFactory} returned by this method is limited and only java code implementation is supported.
* @param transformClass
* @return {@link TransformFactory} for the given class
*/
public static <T> TransformFactory<T> createTransformFactory(final Class<T> transformClass) {
return new TransformFactory<T>(new TransformDescriptor<T>() {
@Override
public Class<T> getTransformClass() {
return transformClass;
}
@Override
public T createCTL1Transform(String transformCode, Logger logger) {
throw new UnsupportedOperationException("CTL1 is not supported in '" + transformClass.getName() + "'.");
}
@Override
public Class<? extends CTLAbstractTransform> getCompiledCTL2TransformClass() {
throw new UnsupportedOperationException("CTL2 is not supported in '" + transformClass.getName() + "'.");
}
@Override
public T createInterpretedCTL2Transform(TransformLangExecutor executor, Logger logger) {
throw new UnsupportedOperationException("CTL2 is not supported in '" + transformClass.getName() + "'.");
}
});
}
private void validateSettings() {
//without these parameters we cannot create transformation
if (StringUtils.isEmpty(transform)
&& StringUtils.isEmpty(transformClass)
&& StringUtils.isEmpty(transformUrl)) {
throw new JetelRuntimeException("Transformation is not defined.");
}
if (component == null) {
throw new JetelRuntimeException("Component is not defined.");
}
}
/**
* Configuration check, mainly invoked from {@link Node#checkConfig(ConfigurationStatus)}.
* Only CTL1 and CTL2 code is compiled to ensure correctness of all settings.
* Java code is not validated.
* @param status
* @return
*/
public ConfigurationStatus checkConfig(ConfigurationStatus status) {
try {
validateSettings();
} catch (Exception e) {
status.add(new ConfigurationProblem(e, Severity.ERROR, component, Priority.NORMAL, null));
}
String checkTransform = null;
if (StringUtils.isEmpty(transformClass)) {
if (!StringUtils.isEmpty(transform)) {
checkTransform = transform;
} else if (!StringUtils.isEmpty(transformUrl)) {
if (charset == null) {
charset = Defaults.DEFAULT_SOURCE_CODE_CHARSET;
}
checkTransform = FileUtils.getStringFromURL(component.getGraph().getRuntimeContext().getContextURL(), transformUrl, charset);
}
// only the transform and transformURL parameters are checked, transformClass is ignored
if (checkTransform != null) {
TransformLanguage transformLanguage = TransformLanguageDetector.guessLanguage(checkTransform);
if (transformLanguage == TransformLanguage.CTL1
|| transformLanguage == TransformLanguage.CTL2) {
// only CTL is checked
try {
createTransform();
} catch (JetelRuntimeException e) {
// report CTL error as a warning
status.add(new ConfigurationProblem(e, Severity.WARNING, component, Priority.NORMAL, null));
}
} else if (transformLanguage == null) {
String messagePrefix = attributeName != null ? attributeName + ": can't" : "Can't";
status.add(new ConfigurationProblem(messagePrefix + " determine transformation language",
Severity.WARNING, component, Priority.NORMAL, attributeName));
}
}
}
return status;
}
/**
* Core method of the factory.
* @return instance of transformation class
* @throws MissingFieldException if the CTL transformation tried to access non-existing field
* @throws LoadClassException transformation cannot be instantiated
*/
public T createTransform() {
validateSettings();
T transformation = null;
if (!StringUtils.isEmpty(transform)) {
//transform has highest priority
transformation = createTransformFromCode(transform);
} else if (!StringUtils.isEmpty(transformUrl)) {
//load transformation code from an URL
if (charset == null) {
charset = Defaults.DEFAULT_SOURCE_CODE_CHARSET;
}
String transformCode = FileUtils.getStringFromURL(component.getGraph().getRuntimeContext().getContextURL(), transformUrl, charset);
PropertyRefResolver refResolver = component.getPropertyRefResolver();
transformCode = refResolver.resolveRef(transformCode, RefResFlag.SPEC_CHARACTERS_OFF);
transformation = createTransformFromCode(transformCode);
} else if (!StringUtils.isEmpty(transformClass)) {
transformation = ClassLoaderUtils.loadClassInstance(transformDescriptor.getTransformClass(), transformClass, component);
} else {
throw new JetelRuntimeException("Transformation is not defined.");
}
if (transformation instanceof Transform) {
((Transform) transformation).setNode(component);
}
return transformation;
}
/**
* Creates transform based on the given source code.
*/
private T createTransformFromCode(String transformCode) {
T transformation = null;
TransformLanguage language = TransformLanguageDetector.guessLanguage(transformCode);
if (language == null) {
throw new LoadClassException("Can't determine transformation code.");
}
switch (language) {
case JAVA:
transformCode = preprocessJavaCode(transformCode, inMetadata, outMetadata, component, false);
transformation = DynamicJavaClass.instantiate(transformCode, transformDescriptor.getTransformClass(), component);
break;
case JAVA_PREPROCESS:
transformCode = preprocessJavaCode(transformCode, inMetadata, outMetadata, component, true);
transformation = DynamicJavaClass.instantiate(transformCode, transformDescriptor.getTransformClass(), component);
break;
case CTL1:
transformation = transformDescriptor.createCTL1Transform(transformCode, component.getLog());
break;
case CTL2:
if (charset == null) {
charset = Defaults.DEFAULT_SOURCE_CODE_CHARSET;
}
final ITLCompiler compiler =
compilerFactory.createCompiler(component.getGraph(), inMetadata, outMetadata, charset);
String id = component.getId();
if (!StringUtils.isEmpty(attributeName)) {
id += "_" + attributeName;
}
List<ErrorMessage> msgs = compiler.compile(transformCode, transformDescriptor.getCompiledCTL2TransformClass(), id);
if (compiler.errorCount() > 0) {
String report = ErrorMessage.listToString(msgs, null); // message does not need to be logged here, will be thrown up as part of an exception
String message = "CTL code compilation finished with " + compiler.errorCount() + " errors." + report;
for (ErrorMessage msg: msgs) {
if (msg.getDetail() instanceof MetadataErrorDetail) {
MetadataErrorDetail detail = (MetadataErrorDetail) msg.getDetail();
throw new MissingFieldException(message, detail.isOutput(), detail.getRecordId(), detail.getFieldName());
}
}
throw new LoadClassException(message);
}
Object ret = compiler.getCompiledCode();
if (ret instanceof TransformLangExecutor) {
// setup interpreted runtime
transformation = transformDescriptor.createInterpretedCTL2Transform((TransformLangExecutor) ret, component.getLog());
} else if (transformDescriptor.getTransformClass().isInstance(ret)) {
transformation = transformDescriptor.getTransformClass().cast(ret);
} else {
// this should never happen as compiler always generates correct interface
throw new LoadClassException("Invalid type of record transformation");
}
break;
default:
throw new LoadClassException("Can't determine transformation code.");
}
return transformation;
}
/**
* Java code is pre-processed by {@link CodeParser} before compilation.
*/
private static String preprocessJavaCode(String transformCode, DataRecordMetadata[] inMetadata, DataRecordMetadata[] outMetadata, Node node, boolean addTransformCodeStub) {
// creating dynamicTransformCode from internal transformation format
CodeParser codeParser = new CodeParser(inMetadata, outMetadata);
if (!addTransformCodeStub)
// we must turn this off, because we don't have control about the rest of Java source
// and thus we cannot create the declaration of symbolic constants
codeParser.setUseSymbolicNames(false);
codeParser.setSourceCode(transformCode);
codeParser.parse();
if (addTransformCodeStub) {
codeParser.addTransformCodeStub("Transform" + node.getId());
}
return codeParser.getSourceCode();
}
/**
* Sets transformation code.
*/
public void setTransform(String transform) {
this.transform = transform;
}
/**
* Sets transformation class name.
*/
public void setTransformClass(String transformClass) {
this.transformClass = transformClass;
}
/**
* Sets URL where the transformation code can be loaded.
*/
public void setTransformUrl(String transformUrl) {
this.transformUrl = transformUrl;
}
/**
* Sets charset of external definition of transformation code defined in transformUrl
* or charset that should be used for import
*/
public void setCharset(String charset) {
this.charset = charset;
}
/**
* Sets component which requests the transformation instantiation.
*/
public void setComponent(Node component) {
this.component = component;
}
/**
* Sets the name of the component attribute which requests the transformation instantiation.
*/
public void setAttributeName(String attributeName) {
this.attributeName = attributeName;
}
/**
* Sets input metadata of transformation necessary for CTL compilation.
*/
public void setInMetadata(DataRecordMetadata... inMetadata) {
this.inMetadata = inMetadata;
}
/**
* Sets input metadata of transformation necessary for CTL compilation.
*/
public void setInMetadata(List<DataRecordMetadata> inMetadata) {
this.inMetadata = inMetadata.toArray(new DataRecordMetadata[inMetadata.size()]);
}
/**
* Sets output metadata of transformation necessary for CTL compilation.
*/
public void setOutMetadata(DataRecordMetadata... outMetadata) {
this.outMetadata = outMetadata;
}
/**
* Sets output metadata of transformation necessary for CTL compilation.
*/
public void setOutMetadata(List<DataRecordMetadata> outMetadata) {
this.outMetadata = outMetadata.toArray(new DataRecordMetadata[outMetadata.size()]);
}
/**
* @return true if the transformation code or class name is specified
*/
public boolean isTransformSpecified() {
return !StringUtils.isEmpty(transform) || !StringUtils.isEmpty(transformClass) || !StringUtils.isEmpty(transformUrl);
}
public void setCompilerFactory(ITLCompilerFactory compilerFactory) {
this.compilerFactory = compilerFactory;
}
/**
* Default {@link ITLCompilerFactory} implementation,
* selects the compiler with maximum priority from registered compilers.
*
* @author krivanekm ([email protected])
* (c) Javlin, a.s. (www.cloveretl.com)
*
* @created 14. 1. 2015
*/
public static class DefaultCompilerFactory implements ITLCompilerFactory {
@Override
public ITLCompiler createCompiler(TransformationGraph graph, DataRecordMetadata[] inMetadata,
DataRecordMetadata[] outMetadata, String encoding) {
return TLCompilerFactory.createCompiler(graph, inMetadata, outMetadata, encoding);
}
}
}
| UPDATE: CLO-4739 - Error message for unsuccessful transformation language detection modified.
git-svn-id: 7dbdde2f5643bf55eb500cbc70a792d560074c67@17580 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
| cloveretl.engine/src/org/jetel/component/TransformFactory.java | UPDATE: CLO-4739 - Error message for unsuccessful transformation language detection modified. |
|
Java | apache-2.0 | 730bfd4fdb185ba6ef426d1e00beb70f0778a341 | 0 | drowning/netty,mosoft521/netty,huuthang1993/netty,lugt/netty,firebase/netty,eincs/netty,shenguoquan/netty,chrisprobst/netty,yawkat/netty,olupotd/netty,nadeeshaan/netty,bigheary/netty,huuthang1993/netty,shism/netty,mosoft521/netty,MediumOne/netty,niuxinghua/netty,wangyikai/netty,silvaran/netty,danbev/netty,xiongzheng/netty,wuxiaowei907/netty,eincs/netty,IBYoung/netty,timboudreau/netty,wangyikai/netty,woshilaiceshide/netty,Alwayswithme/netty,junjiemars/netty,bob329/netty,gigold/netty,clebertsuconic/netty,MediumOne/netty,codevelop/netty,Alwayswithme/netty,woshilaiceshide/netty,caoyanwei/netty,CodingFabian/netty,Kalvar/netty,smayoorans/netty,niuxinghua/netty,satishsaley/netty,djchen/netty,nadeeshaan/netty,sammychen105/netty,lukehutch/netty,LuminateWireless/netty,rovarga/netty,shuangqiuan/netty,huanyi0723/netty,kvr000/netty,zhujingling/netty,mcanthony/netty,youprofit/netty,x1957/netty,BrunoColin/netty,chinayin/netty,sammychen105/netty,Alwayswithme/netty,lugt/netty,balaprasanna/netty,unei66/netty,jdivy/netty,bob329/netty,timboudreau/netty,lightsocks/netty,niuxinghua/netty,codevelop/netty,zhoffice/netty,carlbai/netty,xiongzheng/netty,ajaysarda/netty,Kalvar/netty,niuxinghua/netty,lightsocks/netty,wuxiaowei907/netty,orika/netty,drowning/netty,CodingFabian/netty,mubarak/netty,nat2013/netty,bigheary/netty,afds/netty,youprofit/netty,ijuma/netty,mway08/netty,clebertsuconic/netty,wangyikai/netty,sameira/netty,jovezhougang/netty,louiscryan/netty,sunbeansoft/netty,louiscryan/netty,lukehutch/netty,ijuma/netty,liuciuse/netty,altihou/netty,sja/netty,djchen/netty,shuangqiuan/netty,chrisprobst/netty,olupotd/netty,xiongzheng/netty,codevelop/netty,sverkera/netty,dongjiaqiang/netty,nkhuyu/netty,AnselQiao/netty,wuyinxian124/netty,hyangtack/netty,IBYoung/netty,lznhust/netty,bob329/netty,satishsaley/netty,junjiemars/netty,chinayin/netty,moyiguket/netty,WangJunTYTL/netty,kjniemi/netty,fantayeneh/netty,unei66/netty,zhujingling/netty,normanmaurer/netty,ninja-/netty,orika/netty,liuciuse/netty,ninja-/netty,niuxinghua/netty,lukehutch/netty,CodingFabian/netty,serioussam/netty,wuxiaowei907/netty,alkemist/netty,brennangaunce/netty,yonglehou/netty-1,huanyi0723/netty,blademainer/netty,afds/netty,firebase/netty,shelsonjava/netty,zzcclp/netty,youprofit/netty,caoyanwei/netty,unei66/netty,brennangaunce/netty,wangyikai/netty,youprofit/netty,ioanbsu/netty,sunbeansoft/netty,jdivy/netty,BrunoColin/netty,xiexingguang/netty,LuminateWireless/netty,nat2013/netty,rovarga/netty,timboudreau/netty,danbev/netty,castomer/netty,zxhfirefox/netty,seetharamireddy540/netty,zhoffice/netty,phlizik/netty,kjniemi/netty,exinguu/netty,balaprasanna/netty,caoyanwei/netty,ninja-/netty,zzcclp/netty,yawkat/netty,tempbottle/netty,danny200309/netty,junjiemars/netty,qingsong-xu/netty,mway08/netty,seetharamireddy540/netty,moyiguket/netty,liuciuse/netty,tempbottle/netty,altihou/netty,normanmaurer/netty,dongjiaqiang/netty,rovarga/netty,JungMinu/netty,MediumOne/netty,mosoft521/netty,hgl888/netty,zxhfirefox/netty,jenskordowski/netty,danbev/netty,DolphinZhao/netty,louiscryan/netty,lukehutch/netty,buchgr/netty,nadeeshaan/netty,hyangtack/netty,satishsaley/netty,AchinthaReemal/netty,kjniemi/netty,seetharamireddy540/netty,buchgr/netty,zxhfirefox/netty,f7753/netty,DolphinZhao/netty,lukehutch/netty,ninja-/netty,balaprasanna/netty,duqiao/netty,altihou/netty,liuciuse/netty,jovezhougang/netty,altihou/netty,sja/netty,exinguu/netty,xiongzheng/netty,qingsong-xu/netty,tempbottle/netty,huanyi0723/netty,slandelle/netty,duqiao/netty,castomer/netty,carlbai/netty,MediumOne/netty,nkhuyu/netty,Mounika-Chirukuri/netty,zhujingling/netty,kvr000/netty,nkhuyu/netty,mcanthony/netty,exinguu/netty,orika/netty,youprofit/netty,silvaran/netty,shelsonjava/netty,f7753/netty,eincs/netty,hyangtack/netty,unei66/netty,hgl888/netty,lznhust/netty,BrunoColin/netty,castomer/netty,lukw00/netty,JungMinu/netty,f7753/netty,liyang1025/netty,chrisprobst/netty,bob329/netty,orika/netty,blademainer/netty,chrisprobst/netty,sameira/netty,f7753/netty,AchinthaReemal/netty,nkhuyu/netty,WangJunTYTL/netty,afds/netty,mcanthony/netty,nkhuyu/netty,smayoorans/netty,chinayin/netty,f7753/netty,clebertsuconic/netty,BrunoColin/netty,yawkat/netty,zzcclp/netty,hepin1989/netty,Kalvar/netty,zhujingling/netty,ioanbsu/netty,Alwayswithme/netty,Mounika-Chirukuri/netty,serioussam/netty,zxhfirefox/netty,shism/netty,kvr000/netty,serioussam/netty,zzcclp/netty,chinayin/netty,yonglehou/netty-1,xingguang2013/netty,drowning/netty,timboudreau/netty,mcanthony/netty,ajaysarda/netty,xiexingguang/netty,slandelle/netty,LuminateWireless/netty,nmittler/netty,dongjiaqiang/netty,clebertsuconic/netty,mubarak/netty,nadeeshaan/netty,kjniemi/netty,silvaran/netty,phlizik/netty,ioanbsu/netty,xiexingguang/netty,nadeeshaan/netty,lznhust/netty,satishsaley/netty,shism/netty,tempbottle/netty,wuxiaowei907/netty,MediumOne/netty,liyang1025/netty,normanmaurer/netty,Alwayswithme/netty,ijuma/netty,JungMinu/netty,qingsong-xu/netty,liuciuse/netty,mubarak/netty,mosoft521/netty,tempbottle/netty,moyiguket/netty,duqiao/netty,firebase/netty,huuthang1993/netty,mway08/netty,jovezhougang/netty,shenguoquan/netty,zhoffice/netty,yawkat/netty,sunbeansoft/netty,huuthang1993/netty,olupotd/netty,fantayeneh/netty,shenguoquan/netty,unei66/netty,sammychen105/netty,hyangtack/netty,jenskordowski/netty,hgl888/netty,wangyikai/netty,brennangaunce/netty,castomer/netty,ajaysarda/netty,olupotd/netty,lznhust/netty,kvr000/netty,danny200309/netty,afds/netty,blademainer/netty,ijuma/netty,mway08/netty,AchinthaReemal/netty,woshilaiceshide/netty,eonezhang/netty,ninja-/netty,brennangaunce/netty,shism/netty,slandelle/netty,bigheary/netty,xiexingguang/netty,danny200309/netty,WangJunTYTL/netty,moyiguket/netty,x1957/netty,huanyi0723/netty,duqiao/netty,drowning/netty,Mounika-Chirukuri/netty,liyang1025/netty,ijuma/netty,ajaysarda/netty,eincs/netty,chinayin/netty,qingsong-xu/netty,huanyi0723/netty,Kingson4Wu/netty,xingguang2013/netty,firebase/netty,olupotd/netty,timboudreau/netty,eonezhang/netty,nmittler/netty,sverkera/netty,nayato/netty,JungMinu/netty,alkemist/netty,ioanbsu/netty,pengzj/netty,huuthang1993/netty,xingguang2013/netty,shenguoquan/netty,carlbai/netty,pengzj/netty,nayato/netty,woshilaiceshide/netty,ajaysarda/netty,sja/netty,LuminateWireless/netty,yonglehou/netty-1,jovezhougang/netty,AnselQiao/netty,xingguang2013/netty,hepin1989/netty,qingsong-xu/netty,seetharamireddy540/netty,smayoorans/netty,sunbeansoft/netty,lukw00/netty,shuangqiuan/netty,mosoft521/netty,jenskordowski/netty,x1957/netty,zhoffice/netty,djchen/netty,dongjiaqiang/netty,pengzj/netty,hepin1989/netty,eonezhang/netty,louiscryan/netty,junjiemars/netty,sverkera/netty,pengzj/netty,kjniemi/netty,silvaran/netty,smayoorans/netty,Mounika-Chirukuri/netty,Kingson4Wu/netty,shelsonjava/netty,hgl888/netty,sverkera/netty,WangJunTYTL/netty,Mounika-Chirukuri/netty,serioussam/netty,liyang1025/netty,hepin1989/netty,exinguu/netty,x1957/netty,gigold/netty,bob329/netty,WangJunTYTL/netty,lightsocks/netty,louiscryan/netty,rovarga/netty,woshilaiceshide/netty,brennangaunce/netty,gigold/netty,smayoorans/netty,normanmaurer/netty,lugt/netty,jdivy/netty,wuyinxian124/netty,AchinthaReemal/netty,ioanbsu/netty,AnselQiao/netty,mcanthony/netty,phlizik/netty,chrisprobst/netty,jenskordowski/netty,carlbai/netty,gigold/netty,wuxiaowei907/netty,xiexingguang/netty,AnselQiao/netty,clebertsuconic/netty,yonglehou/netty-1,wuyinxian124/netty,gigold/netty,djchen/netty,lukw00/netty,danny200309/netty,shuangqiuan/netty,CodingFabian/netty,IBYoung/netty,alkemist/netty,AnselQiao/netty,serioussam/netty,IBYoung/netty,blademainer/netty,zhoffice/netty,kvr000/netty,nayato/netty,codevelop/netty,lukw00/netty,bigheary/netty,dongjiaqiang/netty,bigheary/netty,shuangqiuan/netty,yawkat/netty,altihou/netty,danbev/netty,zxhfirefox/netty,sunbeansoft/netty,orika/netty,lightsocks/netty,caoyanwei/netty,eincs/netty,x1957/netty,wuyinxian124/netty,buchgr/netty,silvaran/netty,nayato/netty,sja/netty,DolphinZhao/netty,satishsaley/netty,buchgr/netty,exinguu/netty,alkemist/netty,mubarak/netty,mubarak/netty,balaprasanna/netty,lugt/netty,xiongzheng/netty,sameira/netty,sja/netty,duqiao/netty,nmittler/netty,phlizik/netty,lznhust/netty,normanmaurer/netty,jovezhougang/netty,BrunoColin/netty,danbev/netty,zhujingling/netty,DolphinZhao/netty,shelsonjava/netty,Kalvar/netty,Kalvar/netty,fantayeneh/netty,caoyanwei/netty,IBYoung/netty,lugt/netty,jenskordowski/netty,shenguoquan/netty,DolphinZhao/netty,zzcclp/netty,afds/netty,balaprasanna/netty,eonezhang/netty,Kingson4Wu/netty,jdivy/netty,danny200309/netty,xingguang2013/netty,liyang1025/netty,AchinthaReemal/netty,moyiguket/netty,slandelle/netty,djchen/netty,blademainer/netty,jdivy/netty,shelsonjava/netty,mway08/netty,lukw00/netty,LuminateWireless/netty,alkemist/netty,fantayeneh/netty,CodingFabian/netty,yonglehou/netty-1,nat2013/netty,sverkera/netty,nayato/netty,castomer/netty,Kingson4Wu/netty,junjiemars/netty,hgl888/netty,fantayeneh/netty,sameira/netty,lightsocks/netty,seetharamireddy540/netty,sameira/netty,carlbai/netty,eonezhang/netty,shism/netty,Kingson4Wu/netty | /*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.epoll;
/**
* Tells if <a href="http://netty.io/wiki/native-transports.html">{@code netty-transport-native-epoll}</a> is supported.
*/
public final class Epoll {
private static final Throwable UNAVAILABILITY_CAUSE;
static {
Throwable cause = null;
int epollFd = -1;
int eventFd = -1;
try {
epollFd = Native.epollCreate();
eventFd = Native.eventFd();
} catch (Throwable t) {
cause = t;
} finally {
if (epollFd != -1) {
try {
Native.close(epollFd);
} catch (Exception ignore) {
// ignore
}
}
if (eventFd != -1) {
try {
Native.close(eventFd);
} catch (Exception ignore) {
// ignore
}
}
}
if (cause != null) {
UNAVAILABILITY_CAUSE = cause;
} else {
UNAVAILABILITY_CAUSE = null;
}
}
/**
* Returns {@code true} if and only if the
* <a href="http://netty.io/wiki/native-transports.html">{@code netty-transport-native-epoll}</a> is available.
*/
public static boolean isAvailable() {
return UNAVAILABILITY_CAUSE == null;
}
/**
* Ensure that <a href="http://netty.io/wiki/native-transports.html">{@code netty-transport-native-epoll}</a> is
* available.
*
* @throws UnsatisfiedLinkError if unavailable
*/
public static void ensureAvailability() {
if (UNAVAILABILITY_CAUSE != null) {
throw (Error) new UnsatisfiedLinkError(
"failed to load the required native library").initCause(UNAVAILABILITY_CAUSE);
}
}
/**
* Returns the cause of unavailability of
* <a href="http://netty.io/wiki/native-transports.html">{@code netty-transport-native-epoll}</a>.
*
* @return the cause if unavailable. {@code null} if available.
*/
public static Throwable unavailabilityCause() {
return UNAVAILABILITY_CAUSE;
}
private Epoll() { }
}
| transport-native-epoll/src/main/java/io/netty/channel/epoll/Epoll.java | /*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.epoll;
/**
* Tells if <a href="http://netty.io/wiki/native-transports.html">{@code netty-transport-native-epoll}</a> is supported.
*/
public final class Epoll {
private static final boolean IS_AVAILABLE;
static {
boolean available;
int epollFd = -1;
int eventFd = -1;
try {
epollFd = Native.epollCreate();
eventFd = Native.eventFd();
available = true;
} catch (Throwable cause) {
// ignore
available = false;
} finally {
if (epollFd != -1) {
try {
Native.close(epollFd);
} catch (Exception ignore) {
// ignore
}
}
if (eventFd != -1) {
try {
Native.close(eventFd);
} catch (Exception ignore) {
// ignore
}
}
}
IS_AVAILABLE = available;
}
/**
* Returns {@code true} if and only if the
* <a href="http://netty.io/wiki/native-transports.html">{@code netty-transport-native-epoll}</a> can be used.
*/
public static boolean isAvailable() {
return IS_AVAILABLE;
}
private Epoll() { }
}
| Add more utility methods to check the availability of the epoll transport
Related issue: #2733
Motivation:
Unlike OpenSsl, Epoll lacks a couple useful availability checker
methods:
- ensureAvailability()
- unavailabilityCause()
Modifications:
Add missing methods
Result:
More ways to check the availability and to get the cause of
unavailability programatically.
| transport-native-epoll/src/main/java/io/netty/channel/epoll/Epoll.java | Add more utility methods to check the availability of the epoll transport |
|
Java | apache-2.0 | 3243e98c207249cfc80eeef424aa0291cf8b7e99 | 0 | Juanjojara/cordova-plugin-background-geolocation,Juanjojara/cordova-plugin-background-geolocation,Juanjojara/cordova-plugin-background-geolocation | package com.tenforwardconsulting.cordova.bgloc.data.sqlite;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.preference.PreferenceManager;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.util.Log;
import com.tenforwardconsulting.cordova.bgloc.data.Card;
import com.tenforwardconsulting.cordova.bgloc.data.CardDAO;
public class SQLiteCardDAO implements CardDAO {
public static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final String TAG = "LocationUpdateService";
private Context context;
public SQLiteCardDAO(Context context) {
this.context = context;
}
public void internetPendingCards() {
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
String user_id = pref.getString("user_id", "");
String countQuery = "SELECT count(id) countPendings FROM pending_geo WHERE user_id = ?";
SQLiteDatabase db = new CardOpenHelper(context).getReadableDatabase();
Cursor cursor = db.rawQuery(countQuery, new String[]{user_id});
int internetCards = cursor.getInt(cursor.getColumnIndex("countPendings"));
cursor.close();
db.close();
SharedPreferences.Editor edit = pref.edit();
if (internetCards > 0){
edit.putBoolean("pendingInternet", true);
}else{
edit.putBoolean("pendingInternet", false);
}
Log.d(TAG, "RC = " + internetCards);
edit.commit();
}
public boolean persistCard(String tableName, Card card) {
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
String user_id = pref.getString("user_id", "");
Log.i(TAG, "---- Persist Location");
//String countQuery = "SELECT count(id) countPendings FROM shared_cards WHERE user_id = ?";
String countQuery = "SELECT count(id) countPendings FROM shared_cards";
Log.i(TAG, "USER ID: " + user_id);
SQLiteDatabase db = new CardOpenHelper(context).getReadableDatabase();
Log.i(TAG, "AAAA Persist Location");
String[] queryParams = new String[1];
Log.i(TAG, "BBBB Persist Location");
queryParams[0] =user_id;
Log.i(TAG, "CCCC Persist Location");
//Cursor cursor = db.rawQuery(countQuery, null);
Cursor cursor = db.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null);
if (cursor.moveToFirst()) {
while ( !cursor.isAfterLast() ) {
Log.i(TAG, "Table Name=> " + cursor.getString(0));
cursor.moveToNext();
}
}
Log.i(TAG, "DDDD Persist Location");
//int internetCards = cursor.getInt(cursor.getColumnIndex("countPendings"));
Log.i(TAG, "EEEE Persist Location");
cursor.close();
db.close();
//Log.i(TAG, "RC = " + internetCards);
/*SQLiteDatabase db = new CardOpenHelper(context).getWritableDatabase();
db.beginTransaction();
Log.i(TAG, "DB Open: " + db.isOpen());
ContentValues values = getContentValues(card);
long rowId = db.insert(tableName, null, values);
Log.d(TAG, "After insert, rowId = " + rowId);
db.setTransactionSuccessful();
db.endTransaction();
db.close();
*/
long rowId = -1;
if (rowId > -1) {
return true;
} else {
return false;
}
}
private ContentValues getContentValues(Card card) {
ContentValues values = new ContentValues();
values.put("id", card.getId());
values.put("created", card.getCreated());
values.put("info", card.getInfo());
values.put("location", card.getLocation());
values.put("latitude", card.getLatitude());
values.put("longitude", card.getLongitude());
values.put("sharing_level", card.getSharing_level());
values.put("location_level", card.getLocation_level());
values.put("user_id", card.getUser_id());
values.put("confirm", card.getConfirm());
return values;
}
}
| src/android/data/sqlite/SQLiteCardDAO.java | package com.tenforwardconsulting.cordova.bgloc.data.sqlite;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.preference.PreferenceManager;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.util.Log;
import com.tenforwardconsulting.cordova.bgloc.data.Card;
import com.tenforwardconsulting.cordova.bgloc.data.CardDAO;
public class SQLiteCardDAO implements CardDAO {
public static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final String TAG = "LocationUpdateService";
private Context context;
public SQLiteCardDAO(Context context) {
this.context = context;
}
public void internetPendingCards() {
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
String user_id = pref.getString("user_id", "");
String countQuery = "SELECT count(id) countPendings FROM pending_geo WHERE user_id = ?";
SQLiteDatabase db = new CardOpenHelper(context).getReadableDatabase();
Cursor cursor = db.rawQuery(countQuery, new String[]{user_id});
int internetCards = cursor.getInt(cursor.getColumnIndex("countPendings"));
cursor.close();
db.close();
SharedPreferences.Editor edit = pref.edit();
if (internetCards > 0){
edit.putBoolean("pendingInternet", true);
}else{
edit.putBoolean("pendingInternet", false);
}
Log.d(TAG, "RC = " + internetCards);
edit.commit();
}
public boolean persistCard(String tableName, Card card) {
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
String user_id = pref.getString("user_id", "");
Log.i(TAG, "---- Persist Location");
//String countQuery = "SELECT count(id) countPendings FROM shared_cards WHERE user_id = ?";
String countQuery = "SELECT count(id) countPendings FROM shared_cards";
Log.i(TAG, "USER ID: " + user_id);
SQLiteDatabase db = new CardOpenHelper(context).getReadableDatabase();
Log.i(TAG, "AAAA Persist Location");
String[] queryParams = new String[1];
Log.i(TAG, "BBBB Persist Location");
queryParams[0] =user_id;
Log.i(TAG, "CCCC Persist Location");
//Cursor cursor = db.rawQuery(countQuery, null);
Cursor cursor = db.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null);
if (cursor.moveToFirst()) {
while ( !cursor.isAfterLast() ) {
Log.i(TAG, "Table Name=> " + cursor.getString(0));
cursor.moveToNext();
}
}
Log.i(TAG, "DDDD Persist Location");
//int internetCards = cursor.getInt(cursor.getColumnIndex("countPendings"));
Log.i(TAG, "EEEE Persist Location");
cursor.close();
db.close();
Log.i(TAG, "RC = " + internetCards);
/*SQLiteDatabase db = new CardOpenHelper(context).getWritableDatabase();
db.beginTransaction();
Log.i(TAG, "DB Open: " + db.isOpen());
ContentValues values = getContentValues(card);
long rowId = db.insert(tableName, null, values);
Log.d(TAG, "After insert, rowId = " + rowId);
db.setTransactionSuccessful();
db.endTransaction();
db.close();
*/
long rowId = -1;
if (rowId > -1) {
return true;
} else {
return false;
}
}
private ContentValues getContentValues(Card card) {
ContentValues values = new ContentValues();
values.put("id", card.getId());
values.put("created", card.getCreated());
values.put("info", card.getInfo());
values.put("location", card.getLocation());
values.put("latitude", card.getLatitude());
values.put("longitude", card.getLongitude());
values.put("sharing_level", card.getSharing_level());
values.put("location_level", card.getLocation_level());
values.put("user_id", card.getUser_id());
values.put("confirm", card.getConfirm());
return values;
}
}
| no message
| src/android/data/sqlite/SQLiteCardDAO.java | no message |
|
Java | apache-2.0 | 87d5892e35ad4b8cce859e806407cf7ed354f162 | 0 | onepf/OPFPush | /*
* Copyright 2012-2014 One Platform Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onepf.openpush;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import junit.framework.Assert;
import org.onepf.openpush.util.PackageUtils;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import static org.onepf.openpush.OpenPushLog.LOGD;
import static org.onepf.openpush.OpenPushLog.LOGI;
import static org.onepf.openpush.OpenPushLog.LOGW;
/**
* Helper class for manage push providers.
* For get instance call {@link OpenPushHelper#getInstance(android.content.Context)}.\
*
* @author Kirill Rozov
* @since 04.09.2014
* <p/>
* {@link }
*/
public class OpenPushHelper {
static final String KEY_LAST_PROVIDER_NAME = "last_provider_name";
static final String PREF_NAME = "org.onepf.openpush";
private static final int STATE_NONE = 0;
private static final int STATE_REGISTERING = 1;
private static final int STATE_WORKING = 2;
private static final int STATE_UNREGISTERING = 3;
@Nullable
private static OpenPushHelper sInstance;
@NonNull
private final Context mAppContext;
@NonNull
private final SharedPreferences mPreferences;
@Nullable
private OpenPushListener mListener;
@Nullable
private BroadcastReceiver mPackageReceiver;
@Nullable
private PushProvider mCurrentProvider;
private AtomicInteger mState = new AtomicInteger(STATE_NONE);
private Options mOptions;
private final Object mRegistrationLock = new Object();
private final Object mInitLock = new Object();
private OpenPushHelper(@NonNull Context context) {
mAppContext = context.getApplicationContext();
mPreferences =
mAppContext.getSharedPreferences(PREF_NAME, Context.MODE_PRIVATE);
}
public static OpenPushHelper getInstance(@NonNull Context context) {
if (sInstance == null) {
synchronized (OpenPushHelper.class) {
if (sInstance == null) {
sInstance = new OpenPushHelper(context);
}
}
}
return sInstance;
}
/**
* Create new instance of {@link OpenPushHelper}.
* <b>Use for test purposes only!</b>
*
* @param context
* @return New instance of {@link OpenPushHelper}.
*/
static OpenPushHelper getNewInstance(@NonNull Context context) {
synchronized (OpenPushHelper.class) {
sInstance = new OpenPushHelper(context);
}
return sInstance;
}
public boolean isInitDone() {
synchronized (mInitLock) {
return mOptions != null;
}
}
public boolean isRegistered() {
return mCurrentProvider != null && mCurrentProvider.isRegistered();
}
private void checkInitDone() {
if (!isInitDone()) {
throw new OpenPushException("Before work with OpenPushHelper call init() first.");
}
}
public void init(@NonNull Options options) {
if (mOptions == null) {
synchronized (mInitLock) {
if (mOptions == null) {
mOptions = options;
}
}
} else {
throw new OpenPushException("Attempt to init twice.");
}
initLastProvider();
LOGI("Init done.");
}
private void initLastProvider() {
final PushProvider lastProvider = getLastProvider();
if (lastProvider == null) {
return;
}
LOGI(String.format("Try restore last provider '%s'.", lastProvider));
if (lastProvider.isAvailable()) {
if (lastProvider.isRegistered()) {
LOGI("Last provider running.");
mCurrentProvider = lastProvider;
mState.set(STATE_WORKING);
} else {
LOGI("Last provider need register.");
mState.set(STATE_REGISTERING);
if (!registerProvider(lastProvider)) {
mState.set(STATE_NONE);
saveLastProvider(null);
}
}
} else {
lastProvider.onUnavailable();
LOGI("Last provider isn't available.");
reset();
if (mOptions.isRecoverProvider()) {
LOGI("Try register any available provider.");
register();
}
}
}
public void setListener(@Nullable OpenPushListener l) {
mListener = l == null ? null : new MainThreadListenerWrapper(l);
}
public void register() {
checkInitDone();
synchronized (mRegistrationLock) {
switch (mState.get()) {
case STATE_REGISTERING:
break;
case STATE_NONE:
mState.set(STATE_REGISTERING);
if (mOptions.isSystemPushPreferred()
&& registerSystemPreferredProvider()) {
return;
}
registerNextProvider(null);
break;
case STATE_UNREGISTERING:
throw new OpenPushException("Can't register while unregistration is running.");
case STATE_WORKING:
throw new OpenPushException("Attempt to register twice!");
}
}
}
private boolean registerSystemPreferredProvider() {
for (PushProvider provider : mOptions.getProviders()) {
if (PackageUtils.isSystemApp(mAppContext, provider.getHostAppPackage())
&& registerProvider(provider)) {
return true;
}
}
return false;
}
/**
* Register first available provider. Iterate all provider from the next provider after
* {@code lastProvider} param.
*
* @param lastProvider Last provider what check to register or null if has no.
* @return True if find provider that can try to register, otherwise false.
*/
private boolean registerNextProvider(@Nullable PushProvider lastProvider) {
int nextProviderIndex = 0;
final List<PushProvider> providers = mOptions.getProviders();
if (lastProvider != null) {
int lastProviderIndex = providers.indexOf(lastProvider);
if (lastProviderIndex != -1) {
nextProviderIndex = lastProviderIndex + 1;
}
}
for (int providersCount = providers.size();
nextProviderIndex < providersCount; ++nextProviderIndex) {
if (registerProvider(providers.get(nextProviderIndex))) {
return true;
}
}
mState.set(STATE_NONE);
LOGW("No more available providers.");
if (mListener != null) {
mListener.onNoAvailableProvider();
}
return false;
}
/**
* Same that {@link #registerProvider(PushProvider, boolean)} with {@code registerNext} set to false.
*/
private boolean registerProvider(@NonNull PushProvider provider) {
return registerProvider(provider, false);
}
/**
* Start register provider.
*
* @param provider Provider for registration.
* @param tryRegisterNext Try to register next available push provider after the {@code provider},
* if the {@code provider} isn't available.
* @return If provider available and can start registration return true, otherwise - false.
*/
private boolean registerProvider(@NonNull PushProvider provider, boolean tryRegisterNext) {
if (provider.isAvailable()) {
LOGD(String.format("Try register %s.", provider));
provider.register();
return true;
}
LOGI(String.format("Provider '%s' not available.", provider));
return tryRegisterNext && registerNextProvider(provider);
}
public void unregister() {
checkInitDone();
synchronized (mRegistrationLock) {
if (!isRegistered()) {
throw new OpenPushException("No one provider is registered!");
}
Assert.assertNotNull(mCurrentProvider);
switch (mState.get()) {
case STATE_WORKING:
mState.set(STATE_UNREGISTERING);
unregisterPackageChangeReceiver();
mCurrentProvider.unregister();
break;
case STATE_UNREGISTERING:
break;
case STATE_REGISTERING:
throw new OpenPushException("Can't unregister when registration in progress.!");
case STATE_NONE:
throw new OpenPushException("Before to unregister you must register provider.!");
}
}
}
private void unregisterPackageChangeReceiver() {
if (mPackageReceiver != null) {
mAppContext.unregisterReceiver(mPackageReceiver);
mPackageReceiver = null;
}
}
@Nullable
public PushProvider getCurrentProvider() {
return mCurrentProvider;
}
/**
* Search provider by name in {@code options} and return in.
* If {@code} doesn't contain provider with described name return null.
*
* @param providerName Name of provider for search.
* @return Provider with described name or null if nothing have found.
*/
@Nullable
private PushProvider getProvider(@NonNull String providerName) {
for (PushProvider provider : mOptions.getProviders()) {
if (providerName.equals(provider.getName())) {
return provider;
}
}
return null;
}
@Nullable
private PushProvider getLastProvider() {
if (mPreferences.contains(KEY_LAST_PROVIDER_NAME)) {
String storedProviderName = mPreferences.getString(KEY_LAST_PROVIDER_NAME, null);
if (!TextUtils.isEmpty(storedProviderName)) {
PushProvider provider = getProvider(storedProviderName);
if (provider != null) {
return provider;
}
}
mPreferences.edit().remove(KEY_LAST_PROVIDER_NAME).apply();
}
return null;
}
private void saveLastProvider(@Nullable PushProvider provider) {
SharedPreferences.Editor editor = mPreferences.edit();
if (provider == null) {
editor.remove(KEY_LAST_PROVIDER_NAME);
} else {
editor.putString(KEY_LAST_PROVIDER_NAME, provider.getName());
}
editor.apply();
}
public void onMessage(@NonNull String providerName, @Nullable Bundle extras) {
LOGD(String.format("onUnavailable(providerName = %s).", providerName));
if (mListener != null) {
mListener.onMessage(providerName, extras);
}
}
public void onDeletedMessages(@NonNull String providerName, int messagesCount) {
LOGD(String.format("onDeletedMessages(providerName = %s,messagesCount = %d).",
providerName, messagesCount));
if (mListener != null) {
mListener.onDeletedMessages(providerName, messagesCount);
}
}
public void onNeedRetryRegister(@NonNull String providerName) {
LOGD(String.format("onNeedRetryRegister(providerName = %s).", providerName));
if (mCurrentProvider != null && mCurrentProvider.getName().equals(providerName)) {
reset();
mCurrentProvider.onAppStateChanged();
mState.set(STATE_REGISTERING);
if (!registerProvider(mCurrentProvider, false)) {
mState.set(STATE_NONE);
}
}
}
void reset() {
mPreferences.edit().clear().apply();
mState.set(STATE_NONE);
}
public void onUnavailable(@NonNull PushProvider provider) {
LOGD(String.format("onUnavailable(provider = %s).", provider));
if (mCurrentProvider != null && mCurrentProvider.equals(provider)) {
reset();
mCurrentProvider.onUnavailable();
mCurrentProvider = null;
if (mListener != null) {
mListener.onProviderBecameUnavailable(provider.getName());
}
if (mOptions.isRecoverProvider()) {
register(); //Restart registration
}
}
}
public void onResult(Result result) {
synchronized (mRegistrationLock) {
switch (mState.get()) {
case STATE_REGISTERING:
onRegistrationResult(result);
break;
case STATE_UNREGISTERING:
onUnregistrationResult(result);
break;
default:
throw new UnsupportedOperationException("New result can be handled only when" +
" registration or unregistration is running.");
}
}
}
private void onUnregistrationResult(@NonNull Result result) {
if (result.isSuccess()) {
LOGI(String.format("Successfully unregister provider '%s'.", result.getProviderName()));
reset();
if (mCurrentProvider != null) {
mCurrentProvider.close();
mCurrentProvider = null;
}
if (mListener != null) {
Assert.assertNotNull(result.getRegistrationId());
mListener.onUnregistered(result.getProviderName(), result.getRegistrationId());
}
} else if (mListener != null) {
LOGI(String.format("Error unregister provider '%s'.", result.getProviderName()));
final PushProvider provider = getProvider(result.getProviderName());
if (provider != null) {
mListener.onUnregistrationError(provider.getName(), result.getErrorCode());
}
}
}
private void onRegistrationResult(@NonNull Result result) {
if (result.isSuccess()) {
LOGI(String.format("Successfully register provider '%s'.", result.getProviderName()));
LOGI(String.format("Register id '%s'.", result.getRegistrationId()));
mState.set(STATE_WORKING);
mCurrentProvider = getProvider(result.getProviderName());
Assert.assertNotNull(mCurrentProvider);
saveLastProvider(mCurrentProvider);
Assert.assertNotNull(result.getRegistrationId());
if (mListener != null) {
mListener.onRegistered(result.getProviderName(), result.getRegistrationId());
}
mPackageReceiver =
PackageUtils.registerPackageChangeReceiver(mAppContext, mCurrentProvider);
} else {
LOGI(String.format("Error register provider '%s'.", result.getProviderName()));
PushProvider provider = getProvider(result.getProviderName());
if (provider != null) {
if (mListener != null) {
mListener.onRegistrationError(provider.getName(), result.getErrorCode());
}
if (!result.isRecoverableError()) {
registerNextProvider(provider);
}
}
}
}
@Override
public String toString() {
return "OpenPushHelper{" +
"options=" + mOptions +
", currentProvider=" + mCurrentProvider +
", inited=" + isInitDone() +
", registered=" + isRegistered() +
'}';
}
}
| library/src/main/java/org/onepf/openpush/OpenPushHelper.java | /*
* Copyright 2012-2014 One Platform Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onepf.openpush;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import junit.framework.Assert;
import org.onepf.openpush.util.PackageUtils;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import static org.onepf.openpush.OpenPushLog.LOGD;
import static org.onepf.openpush.OpenPushLog.LOGI;
import static org.onepf.openpush.OpenPushLog.LOGW;
/**
* Helper class for manage push providers.
* For get instance call {@link OpenPushHelper#getInstance(android.content.Context)}.\
*
* @author Kirill Rozov
* @since 04.09.2014
*/
public class OpenPushHelper {
static final String KEY_LAST_PROVIDER_NAME = "last_provider_name";
static final String PREF_NAME = "org.onepf.openpush";
private static final int STATE_NONE = 0;
private static final int STATE_REGISTERING = 1;
private static final int STATE_WORKING = 2;
private static final int STATE_UNREGISTERING = 3;
@Nullable
private static OpenPushHelper sInstance;
@NonNull
private final Context mAppContext;
@NonNull
private final SharedPreferences mPreferences;
@Nullable
private OpenPushListener mListener;
@Nullable
private BroadcastReceiver mPackageReceiver;
@Nullable
private PushProvider mCurrentProvider;
private AtomicInteger mState = new AtomicInteger(STATE_NONE);
private Options mOptions;
private final Object mRegistrationLock = new Object();
private final Object mInitLock = new Object();
private OpenPushHelper(@NonNull Context context) {
mAppContext = context.getApplicationContext();
mPreferences =
mAppContext.getSharedPreferences(PREF_NAME, Context.MODE_PRIVATE);
}
public static OpenPushHelper getInstance(@NonNull Context context) {
if (sInstance == null) {
synchronized (OpenPushHelper.class) {
if (sInstance == null) {
sInstance = new OpenPushHelper(context);
}
}
}
return sInstance;
}
/**
* Create new instance of {@link OpenPushHelper}.
* <b>Use for test purposes only!</b>
*
* @param context
* @return New instance of {@link OpenPushHelper}.
*/
static OpenPushHelper getNewInstance(@NonNull Context context) {
synchronized (OpenPushHelper.class) {
sInstance = new OpenPushHelper(context);
}
return sInstance;
}
public boolean isInitDone() {
synchronized (mInitLock) {
return mOptions != null;
}
}
public boolean isRegistered() {
return mCurrentProvider != null && mCurrentProvider.isRegistered();
}
private void checkInitDone() {
if (!isInitDone()) {
throw new OpenPushException("Before work with OpenPushHelper call init() first.");
}
}
public void init(@NonNull Options options) {
if (mOptions == null) {
synchronized (mInitLock) {
if (mOptions == null) {
mOptions = options;
}
}
} else {
throw new OpenPushException("Attempt to init twice.");
}
initLastProvider();
LOGI("Init done.");
}
private void initLastProvider() {
final PushProvider lastProvider = getLastProvider();
if (lastProvider == null) {
return;
}
LOGI(String.format("Try restore last provider '%s'.", lastProvider));
if (lastProvider.isAvailable()) {
if (lastProvider.isRegistered()) {
LOGI("Last provider running.");
mCurrentProvider = lastProvider;
mState.set(STATE_WORKING);
} else {
LOGI("Last provider need register.");
mState.set(STATE_REGISTERING);
if (!registerProvider(lastProvider)) {
mState.set(STATE_NONE);
saveLastProvider(null);
}
}
} else {
LOGI("Last provider isn't available.");
reset();
if (mOptions.isRecoverProvider()) {
LOGI("Try register any available provider.");
register();
}
}
}
public void setListener(@Nullable OpenPushListener l) {
mListener = l == null ? null : new MainThreadListenerWrapper(l);
}
public void register() {
checkInitDone();
synchronized (mRegistrationLock) {
switch (mState.get()) {
case STATE_REGISTERING:
break;
case STATE_NONE:
mState.set(STATE_REGISTERING);
if (mOptions.isSystemPushPreferred()
&& registerSystemPreferredProvider()) {
return;
}
registerNextProvider(null);
break;
case STATE_UNREGISTERING:
throw new OpenPushException("Can't register while unregistration is running.");
case STATE_WORKING:
throw new OpenPushException("Attempt to register twice!");
}
}
}
private boolean registerSystemPreferredProvider() {
for (PushProvider provider : mOptions.getProviders()) {
if (PackageUtils.isSystemApp(mAppContext, provider.getHostAppPackage())
&& registerProvider(provider)) {
return true;
}
}
return false;
}
/**
* Register first available provider. Iterate all provider from the next provider after
* {@code lastProvider} param.
*
* @param lastProvider Last provider what check to register or null if has no.
* @return True if find provider that can try to register, otherwise false.
*/
private boolean registerNextProvider(@Nullable PushProvider lastProvider) {
int nextProviderIndex = 0;
final List<PushProvider> providers = mOptions.getProviders();
if (lastProvider != null) {
int lastProviderIndex = providers.indexOf(lastProvider);
if (lastProviderIndex != -1) {
nextProviderIndex = lastProviderIndex + 1;
}
}
for (int providersCount = providers.size();
nextProviderIndex < providersCount; ++nextProviderIndex) {
if (registerProvider(providers.get(nextProviderIndex))) {
return true;
}
}
mState.set(STATE_NONE);
LOGW("No more available providers.");
if (mListener != null) {
mListener.onNoAvailableProvider();
}
return false;
}
/**
* Same that {@link #registerProvider(PushProvider, boolean)} with {@code registerNext} set to false.
*/
private boolean registerProvider(@NonNull PushProvider provider) {
return registerProvider(provider, false);
}
/**
* Start register provider.
*
* @param provider Provider for registration.
* @param tryRegisterNext Try to register next available push provider after the {@code provider},
* if the {@code provider} isn't available.
* @return If provider available and can start registration return true, otherwise - false.
*/
private boolean registerProvider(@NonNull PushProvider provider, boolean tryRegisterNext) {
if (provider.isAvailable()) {
LOGD(String.format("Try register %s.", provider));
provider.register();
return true;
}
LOGI(String.format("Provider '%s' not available.", provider));
return tryRegisterNext && registerNextProvider(provider);
}
public void unregister() {
checkInitDone();
synchronized (mRegistrationLock) {
if (!isRegistered()) {
throw new OpenPushException("No one provider is registered!");
}
Assert.assertNotNull(mCurrentProvider);
switch (mState.get()) {
case STATE_WORKING:
mState.set(STATE_UNREGISTERING);
unregisterPackageChangeReceiver();
mCurrentProvider.unregister();
break;
case STATE_UNREGISTERING:
break;
case STATE_REGISTERING:
throw new OpenPushException("Can't unregister when registration in progress.!");
case STATE_NONE:
throw new OpenPushException("Before to unregister you must register provider.!");
}
}
}
private void unregisterPackageChangeReceiver() {
if (mPackageReceiver != null) {
mAppContext.unregisterReceiver(mPackageReceiver);
mPackageReceiver = null;
}
}
@Nullable
public PushProvider getCurrentProvider() {
return mCurrentProvider;
}
/**
* Search provider by name in {@code options} and return in.
* If {@code} doesn't contain provider with described name return null.
*
* @param providerName Name of provider for search.
* @return Provider with described name or null if nothing have found.
*/
@Nullable
private PushProvider getProvider(@NonNull String providerName) {
for (PushProvider provider : mOptions.getProviders()) {
if (providerName.equals(provider.getName())) {
return provider;
}
}
return null;
}
@Nullable
private PushProvider getLastProvider() {
if (mPreferences.contains(KEY_LAST_PROVIDER_NAME)) {
String storedProviderName = mPreferences.getString(KEY_LAST_PROVIDER_NAME, null);
if (!TextUtils.isEmpty(storedProviderName)) {
PushProvider provider = getProvider(storedProviderName);
if (provider != null) {
return provider;
}
}
mPreferences.edit().remove(KEY_LAST_PROVIDER_NAME).apply();
}
return null;
}
private void saveLastProvider(@Nullable PushProvider provider) {
SharedPreferences.Editor editor = mPreferences.edit();
if (provider == null) {
editor.remove(KEY_LAST_PROVIDER_NAME);
} else {
editor.putString(KEY_LAST_PROVIDER_NAME, provider.getName());
}
editor.apply();
}
public void onMessage(@NonNull String providerName, @Nullable Bundle extras) {
LOGD(String.format("onUnavailable(providerName = %s).", providerName));
if (mListener != null) {
mListener.onMessage(providerName, extras);
}
}
public void onDeletedMessages(@NonNull String providerName, int messagesCount) {
LOGD(String.format("onDeletedMessages(providerName = %s,messagesCount = %d).",
providerName, messagesCount));
if (mListener != null) {
mListener.onDeletedMessages(providerName, messagesCount);
}
}
public void onNeedRetryRegister(@NonNull String providerName) {
LOGD(String.format("onNeedRetryRegister(providerName = %s).", providerName));
if (mCurrentProvider != null && mCurrentProvider.getName().equals(providerName)) {
reset();
mCurrentProvider.onAppStateChanged();
mState.set(STATE_REGISTERING);
if (!registerProvider(mCurrentProvider, false)) {
mState.set(STATE_NONE);
}
}
}
private void reset() {
mPreferences.edit().clear().apply();
mState.set(STATE_NONE);
}
public void onUnavailable(@NonNull PushProvider provider) {
LOGD(String.format("onUnavailable(provider = %s).", provider));
if (mCurrentProvider != null && mCurrentProvider.equals(provider)) {
reset();
mCurrentProvider.onUnavailable();
mCurrentProvider = null;
if (mListener != null) {
mListener.onProviderBecameUnavailable(provider.getName());
}
if (mOptions.isRecoverProvider()) {
register(); //Restart registration
}
}
}
public synchronized void onResult(Result result) {
switch (mState.get()) {
case STATE_REGISTERING:
onRegistrationResult(result);
break;
case STATE_UNREGISTERING:
onUnregistrationResult(result);
break;
default:
throw new UnsupportedOperationException("New result can be handled only when" +
" registration or unregistration is running.");
}
}
private void onUnregistrationResult(@NonNull Result result) {
if (result.isSuccess()) {
LOGI(String.format("Successfully unregister provider '%s'.", result.getProviderName()));
reset();
if (mCurrentProvider != null) {
mCurrentProvider.close();
mCurrentProvider = null;
}
if (mListener != null) {
Assert.assertNotNull(result.getRegistrationId());
mListener.onUnregistered(result.getProviderName(), result.getRegistrationId());
}
} else if (mListener != null) {
LOGI(String.format("Error unregister provider '%s'.", result.getProviderName()));
final PushProvider provider = getProvider(result.getProviderName());
if (provider != null) {
mListener.onUnregistrationError(provider.getName(), result.getErrorCode());
}
}
}
private void onRegistrationResult(@NonNull Result result) {
if (result.isSuccess()) {
LOGI(String.format("Successfully register provider '%s'.", result.getProviderName()));
LOGI(String.format("Register id '%s'.", result.getRegistrationId()));
mState.set(STATE_WORKING);
mCurrentProvider = getProvider(result.getProviderName());
Assert.assertNotNull(mCurrentProvider);
saveLastProvider(mCurrentProvider);
Assert.assertNotNull(result.getRegistrationId());
if (mListener != null) {
mListener.onRegistered(result.getProviderName(), result.getRegistrationId());
}
mPackageReceiver =
PackageUtils.registerPackageChangeReceiver(mAppContext, mCurrentProvider);
} else {
LOGI(String.format("Error register provider '%s'.", result.getProviderName()));
PushProvider provider = getProvider(result.getProviderName());
if (provider != null) {
if (mListener != null) {
mListener.onRegistrationError(provider.getName(), result.getErrorCode());
}
if (!result.isRecoverableError()) {
registerNextProvider(provider);
}
}
}
}
@Override
public String toString() {
return "OpenPushHelper{" +
"options=" + mOptions +
", currentProvider=" + mCurrentProvider +
", inited=" + isInitDone() +
", registered=" + isRegistered() +
'}';
}
}
| Fix bug with restore last provider.
| library/src/main/java/org/onepf/openpush/OpenPushHelper.java | Fix bug with restore last provider. |
|
Java | apache-2.0 | 213d68221d7d248166d326c053170794b070cd68 | 0 | davidegiannella/jackrabbit-oak,davidegiannella/jackrabbit-oak,davidegiannella/jackrabbit-oak,davidegiannella/jackrabbit-oak,davidegiannella/jackrabbit-oak | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.get;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Sets.newConcurrentHashSet;
import static com.google.common.util.concurrent.Futures.addCallback;
import static com.google.common.util.concurrent.Futures.immediateCancelledFuture;
import static com.google.common.util.concurrent.MoreExecutors.listeningDecorator;
import static java.io.File.createTempFile;
import static java.lang.Integer.MAX_VALUE;
import static java.lang.String.valueOf;
import static java.lang.System.getProperty;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.commons.io.FileUtils.deleteDirectory;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
import static org.apache.jackrabbit.oak.plugins.segment.CompactionMap.sum;
import static org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategy.CleanupType.CLEAN_OLD;
import static org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategy.MEMORY_THRESHOLD_DEFAULT;
import static org.apache.jackrabbit.oak.plugins.segment.file.FileStore.newFileStore;
import static org.junit.Assume.assumeTrue;
import static org.slf4j.helpers.MessageFormatter.arrayFormat;
import static org.slf4j.helpers.MessageFormatter.format;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import javax.annotation.Nonnull;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import com.google.common.base.Predicate;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListenableScheduledFuture;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.commons.jmx.AnnotatedStandardMBean;
import org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategy;
import org.apache.jackrabbit.oak.plugins.segment.compaction.DefaultCompactionStrategyMBean;
import org.apache.jackrabbit.oak.plugins.segment.file.FileStore;
import org.apache.jackrabbit.oak.plugins.segment.file.FileStoreGCMonitor;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.spi.whiteboard.CompositeRegistration;
import org.apache.jackrabbit.oak.spi.whiteboard.Registration;
import org.apache.jackrabbit.oak.stats.Clock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a longeivity test for SegmentMK compaction. The test schedules a number
* of readers, writers, a compactor and holds some references for a certain time.
* All of which can be interactively modified through the accompanying
* {@link SegmentCompactionITMBean}, the
* {@link org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategyMBean} and the
* {@link org.apache.jackrabbit.oak.plugins.segment.file.GCMonitorMBean}.
*
* TODO Leverage longeivity test support from OAK-2771 once we have it.
*/
public class SegmentCompactionIT {
private static final boolean PERSIST_COMPACTION_MAP = Boolean.getBoolean("persist-compaction-map");
/** Only run if explicitly asked to via -Dtest=SegmentCompactionIT */
private static final boolean ENABLED =
SegmentCompactionIT.class.getSimpleName().equals(getProperty("test"));
private static final Logger LOG = LoggerFactory.getLogger(SegmentCompactionIT.class);
private final MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
private final Random rnd = new Random();
private final ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(50);
private final ListeningScheduledExecutorService scheduler = listeningDecorator(executor);
private final FileStoreGCMonitor fileStoreGCMonitor = new FileStoreGCMonitor(Clock.SIMPLE);
private final TestGCMonitor gcMonitor = new TestGCMonitor(fileStoreGCMonitor);
private final Set<ListenableScheduledFuture<?>> writers = newConcurrentHashSet();
private final Set<ListenableScheduledFuture<?>> readers = newConcurrentHashSet();
private final Set<ListenableScheduledFuture<?>> references = newConcurrentHashSet();
private final SegmentCompactionITMBean segmentCompactionMBean = new SegmentCompactionITMBean();
private final CompactionStrategy compactionStrategy = new CompactionStrategy(
false, false, CLEAN_OLD, 60000, MEMORY_THRESHOLD_DEFAULT) {
@Override
public boolean compacted(@Nonnull Callable<Boolean> setHead) throws Exception {
return nodeStore.locked(setHead, lockWaitTime, SECONDS);
}
};
private File directory;
private FileStore fileStore;
private SegmentNodeStore nodeStore;
private Registration mBeanRegistration;
private volatile ListenableFuture<?> compactor = immediateCancelledFuture();
private volatile int lockWaitTime = 60;
private volatile int maxReaders = 10;
private volatile int maxWriters = 10;
private volatile long maxStoreSize = 200000000000L;
private volatile int maxBlobSize = 1000000;
private volatile int maxStringSize = 10000;
private volatile int maxReferences = 10;
private volatile int maxWriteOps = 10000;
private volatile int maxNodeCount = 1000;
private volatile int maxPropertyCount = 1000;
private volatile int nodeRemoveRatio = 10;
private volatile int propertyRemoveRatio = 10;
private volatile int nodeAddRatio = 40;
private volatile int addStringRatio = 20;
private volatile int addBinaryRatio = 20;
private volatile int compactionInterval = 1;
private volatile boolean stopping;
private volatile Reference rootReference;
private volatile long fileStoreSize;
public synchronized void stop() {
stopping = true;
notifyAll();
}
public void addReaders(int count) {
for (int c = 0; c < count; c++) {
scheduleReader();
}
}
public void removeReaders(int count) {
remove(readers, count);
}
public void addWriters(int count) {
for (int c = 0; c < count; c++) {
scheduleWriter();
}
}
public void removeWriters(int count) {
remove(writers, count);
}
public void removeReferences(int count) {
remove(references, count);
}
private static void remove(Set<ListenableScheduledFuture<?>> ops, int count) {
Iterator<ListenableScheduledFuture<?>> it = ops.iterator();
while (it.hasNext() && count-- > 0) {
it.next().cancel(false);
}
}
private Registration registerMBean(Object mBean, final ObjectName objectName)
throws NotCompliantMBeanException, InstanceAlreadyExistsException,
MBeanRegistrationException {
mBeanServer.registerMBean(mBean, objectName);
return new Registration(){
@Override
public void unregister() {
try {
mBeanServer.unregisterMBean(objectName);
} catch (Exception e) {
LOG.error("Error unregistering Segment Compaction MBean", e);
}
}
};
}
@Before
public void setUp() throws IOException, MalformedObjectNameException, NotCompliantMBeanException,
InstanceAlreadyExistsException, MBeanRegistrationException {
assumeTrue(ENABLED);
scheduler.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
fileStoreGCMonitor.run();
}
}, 1, 1, SECONDS);
directory = createTempFile(getClass().getSimpleName(), "dir", new File("target"));
directory.delete();
directory.mkdir();
fileStore = newFileStore(directory).withGCMonitor(gcMonitor).create();
nodeStore = new SegmentNodeStore(fileStore);
compactionStrategy.setPersistCompactionMap(PERSIST_COMPACTION_MAP);
fileStore.setCompactionStrategy(compactionStrategy);
CacheStats segmentCacheStats = fileStore.getTracker().getSegmentCacheStats();
CacheStats stringCacheStats = fileStore.getTracker().getStringCacheStats();
List<Registration> registrations = newArrayList();
registrations.add(registerMBean(segmentCompactionMBean,
new ObjectName("IT:TYPE=Segment Compaction")));
registrations.add(registerMBean(new DefaultCompactionStrategyMBean(compactionStrategy),
new ObjectName("IT:TYPE=Compaction Strategy")));
registrations.add(registerMBean(fileStoreGCMonitor,
new ObjectName("IT:TYPE=GC Monitor")));
registrations.add(registerMBean(segmentCacheStats,
new ObjectName("IT:TYPE=" + segmentCacheStats.getName())));
if (stringCacheStats != null) {
registrations.add(registerMBean(stringCacheStats,
new ObjectName("IT:TYPE=" + stringCacheStats.getName())));
}
mBeanRegistration = new CompositeRegistration(registrations);
}
@After
public void tearDown() {
try {
if (mBeanRegistration != null) {
mBeanRegistration.unregister();
}
remove(writers, MAX_VALUE);
remove(readers, MAX_VALUE);
remove(references, MAX_VALUE);
scheduler.shutdown();
if (fileStore != null) {
fileStore.close();
}
if (directory != null) {
deleteDirectory(directory);
}
} catch (IOException e) {
LOG.error("Error cleaning directory", e);
}
}
@Test
public void run() throws InterruptedException {
scheduleSizeMonitor();
scheduleCompactor();
addReaders(maxReaders);
addWriters(maxWriters);
synchronized (this) {
while (!stopping) {
wait();
}
}
}
private void scheduleSizeMonitor() {
scheduler.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
fileStoreSize = fileStore.size();
}
}, 1, 1, MINUTES);
}
private synchronized void scheduleCompactor() {
LOG.info("Scheduling compaction after {} minutes", compactionInterval);
compactor.cancel(false);
compactor = scheduler.schedule((new Compactor(fileStore, gcMonitor)), compactionInterval, MINUTES);
addCallback(compactor, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
scheduleCompactor();
}
@Override
public void onFailure(Throwable t) {
segmentCompactionMBean.error("Compactor error", t);
}
});
}
private void scheduleWriter() {
if (writers.size() < maxWriters) {
final RandomWriter writer = new RandomWriter(rnd, nodeStore, rnd.nextInt(maxWriteOps), "W" + rnd.nextInt(5));
final ListenableScheduledFuture<Void> futureWriter = scheduler.schedule(
writer, rnd.nextInt(30), SECONDS);
writers.add(futureWriter);
addCallback(futureWriter, new FutureCallback<Void>() {
@Override
public void onSuccess(Void result) {
writers.remove(futureWriter);
if (!futureWriter.isCancelled()) {
scheduleWriter();
}
}
@Override
public void onFailure(Throwable t) {
writer.cancel();
writers.remove(futureWriter);
segmentCompactionMBean.error("Writer error", t);
}
});
}
}
private void scheduleReader() {
if (readers.size() < maxReaders) {
final RandomReader<?> reader = rnd.nextBoolean()
? new RandomNodeReader(rnd, nodeStore)
: new RandomPropertyReader(rnd, nodeStore);
final ListenableScheduledFuture<?> futureReader = scheduler.schedule(
reader, rnd.nextInt(30), SECONDS);
readers.add(futureReader);
addCallback(futureReader, new FutureCallback<Object>() {
@Override
public void onSuccess(Object node) {
readers.remove(futureReader);
if (!futureReader.isCancelled()) {
if (rnd.nextBoolean()) {
scheduleReference(node);
} else {
scheduleReader();
}
}
}
@Override
public void onFailure(Throwable t) {
reader.cancel();
readers.remove(futureReader);
segmentCompactionMBean.error("Node reader error", t);
}
});
}
}
private void scheduleReference(Object object) {
if (references.size() < maxReferences) {
final Reference reference = new Reference(object);
final ListenableScheduledFuture<?> futureReference = scheduler.schedule(
reference, rnd.nextInt(600), SECONDS);
references.add(futureReference);
addCallback(futureReference, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
references.remove(reference);
if (!futureReference.isCancelled()) {
scheduleReader();
}
}
@Override
public void onFailure(Throwable t) {
reference.run();
references.remove(reference);
segmentCompactionMBean.error("Reference error", t);
}
});
} else {
scheduleReader();
}
}
private class RandomWriter implements Callable<Void> {
private final Random rnd;
private final NodeStore nodeStore;
private final int opCount;
private final String itemPrefix;
private volatile boolean cancelled;
RandomWriter(Random rnd, NodeStore nodeStore, int opCount, String itemPrefix) {
this.rnd = rnd;
this.nodeStore = nodeStore;
this.opCount = opCount;
this.itemPrefix = itemPrefix;
}
public void cancel() {
cancelled = true;
}
@Override
public Void call() throws IOException, CommitFailedException {
NodeBuilder root = nodeStore.getRoot().builder();
for (int k = 0; k < opCount && !cancelled; k++) {
modify(nodeStore, root);
}
if (!cancelled) {
nodeStore.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
return null;
}
private void modify(NodeStore nodeStore, NodeBuilder nodeBuilder) throws IOException {
int p0 = nodeRemoveRatio;
int p1 = p0 + propertyRemoveRatio;
int p2 = p1 + nodeAddRatio;
int p3 = p2 + addStringRatio;
double p = p3 + addBinaryRatio;
boolean deleteOnly = fileStoreSize > maxStoreSize;
double k = rnd.nextDouble();
if (k < p0/p) {
chooseRandomNode(nodeBuilder).remove();
} else if (k < p1/p) {
removeRandomProperty(chooseRandomNode(nodeBuilder));
} else if (k < p2/p && !deleteOnly) {
addRandomNode(nodeBuilder);
} else if (k < p3/p && !deleteOnly) {
addRandomValue(nodeBuilder);
} else if (!deleteOnly) {
addRandomBlob(nodeStore, nodeBuilder);
}
}
private NodeBuilder chooseRandomNode(NodeBuilder nodeBuilder) {
NodeBuilder childBuilder = nodeBuilder;
for (int k = 0; k < rnd.nextInt(1000); k++) {
childBuilder = randomStep(nodeBuilder, nodeBuilder = childBuilder);
}
return childBuilder;
}
private NodeBuilder chooseRandomNode(NodeBuilder nodeBuilder, Predicate<NodeBuilder> predicate) {
NodeBuilder childBuilder = chooseRandomNode(nodeBuilder);
while (!predicate.apply(childBuilder)) {
childBuilder = randomStep(nodeBuilder, nodeBuilder = childBuilder);
}
return childBuilder;
}
private NodeBuilder randomStep(NodeBuilder parent, NodeBuilder node) {
int count = (int) node.getChildNodeCount(Long.MAX_VALUE);
int k = rnd.nextInt(count + 1);
if (k == 0) {
return parent;
} else {
String name = get(node.getChildNodeNames(), k - 1);
return node.getChildNode(name);
}
}
private void removeRandomProperty(NodeBuilder nodeBuilder) {
int count = (int) nodeBuilder.getPropertyCount();
if (count > 0) {
PropertyState property = get(nodeBuilder.getProperties(), rnd.nextInt(count));
nodeBuilder.removeProperty(property.getName());
}
}
private void addRandomNode(NodeBuilder nodeBuilder) {
chooseRandomNode(nodeBuilder, new Predicate<NodeBuilder>() {
@Override
public boolean apply(NodeBuilder builder) {
return builder.getChildNodeCount(maxNodeCount) < maxNodeCount;
}
}).setChildNode('N' + itemPrefix + rnd.nextInt(maxNodeCount));
}
private void addRandomValue(NodeBuilder nodeBuilder) {
chooseRandomNode(nodeBuilder, new Predicate<NodeBuilder>() {
@Override
public boolean apply(NodeBuilder builder) {
return builder.getPropertyCount() < maxPropertyCount;
}
})
.setProperty('P' + itemPrefix + rnd.nextInt(maxPropertyCount),
randomAlphabetic(rnd.nextInt(maxStringSize)));
}
private void addRandomBlob(NodeStore nodeStore, NodeBuilder nodeBuilder) throws IOException {
chooseRandomNode(nodeBuilder, new Predicate<NodeBuilder>() {
@Override
public boolean apply(NodeBuilder builder) {
return builder.getPropertyCount() < maxPropertyCount;
}
})
.setProperty('B' + itemPrefix + rnd.nextInt(maxPropertyCount),
createBlob(nodeStore, rnd.nextInt(maxBlobSize)));
}
private Blob createBlob(NodeStore nodeStore, int size) throws IOException {
byte[] data = new byte[size];
new Random().nextBytes(data);
return nodeStore.createBlob(new ByteArrayInputStream(data));
}
}
private abstract static class RandomReader<T> implements Callable<T> {
protected final Random rnd;
protected final NodeStore nodeStore;
protected volatile boolean cancelled;
RandomReader(Random rnd, NodeStore nodeStore) {
this.rnd = rnd;
this.nodeStore = nodeStore;
}
public void cancel() {
cancelled = true;
}
private NodeState randomStep(NodeState parent, NodeState node) {
int count = (int) node.getChildNodeCount(Long.MAX_VALUE);
int k = rnd.nextInt(count + 1);
if (k == 0) {
return parent;
} else {
String name = get(node.getChildNodeNames(), k - 1);
return node.getChildNode(name);
}
}
protected final NodeState chooseRandomNode(NodeState parent) {
NodeState child = parent;
for (int k = 0; k < rnd.nextInt(1000) && !cancelled; k++) {
child = randomStep(parent, parent = child);
}
return child;
}
protected final PropertyState chooseRandomProperty(NodeState node) {
int count = (int) node.getPropertyCount();
if (count > 0) {
return get(node.getProperties(), rnd.nextInt(count));
} else {
return null;
}
}
}
private static class RandomNodeReader extends RandomReader<NodeState> {
RandomNodeReader(Random rnd, NodeStore nodeStore) {
super(rnd, nodeStore);
}
@Override
public NodeState call() throws Exception {
return chooseRandomNode(nodeStore.getRoot());
}
}
private static class RandomPropertyReader extends RandomReader<PropertyState> {
RandomPropertyReader(Random rnd, NodeStore nodeStore) {
super(rnd, nodeStore);
}
@Override
public PropertyState call() throws Exception {
return chooseRandomProperty(chooseRandomNode(nodeStore.getRoot()));
}
}
private static class Reference implements Runnable {
private volatile Object referent;
Reference(Object referent) {
this.referent = referent;
}
@Override
public void run() {
referent = null;
}
}
private static class Compactor implements Runnable {
private final FileStore fileStore;
private final TestGCMonitor gcMonitor;
Compactor(FileStore fileStore, TestGCMonitor gcMonitor) {
this.fileStore = fileStore;
this.gcMonitor = gcMonitor;
}
@Override
public void run() {
if (gcMonitor.isCleaned()) {
LOG.info("Running compaction");
gcMonitor.resetCleaned();
fileStore.maybeCompact(true);
} else {
LOG.info("Not running compaction as no cleanup has taken place");
}
}
}
private static class TestGCMonitor implements GCMonitor {
private final GCMonitor delegate;
private volatile boolean cleaned = true;
private volatile long lastCompacted;
TestGCMonitor(GCMonitor delegate) {
this.delegate = delegate;
}
@Override
public void info(String message, Object... arguments) {
System.out.println(arrayFormat(message, arguments).getMessage());
delegate.info(message, arguments);
}
@Override
public void warn(String message, Object... arguments) {
System.out.println(arrayFormat(message, arguments).getMessage());
delegate.warn(message, arguments);
}
@Override
public void error(String message, Exception exception) {
System.out.println(format(message, exception).getMessage());
delegate.error(message, exception);
}
@Override
public void skipped(String reason, Object... arguments) {
cleaned = true;
System.out.println(arrayFormat(reason, arguments).getMessage());
delegate.skipped(reason, arguments);
}
@Override
public void compacted(long[] segmentCounts, long[] recordCounts, long[] compactionMapWeights) {
delegate.compacted(segmentCounts, recordCounts, compactionMapWeights);
lastCompacted = System.currentTimeMillis();
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
cleaned = true;
delegate.cleaned(reclaimedSize, currentSize);
}
public boolean isCleaned() {
return cleaned;
}
public void resetCleaned() {
cleaned = false;
}
public long getLastCompacted() {
return lastCompacted;
}
}
private class SegmentCompactionITMBean extends AnnotatedStandardMBean implements SegmentCompactionMBean {
private String lastError;
SegmentCompactionITMBean() {
super(SegmentCompactionMBean.class);
}
@Override
public void stop() {
SegmentCompactionIT.this.stop();
}
@Override
public void setCorePoolSize(int corePoolSize) {
executor.setCorePoolSize(corePoolSize);
}
@Override
public int getCorePoolSize() {
return executor.getCorePoolSize();
}
@Override
public void setCompactionInterval(int minutes) {
if (compactionInterval != minutes) {
compactionInterval = minutes;
scheduleCompactor();
}
}
@Override
public int getCompactionInterval() {
return compactionInterval;
}
@Override
public String getLastCompaction() {
return valueOf(new Date(gcMonitor.getLastCompacted()));
}
@Override
public void setLockWaitTime(int seconds) {
lockWaitTime = seconds;
}
@Override
public int getLockWaitTime() {
return lockWaitTime;
}
@Override
public void setMaxReaders(int count) {
checkArgument(count >= 0);
maxReaders = count;
if (count > readers.size()) {
addReaders(count - readers.size());
} else {
removeReaders(readers.size() - count);
}
}
@Override
public int getMaxReaders() {
return maxReaders;
}
@Override
public void setMaxWriters(int count) {
checkArgument(count >= 0);
maxWriters = count;
if (count > writers.size()) {
addWriters(count - writers.size());
} else {
removeWriters(writers.size() - count);
}
}
@Override
public int getMaxWriters() {
return maxWriters;
}
@Override
public void setMaxStoreSize(long size) {
maxStoreSize = size;
}
@Override
public long getMaxStoreSize() {
return maxStoreSize;
}
@Override
public void setMaxStringSize(int size) {
maxStringSize = size;
}
@Override
public int getMaxStringSize() {
return maxStringSize;
}
@Override
public void setMaxBlobSize(int size) {
maxBlobSize = size;
}
@Override
public int getMaxBlobSize() {
return maxBlobSize;
}
@Override
public void setMaxReferences(int count) {
checkArgument(count >= 0);
maxReferences = count;
if (count < references.size()) {
removeReferences(references.size() - count);
}
}
@Override
public int getMaxReferences() {
return maxReferences;
}
@Override
public void setMaxWriteOps(int count) {
checkArgument(count >= 0);
maxWriteOps = count;
}
@Override
public int getMaxWriteOps() {
return maxWriteOps;
}
@Override
public void setMaxNodeCount(int count) {
checkArgument(count >= 0);
maxNodeCount = count;
}
@Override
public int getMaxNodeCount() {
return maxNodeCount;
}
@Override
public void setMaxPropertyCount(int count) {
checkArgument(count >= 0);
maxPropertyCount = count;
}
@Override
public int getMaxPropertyCount() {
return maxPropertyCount;
}
@Override
public void setNodeRemoveRatio(int ratio) {
nodeRemoveRatio = ratio;
}
@Override
public int getNodeRemoveRatio() {
return nodeRemoveRatio;
}
@Override
public void setPropertyRemoveRatio(int ratio) {
propertyRemoveRatio = ratio;
}
@Override
public int getPropertyRemoveRatio() {
return propertyRemoveRatio;
}
@Override
public void setNodeAddRatio(int ratio) {
nodeAddRatio = ratio;
}
@Override
public int getNodeAddRatio() {
return nodeAddRatio;
}
@Override
public void setAddStringRatio(int ratio) {
addStringRatio = ratio;
}
@Override
public int getAddStringRatio() {
return addStringRatio;
}
@Override
public void setAddBinaryRatio(int ratio) {
addBinaryRatio = ratio;
}
@Override
public int getAddBinaryRatio() {
return addBinaryRatio;
}
@Override
public void setRootReference(boolean set) {
if (set && rootReference == null) {
rootReference = new Reference(nodeStore.getRoot());
} else {
rootReference = null;
}
}
@Override
public boolean getRootReference() {
return rootReference != null;
}
@Override
public boolean getPersistCompactionMap() {
return compactionStrategy.getPersistCompactionMap();
}
@Override
public int getReaderCount() {
return readers.size();
}
@Override
public int getWriterCount() {
return writers.size();
}
@Override
public int getReferenceCount() {
return references.size();
}
@Override
public long getFileStoreSize() {
return fileStoreSize;
}
private CompactionMap getCompactionMap() {
return fileStore.getTracker().getCompactionMap();
}
@Override
public long getCompactionMapWeight() {
return sum(getCompactionMap().getEstimatedWeights());
}
@Override
public long getSegmentCount() {
return sum(getCompactionMap().getSegmentCounts());
}
@Override
public long getRecordCount() {
return sum(getCompactionMap().getRecordCounts());
}
@Override
public int getCompactionMapDepth() {
return getCompactionMap().getDepth();
}
@Override
public String getLastError() {
return lastError;
}
void error(String message, Throwable t) {
if (!(t instanceof CancellationException)) {
StringWriter sw = new StringWriter();
sw.write(message + ": ");
t.printStackTrace(new PrintWriter(sw));
lastError = sw.toString();
LOG.error(message, t);
}
}
}
}
| oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentCompactionIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.get;
import static com.google.common.collect.Sets.newConcurrentHashSet;
import static com.google.common.util.concurrent.Futures.addCallback;
import static com.google.common.util.concurrent.Futures.immediateCancelledFuture;
import static com.google.common.util.concurrent.MoreExecutors.listeningDecorator;
import static java.io.File.createTempFile;
import static java.lang.Integer.MAX_VALUE;
import static java.lang.String.valueOf;
import static java.lang.System.getProperty;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.commons.io.FileUtils.deleteDirectory;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
import static org.apache.jackrabbit.oak.plugins.segment.CompactionMap.sum;
import static org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategy.CleanupType.CLEAN_OLD;
import static org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategy.MEMORY_THRESHOLD_DEFAULT;
import static org.apache.jackrabbit.oak.plugins.segment.file.FileStore.newFileStore;
import static org.junit.Assume.assumeTrue;
import static org.slf4j.helpers.MessageFormatter.arrayFormat;
import static org.slf4j.helpers.MessageFormatter.format;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.util.Date;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import javax.annotation.Nonnull;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import com.google.common.base.Predicate;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListenableScheduledFuture;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.commons.jmx.AnnotatedStandardMBean;
import org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategy;
import org.apache.jackrabbit.oak.plugins.segment.compaction.DefaultCompactionStrategyMBean;
import org.apache.jackrabbit.oak.plugins.segment.file.FileStore;
import org.apache.jackrabbit.oak.plugins.segment.file.FileStoreGCMonitor;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.spi.whiteboard.CompositeRegistration;
import org.apache.jackrabbit.oak.spi.whiteboard.Registration;
import org.apache.jackrabbit.oak.stats.Clock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a longeivity test for SegmentMK compaction. The test schedules a number
* of readers, writers, a compactor and holds some references for a certain time.
* All of which can be interactively modified through the accompanying
* {@link SegmentCompactionITMBean}, the
* {@link org.apache.jackrabbit.oak.plugins.segment.compaction.CompactionStrategyMBean} and the
* {@link org.apache.jackrabbit.oak.plugins.segment.file.GCMonitorMBean}.
*
* TODO Leverage longeivity test support from OAK-2771 once we have it.
*/
public class SegmentCompactionIT {
private static final boolean PERSIST_COMPACTION_MAP = Boolean.getBoolean("persist-compaction-map");
/** Only run if explicitly asked to via -Dtest=SegmentCompactionIT */
private static final boolean ENABLED =
SegmentCompactionIT.class.getSimpleName().equals(getProperty("test"));
private static final Logger LOG = LoggerFactory.getLogger(SegmentCompactionIT.class);
private final MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
private final Random rnd = new Random();
private final ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(50);
private final ListeningScheduledExecutorService scheduler = listeningDecorator(executor);
private final FileStoreGCMonitor fileStoreGCMonitor = new FileStoreGCMonitor(Clock.SIMPLE);
private final TestGCMonitor gcMonitor = new TestGCMonitor(fileStoreGCMonitor);
private final Set<ListenableScheduledFuture<?>> writers = newConcurrentHashSet();
private final Set<ListenableScheduledFuture<?>> readers = newConcurrentHashSet();
private final Set<ListenableScheduledFuture<?>> references = newConcurrentHashSet();
private final SegmentCompactionITMBean segmentCompactionMBean = new SegmentCompactionITMBean();
private final CompactionStrategy compactionStrategy = new CompactionStrategy(
false, false, CLEAN_OLD, 60000, MEMORY_THRESHOLD_DEFAULT) {
@Override
public boolean compacted(@Nonnull Callable<Boolean> setHead) throws Exception {
return nodeStore.locked(setHead, lockWaitTime, SECONDS);
}
};
private File directory;
private FileStore fileStore;
private SegmentNodeStore nodeStore;
private Registration mBeanRegistration;
private volatile ListenableFuture<?> compactor = immediateCancelledFuture();
private volatile int lockWaitTime = 60;
private volatile int maxReaders = 10;
private volatile int maxWriters = 10;
private volatile long maxStoreSize = 200000000000L;
private volatile int maxBlobSize = 1000000;
private volatile int maxStringSize = 10000;
private volatile int maxReferences = 10;
private volatile int maxWriteOps = 10000;
private volatile int maxNodeCount = 1000;
private volatile int maxPropertyCount = 1000;
private volatile int nodeRemoveRatio = 10;
private volatile int propertyRemoveRatio = 10;
private volatile int nodeAddRatio = 40;
private volatile int addStringRatio = 20;
private volatile int addBinaryRatio = 20;
private volatile int compactionInterval = 1;
private volatile boolean stopping;
private volatile Reference rootReference;
private volatile long fileStoreSize;
public synchronized void stop() {
stopping = true;
notifyAll();
}
public void addReaders(int count) {
for (int c = 0; c < count; c++) {
scheduleReader();
}
}
public void removeReaders(int count) {
remove(readers, count);
}
public void addWriters(int count) {
for (int c = 0; c < count; c++) {
scheduleWriter();
}
}
public void removeWriters(int count) {
remove(writers, count);
}
public void removeReferences(int count) {
remove(references, count);
}
private static void remove(Set<ListenableScheduledFuture<?>> ops, int count) {
Iterator<ListenableScheduledFuture<?>> it = ops.iterator();
while (it.hasNext() && count-- > 0) {
it.next().cancel(false);
}
}
private Registration registerMBean(Object mBean, final ObjectName objectName)
throws NotCompliantMBeanException, InstanceAlreadyExistsException,
MBeanRegistrationException {
mBeanServer.registerMBean(mBean, objectName);
return new Registration(){
@Override
public void unregister() {
try {
mBeanServer.unregisterMBean(objectName);
} catch (Exception e) {
LOG.error("Error unregistering Segment Compaction MBean", e);
}
}
};
}
@Before
public void setUp() throws IOException, MalformedObjectNameException, NotCompliantMBeanException,
InstanceAlreadyExistsException, MBeanRegistrationException {
assumeTrue(ENABLED);
mBeanRegistration = new CompositeRegistration(
registerMBean(segmentCompactionMBean, new ObjectName("IT:TYPE=Segment Compaction")),
registerMBean(new DefaultCompactionStrategyMBean(compactionStrategy),
new ObjectName("IT:TYPE=Compaction Strategy")),
registerMBean(fileStoreGCMonitor, new ObjectName("IT:TYPE=GC Monitor")));
scheduler.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
fileStoreGCMonitor.run();
}
}, 1, 1, SECONDS);
directory = createTempFile(getClass().getSimpleName(), "dir", new File("target"));
directory.delete();
directory.mkdir();
fileStore = newFileStore(directory).withGCMonitor(gcMonitor).create();
nodeStore = new SegmentNodeStore(fileStore);
compactionStrategy.setPersistCompactionMap(PERSIST_COMPACTION_MAP);
fileStore.setCompactionStrategy(compactionStrategy);
}
@After
public void tearDown() {
try {
if (mBeanRegistration != null) {
mBeanRegistration.unregister();
}
remove(writers, MAX_VALUE);
remove(readers, MAX_VALUE);
remove(references, MAX_VALUE);
scheduler.shutdown();
if (fileStore != null) {
fileStore.close();
}
if (directory != null) {
deleteDirectory(directory);
}
} catch (IOException e) {
LOG.error("Error cleaning directory", e);
}
}
@Test
public void run() throws InterruptedException {
scheduleSizeMonitor();
scheduleCompactor();
addReaders(maxReaders);
addWriters(maxWriters);
synchronized (this) {
while (!stopping) {
wait();
}
}
}
private void scheduleSizeMonitor() {
scheduler.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
fileStoreSize = fileStore.size();
}
}, 1, 1, MINUTES);
}
private synchronized void scheduleCompactor() {
LOG.info("Scheduling compaction after {} minutes", compactionInterval);
compactor.cancel(false);
compactor = scheduler.schedule((new Compactor(fileStore, gcMonitor)), compactionInterval, MINUTES);
addCallback(compactor, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
scheduleCompactor();
}
@Override
public void onFailure(Throwable t) {
segmentCompactionMBean.error("Compactor error", t);
}
});
}
private void scheduleWriter() {
if (writers.size() < maxWriters) {
final RandomWriter writer = new RandomWriter(rnd, nodeStore, rnd.nextInt(maxWriteOps), "W" + rnd.nextInt(5));
final ListenableScheduledFuture<Void> futureWriter = scheduler.schedule(
writer, rnd.nextInt(30), SECONDS);
writers.add(futureWriter);
addCallback(futureWriter, new FutureCallback<Void>() {
@Override
public void onSuccess(Void result) {
writers.remove(futureWriter);
if (!futureWriter.isCancelled()) {
scheduleWriter();
}
}
@Override
public void onFailure(Throwable t) {
writer.cancel();
writers.remove(futureWriter);
segmentCompactionMBean.error("Writer error", t);
}
});
}
}
private void scheduleReader() {
if (readers.size() < maxReaders) {
final RandomReader<?> reader = rnd.nextBoolean()
? new RandomNodeReader(rnd, nodeStore)
: new RandomPropertyReader(rnd, nodeStore);
final ListenableScheduledFuture<?> futureReader = scheduler.schedule(
reader, rnd.nextInt(30), SECONDS);
readers.add(futureReader);
addCallback(futureReader, new FutureCallback<Object>() {
@Override
public void onSuccess(Object node) {
readers.remove(futureReader);
if (!futureReader.isCancelled()) {
if (rnd.nextBoolean()) {
scheduleReference(node);
} else {
scheduleReader();
}
}
}
@Override
public void onFailure(Throwable t) {
reader.cancel();
readers.remove(futureReader);
segmentCompactionMBean.error("Node reader error", t);
}
});
}
}
private void scheduleReference(Object object) {
if (references.size() < maxReferences) {
final Reference reference = new Reference(object);
final ListenableScheduledFuture<?> futureReference = scheduler.schedule(
reference, rnd.nextInt(600), SECONDS);
references.add(futureReference);
addCallback(futureReference, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
references.remove(reference);
if (!futureReference.isCancelled()) {
scheduleReader();
}
}
@Override
public void onFailure(Throwable t) {
reference.run();
references.remove(reference);
segmentCompactionMBean.error("Reference error", t);
}
});
} else {
scheduleReader();
}
}
private class RandomWriter implements Callable<Void> {
private final Random rnd;
private final NodeStore nodeStore;
private final int opCount;
private final String itemPrefix;
private volatile boolean cancelled;
RandomWriter(Random rnd, NodeStore nodeStore, int opCount, String itemPrefix) {
this.rnd = rnd;
this.nodeStore = nodeStore;
this.opCount = opCount;
this.itemPrefix = itemPrefix;
}
public void cancel() {
cancelled = true;
}
@Override
public Void call() throws IOException, CommitFailedException {
NodeBuilder root = nodeStore.getRoot().builder();
for (int k = 0; k < opCount && !cancelled; k++) {
modify(nodeStore, root);
}
if (!cancelled) {
nodeStore.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
return null;
}
private void modify(NodeStore nodeStore, NodeBuilder nodeBuilder) throws IOException {
int p0 = nodeRemoveRatio;
int p1 = p0 + propertyRemoveRatio;
int p2 = p1 + nodeAddRatio;
int p3 = p2 + addStringRatio;
double p = p3 + addBinaryRatio;
boolean deleteOnly = fileStoreSize > maxStoreSize;
double k = rnd.nextDouble();
if (k < p0/p) {
chooseRandomNode(nodeBuilder).remove();
} else if (k < p1/p) {
removeRandomProperty(chooseRandomNode(nodeBuilder));
} else if (k < p2/p && !deleteOnly) {
addRandomNode(nodeBuilder);
} else if (k < p3/p && !deleteOnly) {
addRandomValue(nodeBuilder);
} else if (!deleteOnly) {
addRandomBlob(nodeStore, nodeBuilder);
}
}
private NodeBuilder chooseRandomNode(NodeBuilder nodeBuilder) {
NodeBuilder childBuilder = nodeBuilder;
for (int k = 0; k < rnd.nextInt(1000); k++) {
childBuilder = randomStep(nodeBuilder, nodeBuilder = childBuilder);
}
return childBuilder;
}
private NodeBuilder chooseRandomNode(NodeBuilder nodeBuilder, Predicate<NodeBuilder> predicate) {
NodeBuilder childBuilder = chooseRandomNode(nodeBuilder);
while (!predicate.apply(childBuilder)) {
childBuilder = randomStep(nodeBuilder, nodeBuilder = childBuilder);
}
return childBuilder;
}
private NodeBuilder randomStep(NodeBuilder parent, NodeBuilder node) {
int count = (int) node.getChildNodeCount(Long.MAX_VALUE);
int k = rnd.nextInt(count + 1);
if (k == 0) {
return parent;
} else {
String name = get(node.getChildNodeNames(), k - 1);
return node.getChildNode(name);
}
}
private void removeRandomProperty(NodeBuilder nodeBuilder) {
int count = (int) nodeBuilder.getPropertyCount();
if (count > 0) {
PropertyState property = get(nodeBuilder.getProperties(), rnd.nextInt(count));
nodeBuilder.removeProperty(property.getName());
}
}
private void addRandomNode(NodeBuilder nodeBuilder) {
chooseRandomNode(nodeBuilder, new Predicate<NodeBuilder>() {
@Override
public boolean apply(NodeBuilder builder) {
return builder.getChildNodeCount(maxNodeCount) < maxNodeCount;
}
}).setChildNode('N' + itemPrefix + rnd.nextInt(maxNodeCount));
}
private void addRandomValue(NodeBuilder nodeBuilder) {
chooseRandomNode(nodeBuilder, new Predicate<NodeBuilder>() {
@Override
public boolean apply(NodeBuilder builder) {
return builder.getPropertyCount() < maxPropertyCount;
}
})
.setProperty('P' + itemPrefix + rnd.nextInt(maxPropertyCount),
randomAlphabetic(rnd.nextInt(maxStringSize)));
}
private void addRandomBlob(NodeStore nodeStore, NodeBuilder nodeBuilder) throws IOException {
chooseRandomNode(nodeBuilder, new Predicate<NodeBuilder>() {
@Override
public boolean apply(NodeBuilder builder) {
return builder.getPropertyCount() < maxPropertyCount;
}
})
.setProperty('B' + itemPrefix + rnd.nextInt(maxPropertyCount),
createBlob(nodeStore, rnd.nextInt(maxBlobSize)));
}
private Blob createBlob(NodeStore nodeStore, int size) throws IOException {
byte[] data = new byte[size];
new Random().nextBytes(data);
return nodeStore.createBlob(new ByteArrayInputStream(data));
}
}
private abstract static class RandomReader<T> implements Callable<T> {
protected final Random rnd;
protected final NodeStore nodeStore;
protected volatile boolean cancelled;
RandomReader(Random rnd, NodeStore nodeStore) {
this.rnd = rnd;
this.nodeStore = nodeStore;
}
public void cancel() {
cancelled = true;
}
private NodeState randomStep(NodeState parent, NodeState node) {
int count = (int) node.getChildNodeCount(Long.MAX_VALUE);
int k = rnd.nextInt(count + 1);
if (k == 0) {
return parent;
} else {
String name = get(node.getChildNodeNames(), k - 1);
return node.getChildNode(name);
}
}
protected final NodeState chooseRandomNode(NodeState parent) {
NodeState child = parent;
for (int k = 0; k < rnd.nextInt(1000) && !cancelled; k++) {
child = randomStep(parent, parent = child);
}
return child;
}
protected final PropertyState chooseRandomProperty(NodeState node) {
int count = (int) node.getPropertyCount();
if (count > 0) {
return get(node.getProperties(), rnd.nextInt(count));
} else {
return null;
}
}
}
private static class RandomNodeReader extends RandomReader<NodeState> {
RandomNodeReader(Random rnd, NodeStore nodeStore) {
super(rnd, nodeStore);
}
@Override
public NodeState call() throws Exception {
return chooseRandomNode(nodeStore.getRoot());
}
}
private static class RandomPropertyReader extends RandomReader<PropertyState> {
RandomPropertyReader(Random rnd, NodeStore nodeStore) {
super(rnd, nodeStore);
}
@Override
public PropertyState call() throws Exception {
return chooseRandomProperty(chooseRandomNode(nodeStore.getRoot()));
}
}
private static class Reference implements Runnable {
private volatile Object referent;
Reference(Object referent) {
this.referent = referent;
}
@Override
public void run() {
referent = null;
}
}
private static class Compactor implements Runnable {
private final FileStore fileStore;
private final TestGCMonitor gcMonitor;
Compactor(FileStore fileStore, TestGCMonitor gcMonitor) {
this.fileStore = fileStore;
this.gcMonitor = gcMonitor;
}
@Override
public void run() {
if (gcMonitor.isCleaned()) {
LOG.info("Running compaction");
gcMonitor.resetCleaned();
fileStore.maybeCompact(true);
} else {
LOG.info("Not running compaction as no cleanup has taken place");
}
}
}
private static class TestGCMonitor implements GCMonitor {
private final GCMonitor delegate;
private volatile boolean cleaned = true;
private volatile long lastCompacted;
TestGCMonitor(GCMonitor delegate) {
this.delegate = delegate;
}
@Override
public void info(String message, Object... arguments) {
System.out.println(arrayFormat(message, arguments).getMessage());
delegate.info(message, arguments);
}
@Override
public void warn(String message, Object... arguments) {
System.out.println(arrayFormat(message, arguments).getMessage());
delegate.warn(message, arguments);
}
@Override
public void error(String message, Exception exception) {
System.out.println(format(message, exception).getMessage());
delegate.error(message, exception);
}
@Override
public void skipped(String reason, Object... arguments) {
cleaned = true;
System.out.println(arrayFormat(reason, arguments).getMessage());
delegate.skipped(reason, arguments);
}
@Override
public void compacted(long[] segmentCounts, long[] recordCounts, long[] compactionMapWeights) {
delegate.compacted(segmentCounts, recordCounts, compactionMapWeights);
lastCompacted = System.currentTimeMillis();
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
cleaned = true;
delegate.cleaned(reclaimedSize, currentSize);
}
public boolean isCleaned() {
return cleaned;
}
public void resetCleaned() {
cleaned = false;
}
public long getLastCompacted() {
return lastCompacted;
}
}
private class SegmentCompactionITMBean extends AnnotatedStandardMBean implements SegmentCompactionMBean {
private String lastError;
SegmentCompactionITMBean() {
super(SegmentCompactionMBean.class);
}
@Override
public void stop() {
SegmentCompactionIT.this.stop();
}
@Override
public void setCorePoolSize(int corePoolSize) {
executor.setCorePoolSize(corePoolSize);
}
@Override
public int getCorePoolSize() {
return executor.getCorePoolSize();
}
@Override
public void setCompactionInterval(int minutes) {
if (compactionInterval != minutes) {
compactionInterval = minutes;
scheduleCompactor();
}
}
@Override
public int getCompactionInterval() {
return compactionInterval;
}
@Override
public String getLastCompaction() {
return valueOf(new Date(gcMonitor.getLastCompacted()));
}
@Override
public void setLockWaitTime(int seconds) {
lockWaitTime = seconds;
}
@Override
public int getLockWaitTime() {
return lockWaitTime;
}
@Override
public void setMaxReaders(int count) {
checkArgument(count >= 0);
maxReaders = count;
if (count > readers.size()) {
addReaders(count - readers.size());
} else {
removeReaders(readers.size() - count);
}
}
@Override
public int getMaxReaders() {
return maxReaders;
}
@Override
public void setMaxWriters(int count) {
checkArgument(count >= 0);
maxWriters = count;
if (count > writers.size()) {
addWriters(count - writers.size());
} else {
removeWriters(writers.size() - count);
}
}
@Override
public int getMaxWriters() {
return maxWriters;
}
@Override
public void setMaxStoreSize(long size) {
maxStoreSize = size;
}
@Override
public long getMaxStoreSize() {
return maxStoreSize;
}
@Override
public void setMaxStringSize(int size) {
maxStringSize = size;
}
@Override
public int getMaxStringSize() {
return maxStringSize;
}
@Override
public void setMaxBlobSize(int size) {
maxBlobSize = size;
}
@Override
public int getMaxBlobSize() {
return maxBlobSize;
}
@Override
public void setMaxReferences(int count) {
checkArgument(count >= 0);
maxReferences = count;
if (count < references.size()) {
removeReferences(references.size() - count);
}
}
@Override
public int getMaxReferences() {
return maxReferences;
}
@Override
public void setMaxWriteOps(int count) {
checkArgument(count >= 0);
maxWriteOps = count;
}
@Override
public int getMaxWriteOps() {
return maxWriteOps;
}
@Override
public void setMaxNodeCount(int count) {
checkArgument(count >= 0);
maxNodeCount = count;
}
@Override
public int getMaxNodeCount() {
return maxNodeCount;
}
@Override
public void setMaxPropertyCount(int count) {
checkArgument(count >= 0);
maxPropertyCount = count;
}
@Override
public int getMaxPropertyCount() {
return maxPropertyCount;
}
@Override
public void setNodeRemoveRatio(int ratio) {
nodeRemoveRatio = ratio;
}
@Override
public int getNodeRemoveRatio() {
return nodeRemoveRatio;
}
@Override
public void setPropertyRemoveRatio(int ratio) {
propertyRemoveRatio = ratio;
}
@Override
public int getPropertyRemoveRatio() {
return propertyRemoveRatio;
}
@Override
public void setNodeAddRatio(int ratio) {
nodeAddRatio = ratio;
}
@Override
public int getNodeAddRatio() {
return nodeAddRatio;
}
@Override
public void setAddStringRatio(int ratio) {
addStringRatio = ratio;
}
@Override
public int getAddStringRatio() {
return addStringRatio;
}
@Override
public void setAddBinaryRatio(int ratio) {
addBinaryRatio = ratio;
}
@Override
public int getAddBinaryRatio() {
return addBinaryRatio;
}
@Override
public void setRootReference(boolean set) {
if (set && rootReference == null) {
rootReference = new Reference(nodeStore.getRoot());
} else {
rootReference = null;
}
}
@Override
public boolean getRootReference() {
return rootReference != null;
}
@Override
public boolean getPersistCompactionMap() {
return compactionStrategy.getPersistCompactionMap();
}
@Override
public int getReaderCount() {
return readers.size();
}
@Override
public int getWriterCount() {
return writers.size();
}
@Override
public int getReferenceCount() {
return references.size();
}
@Override
public long getFileStoreSize() {
return fileStoreSize;
}
private CompactionMap getCompactionMap() {
return fileStore.getTracker().getCompactionMap();
}
@Override
public long getCompactionMapWeight() {
return sum(getCompactionMap().getEstimatedWeights());
}
@Override
public long getSegmentCount() {
return sum(getCompactionMap().getSegmentCounts());
}
@Override
public long getRecordCount() {
return sum(getCompactionMap().getRecordCounts());
}
@Override
public int getCompactionMapDepth() {
return getCompactionMap().getDepth();
}
@Override
public String getLastError() {
return lastError;
}
void error(String message, Throwable t) {
if (!(t instanceof CancellationException)) {
StringWriter sw = new StringWriter();
sw.write(message + ": ");
t.printStackTrace(new PrintWriter(sw));
lastError = sw.toString();
LOG.error(message, t);
}
}
}
}
| OAK-2849: Improve revision gc on SegmentMK
Register CacheStatsMBean for segment and string cache
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1691221 13f79535-47bb-0310-9956-ffa450edef68
| oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentCompactionIT.java | OAK-2849: Improve revision gc on SegmentMK Register CacheStatsMBean for segment and string cache |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.