diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..13566b81b018ad684f3a35fee301741b2734c8f4 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000000000000000000000000000000000000..105ce2da2d6447d11dfe32bfb846c3d5b199fc99 --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000000000000000000000000000000000000..d1e22ecb89619a9c2dcf51a28d891a196d2462a0 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000000000000000000000000000000000000..625180ecfb2afd997085383b0626edaf5a4b792e --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/storydalle.iml b/.idea/storydalle.iml new file mode 100644 index 0000000000000000000000000000000000000000..8b8c395472a5a6b3598af42086e590417ace9933 --- /dev/null +++ b/.idea/storydalle.iml @@ -0,0 +1,12 @@ + + + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000000000000000000000000000000000000..94a25f7f4cb416c083d265558da75d457237d671 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/1.3B/config.yaml b/1.3B/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..58e7d3f5f77ac8b49beb49c84882c1fee032cde3 --- /dev/null +++ b/1.3B/config.yaml @@ -0,0 +1,38 @@ +dataset: + tokenizer_type: CharBPE + context_length: 64 + image_resolution: 256 + +stage1: + type: vqgan + embed_dim: 256 + n_embed: 16384 + hparams: + double_z: False + z_channels: 256 + resolution: 256 + in_channels: 3 + out_ch: 3 + ch: 128 + ch_mult: [1, 1, 2, 2, 4] + num_res_blocks: 2 + attn_resolutions: [16] + pdrop: 0.0 + +stage2: + type: transformer1d + vocab_size_txt: 16384 + vocab_size_img: 16384 + hparams: + embed_dim: 1536 + n_layers: 42 + n_heads: 24 + n_dense_layers: 42 + ctx_len_img: 256 + ctx_len_txt: 64 + embd_pdrop: 0.0 + resid_pdrop: 0.0 + attn_pdrop: 0.0 + mlp_bias: True + attn_bias: True + gelu_use_approx: False diff --git a/1.3B/tokenizer/bpe-16k-merges.txt b/1.3B/tokenizer/bpe-16k-merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..f921f1bd4bd5006f70f02f12932219ceced6119c --- /dev/null +++ b/1.3B/tokenizer/bpe-16k-merges.txt @@ -0,0 +1,14426 @@ +#version: 0.2 - Trained by `huggingface/tokenizers` +t h +i n +th e +e r +a n +o n +a r +r e +s t +a t +in g +i t +e r +r o +o r +o f +e n +e d +i n +a l +an d +o n +e s +o u +p er +a c +i l +s on +o r +t o +i c +per son +h o +i on +d e +a n +r a +a t +c o +l o +i s +a l +l e +a s +c h +u r +a s +i s +i g +u n +l e +b e +a m +i r +t o +s e +w it +a g +wit h +r i +e n +f or +i m +w h +s t +l u +t s +at ion +l i +n e +s h +w e +b o +a p +a d +p o +v e +m o +t e +s i +d i +l y +l a +s e +c t +s p +b u +b y +c on +st r +en t +er s +a r +m e +i t +a y +t i +s u +al l +v e +d s +a b +o l +o c +v i +f ro +d ing +in t +co m +a k +th e +e l +q u +ig h +n o +d o +p ho +p ro +a u +c e +w as +s c +2 0 +h a +s s +f e +v er +p ar +t h +ou n +ag e +f or +th at +fro m +en t +e t +p l +s ig +c h +il lu +m e +i on +illu str +p h +w or +ro o +er e +c ar +c e +in g +illustr ation +g ra +d e +o o +ur e +t er +ac k +ar t +t r +at e +i c +v er +th is +co l +t y +in e +e x +it e +c u +on e +p re +p e +it y +m an +g ro +s ho +f u +ar e +de sig +oc k +ac k +b r +i r +ou r +s s +d u +re e +roo m +st ock +h e +d ay +g e +at ed +th er +b l +1 9 +igh t +s o +t ra +m u +20 1 +t a +a d +l d +f f +m b +re d +t er +w n +al l +ac e +i es +h ou +pho to +m ar +p a +m an +m a +p u +c re +m ent +g re +g u +ou t +p or +t e +un d +h is +c i +l as +a f +an d +r u +b ack +g e +at ion +y o +t u +ic e +desig n +p le +b e +k s +d re +u p +a v +wh ite +t ing +p la +ar d +p r +k e +c or +it s +gro und +k ing +d er +an t +d er +s er +oo d +am e +ne w +h e +h i +d ra +k e +p ic +ab le +d is +yo u +gra ph +ing s +t ed +pho to +ar y +m on +lo w +ak e +m in +i ve +il l +ct or +st or +w at +back ground +ou r +st er +ic h +en ts +b a +st y +g o +l es +l l +a m +hou se +on g +t im +il l +vi e +f ir +e v +ur ing +bu il +m or +re s +t w +m i +o k +p s +is h +we d +st u +ct ion +f t +m s +d ed +f ree +w o +y s +sh ir +l and +c k +st e +s k +de n +int er +o l +ol d +v ing +u s +ac h +f i +wh ich +fu l +al s +ou t +an g +p p +y e +bl ack +j u +ro y +s o +ve ctor +on s +s a +h er +m y +j e +wed ding +w ere +il e +in e +as t +en g +mb er +al ly +as h +f in +f l +ac t +c t +l l +sp e +m en +ho me +ro u +g ar +c lo +photo graph +ar s +ad e +in t +c lu +or s +g h +ou s +s m +ch e +re n +y our +r an +ic al +g i +c ent +shir t +b i +w ing +wor ld +ha ve +tw o +w ay +si de +u s +vie w +i l +r y +p at +d uring +g o +o p +i de +u m +h as +sh i +en ce +the ir +fir st +is t +l ine +no t +bu t +ch ar +g ir +at t +al e +n ing +i an +w s +n s +c an +e l +p i +a ir +n u +o ff +ag es +an im +t ure +e le +i a +sty le +lo w +o ther +lo c +b as +se t +ha d +m at +n or +0 0 +l ar +t t +ap er +s y +ar t +j o +al ty +af ter +d it +b lu +po s +im age +an s +ation s +be au +si c +w i +c an +w ar +ou ld +l s +c oun +n o +tim e +h u +lo o +y ing +p aper +photograph y +roy alty +oun d +com p +o ver +f ash +pa int +t ri +b ri +b ir +buil ding +c ity +w all +in s +i d +t at +p e +an ce +c las +a st +m p +the y +v ed +st re +b ro +fash ion +f o +to p +l it +te l +t ion +com m +s ou +i al +am er +c ro +t ur +mor e +at u +g ame +blu e +in to +f low +car to +be st +v is +at ing +ide as +w in +il y +f e +w all +an t +b le +r it +b re +wh o +ce p +so me +d y +c as +ri st +f f +ra l +ation al +de c +photo s +b er +m er +en d +ho tel +c ra +20 0 +w al +s es +b all +r ic +c k +m ade +in k +tat to +is h +ac h +i or +wh ere +al so +e ar +carto on +f am +n er +gir l +mo st +b ed +ag a +s ur +at es +col le +w a +u l +n ed +ve l +ro n +ab out +f il +sc ho +ch rist +wh en +g en +g h +h el +t re +beau ti +c l +s un +st o +an y +wat er +im ages +ar e +t able +i z +w ood +at ive +en s +pl an +a ir +v es +or t +ic k +an ds +wor k +beauti ful +m as +wo man +t es +w ill +no w +we en +bo ok +it ch +ye ar +la r +n y +d on +o s +ta in +pla y +2 0 +in di +d a +c ur +pro du +ch il +amer ic +be en +dre ss +p en +d ge +f un +in clu +t ro +c ar +b ur +e st +v int +th rou +vint age +n i +ver y +w er +at e +c es +s il +li fe +ho w +sh e +ig h +b ack +shi p +be t +w e +s im +be ach +s ion +x t +str u +m er +k i +sm all +mo vi +ion s +li ke +ar ch +pe o +fro n +ic s +l on +lo ve +in ing +is e +h ar +t on +the se +peo ple +scho ol +t or +f ri +christ mas +do or +f lo +n ational +m ake +h and +m ay +m ents +c ho +mo der +m es +c ap +for m +un i +be d +par k +stor y +g las +h igh +g r +g s +k itch +h or +p ri +m il +it ed +d ar +t le +m ak +l ight +b ar +col or +th er +it e +z e +ne y +wall paper +1 8 +m oun +1 0 +gre en +am p +d o +c er +pr int +f ace +ho l +fron t +r e +gar den +th ing +th ere +tr u +paint ing +th ou +lo go +w oo +h al +qu o +bo o +moder n +at er +h er +bu s +ap p +e p +n ight +l ong +g et +wh ile +li ving +t ree +throu gh +b or +dra wing +tu res +re s +po ster +te m +f fe +tr an +wh at +ti c +ur y +as y +f ood +wo men +er y +a ther +ss es +th ree +kitch en +su m +as s +c ts +fam ily +ic k +p es +y oun +tr y +d en +do g +st a +qu e +on ly +t y +l in +f oo +man y +l ed +st ic +pre s +o ff +ac t +or ig +d es +c au +k ed +r ing +a il +re ci +pat ter +stre et +con cep +nor th +e k +par t +w ould +li c +de cor +l igh +ap e +c ou +b an +am es +par ty +f ur +se a +t al +mu sic +lo g +for e +te am +t en +th an +201 9 +lit tle +po l +at ure +gre at +loc ated +s ing +p an +st ri +us ed +ser v +ro ad +bo ard +cu te +st s +youn g +fo re +u e +le ss +bus ine +l and +lar ge +i ous +f ol +pl ace +ba by +w rit +th o +s on +w il +at s +mo de +it ies +fil m +c ed +ar ound +bet ween +ct ed +e t +bed room +c le +ri ver +it ion +s le +t en +be fore +v o +el d +orig in +por t +b al +un der +a son +ha ir +ju st +v al +f er +c a +americ an +s al +do wn +v en +t in +me di +in sp +nu mber +t ak +pic tures +ir e +in ed +f ac +inter ior +pr in +are a +s qu +ch es +sp ace +s it +in st +aga inst +su p +dar k +char ac +ki ds +l an +c y +s we +c am +sho w +concep t +c our +movi e +pl ac +se l +d ent +st er +lon don +b ra +he ad +s si +lo ok +ch an +is ol +le y +st an +ba th +ct ure +e p +di ff +f ar +a z +re ad +por tra +s co +e st +ac ti +c ake +bir th +le t +or y +tatto o +pic ture +clas sic +v al +ce ss +se con +k et +le as +m as +ye ars +k no +out f +dec or +sou th +we st +p al +n am +ce le +pla y +ne ar +ar ing +arch ite +fun ny +isol ated +g old +st ar +se ason +ser ies +ver s +quo tes +birth day +we ll +p ur +glas s +m is +f ra +flow ers +g ood +j un +the m +colle ction +h app +a k +ce ss +charac ter +s oc +19 9 +u se +sum mer +n it +f ic +he al +or e +o ver +par t +l ing +ke t +w ard +v en +pro je +fi eld +r ight +p ink +off ice +anim als +d r +to wn +g ol +b er +st ed +int er +wi th +busine ss +desig ns +d d +e u +h igh +i f +it al +spe ci +be ing +re l +pre mi +hi m +ph one +ion al +e s +e se +gen er +u ary +r un +g er +sc ri +cele br +si ze +at h +s li +fu ll +s w +su b +be g +c all +u p +patter n +loo king +sk y +ish ed +comp le +br it +for m +ho o +hi story +h and +en d +flow er +tra vel +car s +bi g +land sc +co ver +j ack +gro up +co o +g er +l a +e ach +secon d +ke y +com e +st ate +d in +bath room +w ar +dis ney +dre sses +bo x +dit ion +s ale +cra ft +in d +f la +w n +off ic +f our +origin al +e y +flo or +s en +wat er +diff er +un ited +chil dre +tatto os +vi de +m ale +w inter +ap pe +c ir +o b +t it +re cor +in a +sho p +1 2 +te xt +ev ent +v ar +ve lo +0 00 +bo y +c al +re leas +m ing +0 s +d ri +we l +g an +s h +le ft +f ran +gh t +c at +au str +j ap +d a +happ y +st ar +sig n +la ke +sel f +foo t +austr al +childre n +g a +win do +bo th +m ag +cu sto +portra it +f re +g ames +di re +woo den +sc en +c le +stu di +en ing +co stu +cent er +c ame +h ere +201 8 +el low +m a +sp or +be ds +he art +ap art +v ill +col or +sy m +th en +ad v +clo se +l ater +n a +is land +c o +5 0 +for d +d ic +sy ste +k o +p h +e g +l ish +sc re +out door +si ve +de velo +qu i +s ed +c y +k er +se e +hol ding +ne ss +we ar +t ter +per i +c el +t ter +wh e +f le +d ro +pi e +fol low +il ing +in v +1 5 +sh e +3 d +reci pes +ma p +i e +su ch +atu res +archite cture +war ds +b en +3 0 +anim al +cas e +e ve +m ed +s po +al ong +mode l +in es +un der +sty les +st i +g a +f ts +fur nit +furnit ure +t ake +y ellow +e asy +cu l +car d +t al +mb ers +1 1 +kno wn +20 20 +is o +er ed +ba g +m ill +e y +po pu +d ig +w l +bo dy +st ers +coun try +las t +comp any +cau se +po ol +av y +ne xt +b att +thou gh +fir e +op en +ro ck +e ment +se ver +ffe e +ma in +cre ative +por t +d ents +c ast +re sta +resta ur +19 8 +st ra +b s +on al +on d +fore st +le g +t ting +ss ed +c ru +re g +e mp +e ts +bri dge +it al +ren ch +lu x +c la +co ffee +ic on +sho es +c li +an other +or gan +per s +ear ly +vide o +inclu ding +d ol +per fe +pro te +f ig +us ing +al bu +i um +su l +af ric +clu b +a in +o per +f all +te x +roo ms +fe at +19 6 +feat uring +h un +le ad +st one +ele ct +ch amp +i o +da ys +so me +v il +con tin +insp ir +sit ting +ch ur +is m +s ing +e m +l am +i de +differ ent +sim ple +y ard +we aring +a th +indi an +brit ish +pp ed +me tal +h on +wa ys +en ds +19 7 +c ks +c r +sc i +l ad +n e +t an +as e +pl ate +co ol +sym bo +mar ch +sh el +eng land +outf its +b and +ro w +ho li +vis ion +cent ury +develo p +c ab +log y +n ature +s in +pp ing +et te +in st +moun tain +ta ins +dit ional +f ish +h y +su c +st ick +comm un +se p +ri c +l im +m ent +landsc ape +ic es +st ing +b b +in side +n atu +f rench +c ould +gir ls +con si +eu ro +fin d +f it +natu ral +he ad +tim es +b at +b ar +act ion +bre ak +ri l +m al +de l +al ity +d an +s at +e an +on es +stan ding +sh ort +syste m +w ings +201 7 +il s +ap ril +s ite +u al +ta il +mo ther +s ong +pu b +st ates +f ast +n a +con stru +fl at +d ers +2 5 +av es +hu man +restaur ant +el s +l er +p et +pub lic +qu e +tre es +b lo +li ve +i e +at or +te ch +en ti +sp ring +il ity +ep iso +for ce +pp er +mu se +perfe ct +m y +m ic +f ant +sever al +we ek +1 6 +a p +col ors +sa id +be cause +1 8 +s ame +w ho +th y +e d +cu p +le ather +bri de +gra ph +co ver +is ed +per for +fe sti +pro vi +sho t +le ar +re ce +fant asy +on line +albu m +su per +bro wn +v ic +d ou +chur ch +an ge +sil ver +roy al +ne w +1 3 +1 4 +t v +e very +sho ws +tran s +mil it +jun e +indi a +ju ly +bo at +di d +boo ks +z z +i i +d ings +er ing +au gu +1 0 +g al +s ket +in du +r ac +f ound +do wn +call ed +jack et +cou ple +muse um +c er +t ers +c a +an ci +p lu +e ast +re si +tr o +fu n +ss ion +t ab +go ver +s ol +201 6 +ex am +uni vers +ou l +n ame +can ad +l ace +ther n +pl ant +j an +se en +o wn +19 4 +premi ere +ic i +foot ball +po wer +st ill +fin al +iz ed +ar ch +hal f +re tur +b un +re al +po int +th ings +t a +f ive +d ining +play ed +e ver +buil t +apart ment +al i +no ve +wor king +be l +pl an +serv ice +ak es +chil d +am az +mak ing +ct ive +f ru +mu l +beau ty +v ers +pla ying +st ation +per son +ho we +do m +howe ver +writ ing +fla g +l ands +sing le +oc to +pic al +r ace +v as +s mo +hi stor +f a +t ren +lo t +h ands +milit ary +i red +pres ent +th ir +l y +stu dents +ti es +festi val +ne ws +plac es +g all +gi ft +in cre +t our +con tro +sep te +t our +septe mber +gover n +v in +t ty +c at +s an +sil hou +te st +h ind +hal lo +lux ury +ra in +ho t +hallo ween +c ing +sho wing +min i +su n +mu ch +sp on +custo m +res ort +studi o +fe male +an a +mo on +ack s +1 7 +proje ct +hor se +su it +f av +lin es +201 5 +spor ts +t ar +de s +st on +acti v +p il +ti ps +ll ing +squ are +co tt +follow ing +ne ed +gre y +h am +out side +d on +1 7 +be hind +e ds +p ra +ge st +der ed +eng lish +ic le +ti ons +bir d +e ting +stor e +can vas +ha ir +wor k +bo wl +shi ps +air s +mar ket +ar my +cho col +ro und +or ange +st age +be came +s now +de scri +episo de +g y +holi day +b ay +pu bl +is ing +windo w +ap pro +l ist +ch ing +graph ic +ma j +r ings +bo tt +cl ou +inter national +ph il +wat ch +s au +la y +ine se +a bo +sin ce +ill ed +di e +rece i +ev en +oc ean +mill ion +g lo +augu st +t ou +to ge +su g +1 00 +jo h +tra ditional +an ese +chocol ate +ti l +ve ly +ch inese +toge ther +v ol +cu t +u res +we e +clas s +i m +me d +octo ber +am s +plan ts +pre si +p er +b ou +in f +gol den +k es +sun set +reci pe +2 4 +wal k +fi re +d i +s n +pie ce +sp ort +un til +2 1 +ag ue +com b +c rit +ear th +austral ia +wor ks +m ach +bas ed +e f +mat ch +ve h +d ance +qu ar +de f +o il +costu me +jap anese +ac ro +pa int +h all +movi es +cast le +p as +sle eve +u k +b le +p t +bo w +loo ks +ag s +ag o +hel p +y wood +stick er +ch ick +ar ri +ch i +ha m +ish ing +fl or +pl ans +lo s +photo graph +p ass +e ffe +re ad +ill s +anim e +re tro +it u +mor ning +e ver +fra me +clo thing +19 5 +201 4 +cro ss +m ens +art ist +h at +sh ar +ct ions +re sul +act ers +char acters +do gs +play er +inspir ation +po w +f oun +man ag +nove mber +do ors +s y +releas ed +h d +igh ts +sli m +com ing +st ars +m en +s ical +de st +o t +tru ck +at o +i st +silhou ette +en ded +l ate +me mor +ch air +wal king +read y +color ful +ad o +abo ve +dra wings +ver sion +to ok +constru ction +a way +ang el +gener al +heal thy +ver se +ail s +d ate +act or +ha ving +1 9 +ab str +1 6 +dra wn +contin u +av ail +str y +bu l +en jo +an ts +ever y +we ster +speci al +jan uary +cre am +gr and +dd le +li es +with out +mo ti +dig ital +tho se +wor d +sc ul +univers ity +ll ywood +m ou +th s +ar ts +is su +fi ed +de ce +f res +fe atures +s am +ci l +cover ed +di st +br an +2 2 +ho mes +or ies +abstr act +anci ent +gall ery +m p +gu ide +di am +m on +chan ge +fe w +c ent +cre ate +popu lar +a y +c akes +o d +dra g +pol ice +te le +com pu +sh oul +ligh ts +avail able +le ague +ver t +v ac +cer e +st and +com ic +ro r +pre ss +al though +k a +ig n +se arch +gu it +mo tor +pos ition +d am +tem por +fres h +cent ral +tak en +loc al +uni que +air craft +ex ter +far m +f it +an ti +ll s +v an +tra ining +vic tor +hel d +fe br +c amer +febr uary +americ a +to day +tro pical +amaz ing +l or +c ast +dece mber +dre am +thir d +acro ss +wester n +me mbers +spe c +p ack +4 0 +j our +ol s +beg an +pl es +au th +con ne +ho me +re mo +pos ed +ad s +tu red +ta in +re ma +sci ence +ie ty +de li +or der +tu m +pur ple +la un +du e +z e +mo b +th ers +ne ver +com pe +con cer +si x +perfor man +ch ina +form ed +symbo l +la y +201 3 +scul p +g ed +appe ar +n avy +ro un +coun ty +y or +h ill +mak e +t un +po st +wi de +ate ly +wil d +ch ang +chick en +it ing +k now +i d +pa ir +glas ses +bi ke +so ft +scen e +m ir +ate st +n ails +ar ti +sh ould +lu tion +f ile +to wer +co at +batt le +hou ses +f el +e mb +t ly +ar ds +co ast +str ic +ey e +desig ned +sp ar +m ac +2 3 +pro gra +vil le +maj or +f ly +paint ed +be li +si ons +ro man +je wel +le a +vill age +ti al +ac cess +f ather +f igh +t il +r y +tra in +tit le +ru ssi +a wards +m aga +l ap +201 2 +19 3 +angel es +maga z +in ts +w on +po sters +to o +mi ddle +r s +n ers +n al +fu l +b loo +ma ster +ch ed +with in +champ ion +on ed +an n +moun tains +bas e +m ad +x ic +de ta +s day +ser ved +b ak +bor n +a x +govern ment +sp i +e du +tr ack +break fast +mat er +or ing +heal th +com ics +produ ction +ss ing +p lo +car e +w ine +se le +sup er +run ning +dra w +go ing +d ence +tempor ary +po lit +s and +afric an +stor m +h an +w ant +jewel ry +se qu +2 8 +st ron +p age +e f +z o +hi b +shoul der +colle ge +scre en +k ing +pa pers +mi x +2 7 +ger man +to ys +sho wer +g y +for t +form ation +prin cess +so f +ap ple +b an +re spon +le aves +for mer +all s +a de +ru n +sw im +fri ends +edu c +p ack +o d +be ar +pu t +cere mon +a ster +commun ity +g as +se cre +s l +d y +g es +1 5 +na il +am ong +ac cor +f ab +r a +s ch +cra fts +me xic +qu ality +presi dent +insp ired +fu ture +en ed +vie ws +fer ence +n ur +bo ys +que en +pro fe +v ir +ran ge +exter ior +ve ge +con tain +gra y +br ight +int ed +vi l +f ic +p un +buil dings +ex peri +f air +canad a +i phone +car e +ge o +read ing +g n +fru it +ing ton +2 6 +bir ds +on ce +c and +sp ir +ra in +ty pe +bu y +in n +b a +val ley +swe et +fig ure +h ang +fran ce +o w +af e +li br +cour t +st atu +r on +com es +hoo d +a i +bri d +par is +po p +water color +pre tty +hair styles +bott le +ho llywood +mon ey +ne ck +mar vel +compu ter +co p +ber ry +lo st +proje cts +inclu ded +201 1 +st ad +te a +sup port +develop ment +boo ts +spec ies +su pp +nam ed +me t +au gh +de ath +magaz ine +al ter +pro per +te e +pr act +h ard +ste el +se am +mach ine +pos it +201 0 +wall s +mob ile +no w +br and +im port +min u +u t +g oo +ide a +camer a +si ble +ic les +ne l +paint ings +en ces +ne t +g an +cur ren +st art +fam ous +play ers +ra di +i als +bu tter +ste p +p ed +w ars +cent re +cep tion +pre par +recei ved +ele ments +qu est +ach ed +k y +le vel +exam ple +at i +o f +jap an +gar d +2 9 +g am +the at +stor age +de ad +sh or +me l +ex ten +do es +in de +w in +to y +d oc +cre ated +int en +l ation +flor al +6 0 +ev ents +sket ch +plac ed +t on +no v +ent ly +cl im +fri end +sh ap +dou ble +th an +medi a +se e +bu d +recor d +performan ce +in it +m c +dr in +car ri +tak ing +drag on +r ise +aga in +le tter +g ive +g un +g or +th u +adv ent +outf it +g ing +tab les +f act +ceremon y +sh ape +min i +w on +hoo die +l at +cro w +di y +w r +t ation +ne t +che ck +d augh +accor ding +sa ve +sim il +me mber +z ed +ou s +dis play +ch ic +con temporary +te mp +in ation +offic ial +ko re +seam less +vis it +stad ium +oc i +motor cy +al low +mar ri +m ic +ass oci +ital y +ro of +medi um +an ing +ke ep +gra ss +patter ns +y er +ey es +wall papers +bet ter +ic a +ex pe +1 3 +to m +costu mes +buil d +e ss +fre e +writ ten +tem ple +k m +ful ly +art work +guit ar +sat ur +bu s +mon th +ser ve +e qui +sp ac +euro pe +decor ation +the me +l eng +h ur +libr ary +t or +a ward +l ion +bl an +stu dent +w re +1 4 +ra m +as h +fav or +w ol +swim ming +pic s +releas e +pri v +far m +be come +a w +car pet +co ck +tech no +au to +we ight +soc ial +z y +vi a +con t +contro l +ac i +ir on +ev ening +iz ation +au tum +us a +m it +d ru +fl ying +make up +mak es +at a +de ss +gard ens +co lo +cir cle +ag ed +s our +f oc +c amp +ach ing +di ag +acti ve +austral ian +ent al +t t +cle ar +bas ket +l ou +an nu +h en +in stru +av ing +pres ent +ph y +g le +bo x +o es +re por +s a +re view +of ten +back yard +sk ir +basket ball +ck et +produ cts +ic ally +200 9 +autum n +ligh ting +m i +produ ct +cu p +al ist +s ne +se ction +b on +sk in +ra il +activ ities +s an +e sc +alter n +des k +afric a +th es +access ories +il ed +rou te +c in +po se +dess er +dis h +sho w +ent ion +comple te +la be +j er +suc cess +pat io +diam ond +s and +al le +u su +var ious +la u +gro un +windo ws +re search +car ds +joh n +f ine +tra ve +n ar +b ags +yor k +sur roun +wor ds +ph y +ex hib +der ing +en vi +tru cks +ro le +mas k +bre ad +her o +p ast +me t +min im +shir ts +ger many +di stric +ann oun +200 8 +k en +king dom +go t +sou thern +lad y +wee k +ele g +ne ck +pre vi +lo ad +s is +di vision +clo thes +fab ric +mu g +it h +g ri +int ro +k in +prin ts +color ing +ast er +op ening +fe et +g ran +0 0 +envi ron +de ser +a c +b al +ct ing +sur face +gi fts +f r +ne ar +lear n +rain bow +ital ian +w an +l un +ma id +b our +m ur +gra du +fri day +e mer +tra il +par ts +ent er +cy cl +neck lace +pu pp +ch ap +gu e +lor d +chic ago +comm on +fri end +intro du +mat ic +ang le +peri od +8 0 +ho sp +sig ned +t ch +med ical +pl at +ke ts +ous ly +d ry +pal ace +y a +e aster +mag ic +priv ate +ti e +he avy +we b +e dition +s ure +stron g +ve ment +as se +air port +c is +daugh ter +sculp ture +gu est +b ab +al ways +pro ble +ul t +sm iling +mu st +br ick +statu e +f an +theat re +cir cu +care er +po t +g l +shir e +ch airs +inst all +r ang +tr a +re ally +euro pe +w ra +bas e +sa w +se ven +ad es +con st +champion ship +en er +me ss +tem plate +anti que +re fle +re st +sk y +elect ric +re ception +ac tu +win d +ob je +indu stri +s now +t ile +co lu +annu al +sun day +sne ak +i b +ce iling +son gs +nu mbers +re ve +ant a +ad ded +gi ving +f ishing +sig ns +sk u +profe ssi +at tr +at temp +eng ine +ad vert +an y +leng th +nor thern +ap par +ac ade +st and +up on +import ant +sa ys +sp an +ti ve +europe an +a u +plan e +gi ven +re gu +l atest +p it +plu s +ado w +di t +w ns +y ed +ci al +illustr ations +comm er +as ian +im pro +in o +le ad +la w +distric t +ep ing +w ent +ar m +for ces +lar gest +se as +me tri +v a +ru stic +h ab +beg in +w ro +ol y +favor ite +cas ual +doc u +satur day +a er +bo t +g hou +fi f +pla stic +ra y +ct i +su it +in te +je ans +x im +le go +tak es +wi fe +de ep +ad d +clas s +g ate +sur vi +serv ices +cam pa +coo k +din ner +200 7 +eng ine +fl ight +educ ation +woo d +wro te +gre eting +techno logy +en a +an o +th ink +mar ine +st af +au di +l o +base ball +mar y +m m +s che +f an +ste ad +n er +sol di +gol f +pos es +lead ing +s se +al y +inclu des +re present +foo t +h a +decor ations +simil ar +e i +e as +cla im +mi d +spe ak +me as +dro p +k ar +ty pes +ghou t +k in +throu ghout +metri c +eg y +veh icle +cour se +descri bed +groun ds +ag ing +ing er +mi d +inclu de +ro me +y ou +k en +high way +ex pl +wil li +m un +de ci +emp lo +tex ture +mar k +graph y +ho ld +sh ed +stru cture +stu dy +c ri +e at +am a +ent ry +who le +proper ty +thou ght +est ab +to tal +de sp +act re +a vi +cor por +sho pping +ge ar +c ats +photograph er +as ed +col our +ap s +ent ran +co ron +bi o +are as +vill a +decor ating +pan ts +rel ig +che ese +ct ors +h im +che ster +st a +tele vision +fur ther +3 1 +bre ak +brid al +l in +desig ner +aster n +vac ation +ban k +eleg ant +pie ces +si a +me eting +st ay +ener gy +har d +we b +tri p +o x +d or +ad ed +k n +clou ds +f ar +cor ner +bal lo +me mes +hang ing +bat man +mon ths +be er +experi ence +tin y +ur ban +k ind +sim p +sug ar +apart ments +de ck +ni fic +b y +k u +mar t +ch all +the tic +lap top +n g +wh y +whe el +plan et +te en +c it +ro ll +un it +col d +po ints +f ans +dri ve +li fe +bud get +def en +is s +br ac +w ers +ic u +st ands +col on +inf lu +nam ent +19 0 +landsc ap +pro cess +li ed +mode ls +f lu +an e +fo x +ev ed +ge or +co sp +e dit +pu s +s lo +gre ek +w ash +ma y +for ms +ab ility +tan k +in ve +er a +cul ture +p tion +in formation +cosp lay +in ch +sto p +c afe +entran ce +im p +un ion +cre w +k a +c m +z a +cott age +some thing +qu il +enjo y +re qui +hu m +man u +pre ssion +e cu +suc cess +inv ol +1 st +deta ils +comp an +sho wn +b by +pl ic +gal ax +p on +n on +al a +s old +att ack +sku ll +war m +cap tain +b ad +s ound +j a +yo ga +f our +ul tim +ar m +consi dered +staf f +b ble +e ag +retur n +ic ons +ach es +h i +cre te +d un +ge l +sen ior +quo te +deser t +ge ts +ill ing +li d +b es +low er +ll ed +su ff +ex er +continu ed +lim ited +mis sion +pla ys +jo b +b ad +tt ers +bl es +l t +com man +mic ro +est ate +coo king +se ts +reg ion +cho o +progra m +sp a +pr ice +dire ctor +soc cer +fin ished +concer t +t s +down town +th ro +att le +pos sible +se par +li sted +200 6 +m b +ch er +rel ation +lea f +act ors +gre en +f illed +mu sc +n ice +sh u +e m +r is +tur n +desp ite +bur g +prin ce +las h +ch ain +al most +ri es +loc ation +professi onal +tri bu +advent ure +de fin +k y +pro mo +h it +stu ff +com par +ag u +hosp ital +ele ph +re co +t ig +colo red +ch ie +mou th +ge tting +v an +de fe +inv it +g room +i ds +n ic +hor ses +is lands +skir t +campa ign +ac y +c all +produ ced +industri al +resul t +fire place +mul ti +we st +col our +ter min +ex per +fe atu +pat h +ro man +at ors +n av +mix ed +sa fe +bar n +ch art +swe ater +ber t +log ical +del ic +ca p +dis played +c les +til es +be low +20 0 +gu n +veh icles +to wards +o thers +5 00 +dr ink +lan gu +bro n +d ney +ent ed +tu tor +me al +featu red +p ng +a es +eng ag +ic ks +cook ies +p ment +rac ing +cour te +wil d +val ent +mu sical +bro ad +histor ic +i ally +mo ved +sneak ers +part ici +phil i +sc ar +farm house +star ted +d r +pal m +ga ve +busine ss +rou ght +we ll +tri es +in stead +se ssion +russi an +e dge +pen cil +blo ck +mir ror +wat ches +win ning +fe der +courte sy +r at +se n +f on +cro wn +ti onal +cott on +e ight +la bor +d c +af ter +publ ished +sup por +z en +us es +wa ve +estab l +r ice +issu e +ho st +as si +galax y +part ment +pat i +ri ding +e qu +re di +d one +inde pen +7 0 +minu tes +an ato +motorcy cle +mi x +ad dition +down load +e con +ban ner +do ctor +sty lish +do min +comm is +t all +spe ed +sho e +ar i +d ly +ou gh +e ating +ath le +roman tic +ex plo +engag ement +eng ers +th ur +span ish +sug ge +spe e +fe el +premi um +cre atures +part icu +mexic o +qu et +re du +18 9 +e astern +clo se +class room +con crete +hol ds +hou rs +bu tt +z er +op ened +ing redi +y l +di ed +ll a +or ity +e x +indu stry +lear ning +be an +gi ant +br ing +econ o +diag ram +s ay +gro wing +t te +clo ck +r o +ju st +polit ical +1 2 +am i +wor l +so on +go wn +graph ics +3 5 +re la +pu mp +w ed +ren dering +gar age +vi du +r are +friend ly +da ily +web site +emp ty +emp ire +de v +equi pment +commer cial +butter fly +se y +a we +clo sed +wor th +tran spar +run s +al p +indi vidu +b la +a ff +tre at +al o +me et +con ference +n d +effe ct +shap ed +person al +co ach +inter est +b it +cre ek +ac hi +st ick +ro ck +form ing +sal ad +en ding +gra de +p ing +ne u +fil ms +tu al +ac ry +coun t +or n +at ory +ment al +de tail +s la +cle an +e ded +secre t +ne igh +mo ve +con dition +ch ev +d ant +ri de +a ver +s or +h in +tren ds +ag re +crow d +b il +she et +d im +publ ic +de mo +pas s +mil es +do ing +fel t +bl ack +v s +re fer +f er +te en +wan ted +s ons +p y +k i +eg g +sof a +s av +mil k +develop ed +appear ed +ex ecu +aes thetic +cam p +bran ch +scen es +fit ness +z i +b ath +re al +sta in +pe ts +p aci +ent u +de partment +t land +b roo +s ent +av en +m o +we ather +ab and +bot tom +wol f +a h +form al +fe ature +sk i +ch et +ill a +t ition +b ig +int ing +t i +e th +go al +den ti +four th +arti sts +con tr +acry lic +b ac +t ic +cap ital +r i +le tter +b ic +k ne +ne eds +sig nific +bloo d +comm it +ro i +an ni +thou se +en cy +den tial +200 5 +kore an +blo g +mu s +appar el +var iety +ar ms +b ing +man ga +chie f +tru e +on g +bor der +pi zz +s its +li e +e ss +u pper +ev al +ve t +thur sday +anato my +pre ss +en ough +si t +iz e +b is +se tt +h es +sing ap +ve gan +clou d +cre dit +soc iety +f y +br u +transpar ent +de pic +histor ical +as ia +bo w +gu ard +sing er +li er +gu ar +e ps +every thing +emb roi +nor th +be ll +ite ms +pr inted +bo ats +wed ne +ct u +ack ed +li t +be y +il er +squ ad +him self +con test +k it +sp ain +se at +ste l +inve st +back pack +provi ded +le ss +ran ked +di e +po st +eg gs +z er +ir us +sy dney +p a +m r +fair y +n ames +2 nd +sc ale +medi eval +celebr ity +sou th +col l +ll er +her it +da vi +wedne sday +min d +har b +safe ty +ct ly +th re +n at +dol l +n as +con ver +sc ra +h o +lad ies +e ce +tu es +mon day +singap ore +coron av +vin yl +st y +success ful +lar ger +den im +u ps +st un +ev es +m ight +co vers +b ang +d n +d an +coronav irus +fi el +f ail +ch ic +b ill +tues day +labe l +announ ced +sour ce +di es +retur ned +brac ele +eleph ant +flo oring +b on +bas ket +tur ned +dit ions +al and +rou gh +cro chet +comple ted +br us +moun t +paci fic +n ative +co st +sh adow +mer maid +k ong +d den +k it +ate g +d ation +gy m +cap e +n is +previ ous +wit ch +rail way +st airs +p ir +drin ks +gra di +sket ches +igh t +al d +pro vin +har ry +p and +f al +m al +re sc +pow er +4 5 +k id +f ted +be t +for ward +aer ial +re ar +vo lu +t roo +se cur +var i +tra ff +o t +no te +ki e +p en +to ron +s ar +bur y +bo ston +si des +9 0 +ro cks +vers ary +stre ets +ligh t +advert ising +toron to +te ac +net work +actre ss +ser y +mater ial +dan cing +qu es +nur sery +canad ian +am ic +tu ral +pract ice +y u +scre en +lit er +d or +pe ar +phili pp +co ast +decor ative +se tting +herit age +hur ric +fig ures +re fu +li ber +ze aland +radi o +i ans +fact ory +me dit +reco gn +bi ble +v oc +pizz a +awe some +anim ation +le ast +wee ks +ve l +art icle +landscap ing +stand ard +mar ble +sau ce +coun cil +so lid +ca the +f air +sy n +pur ch +y l +od le +sp r +anni versary +sh are +che ap +recor ded +dre ssed +wr ite +oun ds +go d +st at +foc us +bb it +langu age +inst itu +provi de +compe tition +c ities +li ons +hand made +y et +fin ish +al u +i re +ro bo +s ac +dest roy +in j +victor y +r d +manag er +so lution +f alls +i v +relation ship +gr and +dist ance +off ers +fac ts +exhib ition +p el +secur ity +n ic +lam p +ho le +dra l +bra in +mo ving +ben ch +acade my +sh ore +environ ment +to ps +i ble +ei ther +f ence +con tain +writ er +whe els +dis cu +enter tain +enti al +c ine +desser t +aband oned +200 4 +spir it +f ight +el ing +a war +ch am +letter ing +ho p +to dd +7 5 +j am +st ated +cock tail +p ages +cab in +fic ation +hor iz +par ties +g ent +con du +sco tland +o ak +mon ster +re spe +k le +su s +ele ment +an g +comm and +figh ter +begin ning +uni verse +pro m +y our +le tters +on ic +to ols +week end +vi ol +offic er +re li +ingredi ents +he av +traff ic +business man +squad ron +lar ly +cathe dral +spee ch +ad o +ju mp +posit ive +din o +dam age +hor ror +p in +cel l +j an +ex pen +w ish +in dic +woo ds +is s +lo ss +russi a +curren t +pre v +per formed +dri ving +b rought +w aves +se lling +gradi ent +diff ic +de mon +n ine +e ding +bal con +po tat +bro ken +s anta +d ata +smo o +ch er +origin ally +ic y +war ri +4 0 +re public +co t +fra med +brit ain +g lu +h il +popu lation +ar ab +won der +k il +ra ther +activ ity +ap po +mo tor +manag ement +teac her +l as +ti an +di a +hu ge +vie t +z es +t ent +ter y +h our +wor ked +al ed +l en +home made +h ills +sli de +so lu +th in +am ent +ha w +sal t +ad op +in et +sig n +sp read +l ine +jour nal +lay out +b loc +cop y +ke y +par ents +en ced +oc cas +fle et +ac cep +ultim ate +symbo ls +g g +mon d +fran cis +deci ded +qu ick +comple x +spo t +celebr ate +eag le +plan ning +3 2 +h au +back grounds +stra w +sm ile +an gel +d am +out line +organ ization +mo vement +fac es +delic ious +acti ons +cli p +the mes +se ating +ex pla +coun tries +st ren +est im +f i +st e +m it +un ic +le g +am oun +un k +me chan +on a +sou p +v als +the ater +cho ice +mon o +k night +er ship +entertain ment +man chester +ali stic +moti v +sur pr +men u +cor n +th row +so lar +shel f +le mon +ad ult +3 0 +ne on +tren dy +r is +se attle +stren g +incre di +tre at +i denti +mp h +ear rings +c ise +tig er +repor ted +h us +hurric ane +sun glasses +al one +speci ally +ire land +l ink +l ers +n es +c ric +spac es +ent ire +qu art +ev entu +l oun +rel ated +fam il +victor ian +h ong +5 0 +b o +ge ous +pen dant +francis co +ca ve +x i +eventu ally +associ ation +ser ving +tech ni +ch a +mas s +gre ss +spe ct +r ich +ric s +pe ace +go es +to mat +dec is +ic ed +18 8 +stra ight +ch al +s er +ten nis +m ine +sil k +stor ies +im ag +i x +an aly +ste ps +bu bble +pi e +li st +k illed +pic tured +g en +tem per +p au +zo o +un its +w a +pi x +claim ed +al low +cab inet +19 th +ent y +tu s +tu be +mexic an +cru ise +press ure +te ams +lo dge +go d +cand y +vege tables +de er +d u +appear ance +ri bb +m my +ni ke +f au +f l +ex tra +sm art +some times +ex clu +lan e +in door +v ity +bic y +mo ment +k ings +20 00 +ta ined +i er +ar a +pres ents +ra ised +high est +im ent +k ers +rema ined +re as +t as +usu ally +follow ed +me tho +re ached +co w +cre ation +cau sed +mini ster +ta ining +pan el +ri er +p in +bu tter +wild life +ren t +le ts +ro c +or din +surroun ded +wal ks +cab ine +lin ed +celebr ation +aver age +foo ds +gu y +car r +su ite +off er +su its +geo metric +i ors +tour nament +recor ds +bron ze +s ke +lead er +gi ves +s wi +pump kin +re pe +bi e +bou quet +lea ve +c lar +i p +ci vil +ad d +n y +altern ate +work ers +establ ished +e ly +mo m +perfor ms +st ate +cre ating +c ran +p ig +i stic +te ly +pupp y +balcon y +ber ries +brus h +w ire +colu m +bro ther +ac cess +we ap +af fe +man s +choo se +200 3 +bra z +decor ated +illustr ated +bu ll +soldi ers +con serv +eng er +ho c +allow ed +commun ic +me dic +ment ary +br ig +t ted +sun rise +wo mens +3 6 +ce ment +cher ry +oper ation +ad ditional +lo bby +se wing +n ation +ad ap +5 th +zz le +regu lar +g lit +be at +si b +p ak +shel l +no tes +on to +me aning +pl ates +cas es +stre ssed +p sy +t ons +gun s +recor ding +hoc key +aven ue +lo ck +spi der +wel come +f s +wh is +quar ter +chang es +leg end +al ready +re nov +re l +o m +po tter +fi x +wa iting +li p +st ones +relig ious +go als +an ces +ri dge +p al +mu ral +pr o +laun ch +memor ial +p ond +pu l +gen cy +nu t +jo ined +cu tting +camp ing +min e +ish es +pri me +no ted +tur key +ro ses +pro bab +ar gu +ran ge +m ers +s ands +u r +glo bal +j ames +star ting +or i +p ine +b ill +gor geous +be e +pi ano +re r +b all +dis cover +sit u +hum or +sho o +l ying +christ ian +si an +che e +str ation +go th +jun gle +willi am +sele ction +con tribu +pre gn +3 rd +figh ting +k er +loun ge +me ans +por ch +you th +k o +bour ne +sa ying +r ights +re y +rela x +ing ham +ex c +glo be +ma int +att ends +ho pe +g la +sa int +no te +l ic +bey ond +f lash +butt on +mo tion +u mb +for ced +jour ney +t am +tra de +cal i +todd ler +est s +lun ch +ex tre +ac ts +lo vely +re port +ky o +pol ish +the ast +near by +blu e +p it +ir ish +le n +t ical +pe ak +portra its +stun ning +in i +s low +pos ing +3 3 +ben e +d ated +pil low +holi days +4 th +ou se +at ic +ac u +bed rooms +mo de +tran sport +sco o +ne eded +mar ke +the med +ctu ral +a head +go wns +mp ic +ear lier +f inger +we l +ig ra +e specially +li ves +joh n +it i +kne e +le ton +low s +marri ed +o tic +e vi +stick ers +p ic +par ade +ad mini +gro w +la w +life style +carto ons +tou ch +n am +th ail +drin king +200 1 +di vi +li ke +egy pt +ro lling +thail and +whe ther +tutor ial +requi red +sm ith +si ster +as es +ba i +ballo on +com mon +qu ick +inspir ational +w ich +f loo +d v +exer cise +to te +bel t +si zed +sun ny +gro ups +vir g +re alistic +organ ic +actu ally +to kyo +t an +st ance +cer amic +sle eping +cr is +sta ined +ow ner +m n +i son +tran spor +oper ations +li a +y on +d ir +studi os +valent ine +i ed +mis s +re d +usu al +multi ple +ig e +dol ls +pro ce +ir s +proble ms +scri pt +ap ol +com for +philipp ines +con tra +s ity +wat ching +d river +min t +g low +al ized +pack aging +fla v +si x +te aching +3 7 +he at +d ang +coun ter +m ig +shar k +18 6 +jer sey +lan tic +d ary +clo set +ma in +vo ice +near ly +du bai +chang ed +unic orn +pre ssed +bur gh +sal es +bracele t +ru g +il ities +m bl +pri mary +ar ies +pass ed +syste ms +cro ssing +your self +tr acks +gre atest +no vel +cra zy +ast ro +sou l +box es +ir ing +che f +effe cts +b c +ho ly +cor ps +re cent +stri bu +mas cot +co con +u t +fic tion +chil d +invit ation +cou ra +sub je +el l +fiel ds +le aving +grow th +uni form +partici p +se cu +fif th +ra bbit +par king +to ld +hel met +tal king +u es +en ge +it ch +re ference +sh er +proble m +oly mpic +cabine ts +crit ics +hus band +ribb on +sm all +at lantic +le gs +vide os +gu i +pri or +sn ake +st ro +ex i +o wl +ph ia +expl ore +signific ant +k nit +geor ge +wed dings +re gi +mic ha +car ib +sub sequ +troo ps +cric ket +mb ia +re views +gi an +ing ly +di stribu +close up +seas ons +to o +vis it +ist an +3 00 +ic e +ac ted +carib bean +f all +sho ot +mon u +it self +batt al +prin ci +con clu +alp ha +o id +in ches +invol ved +vert ical +200 2 +av engers +is es +ter m +r an +fe b +fu r +mess age +bou ti +chall enge +pol y +moun ted +inter view +ven ue +contain er +g il +probab ly +lon ger +bl on +call igra +coast al +wal es +mon key +ro ads +b in +sh ing +every one +st ant +ter ed +gu ests +marri age +fin ally +inter est +stre am +ber g +y ach +sp here +me at +mb le +chap ter +3 4 +c ers +jack ets +fro zen +val ue +sau r +par li +si on +su sp +ff ed +produ ce +re ti +b it +bar bie +he arts +marke ting +d ge +man d +p pers +bo o +to pped +di stressed +ele v +fin anci +foun dation +no on +sco red +sp ent +di et +pre ci +z one +over all +cocon ut +vi et +bas ic +sle eves +am p +p leas +b alls +expe cted +5 5 +re placed +hou ston +cam pus +re ach +ad ding +f act +le ve +g ers +scho ols +m ar +ca tho +bu dd +po cket +quart ers +tren d +gre ece +hon ey +pear l +sco pe +a del +crit ic +minu te +oc cur +ex cell +tra iler +r ate +tr ying +j ar +sh a +expen sive +st ly +wor n +to il +win d +ff s +cli part +ben ef +ne g +carr ying +mater ials +sci enti +ct ic +res our +k h +inter net +can al +gro w +exam ples +deli very +no thing +coo kie +de al +re se +bouti que +nu mer +b one +eas ily +less ons +mu sh +no min +bott les +some one +init ially +emplo ye +hel ic +pic k +co me +ve st +appro xim +catho lic +ga ther +desser ts +der y +te ch +beli eved +wre st +tit led +j or +per forming +do odle +amoun t +on y +ge tty +minim alist +cel l +bicy cle +blan k +soc ks +to ol +musc le +20 th +new sp +sco tt +be ard +wonder ful +her oes +alpha bet +quick ly +bak ing +ic ian +ion ed +4 8 +dino saur +im medi +f ying +se l +ho te +gener ally +tw in +s mart +pil ot +sel ves +mar ks +j ac +engine ering +auth or +b ars +imp act +dv d +is er +bir th +cur tain +vis ual +robo t +who se +a f +inter iors +am y +la wn +hab it +an ced +bo ards +p an +s ad +a e +fac ade +blan ket +p lot +individu al +ant ly +li qu +dire cted +helic op +treat ment +de st +fol k +b ands +ac ting +bu ff +st air +le v +lau gh +sky line +prote ction +d ates +f at +re serve +op port +ter race +dec o +v ase +e vil +elect ron +interest ing +th in +he ight +stain less +vol un +ar ena +supp lies +comb at +bo y +blon de +fre qu +oly m +sh ine +embroi dery +car ry +win ds +w are +calligra phy +cti vely +ter rit +al co +lay er +gr un +h amp +crit ical +il es +re qu +de sk +pi g +lan ding +mel bourne +m ill +install ation +m aps +con tains +umb re +ch el +o k +incre ase +la in +ve gas +cap tured +colour s +flo ating +ban ana +charac ter +4 2 +mou se +ther e +celebr ities +smo ke +ex cep +low ing +ant ine +inf an +d ale +br a +cent ly +at l +execu tive +like ly +hi p +fit ted +appe ar +e ase +th ick +invest ig +davi d +opport un +child hood +hi dden +ac e +en gra +audi ence +re cently +k ra +is le +wat ers +compan ies +desk top +sp in +hote ls +cer tain +ap plic +pa stel +t ale +t rou +t ary +issu es +shor ts +fl ags +id ent +n ap +s afe +as ked +viet nam +ly rics +sit es +she ep +be at +o m +a be +p le +cycl ing +phil adel +hon da +bu ll +pro posed +s are +high er +note book +cle aning +small er +n ia +provin ce +mat h +sa ur +f lies +ma xim +commit tee +ar range +un known +plat form +appe ars +philadel phia +o h +ad ul +z u +shi pping +comman der +respon se +offic ers +b oun +f ting +head s +di c +3 8 +iz ing +ap p +gar den +ph er +bl ouse +po e +ad or +d ad +hou se +dire ction +ep er +blo ss +lou is +ru b +bo ar +in se +so viet +co de +th us +li ver +comb ination +dra ma +co al +suit es +be ast +chan nel +nav al +ant i +li ft +me an +kore a +incre ased +leg al +or nam +al ing +mi ami +straw berry +batt le +pre school +a b +micha el +be coming +te ll +k iss +ti on +jo in +i ii +c able +egy p +po ten +particu larly +ther ap +ati ves +gu ys +ty po +d dy +just ice +ex pan +ex press +bi kes +el l +an gu +le gen +after noon +financi al +e th +6 5 +resi dential +t in +re gar +init ial +per form +i us +p ag +d ar +th ron +sc ore +lo ft +son ic +ou red +p itch +ele ction +obje cts +j i +remo ved +pe an +separ ate +bas ement +sa il +ra pi +au to +4 7 +tro phy +beli eve +oc t +glit ter +ent er +s burg +is ra +am ing +lo ts +hun ting +sk ills +rou s +out doors +18 7 +ol der +present ed +ni e +al es +introdu ced +o wn +hi king +clim bing +b am +thin king +dol lar +ine ss +resc ue +deta iled +ste am +w ide +pun k +dis hes +4 1 +ran ch +brig ade +per cent +power ful +bo mb +vis ible +p ap +li z +to yo +wor ker +goth ic +tu b +je ep +bo tan +corpor ate +je well +m ical +rema ins +c ateg +b m +pro file +as ing +contin ue +iss ance +photograph ic +3 9 +decis ion +n ings +bur n +ph ones +du tch +screen shot +tt a +w ise +ic onic +cou ch +st itch +lo ved +s wit +quest ions +t ap +worl ds +m am +ber lin +state ment +hel ped +mal ay +const itu +si de +cup cakes +an s +arri ved +tour ist +le e +feder al +dit ion +diffic ult +sho ts +vege table +scott ish +big gest +liver pool +benef its +att ention +cla y +ag a +battal ion +f low +sche du +tur tle +fru its +neigh bor +1 50 +alle n +ne cess +can yon +gre ater +met res +fro g +9 9 +provi des +toil et +toyo ta +ap le +ke ys +dire ct +cor al +fig ur +phy sical +co pper +po or +ra w +mine craft +pro of +6 th +stu ffed +s ink +br igh +champion ships +reas ons +h it +m ass +vel vet +bun ny +co in +shel ves +6 0 +do es +go s +n ight +mak er +does n +co zy +no v +du ck +evi dence +f en +stri ped +pen gu +au lt +f a +199 9 +or dered +oper a +associ ated +hen ry +da y +tt es +volun te +che ss +im per +e ter +infan try +br un +wra p +di an +k s +shap es +s que +and er +altern ative +cor re +sch e +g host +archite ctural +attemp t +con ditions +li ps +si zes +mo di +de b +fol low +de but +previ ously +vie wed +er e +w as +surroun ding +ar row +le o +pic k +ad a +free dom +att end +par d +ra il +spi ral +ne st +in fin +dec lar +egyp tian +ligh tly +sle ep +bra ss +m ps +ak s +esc ape +s k +ra t +run way +plan ned +sh ak +cal end +quil t +shi eld +ac com +su pre +leve ls +ur a +approxim ately +form ers +famil ies +ev o +pur pose +we ars +med al +en ix +y ards +adul ts +the m +grun ge +bo b +soldi er +jack son +al ex +dim en +ri d +tour s +ru g +im pres +4 9 +pati ent +por tu +tal k +fac ing +resi dents +sk in +o le +fo li +champ ion +institu te +carri ed +simp ly +fon t +del hi +resul ts +a quar +mu mb +smart phone +concep tual +ing ing +4 00 +champ ions +thou sands +cy cle +com position +man sion +ju ice +v ent +glu ten +ve ter +ch ron +4 4 +wash ington +de c +mar ked +in ci +c i +bur ger +v it +ag on +ve ctors +mat ter +jewell ery +r ig +oc cu +ro bert +sen se +de te +them selves +se as +lo g +flo ors +9 5 +r ical +he ar +cont ent +b ound +dre ssing +ru les +plu sh +ch rome +c on +cer ti +ti er +bro thers +cine ma +al o +ess ential +c ad +5 2 +fan cy +gener ation +st oc +win ner +sh ade +dam aged +2 8 +so le +l il +constru cted +supp ly +contr act +can cer +in ations +ag en +ak a +ric ul +mumb ai +alco ho +pu zzle +memor ies +progra m +emer gency +cle ar +colu mbia +j et +tre as +par ks +coll age +ct ures +lo aded +col labor +le dge +st ery +c ure +g al +a qu +fin ding +pupp ies +mo stly +cand le +s oun +work shop +ro t +parli ament +di dn +gra ff +ct ional +celebr ates +4 3 +cor e +calend ar +tra y +cul tural +pla in +and a +who le +v ine +p ay +gradu ation +for d +motorcy cles +i j +b ake +mon t +s word +am ed +high ly +fu el +defen se +indepen dent +sa iling +b i +j az +wa it +o live +in o +al ong +t ac +enjo ying +to wn +er ran +m ad +ack ing +be ige +sk i +ma x +ren der +premi er +ch or +al i +che st +bat tery +4 6 +along side +fur n +umbre lla +and er +offic ials +air plane +n au +char ge +graff iti +descri ption +199 8 +7 th +un usual +present ation +excell ent +le s +ag ricul +wor e +z el +dist in +e ye +co ok +pat ri +ce des +re plac +ho g +be ef +d all +si r +cl ick +streng th +do me +incredi ble +le m +s ent +5 1 +ri son +por sche +la b +appo inted +braz il +n ation +cir ca +v ention +laun ched +demo cr +sele cted +fail ed +r ack +fel l +auth or +au stri +s ad +ty pical +beg ins +typo graphy +l ac +ro ws +de dic +is lam +cl in +so il +w ic +exten ded +quest ion +influ ence +ne ther +dis covered +jo int +f c +helicop ter +w ast +tru st +g ur +car ni +wal k +ou ts +por tion +h at +se ll +i stan +br ands +do t +i da +mer cedes +arm or +medit erran +se eds +ve y +ide al +po le +yl or +p ine +studi es +scri p +ali en +gu l +amaz on +comple tely +p c +off ered +au g +moti onal +boo k +8 0 +sing ing +curren tly +ang ry +gam ing +chap el +ck y +st al +re ct +mediterran ean +remo ve +under water +whe el +u d +se x +spo on +a e +scri ption +sp lit +lon g +qu ite +h all +mil lions +5 4 +pass ing +yach t +ta g +scar f +foun tain +fre sh +fun ction +de pression +at mo +mar sh +e o +mit ted +bak ed +t one +govern or +demon str +bill board +ob serv +imper ial +re ason +rema ining +k ill +bal let +visit ors +destroy ed +6 4 +pr is +de p +do me +laun dry +to ward +head quarters +ac ci +co s +car bon +estim ated +pu r +th ai +exclu sive +p s +g ary +ballo ons +be ll +us ers +ru le +fu tur +bro w +tri angle +new ly +fan ta +in nov +we al +she pher +bur ning +for mu +ta ste +en s +me an +sam ple +un is +re ly +1 1 +ham mer +see ing +supre me +be ars +dre ams +con ven +mat ching +install ed +ru ral +pan els +l ag +reve aled +m att +techni ques +cali for +lan ter +in c +appro ach +l ack +r ated +s af +attr active +fall ing +qu ali +ven ice +jun ior +ox ford +bre ed +produ cer +r h +pow der +li ved +fer red +super hero +an cy +fe eling +de m +b and +come dy +sc ent +cu sh +dall as +re pair +m all +horiz on +compar ed +f ill +pr inting +o logy +be y +unis ex +ici ans +cr y +y ers +par alle +t ics +h ats +dire ctly +defe ated +bran ches +int ended +cour ty +m ile +co w +p ure +weap ons +ll ig +19 20 +bas s +u g +prote ct +k un +kno w +van cou +y ment +d son +199 7 +ac he +vancou ver +car ved +bo ld +break ing +pos sib +wre ath +mart in +w an +coun ter +z ing +tt i +sl ice +pra ised +resi dence +g lam +com fort +call ing +ati vely +chang ing +me tro +nas a +py ra +sep t +comfor table +ow ned +sun g +pa sta +di ac +warri or +sur g +cur tains +stre t +volu me +la id +phy s +w ick +transpor tation +bm w +fore ign +p se +er al +bou ght +formu la +pa in +buff alo +pho enix +ve sse +ter ms +asse mb +rug by +war dro +5 6 +18 th +nether lands +par ad +reg iment +cover ing +pu ll +tra ins +can not +dis ease +s or +op tions +0 1 +univers al +comb ined +ch ance +episo des +w y +jaz z +newsp aper +e ing +lo tus +hon or +who m +ster ling +posit ions +char les +bam boo +jump ing +pl ant +en coura +ur ies +be ha +co ol +lat ter +re vo +bo ho +me tal +o z +jo ker +pat ro +mo sa +o on +gr ill +in ner +potat o +d ur +don e +ta ylor +work out +fl ic +medic ine +n inten +clas si +numer ous +ess ay +sh ades +rist s +john son +tho mas +mo sque +ninten do +no se +re comm +ill er +in done +particu lar +spe ctive +hand le +199 6 +tar get +u se +o ven +col lar +wh ale +ad ing +c av +illu min +pro pos +ath ers +re member +bre eds +ru s +econo mic +conne cted +kil o +read s +me me +harb or +can di +cit iz +stat us +ke eping +ic ing +suit able +lo gos +m aple +rock y +ak ed +cro ss +i ke +n ity +h an +che ck +gu il +6 8 +pr e +lu m +ro lls +s wing +re ment +s lightly +gi e +sc and +min or +l ars +my tho +fri ed +g one +nur se +ch i +n ations +in y +k is +att ached +sig nature +199 5 +con fir +pau l +mon it +k ni +bal ance +inte llig +pak istan +lin col +whole sale +sum mit +wardro be +for k +el la +g on +gra m +maint a +c en +awar ded +ssi ons +anim ated +albu ms +ke pt +du ke +mu slim +fanta stic +bu ck +tur quo +turquo ise +suppor ted +as s +tri ed +restaur ants +leo pard +ri ver +indepen dence +immedi ately +any thing +chall eng +analy sis +ak i +air y +deli ver +pp y +bar cel +sc i +beg in +rub ber +roo f +fic ial +pow ered +commis sion +h l +la b +wh it +h ap +pos ite +subje ct +tter y +k ins +bo l +aquar ium +assemb ly +c s +atl anta +ar ctic +broo k +gro ve +pro cess +form at +dest ination +be l +ent ered +9 th +ex otic +poten tial +ti p +shoo ting +me als +so ph +se ven +ro pe +or ial +ir d +bloc ks +de pen +lim e +cap ture +nor mal +c d +beha vi +water fall +he dge +clim ate +chev ro +ster ed +sur f +cont act +co ins +califor nia +ar rang +the ory +organ is +n ba +k ay +sim s +mis sing +revo lution +docu ment +sho ps +w ash +ar med +goo gle +de ad +g az +al e +cro p +lead ers +ab ly +mo o +die go +ste in +monu ment +god de +ci vil +speci fic +woo l +re p +tech n +courty ard +cat ch +islam ic +line ar +adv ant +y ar +v amp +ff in +fa ith +ad vis +ssi ve +att ended +te eth +ser ious +to wel +t ar +continu es +be e +memor y +photograph s +ast ed +obje ct +ex change +bri des +pi er +barcel ona +inte gr +cau ght +5 3 +cu ts +nu tr +shar ed +lead s +nar row +back drop +or nament +chevro let +new born +am el +clas sical +moti ve +avi ation +b ones +se ed +tun nel +in su +be comes +stor y +olym pics +li p +en e +in fe +categ ory +in er +fe ar +199 4 +thu mb +horizon tal +resour ces +b at +agre ed +ser ves +sto red +tur ns +fau x +loo ked +han g +ing u +op y +com posed +on tar +beat les +an ch +re n +worl d +ta ils +de termin +jour n +bran ding +zo diac +futur istic +for tun +tri bal +issu ed +n fl +stru ctures +mini ature +pla za +v in +re tri +reg ional +em per +smo king +sp rings +c ig +over head +r ist +g roo +shor tly +ic an +sim ul +ma inten +ris k +shu tter +j a +l ations +tr e +ontar io +mat ches +cor n +mainten ance +ef for +bath rooms +as signed +o ts +pi pe +garden ing +differ ence +adv anced +id as +e co +leg is +ident ity +s age +ri a +allow s +chev y +hedge hog +ar o +than ks +co m +vie wers +own ers +foot age +de g +therap y +cle an +lux e +od les +e igh +tour ism +ef fort +ff y +av oid +sat in +reli ef +stair case +i sts +196 0s +197 0s +tou rists +la p +sing les +compe tit +do dge +tro it +boo t +prev ent +lat in +ske leton +s ri +cas ino +de troit +mul ti +ren a +ta x +re tail +ol a +sto res +cow boy +cur ly +f ish +ma il +con gress +dar k +organ ized +tw enty +ag ency +cup cake +rou gh +arri ves +ru ins +dec al +impro ve +n on +pa id +sof tw +ad idas +ga in +at ar +dru m +zer o +fac ility +car go +pe ter +k at +rena issance +minim al +e house +cap ac +fra mes +al t +du ty +mosa ic +it a +roo ts +air man +off en +ff le +sh es +bun k +hou sing +ly n +we t +bla zer +pe t +me adow +cal m +sugge sted +7 0 +bu ted +pas ses +con verse +knit ting +sand wich +metal lic +5 7 +softw are +5 8 +than ks +e ff +eth nic +manu fact +thanks giving +fl am +lo ves +sac red +relig ion +ike a +lo vers +6 00 +show ed +blu es +str ing +ri o +sc oring +ta i +be ij +bl in +neg ative +de l +fic tional +ac le +arri vals +te ddy +beij ing +dol ph +north west +na p +wal t +tra dition +gr ant +chan deli +t ors +ha i +un a +custo mer +ma c +ang els +mas sive +war ning +ele mentary +jo y +mag ical +ig an +bo rough +harb our +d ating +vol can +ro y +k el +mas ks +le ans +colon ial +too th +199 2 +glo ves +diam on +deg ree +tex tured +trans formers +on ia +virg in +smoo th +bu ying +secre tary +re stor +nor way +sy l +parad ise +s ap +cro at +bo llywood +sol o +foot wear +fin als +contra st +wa ii +chee se +lincol n +ite m +pra yer +ro ad +world wide +k ir +y land +ig er +angu lar +f is +ado ws +h n +as k +w is +tum bl +neighbor hood +fac e +tomat o +kitch ens +vis ited +f ing +y m +en e +shutter stock +an kle +fore ver +photograph ed +cam bridge +isra el +r ising +pu b +hol der +r ation +a id +bo ss +introdu ction +er o +6 6 +r hy +sn acks +de ter +199 0 +shepher d +writ ers +se x +ju mp +organ iz +con vention +or ch +bed ding +go ds +effe ctive +en ess +mag ne +bo dies +environ mental +ro cket +ve ments +che er +pa irs +or a +gi a +st ations +la kes +tur ning +ou ter +smoo th +me ter +we ak +e ar +sn ack +bus y +chel sea +v o +car t +vis iting +mil an +tre k +clo th +mi st +zel da +lin en +prote in +botan ical +tu p +h its +maj ority +cu ps +spo ts +d ying +pen insu +ligh thouse +pir ate +cli ff +liqu id +u n +att an +carni val +wa i +we ird +in k +purch ase +decor ate +che mical +st ack +be aches +vamp ire +wra pped +opportun ity +con fi +ric hard +wast e +emper or +g ment +hand some +tex ts +exten sion +sare e +soun d +know ledge +2 4 +bra ids +8 5 +car ol +al ism +divi ded +ment ed +pe ach +arab ic +po inting +pi er +bu cket +pregn ant +tr y +hin du +stra p +am ph +conne ct +ak ing +exper im +sur f +goo ds +diamon ds +temper ature +hel ping +re aching +sh i +ag ne +lead ership +advent ures +au di +o re +go at +stri ke +tou ch +pr ices +me ters +gra in +ru m +rac es +vir us +itu de +me ets +po lar +rece ive +alu min +ffe l +re ality +t ang +se al +loc ations +fe eding +char m +malay sia +person nel +hel ps +figh ters +any one +tri m +de vil +dra matic +i el +dre w +con fe +ad mi +chi p +secon ds +sm iles +mo ments +spac e +cu s +bo he +ist ics +d ental +re solution +5 9 +ter rier +stre ss +se ats +assi stant +con flic +jor dan +or che +call s +s u +pic nic +hard wood +du st +than k +suc ce +make over +char ts +dra ft +bus h +pass enger +su bur +199 3 +a i +li x +engine er +po ttery +prepar e +ab solu +so ap +t ape +re u +cla im +ornam ents +ven e +teen age +199 1 +profe ss +cop ies +s end +circu lar +nu ts +de par +evo lution +performan ces +ent ing +kni fe +archite ct +mad rid +liber ty +broad cast +bad ge +ma xi +195 0s +sou theast +kn it +sc ary +scen ery +dis plays +inter pre +m ate +educ ational +d le +exi sting +bri ef +run ner +im e +auth ority +7 7 +att acks +hi p +j o +le st +suff ered +ast s +virgin ia +cand les +8 th +g i +lay ers +instru ments +necess ary +ang les +k shire +sh ang +carri er +k u +197 0 +vi c +ex p +fil ming +7 6 +pack age +ig n +be side +six th +electron ic +ele ss +sequ ence +dedic ated +g ru +re main +emb lem +is n +wrest ling +ban ks +tutor ials +il ers +saf ari +ge ly +spo ke +ac cent +box ing +re treat +mu st +tra l +celebr ating +subsequ ently +pine apple +van ity +t wel +mar ina +arti stic +heav en +conne ction +ac cu +j ack +some where +peninsu la +neu tral +con cent +paralle l +n is +c ade +comp on +mon t +bohe mi +ho t +pol icy +no t +tru th +o val +develo p +refle ction +er ies +hun dre +me ant +d al +w ants +por k +adv ance +pu tting +pean ut +bl ing +per spective +speak s +colour ful +ab bey +dis co +mo sco +mar ines +adv ice +clas ses +arch i +ear ned +maxim um +gro wn +dome stic +7 2 +offic ially +ici ent +in corpor +bis hop +c age +ss y +allow ing +metho d +plan ets +phil o +situ ation +fac ed +i o +f allen +sour ces +atmo sphere +mer ry +over looking +t act +sh aring +respon sible +ny c +alcoho l +pat ch +i bi +ric k +d ges +mis s +mosco w +ta x +champ agne +admini stration +ador able +e sp +b ond +re in +quar ter +sc rat +wa ist +pand a +el f +gu es +in spe +bac on +audi o +pick up +su stain +light ning +pri son +cap it +wor st +air lines +v ice +tri ple +198 0s +d airy +el and +l is +pro min +sam u +apol is +arri val +c c +sand als +le y +prepar ed +or ders +op tion +je sus +ph ase +gues thouse +you tube +fav our +lic en +mo du +speak ing +fe ather +meas u +p ile +or leans +l ily +tomat oes +zer land +swit zerland +c king +il le +as pe +renov ation +willi ams +r ho +communic ation +f are +arrang ed +dang er +ve land +f ame +access ory +hundre ds +con sul +ric h +tri al +re ef +198 9 +foun ded +mo l +dro ps +week ly +mo tel +arrange ment +man or +por ce +refer red +cre amy +dy nam +j as +ci a +th or +occas ion +e ttes +9 0s +tab let +i v +st ive +g in +v ul +lo y +pres ence +chi ps +under ground +lo se +ab les +pass engers +fl an +potat oes +wh i +ene my +porce lain +re min +custo mers +la ven +bro ke +mytho logy +be ans +r ating +sw iss +godde ss +tim ber +ac coun +dro pped +lo gi +hy dro +inv as +sam sung +6 3 +play ground +consi der +solu tions +me try +en der +hun t +ha ven +ar ed +sal mon +0 2 +me sh +s se +circu it +i ved +mo roc +dest inations +sun shine +territ ory +leg acy +ra ff +inter nal +photo shop +stre tch +ati vity +t x +b age +psy cho +ac compan +lo ver +cou ples +ol dest +actre sses +0 9 +b ag +tw ice +redu ced +as is +int h +jo bs +k ick +prote ctive +sculp tures +fl yer +propos al +effor ts +off ering +st ol +reve al +laven der +f illing +0 3 +il li +str a +ph ar +k im +econo my +vis ions +s ight +cel ls +legen ds +lu s +play station +hor se +7 8 +a a +tw ist +e e +j en +pre dic +star ts +k an +au st +fer r +hand bags +w ron +vi br +de plo +ar gent +al ised +brook lyn +inj ury +ang er +n ast +de t +un able +pri de +le ment +simp sons +mat es +depic ting +stri p +invit ations +bloss om +k al +slo pe +ag gre +avi an +stan g +gl aci +k ish +man n +ill ery +far mer +loo se +b bles +je wish +ch rist +ed ward +warri ors +defe at +re present +j am +spo tted +st ood +0 5 +to wers +mu stang +manag ed +lam ps +re vi +po land +fore ground +pan or +ch u +ing e +mar ath +profess or +consi der +trave ling +f las +cra fted +con su +te am +t ire +arti ficial +cle veland +cast ing +ste amp +6 2 +att acked +mir ro +steamp unk +pe pper +tal es +pe ter +fac ilities +pengu in +mal es +con taining +0 8 +die sel +man i +sp ray +sun flower +may or +weal th +op ens +r ally +my stery +d red +r ine +ish ment +desig n +cra sh +s ab +goo d +st en +capac ity +sum ed +al lo +te lls +respe ctively +si oned +accom mo +oc e +circu s +scand in +ele cted +cele bs +advert is +br e +ag ent +cal cul +us er +custo mi +lar gely +ke e +d ock +skin ny +k ok +u lar +ke eper +provi ding +in sur +di um +bohemi an +ep ic +excep t +me m +8 00 +cel tic +gal o +rail road +ac count +har mon +prote st +mo od +presi dential +af p +d ent +m at +awar eness +valent ines +cra f +6 1 +oc cup +cri min +choo sing +dep th +ar es +surpr ise +suppor ting +f at +iv ory +snow y +gar lic +ho sted +el se +str ateg +kitch en +infin ity +represent ing +196 9 +re cru +ph an +der ly +j ones +distribu tion +ch ro +occur red +gul f +ar sen +ff on +liter ature +ti ed +mer ch +budd h +pro gress +sal on +ordin ary +sle y +se mi +south west +bb c +prepar ing +ph ra +ac hu +bill ion +br ings +ap art +dou gh +qui et +d al +recei ving +tas k +retur ns +init i +thou gh +and re +196 8 +danger ous +cheese cake +dev ice +ex tr +scienti fic +person alized +see ms +h ill +chandeli er +meas ure +my ster +t end +ur o +sat is +sk ate +per man +relax ing +cri me +wr ist +gradu ate +origin als +tre ats +ren tal +bl ur +ten ant +app li +6 7 +e c +me tro +0 4 +du blin +0 7 +be ar +super man +con vers +mo ck +he ard +reti red +van illa +hil ar +wh ite +tion ary +up coming +cycl one +mo le +plan es +0 6 +hu mans +portu gal +c out +spir it +disco very +9 6 +vo te +docu ments +employe es +astro no +applic ation +survi val +chan el +post card +st icks +ron au +tr in +s no +fo g +d ding +ro asted +fu sion +br inging +a board +inst ant +qu est +cir cles +proper ties +pol o +tal ks +gen der +declar ed +f ou +ro d +ver sions +dre am +la ying +ans wer +tr unk +ast ronau +dark ness +al li +pris on +f ellow +manu fac +mo ves +198 0 +import ance +agre ement +coun try +pro mis +19 20 +clo sure +re je +motiv ational +p m +u ts +ka waii +str ange +o s +and ro +flor ida +redu ce +fin ds +hu man +sche me +mach ines +f ake +re model +re sting +tr actor +attemp ted +th r +u al +se af +swe den +ri des +me ly +ra ise +loo p +mess ages +bal i +ru p +whe at +ant on +198 4 +ep y +be n +re public +in scription +te ach +pri ze +pa th +fir m +represent ed +archite cts +athle tic +pla id +or ts +fran k +nutr ition +7 1 +dy nast +democr atic +m d +pit ch +b eng +str ate +nu clear +embroi dered +te sting +ri an +thre at +extre me +thu mb +farm ers +tex as +retur ning +i ra +actu s +cy lin +meas ures +motiv ation +friend ship +def ence +tu l +ing o +b log +contin ental +o sc +s in +tar ian +gri d +and y +polit an +s ounds +il i +coo ker +arch ae +purch ased +in um +stri pes +st ure +fe ed +g ings +u ms +resul ted +g ar +out let +et ings +elect rical +pa ired +8 8 +se tup +e din +squ ad +fi red +w ishes +exhib it +g at +chi ffon +squ ir +cin nam +car d +k its +lan ka +conserv ation +mush room +techn ical +ju dge +bra d +extre mely +ag er +gam e +commis sioned +bang kok +s nap +mode ling +a h +sen s +y an +mis sed +oper ating +dis count +s witch +ch y +head ed +g ging +appe al +in dig +equ al +pre d +ap o +cr ack +f its +re stric +t au +196 7 +av oc +me ga +there fore +cho sen +gol a +can nes +par ked +m enti +e ast +an ia +po em +ra i +198 6 +t ally +impres sive +ali sts +restor ation +engra ving +fe stive +every day +shor t +v ine +speak er +p ad +sh ri +198 7 +pit ts +ro se +p add +ro ns +wood working +accep ted +ra ys +ti ally +2 5 +tem plates +mar s +st ation +ex tra +pic ally +nor theast +h acks +lo co +boy friend +v ans +m ist +pro gre +cre ature +t ag +m au +expan sion +indone sia +hun ter +i th +les son +on na +gent le +li eu +spirit ual +me hn +cout ure +mehn di +mis sions +tur kish +ga ined +j el +roman ce +we a +per gola +supp lied +rel atively +brides maid +silhou ettes +lo sing +attemp ts +bo mb +c actus +repor ts +op in +198 8 +exer cis +skir ts +cinnam on +17 th +st amp +z ar +se ctions +promo tion +wron g +jun ction +n or +ch o +bel le +l b +ri der +cau cas +z ard +196 0 +wash ing +chal k +char ity +plat inum +refu sed +2 9 +g au +her self +k g +youn ger +194 0 +exercis es +main ly +o li +labor atory +ex pression +jo kes +d war +br illi +o ak +arsen al +horiz on +w aving +austri a +il a +mode st +st ages +mon sters +c ci +ho stel +wal let +she ets +part ner +port ers +dis c +nav ig +sand y +strate gy +twel ve +g t +den ver +ap h +cas h +bak ery +o let +ste m +expe dition +e di +habit at +su e +cro ssed +af ford +7 9 +tex tile +ne ed +iso metric +ven ues +country side +po ols +pri mar +man h +co ordin +g inger +resul ting +shang hai +p ad +hun dred +trans fer +c attle +on ing +ad just +con ce +ap preci +uni forms +auto matic +go al +st an +di ving +stru ck +design ated +ex posed +patro l +ir a +o red +2 50 +under stand +u de +w ins +cry st +ty pically +pal ette +fic ations +198 5 +pro to +bro s +reg ions +a im +auto mobile +leg gings +fe min +el derly +en na +n s +musc les +con sole +sty ling +condu cted +ra ising +li zed +cho se +consi sts +emer ald +bo eing +intellig ence +m ise +hon ey +yor kshire +bu g +8 0s +en o +bo dy +ent ary +mel on +6 9 +gi raff +app lied +remo te +gre w +las er +pir ates +de sh +teen th +expla ined +ar ab +po p +war ehouse +ten der +spar k +196 6 +hilar ious +prote cted +mad onna +fe athers +scandin avian +edin burgh +l ens +te al +person ality +shel ter +replac ement +log ist +pow ers +ma ster +clo ser +bur gun +pa vil +k ur +ch e +possib ly +194 2 +lieu tenant +shar es +196 5 +oper ated +ferr ari +mus ician +ultim ately +afford able +fl at +vill as +bri stol +mu d +progra ms +pyra mid +zo mb +reu ters +face book +guit ars +g ad +vir tual +gol d +do ts +insur ance +am ster +affe cted +le ban +mb er +att ri +e ments +col oured +ligh ted +dec als +her bs +hap pen +en n +broad way +hu l +actu al +amster dam +3 8 +dir t +per h +surg ery +te es +pen n +pi ed +197 5 +si sters +cre ates +res orts +gir l +offic es +tan ks +j i +dri ed +ra p +incre asing +vege tarian +sp end +7 4 +ce dar +col lap +manh attan +nat ur +pregn ancy +f ine +sa y +wide ly +ro se +cau sing +mon tre +claim s +modi fied +port land +spr ing +el im +tran sm +voc als +perh aps +in ary +mat te +ha un +peri o +appro ved +mole cu +re pl +sp ice +o pp +colle ctions +z oo +burgun dy +bun ch +for um +tribu te +individu als +j agu +194 5 +comm ented +ne go +de luxe +lanter n +confir med +sp lash +se arch +can cel +po ke +bu t +op posite +re ments +mic h +primar ily +be es +to ms +dir ty +kin ds +hy bri +al lied +haw k +m ingham +w ick +ta il +fl ag +ra id +mi di +ri vers +ran dom +ul ty +t ens +poke mon +sever e +foc used +ra ge +perfe ctly +rese arch +ob served +advant age +ath s +shar p +fil es +laugh ing +ro ller +bal d +sound track +de e +clo wn +bab ies +kin der +196 4 +n c +he els +19 50 +en tic +anch or +mand ala +t ine +m l +tro users +ra ble +auth entic +happ iness +7 3 +a o +f ers +194 4 +al ber +13 th +favour ite +seaf ood +ac cur +con ve +ite ly +seven th +ken ne +por ts +inter section +van ia +be ads +d ron +ic king +secon dary +pla yo +carol ina +spe ak +cru iser +thou sand +ment ation +resi dent +b in +dan cer +labe ls +ri f +swe dish +simil ar +light weight +co sta +kitchen ette +sa ints +9 2 +po sted +conver ted +roo t +gal a +art illery +defin ition +s aving +s al +im a +ad dress +be ver +s me +sli ding +re stored +zi p +inse cts +e ars +exten sive +s low +ra i +dition ally +dem and +our s +bu l +gar ten +card board +ly n +identi fied +se a +dou b +it es +ot to +swi m +bun galo +tt le +lear ned +mich igan +nego ti +develop ing +ck er +cock tails +flu ffy +success fully +contro lled +hard ware +c ies +u ss +mbl ed +197 9 +d av +sh ake +cris is +ar ian +gather ing +dynast y +in ju +heav ily +y an +9 7 +invas ion +m rs +ma the +flav or +k illing +bri ef +bas in +pack ed +wal ker +survi ve +bl end +n an +bir mingham +kit ten +statu es +swe at +9 8 +mo on +n ights +au ction +x box +mar io +pitts burgh +di ary +ion e +anton io +ac res +influ enced +re presents +aust in +194 1 +sun light +mur als +de pot +lip stick +marath on +defin ed +ter ri +fran ch +cir cul +ck ed +jam a +colle cted +bo mber +admi ral +wood land +br is +g ang +ra sp +techni que +mi a +termin al +st on +lad der +pl enty +son y +re ss +the sis +regar ding +i ron +8 7 +sou ght +butter flies +nov a +w agon +ac re +har vest +eng aged +won der +ar ia +ben z +jo se +char ming +st ating +em ph +quar antine +la ws +dan cers +hor n +appro pri +nis san +princi pal +te sts +it ation +mater nity +desig ners +vi king +di p +arri ve +montre al +lux ur +vol k +il ton +out standing +co sts +mun ici +experi enced +g lowing +cop y +de mic +instru ctions +br and +fi x +k ent +avoc ado +av atar +fash ion +bre e +fu sed +up date +9 4 +har ley +al em +c ils +caucas ian +un like +vo ted +brid ges +we igh +key board +t ack +vo y +je ts +jer us +i k +ex ce +gre g +thron es +dec ade +en sure +list ening +promin ent +nomin ated +ha y +f u +h as +inspir ing +m s +cau ses +jerus alem +i an +heal th +rain y +sue de +de stri +common ly +s ick +dru m +ho ok +re action +pix ar +op tim +le ice +boo th +197 2 +leice ster +pe destri +t ism +ay a +landscap es +m are +t d +employe e +vo gue +in ser +cu is +ei ffel +loc k +3 60 +scen ic +pavil ion +f as +plan ting +gui des +car amel +hu a +16 th +san ct +8 1 +enjo yed +secre ts +poe try +trans ferred +con vin +conflic t +son g +re place +vi olet +shi ft +al mond +sk ies +coo ked +qu ir +ph e +sa x +bow ls +pro ud +offen sive +ab ilities +cur ved +defen sive +dru g +ent ering +magne tic +pu sh +tit les +teach ers +cran e +expl oring +b ers +vie wing +sen ate +hand writing +follow s +colum n +in sul +li fting +app ing +bull dog +equi pped +bul let +ug ly +g as +head ing +ti de +car ving +por table +di ans +9 1 +stri king +p ione +v or +ar te +af gh +st am +he ar +de man +po d +deliver ed +ham ilton +k new +rough ly +colum ns +fa bul +volk sw +bur n +bel gi +stri pe +p acks +smooth ie +1 20 +te ach +swi ft +pol lution +fl our +pas sion +se ssions +int on +fo am +per fu +con do +ste ph +hal o +volksw agen +fin ishing +m age +com memor +stra w +c ze +m t +te lling +fol ding +bar rel +dimen sions +to mb +nau tical +insp ire +con e +shri mp +lu cky +y el +li est +rela x +bow ling +drag ons +kinder garten +ex pand +com pos +sco tt +fon ts +har rison +bas is +ch im +are ly +car l +i ously +sal v +or tho +fire works +impro ved +ru bs +car din +un ve +si vely +oun ded +b bed +197 3 +metro politan +ris es +wi d +medit ation +we ls +cal if +acu lar +lau gh +f old +199 0s +resi stance +p ou +gu cci +pro ved +al b +bu ffe +roof top +8 6 +dr a +z a +agricul ture +gr illed +197 1 +ac ou +c air +198 2 +cush ion +treas ure +t ick +thre at +st able +he ights +whis key +9 3 +for get +subsequ ent +198 3 +dist ingu +cu ba +fabul ous +elect ric +cr ying +ac qu +c kets +v i +orche stra +ch ase +ga p +adop ted +sustain able +b au +re serv +scoo p +luxur ious +man go +fer ry +orig in +mechan ical +clin ic +re verse +7 00 +d d +thron e +n ou +p ale +wal nut +j im +co ats +edit or +to e +be aded +bl ind +gar ian +gener ated +b ap +sh adows +la m +vibr ant +cro sses +life time +char ged +l ated +reti rement +il ian +promo ted +194 3 +stru gg +boar ding +furn ished +sc at +y am +be am +tor pe +promo te +sugge st +se cure +bi a +c ents +cam el +histor ian +athle tes +guar an +le is +fac ial +bel ly +d f +frequ ently +cre ator +gather ed +sele ct +fail ure +em ir +w i +ear th +swe e +197 6 +fle x +mini stry +virg in +mush rooms +stan ces +shi p +under standing +corre ct +up gra +particip ated +gen e +val u +al ps +dri vers +tra ils +fl ame +hip ster +clu bs +y ne +mix ing +ti res +g ab +v is +al tar +po ts +belgi um +de al +ban e +popu lar +vol ley +1 000 +brilli ant +ho sts +e motional +pil lows +susp en +it o +pack ing +glaci er +lo ving +glam our +mar kets +volley ball +croat ia +i eld +ste ak +l i +di plo +retri ever +ess enti +de cre +ho g +guar dian +pu p +bene ath +ch ris +thu si +8 2 +alex ander +tumbl r +inju red +bro ch +schedu led +ren dered +re ver +green house +ip ad +min ne +bor ne +stoc k +d na +en thusi +dre ss +tim ore +de stru +dec ades +ori ental +ma son +eng ines +scoo ter +lin er +con tained +197 4 +pe aked +nor we +relation ships +be lon +sky scra +tre ated +illumin ated +193 0s +pro mp +alumin um +clo sing +win ners +g ather +acci dent +in ated +g ates +bal timore +guar dians +sc out +o v +o y +s r +bar o +e bay +thu r +mainta ined +cre st +bro ad +colle cti +op en +t ight +saur us +pu lled +gran ite +su m +docu mentary +sp icy +fing ers +fore sts +bl ade +carri es +shel ls +ex pressed +clas sy +cas ser +hi ke +po oh +o tt +go w +da is +high lights +ath ens +197 8 +p y +f und +contribu ted +by z +sw an +micro phone +ti ble +determin ed +v y +conclu ded +squir rel +ar re +pre y +appear ances +im ate +repl ica +h en +ma h +lon ely +ne ver +cul t +out break +t ant +tap e +dy e +arri ving +ti v +he aring +enti rely +8 4 +del ta +he el +ts man +mur der +mp er +char coal +sto ve +cra b +scho ol +fif a +ell ite +blog ger +ap ples +197 7 +lim it +perman ent +re pu +maint ain +show cas +t to +ch airman +po pe +convers ation +mus icians +wheel chair +though ts +o logical +ex tin +sat ellite +ass ass +buffe t +begin ners +p ound +polit ics +swe ar +en velo +ra s +g ly +du al +eag les +bene fit +pract ical +ro les +ir an +dis pu +scienti sts +can opy +otto man +norwe gian +sh ining +ba y +u til +mo red +par ag +at i +hel l +pro per +consi st +blu sh +vesse l +legis l +ev ol +8 9 +pr ise +ir o +deter mine +casser ole +bul b +accompan ied +ra ms +neigh bour +p sy +author ities +3 5 +fi ber +thir ty +remo val +use ful +dri ven +budd ha +astronau t +hall way +monit or +clou dy +10 th +wil son +pe ac +regi ster +cuis ine +vis ed +americ ans +super ior +mono chrome +rain fall +1920 s +cent uries +cham ber +jagu ar +k am +sh iny +que ens +ass ault +a x +cur ry +el ite +gra b +bu st +respe ct +mill er +inci dent +den mark +republic an +cycl ed +g gle +leban on +or al +193 9 +m ere +19 30 +le x +tru ly +ma je +e mo +l ington +sur vey +to wns +pic ked +ho les +la ho +ter rain +a j +expand ed +fix ed +f lix +ra d +bak er +196 3 +l ining +tt en +nar r +g gy +che mi +mathe mat +leis ure +n hl +achi eve +produ cers +i ded +ch a +cu t +requ est +on ion +candi date +re creation +te sted +cre epy +sl ices +no table +net flix +8 3 +in nings +par k +appe aring +brun ch +tri ps +athle te +opp on +op position +pro mise +le w +illustr ator +ok laho +acou stic +bu n +val ues +h ound +ac qui +prepar ation +spac ious +n ate +syl vania +wo ven +admini str +hybri d +ev acu +may be +m 2 +concep ts +mean while +th under +co d +te dly +giraff e +glas gow +close ly +est one +prin ce +ho mer +carri age +emir ates +dru ms +or i +metho ds +cast les +lab or +pur su +girl friend +al ive +snow man +re quest +lon gest +de buted +fun ctional +butt ons +un g +198 1 +so mer +m tv +iz es +constitu tion +tw i +acade mic +fashion able +mi ke +ex it +g ans +far ms +sli p +hi pp +re ign +ti m +k an +tur t +interest ed +ar thur +l an +re vol +re cycled +ch in +kn ights +tape stry +ato mic +g ate +cat ching +de an +sa il +bis cu +dra wers +house hold +du vet +alo gue +appo int +me mb +descri bes +tr ac +gen us +resi den +perfu me +sto pped +t ags +bre ath +cre dit +say ings +n ad +foli o +ne l +a isle +ru sh +gar n +not ice +adjust able +d ice +pri vac +broo k +liter ary +tri bun +exc ited +privac y +n ash +v ig +w ill +al leg +h ire +s i +wi g +la bra +ct ion +re volu +il ey +el le +10 1 +e co +k r +in ers +pr int +s wa +al ities +public ation +refer ences +ll ers +ha wai +au ro +ing ed +re con +s am +dol lars +a ired +mp tion +s ville +blur red +ex pos +hul k +istan bul +air e +reve als +r ant +con secu +clim b +cig are +cy ber +pak istan +ul tra +sty lized +k iller +sc u +sh ab +ff ins +cu b +val ent +pe ace +fi a +sub marine +the less +sa i +head phones +figur ine +tw il +eas ier +super market +deb ate +weap on +busine sses +water front +emb ar +man sions +attr action +apol lo +occu pied +sp in +foun der +twil ight +mag nific +pig e +ad ds +k not +man ic +eigh th +defin itely +he im +pakistan i +ch en +bre eding +rac er +t ations +to l +ti red +cheer ful +an ce +p per +tra sh +man ual +celebr ated +gro cer +cor d +w ounded +ev a +recogn ized +ig u +bu cks +ap oc +esc ap +pu ts +al ph +ac id +oo l +war ner +pun ch +repe at +inter active +don ald +sh a +invest ment +wind sor +behavi or +menti oned +colle ctor +commun ities +ta sty +water proof +ma sters +st itch +spec im +pil lar +expl or +pe dia +ton gue +volunte ers +ab ad +ba ham +rain forest +kis sing +e at +to ast +ten ed +break s +colon y +vers ity +tour ing +expos ure +sy mp +colon el +pu mp +be an +se t +su me +sho ck +e liz +nur sing +cott ages +cro cod +guit ar +ar ily +o ther +ic eland +diam eter +character istics +opin ion +consecu tive +pitch er +fil med +land mark +ac ce +pan try +nash ville +ri ders +ro ts +cor ri +han na +brow se +ti a +car di +bracele ts +e dit +fran k +la va +de si +announ cement +hel lo +b ite +tri u +bor gh +g lad +list en +w ake +sp ices +buil der +in si +app ly +corpor ation +cav al +mer cur +sup porters +ce me +az z +pati ents +lam borgh +bre ast +soc i +ar i +touch down +labra dor +s ic +er ie +star ry +saur s +quarter back +pop corn +w iring +op er +consi sted +li on +gen u +sau sage +e ge +ex ist +f actor +in come +alber ta +fact ors +tick et +rect angular +chemi stry +sp l +explo rer +volunte er +se ated +mo ore +exper t +pon y +surf ing +walk way +haw ks +eliz abe +d its +g li +ic ated +h ad +lamborgh ini +sex ual +produ cing +argent ina +bu bbles +h s +suspen sion +hu b +over sized +delic ate +franch ise +hawai ian +ar mored +ab led +pan demic +pro hib +achi eved +af t +instru ment +ver si +enn ial +mar k +mid night +rhy th +er o +dron e +tou gh +particip ate +co ve +act ly +wat ched +wel sh +sig nal +con feder +spect acular +mo ther +heal ing +pri x +fic ally +consi sting +yu mmy +lu sh +k ol +li sting +sp her +cli ent +196 2 +ta p +sym ph +requ ire +air line +2 1st +ro lled +sp ending +op posed +apoc aly +v on +to tally +en ni +res ol +st itu +fle ece +voc al +v able +ren ce +agu es +slow ly +bloo m +collabor ation +pe g +part ner +cos met +mu gs +gy mn +do s +hu sky +an sw +car rot +t z +po sts +lac es +par rot +s c +j r +ate au +flow ing +acqui red +sl iced +daugh ters +ver ses +h ms +se es +de mon +big ger +n one +game play +12 th +or gan +hen na +mi dd +fl ames +cor respon +d ged +hu t +sto wn +pix el +ex actly +cosmet ics +to s +mb o +der ness +comp act +peac ock +star ring +w au +in doors +can e +water melon +regar ded +th read +pa w +adv oc +sc h +g ing +zomb ie +ke l +cru st +tre ss +refle cted +s by +sugge sts +n az +produ ctions +partici pants +fe els +tri m +or gans +mo thers +rel ations +cur ve +de gre +as king +sk ating +person alised +win nie +lin ks +ar o +kn oc +de als +cou l +compe te +end ers +po ckets +ex ist +ke eps +195 7 +m ing +illu sion +att end +195 5 +stu dying +hil ton +s per +situ ated +li sts +4 9 +hu ll +b low +cl er +ra m +sh rine +fou ght +ne ls +par ent +195 6 +car b +ex pres +octo pus +ad or +ato p +r ates +ne i +paint er +sneak er +b lowing +fish er +see m +our ing +cent er +free way +citiz ens +mercur y +ad dic +jo ining +phra se +th a +dra wer +schedu le +degre es +ken ya +f red +ro be +me lo +pl ated +achi e +c ash +y n +wor ship +el a +art icles +queens land +tu m +continu ous +fal se +jose ph +di versity +me ts +goo se +t ones +min ster +vie wer +on ist +pass age +agricul tural +w we +sc an +kun st +bor ders +certi fied +arch ive +manic ure +na h +cap s +differ ences +ou l +bro wn +15 th +b ills +de on +sh all +no odles +s man +rou tes +bl ed +mix ture +blue berry +19 00 +im mig +dolph in +iv y +phan tom +hu dson +k on +victor ia +4 x +sig ning +viet nam +cylin der +u pho +st acked +hamp ton +occu r +ev ing +li an +qu er +de pu +war hammer +19 14 +alo g +engra ved +conver tible +alli ance +be c +tri be +bloss o +pro motional +ste ering +viol ence +stro ke +program me +broch ure +o rous +il o +le mon +sanct uary +vietnam ese +en de +ri x +at t +fe el +sail or +s ort +dri ves +pro mo +bl in +ge ek +ash ed +compon ents +mon ast +edit orial +o ma +orch id +ce l +wid th +ne west +7 0s +n inj +mar tial +me at +ss o +tran sit +pre p +recogn ition +car dig +ne ath +far ming +ab il +ha v +treat y +bungalo w +survi v +dis ne +phar mac +aff air +per th +popular ity +bu gs +bac ter +le af +fl ights +gu ine +acce ssi +caval ry +con version +c r +col a +straw berries +bar rier +cam bo +per sian +curren cy +9 00 +pi d +lay ered +bb q +sty led +195 9 +t m +cha ins +stan ley +auro ra +man ia +gl en +comp ass +ar cade +t rench +ste st +sil ver +mac ro +rang er +investig ation +an cing +cre dits +com pr +m ach +w ig +mirro rs +fun ctions +g our +an na +we ed +cro w +cze ch +micro soft +sur ve +ch un +meas uring +s at +he art +le aning +argu ed +explo sion +gan g +dav is +li fted +com pla +sh am +ge sture +haun ted +so le +au dit +l td +so li +poe ms +cap able +et sy +fin ed +sil ent +steph en +braz ilian +gentle man +m c +stand ards +st im +crit ic +con tour +desig ning +mak ers +chick ens +thro wing +experi ences +s ock +fa stest +arrange ments +bas kets +buddh ist +ac o +log an +identi fy +emplo yed +ban g +tig ers +col i +im possible +tai wan +ha ired +p ounds +v ol +aw ak +port folio +195 8 +ban ners +att ending +conne cting +critic ized +emplo yment +a w +l one +sett lement +chil i +under neath +floo ding +rou tine +tur re +do w +he m +an ian +ke to +shab by +en han +ceme tery +p ent +kay ak +ox y +e do +en z +cu be +gran ted +relax ed +releas es +celebr ations +aff airs +trou ble +ce o +surpr ised +le x +land o +ver n +tic kets +n ick +x l +is her +inst all +davi dson +flas h +ma ids +196 1 +19 18 +writ es +or ted +lu min +ander son +rapi d +elev ation +sau di +trave ls +ib ility +speci fically +tur b +s lu +dri ve +s add +fal con +chal k +dang er +n ag +ther ed +o le +mo th +bur ied +re new +cy clo +har ve +an ium +me l +z ers +loc ks +ninj a +side walk +up dated +tim eless +dan ish +separ ated +po inted +stron g +loco motive +g m +appropri ate +showcas ing +ho st +14 th +guitar ist +im pression +mu ffins +ste ve +hair cut +v at +ang ered +w right +cli ffs +wis dom +tul le +2 3 +ad jac +e uro +sp el +flow ering +jama ica +p df +b art +bu re +ol ds +s as +la ke +se oul +o e +d j +ski ing +pra ir +b art +feel ings +d ame +high lands +wi zard +loo m +fav or +challeng es +l ang +over coat +vesse ls +pu dding +adjac ent +sc ape +stor ms +scre ening +kilo metres +lew is +yar n +other wise +be lls +commis sion +upho l +se ly +wh ere +gen re +re hear +orig ami +se gment +ca ster +aw a +andro id +mad ison +chalk board +sle eved +sub way +suff ering +coo per +1920 x +aspe cts +gra m +pre ce +gu ards +pol ka +vill ages +sav ed +bris bane +dis ci +sib ility +1 10 +my self +ent it +tr ig +ab sor +gl ory +ja b +ze bra +absolu tely +tact ical +oc h +gi ants +engine ers +rich mond +or lando +min ing +famil i +y d +genu ine +matt ress +ra s +bl u +symp toms +v c +myster ious +co s +ol i +adap ted +alp ine +clar k +bio graphy +hit ting +concer ns +fri es +edi ble +fil ter +was n +pal let +194 8 +rapi dly +on o +o at +tri als +ar c +sha w +ab c +sk ate +tr icks +o ph +man ila +n j +p g +legen dary +tribun e +pro ve +aw are +en chan +si ding +box er +tw ins +do x +tra ding +reje cted +re ven +sh allow +fro sting +inv ited +wil derness +9 0 +hi de +ta h +wa x +re inde +ac cents +art an +festi vals +lou is +mul tic +assi stance +des ire +credit ed +show case +log ies +patter ned +recomm ended +appli ances +dino saurs +n iger +tra ined +soph ist +dr ill +t is +kno ws +inten sity +communic ations +const ant +compar ison +ru b +10 0 +tan e +electric ity +re fr +mar gar +hand bag +en coun +c is +conven i +me te +hu g +t issue +cal gary +ad y +spr ink +quil ting +ch ill +magaz ines +high land +ven dor +195 3 +cour ses +amph ibi +ap a +ru gs +snow fla +dor m +att ire +brief ly +che er +fo il +bloo ming +pract ices +disne yland +ac les +char lie +ci vic +di al +sap ph +revi val +fra g +dist ur +tur bo +r v +r in +am bas +hu gging +gr in +pou ch +inter change +camp er +2 7 +f lip +hi red +tex tures +destru ction +r ing +sof t +fe st +on o +per u +str o +br icks +ging er +kno ts +sub stan +tun e +il ation +cre scent +ill es +ele ven +byz antine +e lo +vir tu +k ang +o asis +fol ded +le t +ser ver +mat t +archi ves +po ly +spor ting +fin ance +susp ended +lag oon +partner ship +m art +town ship +capit ol +lu ck +discu ssion +vac ations +cra w +me tr +m in +ho bbit +temper atures +ge on +medi es +oat meal +exist ence +an o +lun ar +tur n +matt ers +a ys +in ate +co smo +mill s +chur ches +cab ins +samu rai +ph eno +stu ck +sp ag +he tti +ta xi +tra ditions +dev ices +tele phone +c as +inf o +seas ide +bu ses +sche mes +ginger bread +alph a +reinde er +foli age +critic ism +rasp berry +cam ou +t wit +ar k +bro c +exper iment +mon te +chron icle +an ne +occur s +se ctor +di verse +ambas sad +diag no +camou fl +3 50 +mor row +194 6 +ch ry +te s +tr ick +i k +repu tation +pol is +sse x +sub mitted +spag hetti +se dan +fun ds +mer ly +recei ves +home less +hair style +spo t +manufact uring +r al +di vine +ran k +plat ter +femin ine +19 17 +fl ats +wi re +gui ded +att or +natur ally +plant ation +ta wa +my th +ro ss +descri be +somer set +for merly +crow ds +inse ct +fi res +magnific ent +ed ges +appoint ment +co oper +b ly +arm our +di ve +conclu sion +1 30 +adop table +da e +baham as +pal eo +195 4 +lo ading +ton y +guine a +ab u +ex hau +tele scope +ad am +discu ss +an i +vis its +m igh +nove ls +floo d +s ak +ligh ter +broc coli +ma ur +photograph ers +dynam ic +ri m +entit led +le eds +pu lling +yo gur +2 6 +ed mon +tab le +for tress +west in +requi res +sc or +reco very +sign age +bar k +art a +j ar +vo d +exper ts +incorpor ated +vit am +p ouring +fun dra +viol in +suggest ions +bean ie +flo at +bu la +du chess +mono gram +survi ved +pra ise +yogur t +hoo p +ter ing +char ges +distin ct +th ames +an ne +alex and +cardig an +jo e +rel ative +fire fighters +que ens +man ner +m son +exc av +turt les +at tic +colle ct +happen ed +ef a +organis ms +oak land +squ ares +lam b +fine st +pu g +electron ics +be ams +gr atu +innov ative +sam ples +sh im +lo af +imp le +cre ativity +tem ples +por tal +comb ine +we lls +ate ver +cryst als +u efa +wh atever +ap est +an as +sou ven +batt les +kenne dy +je ff +as ingly +bra ve +deli ver +is ation +am ong +hy po +among st +part ners +st o +bre w +impro vement +back s +nei ther +inj uries +gen e +bath tub +we ak +shar ks +represent ative +rit z +copy right +scat tered +do ctors +innov ation +pol ished +mu s +li ked +bi ker +seas onal +ra ven +c af +ut ility +t amp +incre asingly +regu larly +depic ted +stor s +pe ar +it ions +ru p +su spe +193 8 +knit ted +11 th +bear ded +grocer y +beauti fully +al ley +bang la +la c +no tic +gra pes +pu er +phys ics +cla us +penn sylvania +corri dor +mar i +tra iner +langu ages +well ington +gla m +om bre +on ions +contro ls +b oul +pra gue +compos er +ass y +pl ying +dra g +d é +condu ct +w ag +pe tro +193 7 +pan ic +193 6 +pe a +op tical +kit ty +kit tens +pl ed +skate board +trave lling +trans lation +ot tawa +won ders +depar ture +ou ting +gu ess +mass age +fre der +ro w +signific antly +sk ill +gu er +veter an +ent ist +frank lin +bri an +ortho dox +happ en +al an +ver te +inten se +sw ir +survi ving +cre ws +twit ter +moroc co +po dium +194 7 +depic ts +st ool +eleph ants +reven ge +i est +comple tion +sci entist +contro versi +j im +crimin al +au ck +gra vel +er son +bas il +professi on +prair ie +ear liest +fin land +hol ders +modu lar +ba thing +av o +night mare +y o +hol low +trans ition +volcan o +emo tions +bre ath +inclu sive +panor amic +e ter +tran sit +con gratu +pengu ins +auck land +m im +gy psy +t u +ar men +pan cakes +hun gary +pre hen +y e +aqu a +sele ctive +c ited +contain ers +tu lip +de leg +torpe do +ff les +yo da +ear l +pro pag +ru led +baro que +arre sted +19 15 +s late +mag no +no stal +ball room +pean uts +ass ist +wol ves +brigh ton +monast ery +dete ctive +cat er +lab our +car ey +swe ets +lo sses +sk il +veter ans +sp ell +der by +concer ned +corn wall +m als +stra ps +ingredi ent +industri es +peter sburg +drive way +mo ss +ff ield +shel ving +ye ster +camer as +e e +o hi +que bec +di ves +19 10 +accessi ble +m ally +x mas +adap tation +geo metry +b end +oxy gen +so phy +edit ing +v ent +crocod ile +lo west +nu mb +tal ent +publ ishing +w er +pur se +mo da +j ig +roun ded +ru st +te ens +fa ster +str anger +bur st +gre ens +nor folk +jo y +h ex +in er +ssi l +er up +al arm +char le +cour ts +spin ach +b n +me tic +hand les +clear ly +bre a +esc o +el ed +vi enna +dress er +miss ile +ic ide +fo ssil +cti ves +ra jas +mor gan +plan ted +cal f +orig ins +v ard +expla in +pre serve +attr actions +must ard +is ers +hal i +app ed +neigh b +eff icient +shoul ders +dis co +anti ques +enter prise +flam ingo +o ro +bu g +bon us +resc u +prepar es +so da +sm ash +sto rey +indic ated +opportun ities +occas ions +rect angle +cool er +u kra +pa y +gra mmy +mat ically +portu gu +thumb nail +philo sophy +sequ ences +flow s +compu ters +pa per +hun garian +ste ep +bon sai +to morrow +pic king +por ted +je di +ca a +black board +accep t +cyber punk +por tions +gi b +ic h +colle ctive +childre ns +pione er +e cli +i ff +dest roy +ar en +indi ans +tor to +advertis ement +y mouth +an ic +ro ast +hun g +rang ers +di visions +represent atives +sta yed +h ate +at ly +migh ty +chi hu +exter nal +ne pal +si ege +hol land +si ber +ad ams +el der +fa x +portugu ese +scrat ch +nat i +circu m +symph ony +or b +sc o +nor mally +bar be +tex tiles +th or +moo se +pedestri an +el ine +poly g +puer to +mo ck +com pl +pan ama +accom pl +cad il +ac a +cu cu +suit case +ach ment +quil ts +el i +com plic +pre view +hi ding +19 16 +to pic +bel gian +pheno men +sy rup +indig en +elizabe th +dwar f +c ry +char ter +distin ctive +1 200 +graph ical +pat ing +sco res +pos se +pil gri +tau ght +estab lish +ukra ine +do ve +water colour +co on +tu x +dec line +00 x +provin cial +g ent +ric a +ab un +cas a +mam mals +i g +t z +cu e +pre served +em br +th ri +u k +ent ers +195 2 +aer o +lu gg +st ics +co ffin +pur poses +car av +tu lips +cul tiv +gar bage +indigen ous +cent ers +es is +geo graphic +read er +wash ed +att itude +respon sibility +de aths +imag ine +renov ated +mil ky +ab er +campa ig +am bi +ba h +bru ce +camoufl age +ra mp +wra pping +lugg age +en em +threat ened +tr un +ur s +common wealth +n inth +su shi +par tially +ra dar +west minster +liz ard +depu ty +wil low +ra mad +sta in +fit ting +gad gets +coul dn +dais y +classi fication +lu m +loc ked +re medies +end angered +tar ot +od y +whis ky +integr ated +sapph ire +gre etings +ju ras +common s +juras sic +ie ties +cre ations +min er +j ay +ev alu +s ist +the astern +ko i +cou pe +deplo yed +n i +mo vements +respon ded +wor thy +pu zz +a z +du cks +plan ner +search ing +vod ka +transm is +m ack +7 5 +un t +sh rubs +cas ual +yester day +h agen +po et +arch y +hun g +yl on +j on +auth ors +re gency +par am +mechan ic +she ffield +re x +n d +th under +v ast +ker ala +beng al +al lies +to x +c é +da i +tac o +classi fied +ohi o +studi ed +continu ing +hung ry +clu ster +boo st +cin cin +mun ich +geor gia +fis her +hel mets +zi pper +arab ia +exc iting +casual ties +cat ering +chihu a +nel son +ton ight +al er +sare es +gra ve +demon stration +k ata +for bi +ben jam +teen ager +cigare tte +ge m +see king +rat ings +fun ding +sophist icated +p i +hor ns +ob vi +process ing +pre st +lil ac +sub stitu +conserv ative +pi a +minim um +magno lia +squ ash +me ss +cam o +revolu tionary +new castle +eleg ance +hipp ie +gall eries +marri ott +16 0 +co ca +ma z +certi fic +dolph ins +gor don +spe ed +be lla +che cking +enem ies +a pers +pe ppers +commun ist +tamp a +or ium +hy dr +p ins +on ym +fash ioned +lin ked +cincin nati +buil ders +re pairs +wh il +do g +trin ity +dun es +ar rows +z z +fran cis +up loaded +sequ el +cle ver +el lation +cit y +qu ant +pen cils +invol ving +lay ed +con vo +plac ement +194 9 +whil st +b ach +di vor +resour ce +refr iger +tra p +cap su +der man +mar it +equi valent +expla ins +descri bing +pig s +ther mal +pi pes +stru ggle +at us +elect ions +fortun e +land ed +ser iously +to pics +re production +cambo dia +k l +moder n +envelo pe +cor ve +battle ship +in ning +be aring +cu bs +auto motive +bou lev +inter cep +boulev ard +par as +go ssi +ter ies +lanter ns +sten cil +pan ther +wh ist +ra f +19 19 +ag ents +wel ding +h r +lo b +trans formed +195 1 +arab ian +7 6 +ro s +en cies +lan es +pige on +st all +t uring +gra ms +de sc +ob tained +ne bula +2 2 +speak ers +organiz ations +crack er +benjam in +myth ical +trans form +co ws +gro ws +sp an +research ers +inten si +too th +im mun +confi dence +foun d +k ick +ro b +ban k +pen alty +j ars +astrono my +ne o +comm ents +ax is +vent ure +ri pe +19 29 +roc king +ro wing +den se +dire ctions +maje stic +ne ll +tam il +walk through +bri des +attor ney +ty pho +sto mach +circul ation +un esco +ho ward +destroy er +50 th +un ity +su stained +av a +her o +193 5 +m ck +ack now +par ish +phy si +triu mph +gymn ast +famili ar +p ated +mo d +h art +read ers +vers y +contro versy +gradu ally +per su +au x +gen esis +merch and +att i +war ming +tab ly +ef s +hot test +wi i +th igh +do odles +dis aster +tes la +com mu +andre w +her b +can s +inf ant +ion ally +int y +tra ilers +d at +skil let +const ellation +ther mo +d h +mar king +res in +ns w +or nate +edmon ton +th ick +run ners +ku ala +ill ness +narr ative +int imate +pit er +sett led +l ished +deci de +neck laces +v ous +indi an +com prehen +tw ood +b ility +pl er +tr on +afgh an +se ek +car rots +stri ps +pil ots +qu is +bangla desh +enter taining +eye bro +afghan istan +194 0s +4 5 +an th +se min +whi pped +ren amed +trou t +ambassad or +hol m +mur ray +v re +commit ted +for ti +suff icient +r acks +colle cting +sp ur +osc ar +emph as +requi rements +pleas ant +represent ation +co l +prison ers +4 8 +mo squ +bu mper +har a +mo tors +du o +depen ding +z el +je wel +bas es +f ate +ra v +comp ound +ho u +ve il +hal f +ak er +et ary +ner d +affe ct +e o +ban quet +b are +pe as +ho bb +hand ed +appro ached +mor al +feder ation +explor ation +do zen +she er +applic ations +fil m +ade la +cool ing +reserv o +spe cul +re modeling +ma ya +sim on +la ys +ang ler +dec lined +chihua hua +si ded +vers at +green ery +sett ings +kr ish +re mark +sto ps +com b +do or +dro me +indic ate +onym ous +s ag +par o +x on +bi d +fin ale +ford shire +c al +f 1 +fr inge +po ps +nor man +tt on +essenti als +pu mps +ma ze +flav ors +adela ide +f ails +mon keys +bul le +n caa +be ats +stri kes +3 6 +br ack +15 00 +cho ices +ru sty +ju piter +mor ris +plan k +up grade +e la +ro ber +med als +boar d +be gun +par achu +spect rum +or bit +story line +no ble +plan ter +pro ps +beli eves +ri g +soft ball +mal ta +estim ate +don ated +bu lls +su ssex +cair o +distingu ished +no ise +cab bage +popu lations +oper ate +fire fighter +pa ths +pre ssing +asse mbled +pu shing +spark ling +appar ently +blosso ms +s ni +an x +glam orous +gossi p +jo ke +ap ron +expe ct +k or +me tre +preci ous +b red +sp y +pe pper +c aves +f ast +wit ne +geo graphy +acade mia +moti f +h p +pro fit +graph er +mill enni +ch ile +ant ici +pri est +high light +sen ator +en cing +sand wic +hoo ded +sno opy +portra yed +ch at +inter se +vi vi +4x 4 +s aga +a il +w art +never theless +re claimed +c l +tal lest +harmon y +valu able +k on +sci ences +exp o +pump kins +au l +accur ate +bra ided +ger mans +craf tsman +ash es +am id +wal ter +an n +e u +par ka +athle tics +oklaho man +s our +tr ic +du sk +sur rey +transmis sion +du ties +en ham +re tained +ro bin +sta ying +o i +unve iled +ir is +net t +ul a +dru gs +indian apolis +ho bby +journ alist +i op +th ly +bio logy +establ ishment +l ig +a bu +re cycling +gene tic +pi ke +cer tain +f ed +refle ct +b ach +ran ts +fil ip +der ived +well ness +tux edo +la sted +e labor +ph is +wau kee +as ide +per mission +ho e +s q +tru mp +audi o +4 k +i ro +at las +conven tional +possib ility +gour met +sen sit +plu m +sp rou +as su +193 2 +sn akes +pla ins +l on +co pen +see med +se g +war fare +attri buted +18 0 +peace ful +new port +effe ctively +wire less +tri bes +concer n +similar ly +v ases +wi res +do wns +need le +philo soph +le f +tra iners +p ill +streng th +em brac +am ar +pred ator +cris p +e ats +t als +plac ing +wh el +vo tes +bun dle +k entu +ad ditionally +eth iop +an k +st or +her ald +roman ia +thro ws +bre w +r c +co sm +ma x +go tten +o y +ro ver +present ing +v as +am end +fo x +shak e +w ander +to pper +elect ro +care fully +rat io +wh im +n ine +print able +re aches +roo ster +mp y +moder ate +mbl ing +p o +radi o +claim ing +y man +pu ff +dev on +la sting +mar tin +cli ents +fif ty +no tre +don key +moroc can +voy age +ro s +gra vity +bud apest +gu m +d ness +par ker +l ty +distribu ted +b li +lo ads +bir ch +lum pur +scre ens +residen ces +copen hagen +ser en +corve tte +wal ked +cho ir +sh ut +metr ical +wh ales +m n +mon arch +compe ting +dead ly +p ace +fo ss +medi an +stret ched +so red +bul gar +doub t +har vard +ag as +incredi bles +jump suit +mand al +ra p +je an +geor gian +se ls +ex act +bug atti +in kle +cat cher +i den +fri dge +193 3 +austri an +on cé +manu script +re sume +cush ions +bu d +tit ude +m ation +er ra +ad ju +ce il +po ppy +lim its +real ism +orch ard +k icks +cor k +re mn +si e +pe aks +li ve +d il +e a +disci pl +w u +li ver +pa stry +cru isers +appro aching +health care +mem phis +bi bl +inde x +he l +strateg ic +pre historic +spon ge +chor us +excep tional +inter face +remo ving +be spoke +bo ws +fab rics +controversi al +an thro +pa ints +ty pe +organis ation +ph ic +st ur +mon thly +den sity +bulle tin +u pr +en su +apocaly pse +ra bb +accoun ts +promis ed +in n +le on +mad agas +st le +over view +plu g +introdu cing +e bo +e cle +gymnast ics +bibl ical +i di +k ite +kar a +pal ms +let tu +lo an +im eter +jig saw +attr acted +imag ination +bar re +sadd le +re covered +si onal +dis playing +speci al +trans formation +so x +typho on +sur round +cro ps +techno logies +after math +bey oncé +for cement +pe i +all er +speci alist +fair y +aff ili +tu rer +be ad +fo ster +work place +h ag +ye ar +cel er +merch ant +ar ray +mil waukee +to m +st ir +de vo +lou d +c or +back packs +st el +princi ples +boun dary +un cle +concer ts +tak e +slo pes +guil d +resi stant +g ig +in je +sp ra +gen eva +indi es +sle ek +shoo ts +s mi +ta sting +sequ in +after wards +te ars +tho mp +fle w +filip ino +pra ying +att acking +upgra ded +for th +yel low +manufac turer +krish na +la h +glu e +eff ici +home coming +bure au +ri val +vi sta +pro p +pro spe +cos metic +liber al +at er +sh er +di aries +sp are +p vc +a is +consi dering +fas cin +h ay +w ad +him al +hon our +ira q +ish i +fore arm +dist ant +honey moon +tri angles +wide spread +bo lt +193 1 +occas ionally +c run +ru sse +can non +hand written +tu bes +ornam ental +succe eded +lettu ce +bi an +any where +nas car +manufac tured +boo m +c il +asse ss +dre ad +ak e +fe males +mo re +cow boys +fun eral +ic o +re fre +sug gest +i ana +se same +pl ymouth +sil ence +gla ze +tim ber +10 80 +robin son +in der +kentu cky +ch ers +la zy +water ing +defen der +i pur +de o +19 28 +emer ged +per ed +domin ated +b ent +ac celer +experim ental +spi derman +wa its +robo ts +mor tal +fl ick +pre ferred +b u +charle ston +su b +part icles +my an +clu tch +class ics +ce ster +mat s +set ts +sc roll +se mi +19 12 +cover age +mon go +certain ly +od d +hang s +millenni um +ad mitted +pu shed +thomp son +bat ting +in fra +sub tle +h is +psy che +kol kata +sku lls +immedi ate +aggre ssive +un less +forbi dden +ra pper +fl are +o bl +su icide +mer ged +p is +accommo dation +sum mary +h art +organ ised +slo th +tack le +minne apolis +myan mar +t all +administr ative +se ash +request ed +compan ion +rac e +commer ce +in ery +some what +ad mir +re sso +we aving +ge e +happ ens +bever age +a im +gu s +puzz les +fur ious +mass achu +ri ed +ado be +fix tures +ru ler +shake spe +k yo +mess enger +s ings +bit es +kh an +fen der +by r +accu sed +ber n +har dy +repor tedly +drum mer +v ary +as ks +ceil ings +e li +o mega +on i +w w +rep tiles +cur ious +regi stered +le h +re called +confe tti +massachu setts +nor dic +har ris +ri vi +ne ts +tor n +se ctional +weak ened +absolu te +ki d +madagas car +b av +k az +m bles +con vic +cit rus +w y +dis abled +def end +pa ste +cor ners +secu red +f ies +tol er +g no +bas ics +at o +gla zed +re form +form ations +d ak +he ating +nomin ation +as ser +brew ery +en se +gra ham +her d +diffic ulty +sur fer +influ ential +o ct +ro lex +per sons +dis si +flo yd +plant ers +ta way +gar land +fr act +oper ational +d t +beat en +19 27 +ast on +st air +par tial +bar ber +ve lt +rema inder +nap ole +el se +sc iss +marit ime +co b +wor m +char ger +carl ton +3 7 +co ated +di abe +min eral +mi ds +vine yard +air bus +im pressed +elev ated +hamp shire +au tism +char ging +n bc +su d +ch in +pl ing +ston e +u als +w ight +pe st +conne ctions +coron a +h u +ino is +il legal +dan iel +cris py +s wal +roo se +9 11 +w im +bee tle +over night +stru ctural +nu cle +dra ws +vic tim +deli vers +replac ing +philipp ine +per cent +consider able +in al +sk a +law yer +ecle ctic +star bucks +lim estone +ar n +proto type +fi g +wre ck +cul tures +law rence +encoura ged +cer am +high lighted +river side +ch at +po ds +u a +pleas ure +inter state +wal i +star red +th y +d ad +every where +cran berry +is an +spar kle +ex tend +compon ent +ecli pse +ba st +marsh all +sli des +simp son +193 4 +expe ct +bur ns +mo to +dor a +ur ers +b d +f o +playo ff +en amel +pic ks +hav ana +strong ly +ju n +floo ded +su zu +n an +po ker +ent re +fun ni +con fron +aro o +aspe ct +er ia +sl o +i do +ev en +ac es +re fers +sho re +suppor ts +sens ory +advertis ements +19 13 +mag ni +se w +log ic +rhyth m +din er +roose velt +panor ama +bra ke +gat sby +g ay +l le +po is +na i +cu ban +spo o +cock pit +grand father +can ter +cere al +coun ting +territ ories +ter rac +appe ti +dimen sional +program ming +bacter ia +labe led +mic key +sandwic hes +cru sh +sse y +g lor +to p +sol ve +st ir +de lic +ven ess +mc car +vene tian +buck le +c ch +go ats +sc ious +est a +city scape +audio book +mic hel +cardi ff +no odle +sum mer +carav an +thro wn +e v +dest iny +on y +pla que +sc ales +r arely +satur n +ff er +citiz en +can oe +sup posed +hoo ks +psyche delic +pri mit +bor o +rajas than +l ings +cho pped +trave ler +con fused +compl im +incre ases +appro val +rang oli +ver sus +f in +fe ver +fir ing +reservo ir +do ct +termin us +pu ma +ro t +co aches +3 9 +histor ians +ans as +else where +s lot +re be +perio ds +den ied +u ally +ge ars +contin ent +begin ner +d as +wick er +sen ding +2 5th +19 26 +tri o +ar k +roun ds +audit orium +stron ger +t ill +de scen +ramad an +ho mo +air borne +ro ds +do d +de layed +nu t +dire ctors +pyra mids +pat ent +est ates +west ward +te x +lock down +no ting +v u +lor ds +em my +car ter +ab road +o ta +over whel +mel ting +mother board +an ge +ag ers +g love +mu m +gre atly +in her +com posite +don t +sher iff +candi dates +alter ed +repe ated +cap tion +monu ments +ali ens +ear ring +e po +sur real +attemp ting +wonder land +marsh mal +c ca +al am +ju ven +dra w +mar yland +gaz ebo +esp resso +star ter +russe ll +ab s +extra ordinary +gate way +con text +back ed +victim s +f ry +comple ting +tech nic +extin ct +4 7 +de ll +proper ly +ever green +patri otic +si erra +equ ation +e den +flex ible +ca b +interpre tation +ham mock +comb o +m ite +con dition +d ley +g ic +den cy +influ ences +pand ora +is lam +measu red +center piece +customi zed +w rought +di wali +lis bon +p u +co a +gro up +di alogue +ran king +beli ef +1 25 +gra pe +amp ton +interest s +scra p +clean er +brus sels +a ha +y n +mic ro +f action +po lar +for cing +all ion +il it +u te +nor se +appro aches +land fall +twi sted +cha os +playo ffs +se ga +a g +th ie +str ings +per si +fac ulty +vege tation +sc al +sou ls +my th +your s +s ant +pi st +strateg ies +ox ide +ign ment +archae ological +19 11 +ta j +co aching +as ph +flag ship +1 40 +w illing +am en +do ver +ser ge +mul der +domin ant +visit or +trave led +lad y +drag on +qu ad +canter bury +le an +o graphy +tr actors +screen shots +sme ll +bb ing +frequ ency +barbe cue +ch and +recei ver +ni k +cu b +qu in +clas h +flo ck +abs ence +w avy +in ver +acknow le +g len +mar sh +fairy tale +for ty +jan e +bree ze +collecti ble +ge m +dec ks +fiel der +main land +attr act +licen se +car p +se i +ll is +ach i +certific ate +fi er +men swear +spon sored +re gre +bo t +ru ffle +christ ian +competit ive +miner als +subje cts +si bil +experim ents +kim ono +quir ky +tim e +ceram ics +s ler +en h +person a +confi dent +po les +19 25 +cau li +fron tier +achie vement +ho pes +po t +enchan ted +n ick +as sumed +fe ast +fav ors +web sites +ste wart +vis ual +oper ator +diag rams +pla ster +pr inter +cater pillar +infra structure +con es +rivi era +je wels +th i +v ines +du mp +execu ted +re bell +end less +p ens +sm alle +sla ve +exp and +min d +alcoho lic +me etings +f ist +the ore +eng age +spring field +refre shing +t art +qui z +arm chair +funni est +man tle +bil ly +logi sts +sibil ities +fun ky +es y +chun ky +is les +int ric +pro tag +sa iled +gen ius +inn oc +fun dam +repor ter +inde ed +mn i +de vi +v ag +wit cher +pet als +gre y +un fortun +sto len +mat rix +part ly +siber ian +bul k +rebell ion +smalle st +u x +up side +sch u +newsp apers +det ached +é e +am bul +eco logy +munici pal +ag ara +stu mp +pro pel +f ights +er ina +bas il +discu ssed +wic kets +kang aroo +ro e +bur berry +consist ent +fr ig +quali fied +sav an +di ver +spon ge +ham mer +nap kin +chron icles +edit ed +se ller +patri ots +pre de +over seas +cab les +co aster +19 21 +o aks +bomb ar +infe ction +ro ger +conne cts +by e +cauli flower +do w +munici pal +indic ates +j on +cosm ic +reg im +numb ered +w oul +al ex +todd lers +parachu te +in age +ju li +prede cess +re ta +asph alt +spor t +ral ph +phili p +out put +it ive +ato ms +remo vable +x y +app er +observ ation +oce ans +eng a +in flat +st ical +ste am +colle agues +bar bar +grand mother +commer ci +bloc k +loc als +enh ance +re construction +cu ff +back ing +cl inton +croat ian +sponge bob +woul dn +af raid +mini atures +refle cting +o slo +subur ban +z hou +rang es +frequ ent +ga e +vat ican +con gre +car ic +hy un +primit ive +re cep +pen ny +pat ches +ut c +lob ster +wal mart +entre pre +savan nah +water falls +ay an +hyun dai +4 6 +scho lars +ven us +spoo ky +gro ss +shakespe are +stre et +quart z +he ated +impres sion +ap ps +19 22 +attri bu +ra d +tor n +ex ception +evol ved +s bury +con qu +mal dives +cadil lac +con sum +hydro gen +ack er +form ally +lo ch +tou ching +brid ge +pp e +wra ps +g el +g aga +qu ets +gau ge +hin di +m ics +goal keeper +show ers +p our +coun ts +produ ces +do o +arti facts +mand atory +gener ations +versat ile +dan del +al as +de le +wa it +sh av +space craft +pix ie +te ar +pol ic +sail ors +so lo +chee tah +us b +fresh water +navig ation +fresh ly +al ban +cardin al +un expected +de ar +musc ular +en counter +o tta +inv ent +che cks +are st +ag ree +invol vement +skyscra per +pe el +d ach +assi sted +met als +bur ned +radi ation +torto ise +lit ion +dee med +sc outs +hun ger +en cyclo +den o +nove lty +simul tane +volcan ic +vari ations +rescu ed +lo o +spe ars +re ll +d in +clear ance +cru ci +flu id +9 25 +dandel ion +wa yne +jane iro +a ve +at on +co ck +stock holm +refle ctions +appar ent +convo y +g b +vi i +tou ches +hol ly +colum bus +spin ning +capsu le +s ack +bra id +scrip ture +pupp et +fo yer +remark able +bl ast +je ff +d ging +gam er +park way +pat rick +burn t +pear ls +indi e +legis lation +gent le +mar ker +dro ve +percent age +t ment +propag anda +space ship +convin ced +comman ded +moon light +am ate +licen sed +pe ek +tr ilo +gl as +dent ist +ur us +to pping +fi er +initi ative +station ery +padd le +re venue +o p +do main +b ats +ver e +br is +sto ols +cele st +scul ly +cin der +wo ol +na issance +viol ent +indone sian +spot light +e z +hit ler +challeng ing +inser t +tr ans +ner vous +au ton +ic ul +amend ment +ey el +fi at +isra eli +attend ance +ion ing +don ut +wi der +sweat ers +pro n +fro st +exi sted +o o +glo ssy +par enting +gran de +ambul ance +substan tial +re served +me adows +bul bs +stat istics +yach ts +allo y +answ ers +al bert +sc an +bal anced +obje ctive +ow ls +her cu +in expensive +ang lo +fair ly +mah al +in formed +coura ge +pa iring +mat ched +bour bon +am al +s p +sol it +hobb ies +so fia +p oured +is bn +sket ch +decis ions +st es +ta vern +stret ching +li thu +du rable +gly ph +as c +gru mpy +wa ffle +ash a +gr o +occup ation +no ok +partici pating +cucu mber +technic ian +ri p +nation ally +scre w +install ing +chu ck +recon naissance +lay outs +hur t +shu ttle +dis eas +ill o +e quest +enjo ys +ense mble +e pi +dis gu +es sex +micro wave +rai ders +trilo gy +o reg +vul ner +dream works +adop tion +comp ilation +fil ed +p t +á n +indi ana +li kes +l p +im et +ph u +sur ge +muse ums +coun t +po ver +organ izing +ment ion +appreci ation +re viewers +collap se +j ur +o deon +to bac +hosp ital +key chain +to x +co al +ear n +em bo +business woman +te c +sur render +ro deo +li v +no body +tac os +vi be +ta iled +ke st +pu lls +tw enti +spir its +confeder ate +conve ction +mat the +cch ini +suc cul +psycho logy +on going +bi zar +sho res +mar ry +proce sses +ne ys +bre wing +hy d +amoun ts +n ile +s light +30 th +gre e +vari able +inflat able +zu cchini +reg ard +cla d +ho ped +bach el +and ers +lumin ous +j u +blin ds +in ity +demo lished +pre cis +cinder ella +d ash +190 5 +ja w +nostal gia +micro scope +an te +con spir +suzu ki +190 9 +polic ies +tra m +ske le +vil la +show room +jun k +board walk +6 0s +discu ssing +where as +in habit +ban anas +k ate +nap les +uphol stery +ó n +n ies +ry an +ban kno +per ry +play ful +argu ment +pa ws +c iting +tra m +min es +fin ishes +be ach +bloo ms +flick r +mo to +to ll +ob st +defe ating +main stream +vari ation +vari ant +k ash +mon aco +190 8 +sail boat +o ils +ident ical +de bris +coun ted +sp las +ca de +m ous +m itch +po m +sco tch +a ver +n ish +gar rison +bi kin +ni agara +maur it +ul ties +organ ize +own ership +s logan +sciss ors +de ep +fra gran +vitam in +bi king +dé cor +ce ments +slo v +con s +ous ness +jel ly +ex tent +spo ken +er ror +blu r +le cture +aqu atic +neighbour hood +biscu its +ri hanna +el m +r he +te ed +ble ssed +liter ally +ar rest +public ations +1920x 1080 +dar ies +vac u +instru ctor +cancel led +ch ak +me g +nu tri +fa b +rang ing +resol ve +re bel +ag o +bath s +recor dings +christian ity +institu tion +dre aming +stu d +blo wn +dou glas +consu mer +tobac co +int enti +gra zing +ill inois +foss ils +val ve +war ren +ri pped +19 23 +19 24 +stick y +sub si +dan ces +h are +re fined +bru shes +lex us +z o +mp et +sp ru +comb ines +ato m +dor f +equest rian +en closed +deta iling +civil ization +fisher man +bulgar ia +st amp +af ro +ste al +de scent +vo ting +bal tic +rel ation +whim sical +s ake +he li +stu res +embr ace +b ren +re t +su bar +hi jab +tro l +imag ery +in land +emer ging +elev ator +kh aki +recomm end +o b +al ignment +loc ally +fif teen +coast line +ody ssey +st il +in augu +art works +cur ves +there after +manufact urers +sing ers +aim ed +ent ation +mon k +earth qu +inaugu ral +impro vements +chim ney +re mix +assi sts +liqu or +assess ment +man eu +wed ge +nic hol +po k +go a +clo cks +pract icing +sac ra +pal est +ve g +vi ral +bomb s +mu ffin +compe ted +battle ships +co bra +ex plan +end or +arch es +song writer +y p +mar a +pas sport +am use +mat ure +pos session +ro ll +bor ing +glo ss +celest ial +ke vin +oppon ents +di stan +ko ala +pra desh +jump er +o x +z an +par amount +her bal +stron gest +super natural +ra y +sul li +gor ge +hau l +fen cing +sur re +po int +ge tt +indic ating +nur ses +ton a +pitch ed +3 3 +ar ct +camp bell +pa yment +re memb +j ak +dro pping +mber g +i que +t illa +ca th +refer ee +g lou +us able +compe tes +hau s +v ital +pic tu +public ity +quali fying +cap turing +y es +ssi e +symbo li +re lev +st ine +pl ywood +scho lar +pur suit +close st +go sla +ri bs +caf é +mou red +relax ation +infin ite +refriger ator +an them +int ention +le ments +k ia +gu ide +wi ki +sur faces +iz er +k ane +memor able +bi o +deb t +g ged +ho lo +fa ith +youn gest +less er +guil ty +mer y +vol vo +pack ages +thre w +boun daries +sun flowers +def ended +fra ming +sent ence +aler t +domin ican +ri e +lu ke +po ll +ph s +symbo lic +cli pping +azz i +sal sa +bad ges +k ill +st ag +ceremon ies +flor a +s nail +fair ies +arm ies +we mb +mir acle +colo mbia +lis a +thie f +thumb s +so vere +jo ins +r ink +as er +ten th +ster n +vin cent +preci pit +av al +cash mere +merchand ise +le o +cou p +7 8 +bu sh +prote sters +bad ass +f ton +off shore +ish a +ten sion +la d +promo ting +groo ming +co coa +pe l +shu tters +dach sh +wemb ley +j ol +k out +pa ying +pr a +super vis +mon te +p ills +ho sting +la kers +hel p +ey ed +theat rical +co t +dis missed +cr ack +sulli van +re built +tru mpet +land er +vil lain +human ity +ho ping +hon or +fe e +ble don +rup ted +el li +du bbed +sketch book +mean ing +hi ve +tar gets +colon ies +dad dy +du l +exclu sively +freder ick +g ently +ju dic +10 5 +yu gosla +pap ar +u to +gradu ates +introdu ce +m ama +re store +al ab +be ag +coo p +hy per +fa il +desc end +re fer +contro ller +organiz er +je ws +comman ding +redu ction +k ap +gradu ated +inst ance +smo ked +distric ts +observ atory +v ault +man age +pi res +coll ins +lin d +diseas es +en ne +def ine +au burn +wr angler +mar oon +gor illa +real is +skyscra pers +maz da +s ights +ran ks +restric ted +t age +sil ic +se aled +oreg on +o lo +r ats +be ating +po d +to tt +win ery +mal a +alumin ium +sweat shirt +surpr ising +bo realis +hoo ds +hang er +mechan ism +to w +tu dor +chry sler +ba za +hand ling +every body +emb assy +civil ian +mo ld +hur st +parli am +chen nai +bachel or +vi an +sw ick +hum ming +amuse ment +str and +dar win +rapi ds +d ure +k ai +t ents +rac coon +leh enga +p p +⁄ 4 +re sp +n ab +co d +elect rons +live stock +wit ness +roo fs +subur b +wim bledon +ra af +con scious +en forcement +ps d +p ines +ma i +miss is +an a +cu mb +can on +diffic ulties +re viewer +pe dest +custo ms +zoo m +swee the +aw k +ha z +col li +lat te +com fy +cur ated +reli able +b t +lil ies +ho w +mess y +wea ve +ro tat +vo ices +kno wing +foun ding +alb any +veg gie +su v +organ ism +fab ric +cra shed +in san +legisl ative +y weight +th on +tact ics +gra ss +ax e +hon ors +el k +ca es +coo lest +5 5 +fl a +symbo l +bat ter +carri ers +mean ings +guaran tee +papar azzi +im pe +d ash +m und +re taining +crow ded +oppon ent +mitch ell +man kind +land marks +f ford +encoura ge +du m +ma hog +exten sions +deliver ing +pen n +specim ens +prote cting +bizar re +mahog any +bo mbers +sym metry +serge ant +k ath +u ma +hab its +po tted +per ce +cle aring +fru str +tar ge +sor ry +profession als +war ts +var ieties +ri ot +leg ion +syn drome +atl ant +mont go +le ys +as y +gib son +success or +wol ver +ru ption +she ar +my stic +189 0 +pred ators +negoti ations +ho pe +ob tain +exten ding +mist akes +subar u +l s +bang s +wick et +a o +le igh +cur b +cast s +desi red +mag net +lou vre +s ins +on ally +dis pos +us age +must ache +res que +measu rements +x ton +5 0s +dra in +cig ar +tra ditionally +shor ter +arct ica +tott enham +oy ster +as sign +pro secu +premi ered +midd le +cu bes +redu cing +sh re +net works +kar ate +an kara +cla w +d hab +ol or +loc ker +da ve +basil ica +constru ct +et a +j uni +l c +n z +up set +pra yers +kn ock +190 7 +cosmo s +hon ored +buddh ism +it iner +categ ories +cardin als +t asse +tun a +kyo to +e k +o sa +leas h +elabor ate +hand crafted +divor ce +mo ist +son ville +b arely +pat ag +mach inery +win ding +cou sin +comple x +pictu resque +rit ual +b its +manag ing +ol k +pe ment +inter action +190 4 +k b +t ney +per ched +be half +kis ses +expres sions +depar ted +ade qu +190 6 +fascin ating +o be +groun d +bloc ked +fall out +o v +bo sn +goth am +ma har +identi fication +amate ur +comprehen sive +y al +sh am +renov ations +cat ches +notic ed +a 3 +bat ch +gri zz +pon ds +mainta ining +pl un +tra pped +mer cy +for gotten +m me +en or +fur b +an te +ph i +proce dure +u i +u ae +p ant +qu an +bat teries +q atar +re union +end a +is a +un successful +ball erina +g c +spor ty +refle cts +ti bet +men to +reas on +fo ggy +bur ton +gr inch +nic kel +gram mar +no tably +var sity +accommo date +montgo mery +har ness +adop t +mar ching +bo k +sco tia +mock up +tox ic +re furb +2 d +l g +sh aft +hyd range +scri bed +conver t +w at +ve loc +su res +favor able +advant ages +s field +arm ament +brides maids +rif le +p r +ta stes +sta g +cat alog +sha wl +bur gers +high ways +lau rel +plat forms +accu mul +eter nal +ido l +z im +me lan +coun ties +inte lle +neu ro +niger ia +inhabit ants +w inged +en to +over lay +sla b +indig o +ac on +as pen +f ir +ex quis +cu tter +squ ee +expla ining +sil ly +tr ace +vers ace +pill ars +gi o +cal ories +specim en +h alls +fu dge +t ir +fe wer +ap plying +part ments +l u +cl am +hy de +n ig +p ant +par aly +ant arctica +che st +zz y +elo pement +wiki pedia +j oc +bou quets +proce du +pover ty +f li +i vely +loc ate +ber ke +ib u +paro dy +lef to +ev ans +tempor arily +real ized +commission er +n ativity +compar e +3 2 +distan cing +pra y +willi am +kel ly +jeff erson +en closure +check ed +complic ated +p ac +san g +simp li +mur phy +re v +sensit ive +as i +trave lled +post cards +dispu te +s ari +less ly +line up +concent ration +mc donald +incredi bly +nou ve +ll ama +d ine +re acts +peo ples +mit sub +jer se +s worth +ur i +bu shes +treat ments +scrip ts +mas cu +sol ved +intellig ent +jim my +3 ⁄4 +bio logical +de light +pe bble +fle sh +mist ake +m ington +ro r +sp it +heav yweight +ju icy +moti fs +po m +inspe ction +er ic +hel m +hun t +lo re +fi esta +air ways +sal t +f ishes +was p +encoun tered +phu ket +or ang +br o +develo pers +ge ms +magni fying +mitsub ishi +o da +s nor +po ll +pa ved +tibet an +ni el +roof ing +speed way +ge ons +team mates +vacu um +head band +or ly +de e +in ian +fro gs +smo ky +behavi our +pro sper +observ ations +al titude +ben ches +shak er +meat balls +instru mental +t n +18 k +des per +is e +melo dy +om ous +jerse ys +ju dges +laun ches +volu mes +exi sts +team mate +pedest al +biscu it +baza ar +ro rs +surviv ors +b r +in put +prote sts +cr acked +bad ly +m u +fer n +invol ves +200 0s +mel ted +rail ways +audi ences +cor gi +war saw +arch ed +intric ate +conserv atory +path way +4 50 +play room +exam ination +fing er +encyclo pedia +br y +up stairs +c bs +inter medi +spe l +a ids +trans lated +constitu tional +stu pid +refle ctive +e cho +ar io +lu cas +dar ker +cru shed +vivi d +tur ner +bar n +hy att +hou sed +esc al +il o +ser bia +scre am +prison er +gu ate +thir teen +st ap +recogn ize +rehear sal +t igh +de vil +yam aha +re st +roc kets +j ab +be ir +se als +thro at +o a +con greg +pet ite +it ted +co tt +slo ven +author ized +trol ley +t ist +ad ri +go spel +dhab i +o me +p all +naz i +anthro po +sweethe art +r ington +ke es +swe ep +cli ps +k ills +tit an +stat ic +demon s +de pos +produ ctive +us d +as parag +comple ment +tan a +cook book +d ors +m oul +pur ses +3 4 +s ar +so l +190 3 +ar moured +be sides +tal ented +hol i +24 x +h n +parag ra +sof as +bicy cles +spru ce +g lim +colle ctors +develop er +tor y +et t +rein forced +dri ft +body building +ag an +character istic +nouve au +tra de +prev ented +mathemat ics +di or +cent ered +da h +te st +di aper +star fish +jan e +leng ths +eat en +st ability +ad er +beir ut +g he +day tona +ro gers +i ster +tas mania +legisl ature +hercu les +vene zu +anx iety +z illa +re sts +ne s +sto le +vo iced +bent ley +o ats +bl ender +m ir +ra z +g hi +wor ried +ex am +fav our +7 50 +co co +un limited +char ms +4 80 +scra p +is ion +com ment +k ale +t ights +pre serv +roy ce +mode led +g age +den ali +memb ership +laun ching +molecu lar +l acro +she ds +po me +fun ction +mirro red +laugh ter +at ro +go th +cal yp +asparag us +ero sion +protag onist +mu da +jel ly +av on +eng aging +bu ck +ig nor +roun d +cti vity +deman ds +promp ted +re now +mar ia +gil bert +fundra iser +in visible +ch en +tr acking +all ig +mas quer +ben der +r t +de mo +re signed +ch ateau +new ton +renow ned +patag onia +sk ater +battle field +ere cted +subur bs +pome gran +milit ia +cri b +trave lers +du m +chal et +breath taking +effici ency +sw amp +pon y +v p +ear ning +spr int +rap tor +terri ble +hy gi +pharmac y +enhan ced +dachsh und +sy ria +demo lition +habit ats +s f +mon s +z ones +ato on +ar ium +be loved +el ig +bl ings +bul ary +re tro +ho se +mal ibu +condu cting +sh red +tor ch +z ens +do tted +ju mps +war m +progre ssive +po ison +str at +mu r +moti ves +guaran teed +remn ants +f athers +atlant is +c ms +clo ck +bla ze +h ack +buil ds +p ies +ra ins +sp y +met all +00 1 +torn ado +beag le +flan k +fra me +190 1 +di ver +laugh s +abu se +m ys +ri b +fe e +south western +pre fer +wall ed +dead pool +mo or +si blings +villa ins +ela stic +awk ward +m g +v a +fo to +ven om +mix er +ast ing +instru ction +niel sen +h ence +ing le +des ks +quar ant +institu tions +character ized +wh ar +home stay +molecu les +desig nation +cr ate +fl yers +che esy +mb s +sla m +alexand ria +dis pen +cross over +meaning ful +co g +ste ms +fig u +person ally +chi ang +west wood +r hin +excell ence +wy nd +lo u +my r +but ler +bro wns +cl an +so y +ste el +comb inations +dy ed +stro kes +len non +exten ds +mus ic +hau te +so m +ul tra +origin ated +weal thy +im posed +ser ial +hill side +wynd ham +hard cover +wi dow +an es +ro tation +iso to +radi us +con go +ang led +competit ions +lan caster +190 2 +appeti zer +road ster +is ian +fe ar +e mail +mob ility +declar ation +d ded +be th +cam aro +deli ber +hosp it +lo l +cur ls +seg ments +an onymous +wood stock +zi g +gir ly +deal er +clear ed +y a +tro y +dig it +veloc ity +p ar +ste pping +neck line +bikin i +til ed +g ins +wal d +ten ing +six teen +em bell +berke ley +2 20 +ri val +collecti bles +obvi ous +o vers +dun e +sic ily +ro gue +fi ji +relev ant +k aw +40 k +figur ines +anth ony +oun ce +bro och +home stead +me mo +associ ate +tas ks +dis order +play house +han oi +ru pt +sand stone +cri mes +cen sus +sud den +simultane ously +len ses +dit ch +bor g +al ization +ser ie +sal em +john ny +open er +fic i +p ads +le tte +se c +pul p +princi ple +har vey +defen ding +victor ies +safe ly +divi de +ski p +verte br +at ri +lacro sse +j ade +mechan ics +scu ba +z en +se ized +sla ves +can ber +ate ful +ber muda +bin ary +j ury +ren tals +on ian +plic ity +signific ance +isol ation +6 6 +bi son +scar ed +collap sed +al fred +ne p +hor ri +j ason +cour thouse +arn old +sudden ly +blue berries +master piece +tran qu +consu mption +ch asing +graph s +paint ers +tri angular +sket ching +perio dic +co inci +ali ke +dra ke +fur ry +docu mentation +snowfla kes +quarant ined +g lee +be aver +ren o +un finished +ter ra +fl u +mil k +bru shed +ac cent +dra u +wo w +sho cked +cl one +turre ts +exquis ite +pro ph +ish ings +ka h +silic one +in fer +g ou +reci pi +pok é +re cess +lu ci +ow a +oli ver +r and +cam ps +li ers +oper ates +ble ssing +campaig ns +7 9 +as a +pag e +ste w +char lo +contribu tions +wor d +pol ye +hun ters +sign als +addic tion +re ed +lu ct +9 60 +el an +tin a +re luct +no tt +dor set +v ings +bir d +ga ining +pig gy +con figur +ver de +souven ir +lo ve +stret ches +comp ounds +f ade +ethiop ia +diabe tes +se dim +dou bles +gun dam +rug ged +p sal +man tel +ac ne +lo l +el ds +bun s +elim inated +car at +refu ge +man ship +pul se +canber ra +e motion +comm entary +res ale +avi er +stitch es +s m +pl as +escap ed +esc ort +turb ine +bo bby +in hab +for ge +itiner ary +he sive +elect ron +t artan +fa ye +ro chester +ch au +up dates +cru z +hon est +don uts +destroy ers +expan ding +eyebro ws +pe can +ru ling +beli ev +tai pei +masquer ade +lemon ade +thr iller +restric tions +e cho +wick ed +remin is +enor mous +co le +brea thing +p tic +gu in +kir k +sh y +ki m +fl ir +ham burger +as ym +win ni +ty ler +stam ps +success ion +o tter +cub ic +ass orted +univers ities +l ate +sp reading +tele graph +rh ino +bang al +furn ishings +qu it +beli eving +sequ ins +sto pping +mel t +demonstr ated +on ey +chro no +fier ce +maurit ius +co des +ti des +jelly fish +ab a +pe as +wind mill +alleg ed +ric o +dur ham +quan tum +poké mon +fre ight +po orly +is ley +adv ices +k or +moo dy +d ently +t oned +re spir +gri p +su bl +loc king +sing ton +pp i +gon na +hu gs +emph asis +du s +pre n +shi va +camer on +ax les +posit ioned +foc al +surg ical +pe te +mix es +dev ast +fragran ce +recogn ised +state ments +helicop ters +ino a +t weed +re lay +predic ted +p am +ja il +prince sses +ju sti +wel coming +was her +ad dresses +cont ents +he mis +y ang +astro logy +loy al +swe at +atmo spher +no ir +air planes +fa una +ar do +fl ed +ro wed +mo ist +nor man +team work +lit ter +god zilla +te ap +du ffel +win es +da h +im ported +li fts +com pati +t te +re actions +pa sture +be long +sp ine +rad ical +de aling +rabb its +do zens +po odle +ru m +inter views +leg it +jam as +in au +chie fs +a bulary +ash i +cap tures +chur chill +compati ble +er n +ent a +cup board +mun ition +public ly +w and +brand ed +it z +ag encies +for mul +turre t +is cus +fix ture +ori ented +hall mark +c un +awak ens +man i +ant on +deser ted +botan ic +mete oro +k re +la byr +di str +syn thetic +e ous +to wels +18 00 +under stood +thin ks +wan ting +bangal ore +labyr inth +g ill +mo ms +scre ws +vari ed +arch bishop +speci alized +critic ised +ra ils +un dated +grand ma +circum stances +mo d +tre k +glad i +on ry +ri ms +mar l +ste pped +dar in +hog warts diff --git a/1.3B/tokenizer/bpe-16k-vocab.json b/1.3B/tokenizer/bpe-16k-vocab.json new file mode 100644 index 0000000000000000000000000000000000000000..a67f45a6990c0471746d9965ea4d1ebb75b151db --- /dev/null +++ b/1.3B/tokenizer/bpe-16k-vocab.json @@ -0,0 +1 @@ +{"[PAD]":0,"[UNK]":1,"!":2,"\"":3,"#":4,"$":5,"%":6,"&":7,"'":8,"(":9,")":10,"*":11,"+":12,",":13,"-":14,".":15,"/":16,"0":17,"1":18,"2":19,"3":20,"4":21,"5":22,"6":23,"7":24,"8":25,"9":26,":":27,";":28,"<":29,"=":30,">":31,"?":32,"@":33,"[":34,"\\":35,"]":36,"^":37,"_":38,"`":39,"a":40,"b":41,"c":42,"d":43,"e":44,"f":45,"g":46,"h":47,"i":48,"j":49,"k":50,"l":51,"m":52,"n":53,"o":54,"p":55,"q":56,"r":57,"s":58,"t":59,"u":60,"v":61,"w":62,"x":63,"y":64,"z":65,"{":66,"|":67,"}":68,"~":69,"¡":70,"¢":71,"£":72,"¤":73,"¥":74,"§":75,"©":76,"«":77,"¬":78,"®":79,"°":80,"±":81,"²":82,"³":83,"´":84,"µ":85,"¶":86,"·":87,"¹":88,"º":89,"»":90,"¼":91,"½":92,"¾":93,"¿":94,"×":95,"ß":96,"à":97,"á":98,"â":99,"ã":100,"ä":101,"å":102,"æ":103,"ç":104,"è":105,"é":106,"ê":107,"ë":108,"ì":109,"í":110,"î":111,"ï":112,"ð":113,"ñ":114,"ò":115,"ó":116,"ô":117,"õ":118,"ö":119,"÷":120,"ø":121,"ù":122,"ú":123,"û":124,"ü":125,"ý":126,"þ":127,"ÿ":128,"ā":129,"ă":130,"ą":131,"ć":132,"č":133,"đ":134,"ē":135,"ė":136,"ę":137,"ě":138,"ğ":139,"ġ":140,"ħ":141,"ĩ":142,"ī":143,"ı":144,"ľ":145,"ł":146,"ń":147,"ņ":148,"ň":149,"ŋ":150,"ō":151,"ŏ":152,"ő":153,"œ":154,"ř":155,"ś":156,"ş":157,"š":158,"ţ":159,"ť":160,"ũ":161,"ū":162,"ŭ":163,"ů":164,"ų":165,"ŵ":166,"ŷ":167,"ź":168,"ż":169,"ž":170,"ƒ":171,"ơ":172,"ư":173,"ǎ":174,"ǐ":175,"ǒ":176,"ǔ":177,"ǫ":178,"ș":179,"ț":180,"ɐ":181,"ɑ":182,"ɒ":183,"ɔ":184,"ɕ":185,"ɖ":186,"ə":187,"ɛ":188,"ɜ":189,"ɟ":190,"ɡ":191,"ɣ":192,"ɨ":193,"ɪ":194,"ɫ":195,"ɬ":196,"ɯ":197,"ɲ":198,"ɾ":199,"ʀ":200,"ʁ":201,"ʂ":202,"ʃ":203,"ʊ":204,"ʋ":205,"ʌ":206,"ʒ":207,"ʔ":208,"ʕ":209,"ʰ":210,"ʲ":211,"ʷ":212,"ʻ":213,"ʼ":214,"ʾ":215,"ʿ":216,"ˀ":217,"ˈ":218,"ˌ":219,"ː":220,"˚":221,"ˠ":222,"ˤ":223,"́":224,"̃":225,"̄":226,"̆":227,"̇":228,"̈":229,"̊":230,"̍":231,"̝":232,"̞":233,"̠":234,"̥":235,"̧":236,"̩":237,"̪":238,"̯":239,"͡":240,"ά":241,"έ":242,"ή":243,"ί":244,"α":245,"β":246,"γ":247,"δ":248,"ε":249,"ζ":250,"η":251,"θ":252,"ι":253,"κ":254,"λ":255,"μ":256,"ν":257,"ξ":258,"ο":259,"π":260,"ρ":261,"ς":262,"σ":263,"τ":264,"υ":265,"φ":266,"χ":267,"ψ":268,"ω":269,"ό":270,"ύ":271,"ώ":272,"ϕ":273,"а":274,"б":275,"в":276,"г":277,"д":278,"е":279,"ж":280,"з":281,"и":282,"й":283,"к":284,"л":285,"м":286,"н":287,"о":288,"п":289,"р":290,"с":291,"т":292,"у":293,"ф":294,"х":295,"ц":296,"ч":297,"ш":298,"щ":299,"ъ":300,"ы":301,"ь":302,"э":303,"ю":304,"я":305,"ё":306,"і":307,"ј":308,"ћ":309,"ա":310,"բ":311,"գ":312,"դ":313,"ե":314,"թ":315,"ի":316,"լ":317,"կ":318,"հ":319,"ղ":320,"մ":321,"յ":322,"ն":323,"ո":324,"պ":325,"ս":326,"վ":327,"տ":328,"ր":329,"ց":330,"ւ":331,"ք":332,"ְ":333,"ִ":334,"ֵ":335,"ֶ":336,"ַ":337,"ָ":338,"ֹ":339,"ּ":340,"א":341,"ב":342,"ג":343,"ד":344,"ה":345,"ו":346,"ז":347,"ח":348,"ט":349,"י":350,"כ":351,"ל":352,"ם":353,"מ":354,"ן":355,"נ":356,"ס":357,"ע":358,"פ":359,"צ":360,"ק":361,"ר":362,"ש":363,"ת":364,"ء":365,"أ":366,"إ":367,"ئ":368,"ا":369,"ب":370,"ة":371,"ت":372,"ث":373,"ج":374,"ح":375,"خ":376,"د":377,"ذ":378,"ر":379,"ز":380,"س":381,"ش":382,"ص":383,"ض":384,"ط":385,"ع":386,"غ":387,"ف":388,"ق":389,"ك":390,"ل":391,"م":392,"ن":393,"ه":394,"و":395,"ى":396,"ي":397,"َ":398,"ُ":399,"ِ":400,"ّ":401,"ْ":402,"ک":403,"ی":404,"ं":405,"अ":406,"क":407,"ग":408,"च":409,"ज":410,"ट":411,"ण":412,"त":413,"द":414,"ध":415,"न":416,"प":417,"ब":418,"भ":419,"म":420,"य":421,"र":422,"ल":423,"व":424,"श":425,"ष":426,"स":427,"ह":428,"ा":429,"ि":430,"ी":431,"ु":432,"ू":433,"ृ":434,"े":435,"ो":436,"्":437,"ক":438,"ল":439,"া":440,"্":441,"க":442,"ம":443,"ர":444,"ா":445,"ி":446,"்":447,"ก":448,"ข":449,"ง":450,"จ":451,"ช":452,"ด":453,"ต":454,"ท":455,"น":456,"บ":457,"ป":458,"พ":459,"ม":460,"ย":461,"ร":462,"ล":463,"ว":464,"ศ":465,"ส":466,"ห":467,"อ":468,"ะ":469,"ั":470,"า":471,"ิ":472,"ี":473,"ุ":474,"ู":475,"เ":476,"แ":477,"่":478,"้":479,"์":480,"་":481,"င":482,"ဆ":483,"န":484,"ပ":485,"ရ":486,"်":487,"ြ":488,"ა":489,"დ":490,"ე":491,"ვ":492,"ი":493,"ლ":494,"მ":495,"ნ":496,"რ":497,"ს":498,"უ":499,"ხ":500,"ក":501,"ត":502,"ន":503,"ព":504,"ម":505,"រ":506,"ស":507,"ា":508,"្":509,"ᵻ":510,"ḍ":511,"ḥ":512,"ḷ":513,"ḻ":514,"ṃ":515,"ṅ":516,"ṇ":517,"ṉ":518,"ṛ":519,"ṣ":520,"ṭ":521,"ạ":522,"ả":523,"ấ":524,"ầ":525,"ẩ":526,"ậ":527,"ắ":528,"ẵ":529,"ế":530,"ễ":531,"ệ":532,"ị":533,"ọ":534,"ố":535,"ồ":536,"ổ":537,"ỗ":538,"ộ":539,"ớ":540,"ờ":541,"ở":542,"ợ":543,"ụ":544,"ủ":545,"ứ":546,"ử":547,"ữ":548,"ự":549,"ỳ":550,"ỹ":551,"ἀ":552,"ἄ":553,"ἐ":554,"ἔ":555,"ἰ":556,"ἱ":557,"ὁ":558,"ὐ":559,"ὑ":560,"ὴ":561,"ὶ":562,"ὸ":563,"ᾶ":564,"ῆ":565,"ῖ":566,"ῥ":567,"ῦ":568,"ῶ":569,"‐":570,"‑":571,"–":572,"—":573,"―":574,"‘":575,"’":576,"“":577,"”":578,"„":579,"†":580,"‡":581,"•":582,"…":583,"‰":584,"′":585,"″":586,"※":587,"⁄":588,"₂":589,"₣":590,"₤":591,"₩":592,"€":593,"₱":594,"₹":595,"ℓ":596,"№":597,"⅓":598,"⅔":599,"←":600,"↑":601,"→":602,"↓":603,"⇌":604,"⇒":605,"∂":606,"∈":607,"∑":608,"−":609,"∗":610,"∙":611,"√":612,"∞":613,"∩":614,"∴":615,"≈":616,"≠":617,"≡":618,"≤":619,"≥":620,"⊕":621,"⊗":622,"⊙":623,"⋅":624,"─":625,"│":626,"█":627,"▲":628,"►":629,"◄":630,"◊":631,"○":632,"◦":633,"☆":634,"☉":635,"☠":636,"☮":637,"♀":638,"♂":639,"♠":640,"♣":641,"♥":642,"♦":643,"♪":644,"♭":645,"♯":646,"⟨":647,"⟩":648,"ⴰ":649,"、":650,"〈":651,"〉":652,"「":653,"」":654,"〜":655,"あ":656,"い":657,"う":658,"え":659,"お":660,"か":661,"が":662,"き":663,"く":664,"ぐ":665,"け":666,"こ":667,"さ":668,"し":669,"じ":670,"す":671,"せ":672,"た":673,"だ":674,"ち":675,"っ":676,"つ":677,"て":678,"で":679,"と":680,"ど":681,"な":682,"に":683,"の":684,"は":685,"ば":686,"ひ":687,"ふ":688,"へ":689,"ぽ":690,"ま":691,"み":692,"め":693,"も":694,"ゃ":695,"や":696,"ゆ":697,"よ":698,"ら":699,"り":700,"る":701,"れ":702,"を":703,"ん":704,"ァ":705,"ア":706,"ィ":707,"イ":708,"ウ":709,"ェ":710,"エ":711,"ォ":712,"オ":713,"カ":714,"ガ":715,"キ":716,"ギ":717,"ク":718,"グ":719,"ケ":720,"ゲ":721,"コ":722,"ゴ":723,"サ":724,"ザ":725,"シ":726,"ジ":727,"ス":728,"ズ":729,"セ":730,"ゼ":731,"ソ":732,"タ":733,"ダ":734,"チ":735,"ッ":736,"ツ":737,"テ":738,"デ":739,"ト":740,"ド":741,"ナ":742,"ニ":743,"ネ":744,"ノ":745,"ハ":746,"バ":747,"パ":748,"ヒ":749,"ビ":750,"ピ":751,"フ":752,"ブ":753,"プ":754,"ベ":755,"ペ":756,"ホ":757,"ボ":758,"ポ":759,"マ":760,"ミ":761,"ム":762,"メ":763,"モ":764,"ャ":765,"ヤ":766,"ュ":767,"ユ":768,"ョ":769,"ラ":770,"リ":771,"ル":772,"レ":773,"ロ":774,"ワ":775,"ン":776,"ヴ":777,"・":778,"ー":779,"一":780,"三":781,"上":782,"下":783,"不":784,"世":785,"个":786,"中":787,"丸":788,"主":789,"之":790,"事":791,"二":792,"五":793,"井":794,"京":795,"人":796,"代":797,"伊":798,"会":799,"伝":800,"作":801,"僕":802,"元":803,"光":804,"全":805,"八":806,"公":807,"六":808,"内":809,"写":810,"前":811,"剣":812,"劉":813,"動":814,"北":815,"十":816,"千":817,"华":818,"南":819,"印":820,"原":821,"古":822,"台":823,"史":824,"司":825,"合":826,"同":827,"名":828,"君":829,"周":830,"和":831,"四":832,"国":833,"國":834,"園":835,"在":836,"地":837,"型":838,"城":839,"堂":840,"場":841,"塊":842,"士":843,"夏":844,"外":845,"夜":846,"夢":847,"大":848,"天":849,"太":850,"女":851,"子":852,"字":853,"学":854,"守":855,"安":856,"宗":857,"宝":858,"宮":859,"家":860,"寺":861,"小":862,"少":863,"山":864,"島":865,"川":866,"州":867,"市":868,"師":869,"平":870,"年":871,"式":872,"張":873,"後":874,"心":875,"思":876,"恋":877,"愛":878,"成":879,"我":880,"戦":881,"所":882,"手":883,"政":884,"教":885,"文":886,"新":887,"方":888,"旋":889,"日":890,"明":891,"星":892,"春":893,"時":894,"書":895,"月":896,"有":897,"朝":898,"木":899,"本":900,"李":901,"村":902,"来":903,"東":904,"松":905,"林":906,"校":907,"機":908,"歌":909,"正":910,"武":911,"殿":912,"氏":913,"民":914,"水":915,"永":916,"泉":917,"法":918,"流":919,"海":920,"火":921,"無":922,"物":923,"狗":924,"王":925,"生":926,"田":927,"界":928,"白":929,"百":930,"的":931,"皇":932,"真":933,"眼":934,"石":935,"社":936,"神":937,"祭":938,"秋":939,"空":940,"第":941,"紅":942,"編":943,"美":944,"者":945,"聖":946,"能":947,"艦":948,"良":949,"色":950,"花":951,"草":952,"華":953,"葉":954,"藤":955,"行":956,"術":957,"西":958,"見":959,"記":960,"詩":961,"語":962,"説":963,"議":964,"谷":965,"賦":966,"赤":967,"路":968,"軍":969,"転":970,"遁":971,"道":972,"郎":973,"部":974,"里":975,"野":976,"金":977,"銀":978,"長":979,"門":980,"院":981,"集":982,"雪":983,"雲":984,"青":985,"音":986,"風":987,"香":988,"馬":989,"高":990,"鬼":991,"魂":992,"魔":993,"黄":994,"鼓":995,"龍":996,"fi":997,"fl":998,"!":999,"~":1000,"":1001,"h":1002,"t":1003,"1":1004,"s":1005,"3":1006,"d":1007,"7":1008,"r":1009,"x":1010,"4":1011,"e":1012,"y":1013,"g":1014,"o":1015,"ς":1016,"ズ":1017,"v":1018,"յ":1019,"®":1020,"n":1021,"a":1022,"0":1023,"ウ":1024,"ō":1025,"8":1026,"6":1027,"9":1028,"ا":1029,"w":1030,"c":1031,"k":1032,"l":1033,"b":1034,"i":1035,"m":1036,"λ":1037,"2":1038,"u":1039,"é":1040,"p":1041,"f":1042,"ก":1043,"ي":1044,"5":1045,"ć":1046,"ʀ":1047,"z":1048,"ɪ":1049,"ó":1050,"ì":1051,"č":1052,"j":1053,"さ":1054,"ι":1055,"ö":1056,"י":1057,"ν":1058,"":1059,"q":1060,"ा":1061,"ю":1062,"ρ":1063,"ʰ":1064,"þ":1065,"¥":1066,"ר":1067,"♥":1068,"è":1069,"η":1070,"ə":1071,"ト":1072,"ß":1073,"о":1074,"ý":1075,"£":1076,"α":1077,"ː":1078,"န":1079,"ק":1080,"ी":1081,"ル":1082,"म":1083,"ś":1084,"م":1085,"φ":1086,"☆":1087,"ナ":1088,"м":1089,"ī":1090,"ה":1091,"会":1092,"©":1093,"て":1094,"न":1095,"í":1096,"ç":1097,"á":1098,"ग":1099,"ú":1100,"а":1101,"ʃ":1102,"н":1103,"ն":1104,"ε":1105,"ı":1106,"ù":1107,"グ":1108,"ド":1109,"リ":1110,"ب":1111,"õ":1112,"ʷ":1113,"ï":1114,"ā":1115,"ɕ":1116,"ן":1117,"ع":1118,"ɑ":1119,"ο":1120,"ί":1121,"原":1122,"е":1123,"よ":1124,"の":1125,"ג":1126,"°":1127,"ー":1128,"и":1129,"տ":1130,"ม":1131,"ن":1132,"ь":1133,"◊":1134,"к":1135,"ě":1136,"կ":1137,"р":1138,"ɫ":1139,"州":1140,"を":1141,"ى":1142,"る":1143,"ם":1144,"シ":1145,"่":1146,"й":1147,"ë":1148,"部":1149,"ศ":1150,"र":1151,"ン":1152,"し":1153,"υ":1154,"ÿ":1155,"ु":1156,"ท":1157,"ḻ":1158,"δ":1159,"ɛ":1160,"ぐ":1161,"င":1162,"θ":1163,"դ":1164,"ت":1165,"ل":1166,"ʲ":1167,"≤":1168,"ク":1169,"ʊ":1170,"→":1171,"ã":1172,"文":1173,"я":1174,"ស":1175,"क":1176,"л":1177,"ð":1178,"š":1179,"ر":1180,"ṃ":1181,"キ":1182,"ム":1183,"台":1184,"ü":1185,"ィ":1186,"ง":1187,"ス":1188,"ष":1189,"ж":1190,"ł":1191,"រ":1192,"ण":1193,"ミ":1194,"ц":1195,"ɔ":1196,"ّ":1197,"ב":1198,"ė":1199,"ş":1200,"ǒ":1201,"ヤ":1202,"ի":1203,"国":1204,"テ":1205,"г":1206,"خ":1207,"ʒ":1208,"ż":1209,"サ":1210,"و":1211,"ة":1212,"ɾ":1213,"ъ":1214,"コ":1215,"ɐ":1216,"ή":1217,"な":1218,"ē":1219,"ø":1220,"一":1221,"劉":1222,"ū":1223,"ă":1224,"ä":1225,"ف":1226,"β":1227,"¢":1228,"ŋ":1229,"ս":1230,"া":1231,"å":1232,"〉":1233,"в":1234,"σ":1235,"と":1236,"り":1237,"↓":1238,"à":1239,"♭":1240,"ր":1241,"ת":1242,"स":1243,"デ":1244,"ò":1245,"̝":1246,"ĩ":1247,"ά":1248,"ព":1249,"ל":1250,"ツ":1251,"ḥ":1252,"ว":1253,"ˠ":1254,"\\":1255,"ิ":1256,"⁄":1257,"ज":1258,"ア":1259,"â":1260,"に":1261,"æ":1262,"イ":1263,"ك":1264,"ラ":1265,"ん":1266,"ό":1267,"ა":1268,"त":1269,"ћ":1270,"д":1271,"レ":1272,"د":1273,"ノ":1274,"郎":1275,"う":1276,"ὶ":1277,"ʿ":1278,"ח":1279,"ź":1280,"ベ":1281,"め":1282,"ត":1283,"プ":1284,"द":1285,"ケ":1286,"¤":1287,"ต":1288,"ʾ":1289,"ブ":1290,"ʻ":1291,"±":1292,"น":1293,"κ":1294,"ч":1295,"♠":1296,"ê":1297,"π":1298,";":1299,"ז":1300,"ี":1301,"¬":1302,"ि":1303,"ñ":1304,"ら":1305,"ध":1306,"ʁ":1307,"ń":1308,"€":1309,"ម":1310,"∂":1311,"͡":1312,"ủ":1313,"ヒ":1314,"ˤ":1315,"א":1316,"界":1317,"ő":1318,"т":1319,"タ":1320,"ء":1321,"ɲ":1322,"じ":1323,"お":1324,"ნ":1325,"⅔":1326,"ղ":1327,"ω":1328,"ň":1329,"ビ":1330,"ল":1331,"ḷ":1332,"ា":1333,"マ":1334,"ʌ":1335,"ы":1336,"ន":1337,"ம":1338,"ǔ":1339,"ô":1340,"ǐ":1341,"ल":1342,"水":1343,"נ":1344,"ξ":1345,"ą":1346,"ế":1347,"星":1348,"с":1349,"ย":1350,"こ":1351,"भ":1352,"動":1353,"î":1354,"γ":1355,"神":1356,"た":1357,"♯":1358,"♪":1359,"լ":1360,"ゼ":1361,"ų":1362,"ز":1363,"ガ":1364,"ه":1365,"≈":1366,"գ":1367,"×":1368,"№":1369,"э":1370,"ち":1371,"東":1372,"ħ":1373,"ů":1374,"エ":1375,"ば":1376,"ɡ":1377,"↑":1378,"ط":1379,"์":1380,"च":1381,"χ":1382,"у":1383,"ɨ":1384,"カ":1385,"ց":1386,"ǫ":1387,"☮":1388,"ா":1389,"₹":1390,"ダ":1391,"ح":1392,"⊙":1393,"す":1394,"ʼ":1395,"え":1396,"ក":1397,"ș":1398,"̊":1399,"ử":1400,"学":1401,"ד":1402,"ш":1403,"み":1404,"س":1405,"金":1406,"ギ":1407,"˚":1408,"‰":1409,"े":1410,"ج":1411,"ψ":1412,"≥":1413,"ע":1414,"]":1415,"ו":1416,"ू":1417,"¡":1418,"ɣ":1419,"龍":1420,"έ":1421,"メ":1422,"ं":1423,"∑":1424,"ž":1425,"ʕ":1426,"‡":1427,"ネ":1428,"い":1429,"明":1430,"説":1431,"ŏ":1432,"─":1433,"ֹ":1434,"ț":1435,"・":1436,"ɜ":1437,"ह":1438,"ュ":1439,"ま":1440,"松":1441,"º":1442,"け":1443,"უ":1444,"ユ":1445,"^":1446,"ị":1447,"堂":1448,"”":1449,"~":1450,"ʔ":1451,"!":1452,"オ":1453,"า":1454,"་":1455,"ワ":1456,"ი":1457,"ब":1458,"ţ":1459,"ジ":1460,"旋":1461,"賦":1462,"∞":1463,"ῦ":1464,"者":1465,"葉":1466,"ġ":1467,"华":1468,"フ":1469,"来":1470,"∴":1471,"ض":1472,"←":1473,"ァ":1474,"○":1475,"ᾶ":1476,"ɯ":1477,"ש":1478,"第":1479,"│":1480,"ũ":1481,"長":1482,"ṛ":1483,"心":1484,"ṭ":1485,"’":1486,"ŭ":1487,"х":1488,"်":1489,"ט":1490,"₤":1491,"्":1492,"チ":1493,"ล":1494,"ハ":1495,"全":1496,"有":1497,"ی":1498,"đ":1499,"व":1500,"ช":1501,"ɟ":1502,"☠":1503,"க":1504,"ṇ":1505,"モ":1506,"⊕":1507,"ე":1508,"დ":1509,"古":1510,"̈":1511,"ư":1512,"ǎ":1513,"ṉ":1514,"μ":1515,"場":1516,"バ":1517,"三":1518,"ق":1519,"も":1520,"ッ":1521,"師":1522,"時":1523,"з":1524,"₂":1525,"で":1526,"₣":1527,"せ":1528,"ボ":1529,"木":1530,"τ":1531,"ɒ":1532,"ք":1533,"ồ":1534,"ľ":1535,"路":1536,"編":1537,"転":1538,"ỹ":1539,"記":1540,"ס":1541,"印":1542,"ห":1543,"ứ":1544,"_":1545,"ὴ":1546,"っ":1547,"く":1548,"ּ":1549,"ˀ":1550,"û":1551,"і":1552,"ั":1553,"ζ":1554,"♂":1555,"श":1556,"ổ":1557,"█":1558,"(":1559,"►":1560,"少":1561,"谷":1562,"ւ":1563,"ύ":1564,"ி":1565,"\"":1566,"ř":1567,"良":1568,"ạ":1569,"§":1570,"ᵻ":1571,"名":1572,"ˌ":1573,"ự":1574,"ゆ":1575,"香":1576,"б":1577,"=":1578,"ロ":1579,"&":1580,"思":1581,"☉":1582,"ش":1583,"物":1584,"ḍ":1585,"ُ":1586,"か":1587,"ṣ":1588,"მ":1589,"黄":1590,"∈":1591,"œ":1592,"十":1593,"้":1594,"µ":1595,"̩":1596,"事":1597,"̇":1598,"狗":1599,"ร":1600,"ơ":1601,"き":1602,"ệ":1603,"ğ":1604,"¾":1605,"ด":1606,"'":1607,"ェ":1608,"字":1609,"ရ":1610,"พ":1611,"寺":1612,"—":1613,"̪":1614,"無":1615,"は":1616,"が":1617,"„":1618,"∩":1619,"্":1620,"ヴ":1621,"ृ":1622,"太":1623,"や":1624,"ِ":1625,"ę":1626,"外":1627,"∙":1628,"石":1629,"ַ":1630,"ἀ":1631,"п":1632,"後":1633,"朝":1634,"大":1635,"♀":1636,"艦":1637,"赤":1638,"校":1639,"ɬ":1640,"≠":1641,"ա":1642,">":1643,"君":1644,"聖":1645,"僕":1646,"³":1647,"女":1648,"ლ":1649,"َ":1650,"ụ":1651,"ả":1652,"ố":1653,"白":1654,"ォ":1655,"◦":1656,"市":1657,"内":1658,"語":1659,"·":1660,"つ":1661,"ს":1662,"ộ":1663,"⊗":1664,"本":1665,"ो":1666,"ƒ":1667,"民":1668,"李":1669,"य":1670,"れ":1671,"鼓":1672,"ザ":1673,"ť":1674,"へ":1675,"ˈ":1676,"中":1677,"ἱ":1678,"ϕ":1679,"प":1680,"「":1681,"რ":1682,"ỗ":1683,"魔":1684,"ث":1685,"″":1686,"春":1687,"城":1688,"公":1689,"平":1690,"南":1691,"“":1692,"ョ":1693,"機":1694,"村":1695,"在":1696,"ゲ":1697,"ู":1698,"子":1699,"ῆ":1700,"島":1701,"園":1702,"¿":1703,"ポ":1704,"•":1705,"永":1706,"京":1707,"ک":1708,"♦":1709,"⟩":1710,"藤":1711,"ვ":1712,"ص":1713,"四":1714,"个":1715,"ป":1716,"บ":1717,"ソ":1718,"̞":1719,"馬":1720,"社":1721,"愛":1722,"作":1723,"պ":1724,"二":1725,"田":1726,"眼":1727,"♣":1728,"‘":1729,"世":1730,"鬼":1731,"剣":1732,"上":1733,"ひ":1734,"ர":1735,"◄":1736,"院":1737,"井":1738,"代":1739,"武":1740,"ё":1741,"王":1742,"ф":1743,"恋":1744,"ذ":1745,"光":1746,"ट":1747,"ո":1748,"夜":1749,"ỳ":1750,"張":1751,"戦":1752,"ْ":1753,"天":1754,"!":1755,"ะ":1756,"հ":1757,"▲":1758,"´":1759,"ʂ":1760,"ὸ":1761,"ข":1762,"ῖ":1763,"̆":1764,"ֵ":1765,"火":1766,"%":1767,"伝":1768,"щ":1769,"₩":1770,"銀":1771,"、":1772,"⅓":1773,"國":1774,"術":1775,"բ":1776,"ホ":1777,"ἰ":1778,"ふ":1779,"̠":1780,"/":1781,"士":1782,"ক":1783,"ở":1784,",":1785,"จ":1786,"夏":1787,"五":1788,"|":1789,"山":1790,"̧":1791,"ữ":1792,"―":1793,"殿":1794,"<":1795,"ŷ":1796,"ʋ":1797,"生":1798,"ј":1799,"ɖ":1800,"½":1801,"ャ":1802,"だ":1803,"@":1804,"小":1805,"−":1806,"$":1807,"*":1808,"+":1809,"成":1810,"写":1811,"不":1812,"宮":1813,"宝":1814,"祭":1815,"ọ":1816,"空":1817,"」":1818,"司":1819,"氏":1820,"我":1821,"海":1822,"-":1823,"色":1824,"ὁ":1825,"高":1826,"ど":1827,"西":1828,"青":1829,"草":1830,"北":1831,"̯":1832,"真":1833,"書":1834,"泉":1835,"秋":1836,"»":1837,")":1838,"}":1839,"…":1840,"ⴰ":1841,"地":1842,"野":1843,"‑":1844,"`":1845,"華":1846,"′":1847,"⋅":1848,"議":1849,"塊":1850,"ợ":1851,"雪":1852,"日":1853,"紅":1854,"〜":1855,"ส":1856,"川":1857,"̄":1858,"詩":1859,"史":1860,"音":1861,"前":1862,":":1863,"門":1864,"宗":1865,"和":1866,"ئ":1867,"方":1868,"雲":1869,"道":1870,"式":1871,"ե":1872,"家":1873,"†":1874,"周":1875,"∗":1876,"²":1877,"ֶ":1878,"–":1879,"⟨":1880,"遁":1881,"#":1882,"{":1883,"մ":1884,"‐":1885,"下":1886,"ώ":1887,"法":1888,"魂":1889,"美":1890,"¹":1891,"元":1892,"្":1893,"的":1894,"手":1895,"千":1896,"林":1897,"غ":1898,"里":1899,"花":1900,"集":1901,"√":1902,"́":1903,"型":1904,"≡":1905,"丸":1906,"מ":1907,"風":1908,"新":1909,"〈":1910,"歌":1911,"六":1912,"之":1913,"百":1914,"流":1915,"₱":1916,"⇒":1917,"்":1918,"¶":1919,"能":1920,"主":1921,"վ":1922,"ゴ":1923,"ニ":1924,"正":1925,"所":1926,"八":1927,"軍":1928,"教":1929,"行":1930,"⇌":1931,"̍":1932,"安":1933,"ぽ":1934,"人":1935,"伊":1936,"夢":1937,"̥":1938,"皇":1939,"«":1940,"̃":1941,"政":1942,"守":1943,"÷":1944,"年":1945,"同":1946,"?":1947,"月":1948,"~":1949,".":1950,"ხ":1951,"ῶ":1952,"[":1953,"合":1954,"¼":1955,"※":1956,"ℓ":1957,"見":1958,"th":1959,"in":1960,"the":1961,"er":1962,"an":1963,"on":1964,"ar":1965,"re":1966,"st":1967,"at":1968,"ing":1969,"it":1970,"er":1971,"ro":1972,"or":1973,"of":1974,"en":1975,"ed":1976,"in":1977,"al":1978,"and":1979,"on":1980,"es":1981,"ou":1982,"per":1983,"ac":1984,"il":1985,"son":1986,"or":1987,"to":1988,"ic":1989,"person":1990,"ho":1991,"ion":1992,"de":1993,"an":1994,"ra":1995,"at":1996,"co":1997,"lo":1998,"is":1999,"al":2000,"le":2001,"as":2002,"ch":2003,"ur":2004,"as":2005,"is":2006,"ig":2007,"un":2008,"le":2009,"be":2010,"am":2011,"ir":2012,"to":2013,"se":2014,"wit":2015,"ag":2016,"with":2017,"ri":2018,"en":2019,"for":2020,"im":2021,"wh":2022,"st":2023,"lu":2024,"ts":2025,"ation":2026,"li":2027,"ne":2028,"sh":2029,"we":2030,"bo":2031,"ap":2032,"ad":2033,"po":2034,"ve":2035,"mo":2036,"te":2037,"si":2038,"di":2039,"ly":2040,"la":2041,"se":2042,"ct":2043,"sp":2044,"bu":2045,"by":2046,"con":2047,"str":2048,"ent":2049,"ers":2050,"ar":2051,"me":2052,"it":2053,"ay":2054,"ti":2055,"su":2056,"all":2057,"ve":2058,"ds":2059,"ab":2060,"ol":2061,"oc":2062,"vi":2063,"fro":2064,"ding":2065,"int":2066,"com":2067,"ak":2068,"the":2069,"el":2070,"qu":2071,"igh":2072,"no":2073,"do":2074,"pho":2075,"pro":2076,"au":2077,"ce":2078,"was":2079,"sc":2080,"20":2081,"ha":2082,"ss":2083,"fe":2084,"ver":2085,"par":2086,"th":2087,"oun":2088,"age":2089,"for":2090,"that":2091,"from":2092,"ent":2093,"et":2094,"pl":2095,"sig":2096,"ch":2097,"illu":2098,"me":2099,"ion":2100,"illustr":2101,"ph":2102,"wor":2103,"roo":2104,"ere":2105,"car":2106,"ce":2107,"ing":2108,"illustration":2109,"gra":2110,"de":2111,"oo":2112,"ure":2113,"ter":2114,"ack":2115,"art":2116,"tr":2117,"ate":2118,"ic":2119,"ver":2120,"this":2121,"col":2122,"ty":2123,"ine":2124,"ex":2125,"ite":2126,"cu":2127,"one":2128,"pre":2129,"pe":2130,"ity":2131,"man":2132,"gro":2133,"sho":2134,"fu":2135,"are":2136,"desig":2137,"ock":2138,"ack":2139,"br":2140,"ir":2141,"our":2142,"ss":2143,"du":2144,"ree":2145,"room":2146,"stock":2147,"he":2148,"day":2149,"ge":2150,"ated":2151,"ther":2152,"bl":2153,"19":2154,"ight":2155,"so":2156,"tra":2157,"mu":2158,"201":2159,"ta":2160,"ad":2161,"ld":2162,"ff":2163,"mb":2164,"red":2165,"ter":2166,"wn":2167,"all":2168,"ace":2169,"ies":2170,"hou":2171,"photo":2172,"mar":2173,"pa":2174,"man":2175,"ma":2176,"pu":2177,"cre":2178,"ment":2179,"gre":2180,"gu":2181,"out":2182,"por":2183,"te":2184,"und":2185,"his":2186,"ci":2187,"las":2188,"af":2189,"and":2190,"ru":2191,"back":2192,"ge":2193,"ation":2194,"yo":2195,"tu":2196,"ice":2197,"design":2198,"ple":2199,"be":2200,"ks":2201,"dre":2202,"up":2203,"av":2204,"white":2205,"ting":2206,"pla":2207,"ard":2208,"pr":2209,"ke":2210,"cor":2211,"its":2212,"ground":2213,"king":2214,"der":2215,"ant":2216,"der":2217,"ser":2218,"ood":2219,"ame":2220,"new":2221,"he":2222,"hi":2223,"dra":2224,"ke":2225,"pic":2226,"able":2227,"dis":2228,"you":2229,"graph":2230,"ings":2231,"ted":2232,"photo":2233,"ary":2234,"mon":2235,"low":2236,"ake":2237,"min":2238,"ive":2239,"ill":2240,"ctor":2241,"stor":2242,"wat":2243,"background":2244,"our":2245,"ster":2246,"ich":2247,"ents":2248,"ba":2249,"sty":2250,"go":2251,"les":2252,"ll":2253,"am":2254,"house":2255,"ong":2256,"tim":2257,"ill":2258,"vie":2259,"fir":2260,"ev":2261,"uring":2262,"buil":2263,"mor":2264,"res":2265,"tw":2266,"mi":2267,"ok":2268,"ps":2269,"ish":2270,"wed":2271,"stu":2272,"ction":2273,"ft":2274,"ms":2275,"ded":2276,"free":2277,"wo":2278,"ys":2279,"shir":2280,"land":2281,"ck":2282,"ste":2283,"sk":2284,"den":2285,"inter":2286,"ol":2287,"old":2288,"ving":2289,"us":2290,"ach":2291,"fi":2292,"which":2293,"ful":2294,"als":2295,"out":2296,"ang":2297,"pp":2298,"ye":2299,"black":2300,"ju":2301,"roy":2302,"so":2303,"vector":2304,"ons":2305,"sa":2306,"her":2307,"my":2308,"je":2309,"wedding":2310,"were":2311,"ile":2312,"ine":2313,"ast":2314,"eng":2315,"mber":2316,"ally":2317,"ash":2318,"fin":2319,"fl":2320,"act":2321,"ct":2322,"ll":2323,"spe":2324,"men":2325,"home":2326,"rou":2327,"gar":2328,"clo":2329,"photograph":2330,"ars":2331,"ade":2332,"int":2333,"clu":2334,"ors":2335,"gh":2336,"ous":2337,"sm":2338,"che":2339,"ren":2340,"your":2341,"ran":2342,"ical":2343,"gi":2344,"cent":2345,"shirt":2346,"bi":2347,"wing":2348,"world":2349,"have":2350,"two":2351,"way":2352,"side":2353,"us":2354,"view":2355,"il":2356,"ry":2357,"pat":2358,"during":2359,"go":2360,"op":2361,"ide":2362,"um":2363,"has":2364,"shi":2365,"ence":2366,"their":2367,"first":2368,"ist":2369,"line":2370,"not":2371,"but":2372,"char":2373,"gir":2374,"att":2375,"ale":2376,"ning":2377,"ian":2378,"ws":2379,"ns":2380,"can":2381,"el":2382,"pi":2383,"air":2384,"nu":2385,"off":2386,"ages":2387,"anim":2388,"ture":2389,"ele":2390,"ia":2391,"style":2392,"low":2393,"other":2394,"loc":2395,"bas":2396,"set":2397,"had":2398,"mat":2399,"nor":2400,"00":2401,"lar":2402,"tt":2403,"aper":2404,"sy":2405,"art":2406,"jo":2407,"alty":2408,"after":2409,"dit":2410,"blu":2411,"pos":2412,"image":2413,"ans":2414,"ations":2415,"beau":2416,"sic":2417,"wi":2418,"can":2419,"war":2420,"ould":2421,"ls":2422,"coun":2423,"no":2424,"time":2425,"hu":2426,"loo":2427,"ying":2428,"paper":2429,"photography":2430,"royalty":2431,"ound":2432,"comp":2433,"over":2434,"fash":2435,"paint":2436,"tri":2437,"bri":2438,"bir":2439,"building":2440,"city":2441,"wall":2442,"ins":2443,"id":2444,"tat":2445,"pe":2446,"ance":2447,"clas":2448,"ast":2449,"mp":2450,"they":2451,"ved":2452,"stre":2453,"bro":2454,"fashion":2455,"fo":2456,"top":2457,"lit":2458,"tel":2459,"tion":2460,"comm":2461,"sou":2462,"ial":2463,"amer":2464,"cro":2465,"tur":2466,"more":2467,"atu":2468,"game":2469,"blue":2470,"into":2471,"flow":2472,"carto":2473,"best":2474,"vis":2475,"ating":2476,"ideas":2477,"win":2478,"ily":2479,"fe":2480,"wall":2481,"ant":2482,"ble":2483,"rit":2484,"bre":2485,"who":2486,"cep":2487,"some":2488,"dy":2489,"cas":2490,"rist":2491,"ff":2492,"ral":2493,"ational":2494,"dec":2495,"photos":2496,"ber":2497,"mer":2498,"end":2499,"hotel":2500,"cra":2501,"200":2502,"wal":2503,"ses":2504,"ball":2505,"ric":2506,"ck":2507,"made":2508,"ink":2509,"tatto":2510,"ish":2511,"ach":2512,"ior":2513,"where":2514,"also":2515,"ear":2516,"cartoon":2517,"fam":2518,"ner":2519,"girl":2520,"most":2521,"bed":2522,"aga":2523,"sur":2524,"ates":2525,"colle":2526,"wa":2527,"ul":2528,"ned":2529,"vel":2530,"ron":2531,"about":2532,"fil":2533,"scho":2534,"christ":2535,"when":2536,"gen":2537,"gh":2538,"hel":2539,"tre":2540,"beauti":2541,"cl":2542,"sun":2543,"sto":2544,"any":2545,"water":2546,"images":2547,"are":2548,"table":2549,"iz":2550,"wood":2551,"ative":2552,"ens":2553,"plan":2554,"air":2555,"ves":2556,"ort":2557,"ick":2558,"ands":2559,"work":2560,"beautiful":2561,"mas":2562,"woman":2563,"tes":2564,"will":2565,"now":2566,"ween":2567,"book":2568,"itch":2569,"year":2570,"lar":2571,"ny":2572,"don":2573,"os":2574,"tain":2575,"play":2576,"20":2577,"indi":2578,"da":2579,"cur":2580,"produ":2581,"chil":2582,"americ":2583,"been":2584,"dress":2585,"pen":2586,"dge":2587,"fun":2588,"inclu":2589,"tro":2590,"car":2591,"bur":2592,"est":2593,"vint":2594,"throu":2595,"vintage":2596,"ni":2597,"very":2598,"wer":2599,"ate":2600,"ces":2601,"sil":2602,"life":2603,"how":2604,"she":2605,"igh":2606,"back":2607,"ship":2608,"bet":2609,"we":2610,"sim":2611,"beach":2612,"sion":2613,"xt":2614,"stru":2615,"mer":2616,"ki":2617,"small":2618,"movi":2619,"ions":2620,"like":2621,"arch":2622,"peo":2623,"fron":2624,"ics":2625,"lon":2626,"love":2627,"ining":2628,"ise":2629,"har":2630,"ton":2631,"these":2632,"people":2633,"school":2634,"tor":2635,"fri":2636,"christmas":2637,"door":2638,"flo":2639,"national":2640,"make":2641,"hand":2642,"may":2643,"ments":2644,"cho":2645,"moder":2646,"mes":2647,"cap":2648,"form":2649,"uni":2650,"bed":2651,"park":2652,"story":2653,"glas":2654,"high":2655,"gr":2656,"gs":2657,"kitch":2658,"hor":2659,"pri":2660,"mil":2661,"ited":2662,"dar":2663,"tle":2664,"mak":2665,"light":2666,"bar":2667,"color":2668,"ther":2669,"ite":2670,"ze":2671,"ney":2672,"wallpaper":2673,"18":2674,"moun":2675,"10":2676,"green":2677,"amp":2678,"do":2679,"cer":2680,"print":2681,"face":2682,"hol":2683,"front":2684,"re":2685,"garden":2686,"thing":2687,"there":2688,"tru":2689,"painting":2690,"thou":2691,"logo":2692,"woo":2693,"hal":2694,"quo":2695,"boo":2696,"modern":2697,"ater":2698,"her":2699,"bus":2700,"app":2701,"ep":2702,"night":2703,"long":2704,"get":2705,"while":2706,"living":2707,"tree":2708,"through":2709,"bor":2710,"drawing":2711,"tures":2712,"res":2713,"poster":2714,"tem":2715,"ffe":2716,"tran":2717,"what":2718,"tic":2719,"ury":2720,"asy":2721,"food":2722,"women":2723,"ery":2724,"ather":2725,"sses":2726,"three":2727,"kitchen":2728,"sum":2729,"ass":2730,"cts":2731,"family":2732,"ick":2733,"pes":2734,"youn":2735,"try":2736,"den":2737,"dog":2738,"sta":2739,"que":2740,"only":2741,"ty":2742,"lin":2743,"foo":2744,"many":2745,"led":2746,"stic":2747,"pres":2748,"off":2749,"act":2750,"orig":2751,"des":2752,"cau":2753,"ked":2754,"ring":2755,"ail":2756,"reci":2757,"patter":2758,"street":2759,"concep":2760,"north":2761,"ek":2762,"part":2763,"would":2764,"lic":2765,"decor":2766,"ligh":2767,"ape":2768,"cou":2769,"ban":2770,"ames":2771,"party":2772,"fur":2773,"sea":2774,"tal":2775,"music":2776,"log":2777,"fore":2778,"team":2779,"ten":2780,"than":2781,"2019":2782,"little":2783,"pol":2784,"ature":2785,"great":2786,"located":2787,"sing":2788,"pan":2789,"stri":2790,"used":2791,"serv":2792,"road":2793,"board":2794,"cute":2795,"sts":2796,"young":2797,"fore":2798,"ue":2799,"less":2800,"busine":2801,"land":2802,"large":2803,"ious":2804,"fol":2805,"place":2806,"baby":2807,"writ":2808,"tho":2809,"son":2810,"wil":2811,"ats":2812,"mode":2813,"ities":2814,"film":2815,"ced":2816,"around":2817,"between":2818,"cted":2819,"et":2820,"bedroom":2821,"cle":2822,"river":2823,"ition":2824,"sle":2825,"ten":2826,"before":2827,"vo":2828,"eld":2829,"origin":2830,"port":2831,"bal":2832,"under":2833,"ason":2834,"hair":2835,"just":2836,"val":2837,"fer":2838,"ca":2839,"american":2840,"sal":2841,"down":2842,"ven":2843,"tin":2844,"medi":2845,"insp":2846,"number":2847,"tak":2848,"pictures":2849,"ire":2850,"ined":2851,"fac":2852,"interior":2853,"prin":2854,"area":2855,"squ":2856,"ches":2857,"space":2858,"sit":2859,"inst":2860,"against":2861,"sup":2862,"dark":2863,"charac":2864,"kids":2865,"lan":2866,"cy":2867,"swe":2868,"cam":2869,"show":2870,"concept":2871,"cour":2872,"movie":2873,"plac":2874,"sel":2875,"dent":2876,"ster":2877,"london":2878,"bra":2879,"head":2880,"ssi":2881,"look":2882,"chan":2883,"isol":2884,"ley":2885,"stan":2886,"bath":2887,"cture":2888,"ep":2889,"diff":2890,"far":2891,"az":2892,"read":2893,"portra":2894,"sco":2895,"est":2896,"acti":2897,"cake":2898,"birth":2899,"let":2900,"ory":2901,"tattoo":2902,"picture":2903,"classic":2904,"val":2905,"cess":2906,"secon":2907,"ket":2908,"leas":2909,"mas":2910,"years":2911,"kno":2912,"outf":2913,"decor":2914,"south":2915,"west":2916,"pal":2917,"nam":2918,"cele":2919,"play":2920,"near":2921,"aring":2922,"archite":2923,"funny":2924,"isolated":2925,"gold":2926,"star":2927,"season":2928,"series":2929,"vers":2930,"quotes":2931,"birthday":2932,"well":2933,"pur":2934,"glass":2935,"mis":2936,"fra":2937,"flowers":2938,"good":2939,"jun":2940,"them":2941,"collection":2942,"happ":2943,"ak":2944,"cess":2945,"character":2946,"soc":2947,"199":2948,"use":2949,"summer":2950,"nit":2951,"fic":2952,"heal":2953,"ore":2954,"over":2955,"part":2956,"ling":2957,"ket":2958,"ward":2959,"ven":2960,"proje":2961,"field":2962,"right":2963,"pink":2964,"office":2965,"animals":2966,"dr":2967,"town":2968,"gol":2969,"ber":2970,"sted":2971,"inter":2972,"with":2973,"business":2974,"designs":2975,"dd":2976,"eu":2977,"high":2978,"if":2979,"ital":2980,"speci":2981,"being":2982,"rel":2983,"premi":2984,"him":2985,"phone":2986,"ional":2987,"es":2988,"ese":2989,"gener":2990,"uary":2991,"run":2992,"ger":2993,"scri":2994,"celebr":2995,"size":2996,"ath":2997,"sli":2998,"full":2999,"sw":3000,"sub":3001,"beg":3002,"call":3003,"up":3004,"pattern":3005,"looking":3006,"sky":3007,"ished":3008,"comple":3009,"brit":3010,"form":3011,"hoo":3012,"history":3013,"hand":3014,"end":3015,"flower":3016,"travel":3017,"cars":3018,"big":3019,"landsc":3020,"cover":3021,"jack":3022,"group":3023,"coo":3024,"ger":3025,"la":3026,"each":3027,"second":3028,"key":3029,"come":3030,"state":3031,"din":3032,"bathroom":3033,"war":3034,"disney":3035,"dresses":3036,"box":3037,"dition":3038,"sale":3039,"craft":3040,"ind":3041,"fla":3042,"wn":3043,"offic":3044,"four":3045,"original":3046,"ey":3047,"floor":3048,"sen":3049,"water":3050,"differ":3051,"united":3052,"childre":3053,"tattoos":3054,"vide":3055,"male":3056,"winter":3057,"appe":3058,"cir":3059,"ob":3060,"tit":3061,"recor":3062,"ina":3063,"shop":3064,"12":3065,"text":3066,"event":3067,"var":3068,"velo":3069,"000":3070,"boy":3071,"cal":3072,"releas":3073,"ming":3074,"0s":3075,"dri":3076,"wel":3077,"gan":3078,"sh":3079,"left":3080,"fran":3081,"ght":3082,"cat":3083,"austr":3084,"jap":3085,"da":3086,"happy":3087,"star":3088,"sign":3089,"lake":3090,"self":3091,"foot":3092,"austral":3093,"children":3094,"ga":3095,"windo":3096,"both":3097,"mag":3098,"custo":3099,"portrait":3100,"fre":3101,"games":3102,"dire":3103,"wooden":3104,"scen":3105,"cle":3106,"studi":3107,"ening":3108,"costu":3109,"center":3110,"came":3111,"here":3112,"2018":3113,"ellow":3114,"ma":3115,"spor":3116,"beds":3117,"heart":3118,"apart":3119,"vill":3120,"color":3121,"sym":3122,"then":3123,"adv":3124,"close":3125,"later":3126,"na":3127,"island":3128,"co":3129,"50":3130,"ford":3131,"dic":3132,"syste":3133,"ko":3134,"ph":3135,"eg":3136,"lish":3137,"scre":3138,"outdoor":3139,"sive":3140,"develo":3141,"qui":3142,"sed":3143,"cy":3144,"ker":3145,"see":3146,"holding":3147,"ness":3148,"wear":3149,"tter":3150,"peri":3151,"cel":3152,"tter":3153,"whe":3154,"fle":3155,"dro":3156,"pie":3157,"follow":3158,"iling":3159,"inv":3160,"15":3161,"she":3162,"3d":3163,"recipes":3164,"map":3165,"ie":3166,"such":3167,"atures":3168,"architecture":3169,"wards":3170,"ben":3171,"30":3172,"animal":3173,"case":3174,"eve":3175,"med":3176,"spo":3177,"along":3178,"model":3179,"ines":3180,"under":3181,"styles":3182,"sti":3183,"ga":3184,"fts":3185,"furnit":3186,"furniture":3187,"take":3188,"yellow":3189,"easy":3190,"cul":3191,"card":3192,"tal":3193,"mbers":3194,"11":3195,"known":3196,"2020":3197,"iso":3198,"ered":3199,"bag":3200,"mill":3201,"ey":3202,"popu":3203,"dig":3204,"wl":3205,"body":3206,"sters":3207,"country":3208,"last":3209,"company":3210,"cause":3211,"pool":3212,"avy":3213,"next":3214,"batt":3215,"though":3216,"fire":3217,"open":3218,"rock":3219,"ement":3220,"sever":3221,"ffee":3222,"main":3223,"creative":3224,"port":3225,"dents":3226,"cast":3227,"resta":3228,"restaur":3229,"198":3230,"stra":3231,"bs":3232,"onal":3233,"ond":3234,"forest":3235,"leg":3236,"tting":3237,"ssed":3238,"cru":3239,"reg":3240,"emp":3241,"ets":3242,"bridge":3243,"ital":3244,"rench":3245,"lux":3246,"cla":3247,"coffee":3248,"icon":3249,"shoes":3250,"cli":3251,"another":3252,"organ":3253,"pers":3254,"early":3255,"video":3256,"including":3257,"dol":3258,"perfe":3259,"prote":3260,"fig":3261,"using":3262,"albu":3263,"ium":3264,"sul":3265,"afric":3266,"club":3267,"ain":3268,"oper":3269,"fall":3270,"tex":3271,"rooms":3272,"feat":3273,"196":3274,"featuring":3275,"hun":3276,"lead":3277,"stone":3278,"elect":3279,"champ":3280,"io":3281,"days":3282,"some":3283,"vil":3284,"contin":3285,"inspir":3286,"sitting":3287,"chur":3288,"ism":3289,"sing":3290,"em":3291,"lam":3292,"ide":3293,"different":3294,"simple":3295,"yard":3296,"wearing":3297,"ath":3298,"indian":3299,"british":3300,"pped":3301,"metal":3302,"hon":3303,"ways":3304,"ends":3305,"197":3306,"cks":3307,"cr":3308,"sci":3309,"lad":3310,"ne":3311,"tan":3312,"ase":3313,"plate":3314,"cool":3315,"symbo":3316,"march":3317,"shel":3318,"england":3319,"outfits":3320,"band":3321,"row":3322,"holi":3323,"vision":3324,"century":3325,"develop":3326,"cab":3327,"logy":3328,"nature":3329,"sin":3330,"pping":3331,"ette":3332,"inst":3333,"mountain":3334,"tains":3335,"ditional":3336,"fish":3337,"hy":3338,"suc":3339,"stick":3340,"commun":3341,"sep":3342,"ric":3343,"lim":3344,"ment":3345,"landscape":3346,"ices":3347,"sting":3348,"bb":3349,"inside":3350,"natu":3351,"french":3352,"could":3353,"girls":3354,"consi":3355,"euro":3356,"find":3357,"fit":3358,"natural":3359,"head":3360,"times":3361,"bat":3362,"bar":3363,"action":3364,"break":3365,"ril":3366,"mal":3367,"del":3368,"ality":3369,"dan":3370,"sat":3371,"ean":3372,"ones":3373,"standing":3374,"short":3375,"system":3376,"wings":3377,"2017":3378,"ils":3379,"april":3380,"site":3381,"ual":3382,"tail":3383,"mother":3384,"song":3385,"pub":3386,"states":3387,"fast":3388,"na":3389,"constru":3390,"flat":3391,"ders":3392,"25":3393,"aves":3394,"human":3395,"restaurant":3396,"els":3397,"ler":3398,"pet":3399,"public":3400,"que":3401,"trees":3402,"blo":3403,"live":3404,"ie":3405,"ator":3406,"tech":3407,"enti":3408,"spring":3409,"ility":3410,"episo":3411,"force":3412,"pper":3413,"muse":3414,"perfect":3415,"my":3416,"mic":3417,"fant":3418,"several":3419,"week":3420,"16":3421,"ap":3422,"colors":3423,"said":3424,"because":3425,"18":3426,"same":3427,"who":3428,"thy":3429,"ed":3430,"cup":3431,"leather":3432,"bride":3433,"graph":3434,"cover":3435,"ised":3436,"perfor":3437,"festi":3438,"provi":3439,"shot":3440,"lear":3441,"rece":3442,"fantasy":3443,"online":3444,"album":3445,"super":3446,"brown":3447,"vic":3448,"dou":3449,"church":3450,"ange":3451,"silver":3452,"royal":3453,"new":3454,"13":3455,"14":3456,"tv":3457,"every":3458,"shows":3459,"trans":3460,"milit":3461,"june":3462,"india":3463,"july":3464,"boat":3465,"did":3466,"books":3467,"zz":3468,"ii":3469,"dings":3470,"ering":3471,"augu":3472,"10":3473,"gal":3474,"sket":3475,"indu":3476,"rac":3477,"found":3478,"down":3479,"called":3480,"jacket":3481,"couple":3482,"museum":3483,"cer":3484,"ters":3485,"ca":3486,"anci":3487,"plu":3488,"east":3489,"resi":3490,"tro":3491,"fun":3492,"ssion":3493,"tab":3494,"gover":3495,"sol":3496,"2016":3497,"exam":3498,"univers":3499,"oul":3500,"name":3501,"canad":3502,"lace":3503,"thern":3504,"plant":3505,"jan":3506,"seen":3507,"own":3508,"194":3509,"premiere":3510,"ici":3511,"football":3512,"power":3513,"still":3514,"final":3515,"ized":3516,"arch":3517,"half":3518,"retur":3519,"bun":3520,"real":3521,"point":3522,"things":3523,"ta":3524,"five":3525,"dining":3526,"played":3527,"ever":3528,"built":3529,"apartment":3530,"ali":3531,"nove":3532,"working":3533,"bel":3534,"plan":3535,"service":3536,"akes":3537,"child":3538,"amaz":3539,"making":3540,"ctive":3541,"fru":3542,"mul":3543,"beauty":3544,"vers":3545,"playing":3546,"station":3547,"person":3548,"howe":3549,"dom":3550,"however":3551,"writing":3552,"flag":3553,"lands":3554,"single":3555,"octo":3556,"pical":3557,"race":3558,"vas":3559,"smo":3560,"histor":3561,"fa":3562,"tren":3563,"lot":3564,"hands":3565,"military":3566,"ired":3567,"present":3568,"thir":3569,"ly":3570,"students":3571,"ties":3572,"festival":3573,"news":3574,"places":3575,"gall":3576,"gift":3577,"incre":3578,"tour":3579,"contro":3580,"septe":3581,"tour":3582,"september":3583,"govern":3584,"vin":3585,"tty":3586,"cat":3587,"san":3588,"silhou":3589,"test":3590,"hind":3591,"hallo":3592,"luxury":3593,"rain":3594,"hot":3595,"halloween":3596,"cing":3597,"showing":3598,"mini":3599,"sun":3600,"much":3601,"spon":3602,"custom":3603,"resort":3604,"studio":3605,"female":3606,"ana":3607,"moon":3608,"acks":3609,"17":3610,"project":3611,"horse":3612,"suit":3613,"fav":3614,"lines":3615,"2015":3616,"sports":3617,"tar":3618,"des":3619,"ston":3620,"activ":3621,"pil":3622,"tips":3623,"lling":3624,"square":3625,"cott":3626,"following":3627,"need":3628,"grey":3629,"ham":3630,"outside":3631,"don":3632,"17":3633,"behind":3634,"eds":3635,"pra":3636,"gest":3637,"dered":3638,"english":3639,"icle":3640,"tions":3641,"bird":3642,"eting":3643,"store":3644,"canvas":3645,"hair":3646,"work":3647,"bowl":3648,"ships":3649,"airs":3650,"market":3651,"army":3652,"chocol":3653,"round":3654,"orange":3655,"stage":3656,"became":3657,"snow":3658,"descri":3659,"episode":3660,"gy":3661,"holiday":3662,"bay":3663,"publ":3664,"ising":3665,"window":3666,"appro":3667,"list":3668,"ching":3669,"graphic":3670,"maj":3671,"rings":3672,"bott":3673,"clou":3674,"international":3675,"phil":3676,"watch":3677,"sau":3678,"lay":3679,"inese":3680,"abo":3681,"since":3682,"illed":3683,"die":3684,"recei":3685,"even":3686,"ocean":3687,"million":3688,"glo":3689,"august":3690,"tou":3691,"toge":3692,"sug":3693,"100":3694,"joh":3695,"traditional":3696,"anese":3697,"chocolate":3698,"til":3699,"vely":3700,"chinese":3701,"together":3702,"vol":3703,"cut":3704,"ures":3705,"wee":3706,"class":3707,"im":3708,"med":3709,"october":3710,"ams":3711,"plants":3712,"presi":3713,"per":3714,"bou":3715,"inf":3716,"golden":3717,"kes":3718,"sunset":3719,"recipe":3720,"24":3721,"walk":3722,"fire":3723,"di":3724,"sn":3725,"piece":3726,"sport":3727,"until":3728,"21":3729,"ague":3730,"comb":3731,"crit":3732,"earth":3733,"australia":3734,"works":3735,"mach":3736,"based":3737,"ef":3738,"match":3739,"veh":3740,"dance":3741,"quar":3742,"def":3743,"oil":3744,"costume":3745,"japanese":3746,"acro":3747,"paint":3748,"hall":3749,"movies":3750,"castle":3751,"pas":3752,"sleeve":3753,"uk":3754,"ble":3755,"pt":3756,"bow":3757,"looks":3758,"ags":3759,"ago":3760,"help":3761,"ywood":3762,"sticker":3763,"chick":3764,"arri":3765,"chi":3766,"ham":3767,"ishing":3768,"flor":3769,"plans":3770,"los":3771,"photograph":3772,"pass":3773,"effe":3774,"read":3775,"ills":3776,"anime":3777,"retro":3778,"itu":3779,"morning":3780,"ever":3781,"frame":3782,"clothing":3783,"195":3784,"2014":3785,"cross":3786,"mens":3787,"artist":3788,"hat":3789,"shar":3790,"ctions":3791,"resul":3792,"acters":3793,"characters":3794,"dogs":3795,"player":3796,"inspiration":3797,"pow":3798,"foun":3799,"manag":3800,"november":3801,"doors":3802,"sy":3803,"released":3804,"hd":3805,"ights":3806,"slim":3807,"coming":3808,"stars":3809,"men":3810,"sical":3811,"dest":3812,"ot":3813,"truck":3814,"ato":3815,"ist":3816,"silhouette":3817,"ended":3818,"late":3819,"memor":3820,"chair":3821,"walking":3822,"ready":3823,"colorful":3824,"ado":3825,"above":3826,"drawings":3827,"version":3828,"took":3829,"construction":3830,"away":3831,"angel":3832,"general":3833,"healthy":3834,"verse":3835,"ails":3836,"date":3837,"actor":3838,"having":3839,"19":3840,"abstr":3841,"16":3842,"drawn":3843,"continu":3844,"avail":3845,"stry":3846,"bul":3847,"enjo":3848,"ants":3849,"every":3850,"wester":3851,"special":3852,"january":3853,"cream":3854,"grand":3855,"ddle":3856,"lies":3857,"without":3858,"moti":3859,"digital":3860,"those":3861,"word":3862,"scul":3863,"university":3864,"llywood":3865,"mou":3866,"ths":3867,"arts":3868,"issu":3869,"fied":3870,"dece":3871,"fres":3872,"features":3873,"sam":3874,"cil":3875,"covered":3876,"dist":3877,"bran":3878,"22":3879,"homes":3880,"ories":3881,"abstract":3882,"ancient":3883,"gallery":3884,"mp":3885,"guide":3886,"diam":3887,"mon":3888,"change":3889,"few":3890,"cent":3891,"create":3892,"popular":3893,"ay":3894,"cakes":3895,"od":3896,"drag":3897,"police":3898,"tele":3899,"compu":3900,"shoul":3901,"lights":3902,"available":3903,"league":3904,"vert":3905,"vac":3906,"cere":3907,"stand":3908,"comic":3909,"ror":3910,"press":3911,"although":3912,"ka":3913,"ign":3914,"search":3915,"guit":3916,"motor":3917,"position":3918,"dam":3919,"tempor":3920,"fresh":3921,"central":3922,"taken":3923,"local":3924,"unique":3925,"aircraft":3926,"exter":3927,"farm":3928,"fit":3929,"anti":3930,"lls":3931,"van":3932,"training":3933,"victor":3934,"held":3935,"febr":3936,"camer":3937,"february":3938,"america":3939,"today":3940,"tropical":3941,"amazing":3942,"lor":3943,"cast":3944,"december":3945,"dream":3946,"third":3947,"across":3948,"western":3949,"members":3950,"spec":3951,"pack":3952,"40":3953,"jour":3954,"ols":3955,"began":3956,"ples":3957,"auth":3958,"conne":3959,"home":3960,"remo":3961,"posed":3962,"ads":3963,"tured":3964,"tain":3965,"rema":3966,"science":3967,"iety":3968,"deli":3969,"order":3970,"tum":3971,"purple":3972,"laun":3973,"due":3974,"ze":3975,"mob":3976,"thers":3977,"never":3978,"compe":3979,"concer":3980,"six":3981,"performan":3982,"china":3983,"formed":3984,"symbol":3985,"lay":3986,"2013":3987,"sculp":3988,"ged":3989,"appear":3990,"navy":3991,"roun":3992,"county":3993,"yor":3994,"hill":3995,"make":3996,"tun":3997,"post":3998,"wide":3999,"ately":4000,"wild":4001,"chang":4002,"chicken":4003,"iting":4004,"know":4005,"id":4006,"pair":4007,"glasses":4008,"bike":4009,"soft":4010,"scene":4011,"mir":4012,"atest":4013,"nails":4014,"arti":4015,"should":4016,"lution":4017,"file":4018,"tower":4019,"coat":4020,"battle":4021,"houses":4022,"fel":4023,"emb":4024,"tly":4025,"ards":4026,"coast":4027,"stric":4028,"eye":4029,"designed":4030,"spar":4031,"mac":4032,"23":4033,"progra":4034,"ville":4035,"major":4036,"fly":4037,"painted":4038,"beli":4039,"sions":4040,"roman":4041,"jewel":4042,"lea":4043,"village":4044,"tial":4045,"access":4046,"father":4047,"figh":4048,"til":4049,"ry":4050,"train":4051,"title":4052,"russi":4053,"awards":4054,"maga":4055,"lap":4056,"2012":4057,"193":4058,"angeles":4059,"magaz":4060,"ints":4061,"won":4062,"posters":4063,"too":4064,"middle":4065,"rs":4066,"ners":4067,"nal":4068,"ful":4069,"bloo":4070,"master":4071,"ched":4072,"within":4073,"champion":4074,"oned":4075,"ann":4076,"mountains":4077,"base":4078,"mad":4079,"xic":4080,"deta":4081,"sday":4082,"served":4083,"bak":4084,"born":4085,"ax":4086,"government":4087,"spi":4088,"edu":4089,"track":4090,"breakfast":4091,"mater":4092,"oring":4093,"health":4094,"comics":4095,"production":4096,"ssing":4097,"plo":4098,"care":4099,"wine":4100,"sele":4101,"super":4102,"running":4103,"draw":4104,"going":4105,"dence":4106,"temporary":4107,"polit":4108,"sand":4109,"african":4110,"storm":4111,"han":4112,"want":4113,"jewelry":4114,"sequ":4115,"28":4116,"stron":4117,"page":4118,"ef":4119,"zo":4120,"hib":4121,"shoulder":4122,"college":4123,"screen":4124,"king":4125,"papers":4126,"mix":4127,"27":4128,"german":4129,"toys":4130,"shower":4131,"gy":4132,"fort":4133,"formation":4134,"princess":4135,"sof":4136,"apple":4137,"ban":4138,"respon":4139,"leaves":4140,"former":4141,"alls":4142,"ade":4143,"run":4144,"swim":4145,"friends":4146,"educ":4147,"pack":4148,"od":4149,"bear":4150,"put":4151,"ceremon":4152,"aster":4153,"community":4154,"gas":4155,"secre":4156,"sl":4157,"dy":4158,"ges":4159,"15":4160,"nail":4161,"among":4162,"accor":4163,"fab":4164,"ra":4165,"sch":4166,"crafts":4167,"mexic":4168,"quality":4169,"president":4170,"inspired":4171,"future":4172,"ened":4173,"views":4174,"ference":4175,"nur":4176,"boys":4177,"queen":4178,"profe":4179,"vir":4180,"range":4181,"exterior":4182,"vege":4183,"contain":4184,"gray":4185,"bright":4186,"inted":4187,"vil":4188,"fic":4189,"pun":4190,"buildings":4191,"experi":4192,"fair":4193,"canada":4194,"iphone":4195,"care":4196,"geo":4197,"reading":4198,"gn":4199,"fruit":4200,"ington":4201,"26":4202,"birds":4203,"once":4204,"cand":4205,"spir":4206,"rain":4207,"type":4208,"buy":4209,"inn":4210,"ba":4211,"valley":4212,"sweet":4213,"figure":4214,"hang":4215,"france":4216,"ow":4217,"afe":4218,"libr":4219,"court":4220,"statu":4221,"ron":4222,"comes":4223,"hood":4224,"ai":4225,"brid":4226,"paris":4227,"pop":4228,"watercolor":4229,"pretty":4230,"hairstyles":4231,"bottle":4232,"hollywood":4233,"money":4234,"neck":4235,"marvel":4236,"computer":4237,"cop":4238,"berry":4239,"lost":4240,"projects":4241,"included":4242,"2011":4243,"stad":4244,"tea":4245,"support":4246,"development":4247,"boots":4248,"species":4249,"supp":4250,"named":4251,"met":4252,"augh":4253,"death":4254,"magazine":4255,"alter":4256,"proper":4257,"tee":4258,"pract":4259,"hard":4260,"steel":4261,"seam":4262,"machine":4263,"posit":4264,"2010":4265,"walls":4266,"mobile":4267,"now":4268,"brand":4269,"import":4270,"minu":4271,"ut":4272,"goo":4273,"idea":4274,"camera":4275,"sible":4276,"icles":4277,"nel":4278,"paintings":4279,"ences":4280,"net":4281,"gan":4282,"curren":4283,"start":4284,"famous":4285,"players":4286,"radi":4287,"ials":4288,"butter":4289,"step":4290,"ped":4291,"wars":4292,"centre":4293,"ception":4294,"prepar":4295,"received":4296,"elements":4297,"quest":4298,"ached":4299,"ky":4300,"level":4301,"example":4302,"ati":4303,"of":4304,"japan":4305,"gard":4306,"29":4307,"gam":4308,"theat":4309,"storage":4310,"dead":4311,"shor":4312,"mel":4313,"exten":4314,"does":4315,"inde":4316,"win":4317,"toy":4318,"doc":4319,"created":4320,"inten":4321,"lation":4322,"floral":4323,"60":4324,"events":4325,"sketch":4326,"placed":4327,"ton":4328,"nov":4329,"ently":4330,"clim":4331,"friend":4332,"shap":4333,"double":4334,"than":4335,"media":4336,"see":4337,"bud":4338,"record":4339,"performance":4340,"init":4341,"mc":4342,"drin":4343,"carri":4344,"taking":4345,"dragon":4346,"rise":4347,"again":4348,"letter":4349,"give":4350,"gun":4351,"gor":4352,"thu":4353,"advent":4354,"outfit":4355,"ging":4356,"tables":4357,"fact":4358,"ceremony":4359,"shape":4360,"mini":4361,"won":4362,"hoodie":4363,"lat":4364,"crow":4365,"diy":4366,"wr":4367,"tation":4368,"net":4369,"check":4370,"daugh":4371,"according":4372,"save":4373,"simil":4374,"member":4375,"zed":4376,"ous":4377,"display":4378,"chic":4379,"contemporary":4380,"temp":4381,"ination":4382,"official":4383,"kore":4384,"seamless":4385,"visit":4386,"stadium":4387,"oci":4388,"motorcy":4389,"allow":4390,"marri":4391,"mic":4392,"associ":4393,"italy":4394,"roof":4395,"medium":4396,"aning":4397,"keep":4398,"grass":4399,"patterns":4400,"yer":4401,"eyes":4402,"wallpapers":4403,"better":4404,"ica":4405,"expe":4406,"13":4407,"tom":4408,"costumes":4409,"build":4410,"ess":4411,"free":4412,"written":4413,"temple":4414,"km":4415,"fully":4416,"artwork":4417,"guitar":4418,"satur":4419,"bus":4420,"month":4421,"serve":4422,"equi":4423,"spac":4424,"europe":4425,"decoration":4426,"theme":4427,"leng":4428,"hur":4429,"library":4430,"tor":4431,"award":4432,"lion":4433,"blan":4434,"student":4435,"wre":4436,"14":4437,"ram":4438,"ash":4439,"favor":4440,"wol":4441,"swimming":4442,"pics":4443,"release":4444,"priv":4445,"farm":4446,"become":4447,"aw":4448,"carpet":4449,"cock":4450,"techno":4451,"auto":4452,"weight":4453,"social":4454,"zy":4455,"via":4456,"cont":4457,"control":4458,"aci":4459,"iron":4460,"evening":4461,"ization":4462,"autum":4463,"usa":4464,"mit":4465,"dru":4466,"flying":4467,"makeup":4468,"makes":4469,"ata":4470,"dess":4471,"gardens":4472,"colo":4473,"circle":4474,"aged":4475,"sour":4476,"foc":4477,"camp":4478,"aching":4479,"diag":4480,"active":4481,"australian":4482,"ental":4483,"tt":4484,"clear":4485,"basket":4486,"lou":4487,"annu":4488,"hen":4489,"instru":4490,"aving":4491,"present":4492,"phy":4493,"gle":4494,"box":4495,"oes":4496,"repor":4497,"sa":4498,"review":4499,"often":4500,"backyard":4501,"skir":4502,"basketball":4503,"cket":4504,"products":4505,"ically":4506,"2009":4507,"autumn":4508,"lighting":4509,"mi":4510,"product":4511,"cup":4512,"alist":4513,"sne":4514,"section":4515,"bon":4516,"skin":4517,"rail":4518,"activities":4519,"san":4520,"esc":4521,"altern":4522,"desk":4523,"africa":4524,"thes":4525,"accessories":4526,"iled":4527,"route":4528,"cin":4529,"pose":4530,"desser":4531,"dish":4532,"show":4533,"ention":4534,"complete":4535,"labe":4536,"jer":4537,"success":4538,"patio":4539,"diamond":4540,"sand":4541,"alle":4542,"usu":4543,"various":4544,"lau":4545,"groun":4546,"windows":4547,"research":4548,"cards":4549,"john":4550,"fine":4551,"trave":4552,"nar":4553,"bags":4554,"york":4555,"surroun":4556,"words":4557,"phy":4558,"exhib":4559,"dering":4560,"envi":4561,"trucks":4562,"role":4563,"mask":4564,"bread":4565,"hero":4566,"past":4567,"met":4568,"minim":4569,"shirts":4570,"germany":4571,"distric":4572,"announ":4573,"2008":4574,"ken":4575,"kingdom":4576,"got":4577,"southern":4578,"lady":4579,"week":4580,"eleg":4581,"neck":4582,"previ":4583,"load":4584,"sis":4585,"division":4586,"clothes":4587,"fabric":4588,"mug":4589,"ith":4590,"gri":4591,"intro":4592,"kin":4593,"prints":4594,"coloring":4595,"aster":4596,"opening":4597,"feet":4598,"gran":4599,"00":4600,"environ":4601,"deser":4602,"ac":4603,"bal":4604,"cting":4605,"surface":4606,"gifts":4607,"fr":4608,"near":4609,"learn":4610,"rainbow":4611,"italian":4612,"wan":4613,"lun":4614,"maid":4615,"bour":4616,"mur":4617,"gradu":4618,"friday":4619,"emer":4620,"trail":4621,"parts":4622,"enter":4623,"cycl":4624,"necklace":4625,"pupp":4626,"chap":4627,"gue":4628,"lord":4629,"chicago":4630,"common":4631,"friend":4632,"introdu":4633,"matic":4634,"angle":4635,"period":4636,"80":4637,"hosp":4638,"signed":4639,"tch":4640,"medical":4641,"plat":4642,"kets":4643,"ously":4644,"dry":4645,"palace":4646,"ya":4647,"easter":4648,"magic":4649,"private":4650,"tie":4651,"heavy":4652,"web":4653,"edition":4654,"sure":4655,"strong":4656,"vement":4657,"asse":4658,"airport":4659,"cis":4660,"daughter":4661,"sculpture":4662,"guest":4663,"bab":4664,"always":4665,"proble":4666,"ult":4667,"smiling":4668,"must":4669,"brick":4670,"statue":4671,"fan":4672,"theatre":4673,"circu":4674,"career":4675,"pot":4676,"gl":4677,"shire":4678,"chairs":4679,"install":4680,"rang":4681,"tra":4682,"really":4683,"europe":4684,"wra":4685,"base":4686,"saw":4687,"seven":4688,"ades":4689,"const":4690,"championship":4691,"ener":4692,"mess":4693,"template":4694,"antique":4695,"refle":4696,"rest":4697,"sky":4698,"electric":4699,"reception":4700,"actu":4701,"wind":4702,"obje":4703,"industri":4704,"snow":4705,"tile":4706,"colu":4707,"annual":4708,"sunday":4709,"sneak":4710,"ib":4711,"ceiling":4712,"songs":4713,"numbers":4714,"reve":4715,"anta":4716,"added":4717,"giving":4718,"fishing":4719,"signs":4720,"sku":4721,"professi":4722,"attr":4723,"attemp":4724,"engine":4725,"advert":4726,"any":4727,"length":4728,"northern":4729,"appar":4730,"acade":4731,"stand":4732,"upon":4733,"important":4734,"says":4735,"span":4736,"tive":4737,"european":4738,"au":4739,"plane":4740,"given":4741,"regu":4742,"latest":4743,"pit":4744,"plus":4745,"adow":4746,"dit":4747,"wns":4748,"yed":4749,"cial":4750,"illustrations":4751,"commer":4752,"asian":4753,"impro":4754,"ino":4755,"lead":4756,"law":4757,"district":4758,"eping":4759,"went":4760,"arm":4761,"forces":4762,"largest":4763,"seas":4764,"metri":4765,"va":4766,"rustic":4767,"hab":4768,"begin":4769,"wro":4770,"oly":4771,"favorite":4772,"casual":4773,"docu":4774,"saturday":4775,"aer":4776,"bot":4777,"ghou":4778,"fif":4779,"plastic":4780,"ray":4781,"cti":4782,"suit":4783,"inte":4784,"jeans":4785,"xim":4786,"lego":4787,"takes":4788,"wife":4789,"deep":4790,"add":4791,"class":4792,"gate":4793,"survi":4794,"services":4795,"campa":4796,"cook":4797,"dinner":4798,"2007":4799,"engine":4800,"flight":4801,"education":4802,"wood":4803,"wrote":4804,"greeting":4805,"technology":4806,"ena":4807,"ano":4808,"think":4809,"marine":4810,"staf":4811,"audi":4812,"lo":4813,"baseball":4814,"mary":4815,"mm":4816,"sche":4817,"fan":4818,"stead":4819,"ner":4820,"soldi":4821,"golf":4822,"poses":4823,"leading":4824,"sse":4825,"aly":4826,"includes":4827,"represent":4828,"foot":4829,"ha":4830,"decorations":4831,"similar":4832,"ei":4833,"eas":4834,"claim":4835,"mid":4836,"speak":4837,"meas":4838,"drop":4839,"kar":4840,"types":4841,"ghout":4842,"kin":4843,"throughout":4844,"metric":4845,"egy":4846,"vehicle":4847,"course":4848,"described":4849,"grounds":4850,"aging":4851,"inger":4852,"mid":4853,"include":4854,"rome":4855,"you":4856,"ken":4857,"highway":4858,"expl":4859,"willi":4860,"mun":4861,"deci":4862,"emplo":4863,"texture":4864,"mark":4865,"graphy":4866,"hold":4867,"shed":4868,"structure":4869,"study":4870,"cri":4871,"eat":4872,"ama":4873,"entry":4874,"whole":4875,"property":4876,"thought":4877,"estab":4878,"total":4879,"desp":4880,"actre":4881,"avi":4882,"corpor":4883,"shopping":4884,"gear":4885,"cats":4886,"photographer":4887,"ased":4888,"colour":4889,"aps":4890,"entran":4891,"coron":4892,"bio":4893,"areas":4894,"villa":4895,"decorating":4896,"pants":4897,"relig":4898,"cheese":4899,"ctors":4900,"him":4901,"chester":4902,"sta":4903,"television":4904,"further":4905,"31":4906,"break":4907,"bridal":4908,"lin":4909,"designer":4910,"astern":4911,"vacation":4912,"bank":4913,"elegant":4914,"pieces":4915,"sia":4916,"meeting":4917,"stay":4918,"energy":4919,"hard":4920,"web":4921,"trip":4922,"ox":4923,"dor":4924,"aded":4925,"kn":4926,"clouds":4927,"far":4928,"corner":4929,"ballo":4930,"memes":4931,"hanging":4932,"batman":4933,"months":4934,"beer":4935,"experience":4936,"tiny":4937,"urban":4938,"kind":4939,"simp":4940,"sugar":4941,"apartments":4942,"deck":4943,"nific":4944,"by":4945,"ku":4946,"mart":4947,"chall":4948,"thetic":4949,"laptop":4950,"ng":4951,"why":4952,"wheel":4953,"planet":4954,"teen":4955,"cit":4956,"roll":4957,"unit":4958,"cold":4959,"points":4960,"fans":4961,"drive":4962,"life":4963,"budget":4964,"defen":4965,"iss":4966,"brac":4967,"wers":4968,"icu":4969,"stands":4970,"colon":4971,"influ":4972,"nament":4973,"190":4974,"landscap":4975,"process":4976,"lied":4977,"models":4978,"flu":4979,"ane":4980,"fox":4981,"eved":4982,"geor":4983,"cosp":4984,"edit":4985,"pus":4986,"slo":4987,"greek":4988,"wash":4989,"may":4990,"forms":4991,"ability":4992,"tank":4993,"inve":4994,"era":4995,"culture":4996,"ption":4997,"information":4998,"cosplay":4999,"inch":5000,"stop":5001,"cafe":5002,"entrance":5003,"imp":5004,"union":5005,"crew":5006,"ka":5007,"cm":5008,"za":5009,"cottage":5010,"something":5011,"quil":5012,"enjoy":5013,"requi":5014,"hum":5015,"manu":5016,"pression":5017,"ecu":5018,"success":5019,"invol":5020,"1st":5021,"details":5022,"compan":5023,"shown":5024,"bby":5025,"plic":5026,"galax":5027,"pon":5028,"non":5029,"ala":5030,"sold":5031,"attack":5032,"skull":5033,"warm":5034,"captain":5035,"bad":5036,"sound":5037,"ja":5038,"yoga":5039,"four":5040,"ultim":5041,"arm":5042,"considered":5043,"staff":5044,"bble":5045,"eag":5046,"return":5047,"icons":5048,"aches":5049,"hi":5050,"crete":5051,"dun":5052,"gel":5053,"senior":5054,"quote":5055,"desert":5056,"gets":5057,"illing":5058,"lid":5059,"bes":5060,"lower":5061,"lled":5062,"suff":5063,"exer":5064,"continued":5065,"limited":5066,"mission":5067,"plays":5068,"job":5069,"bad":5070,"tters":5071,"bles":5072,"lt":5073,"comman":5074,"micro":5075,"estate":5076,"cooking":5077,"sets":5078,"region":5079,"choo":5080,"program":5081,"spa":5082,"price":5083,"director":5084,"soccer":5085,"finished":5086,"concert":5087,"ts":5088,"downtown":5089,"thro":5090,"attle":5091,"possible":5092,"separ":5093,"listed":5094,"2006":5095,"mb":5096,"cher":5097,"relation":5098,"leaf":5099,"actors":5100,"green":5101,"filled":5102,"musc":5103,"nice":5104,"shu":5105,"em":5106,"ris":5107,"turn":5108,"despite":5109,"burg":5110,"prince":5111,"lash":5112,"chain":5113,"almost":5114,"ries":5115,"location":5116,"professional":5117,"tribu":5118,"adventure":5119,"defin":5120,"ky":5121,"promo":5122,"hit":5123,"stuff":5124,"compar":5125,"agu":5126,"hospital":5127,"eleph":5128,"reco":5129,"tig":5130,"colored":5131,"chie":5132,"mouth":5133,"getting":5134,"van":5135,"defe":5136,"invit":5137,"groom":5138,"ids":5139,"nic":5140,"horses":5141,"islands":5142,"skirt":5143,"campaign":5144,"acy":5145,"call":5146,"produced":5147,"industrial":5148,"result":5149,"fireplace":5150,"multi":5151,"west":5152,"colour":5153,"termin":5154,"exper":5155,"featu":5156,"path":5157,"roman":5158,"ators":5159,"nav":5160,"mixed":5161,"safe":5162,"barn":5163,"chart":5164,"sweater":5165,"bert":5166,"logical":5167,"delic":5168,"cap":5169,"displayed":5170,"cles":5171,"tiles":5172,"below":5173,"200":5174,"gun":5175,"vehicles":5176,"towards":5177,"others":5178,"500":5179,"drink":5180,"langu":5181,"bron":5182,"dney":5183,"ented":5184,"tutor":5185,"meal":5186,"featured":5187,"png":5188,"aes":5189,"engag":5190,"icks":5191,"cookies":5192,"pment":5193,"racing":5194,"courte":5195,"wild":5196,"valent":5197,"musical":5198,"broad":5199,"historic":5200,"ially":5201,"moved":5202,"sneakers":5203,"partici":5204,"phili":5205,"scar":5206,"farmhouse":5207,"started":5208,"dr":5209,"palm":5210,"gave":5211,"business":5212,"rought":5213,"well":5214,"tries":5215,"instead":5216,"session":5217,"russian":5218,"edge":5219,"pencil":5220,"block":5221,"mirror":5222,"watches":5223,"winning":5224,"feder":5225,"courtesy":5226,"rat":5227,"sen":5228,"fon":5229,"crown":5230,"tional":5231,"cotton":5232,"eight":5233,"labor":5234,"dc":5235,"after":5236,"published":5237,"suppor":5238,"zen":5239,"uses":5240,"wave":5241,"establ":5242,"rice":5243,"issue":5244,"host":5245,"assi":5246,"galaxy":5247,"partment":5248,"pati":5249,"riding":5250,"equ":5251,"redi":5252,"done":5253,"indepen":5254,"70":5255,"minutes":5256,"anato":5257,"motorcycle":5258,"mix":5259,"addition":5260,"download":5261,"econ":5262,"banner":5263,"doctor":5264,"stylish":5265,"domin":5266,"commis":5267,"tall":5268,"speed":5269,"shoe":5270,"ari":5271,"dly":5272,"ough":5273,"eating":5274,"athle":5275,"romantic":5276,"explo":5277,"engagement":5278,"engers":5279,"thur":5280,"spanish":5281,"sugge":5282,"spee":5283,"feel":5284,"premium":5285,"creatures":5286,"particu":5287,"mexico":5288,"quet":5289,"redu":5290,"189":5291,"eastern":5292,"close":5293,"classroom":5294,"concrete":5295,"holds":5296,"hours":5297,"butt":5298,"zer":5299,"opened":5300,"ingredi":5301,"yl":5302,"died":5303,"lla":5304,"ority":5305,"ex":5306,"industry":5307,"learning":5308,"bean":5309,"giant":5310,"bring":5311,"econo":5312,"diagram":5313,"say":5314,"growing":5315,"tte":5316,"clock":5317,"ro":5318,"just":5319,"political":5320,"12":5321,"ami":5322,"worl":5323,"soon":5324,"gown":5325,"graphics":5326,"35":5327,"rela":5328,"pump":5329,"wed":5330,"rendering":5331,"garage":5332,"vidu":5333,"rare":5334,"friendly":5335,"daily":5336,"website":5337,"empty":5338,"empire":5339,"dev":5340,"equipment":5341,"commercial":5342,"butterfly":5343,"sey":5344,"awe":5345,"closed":5346,"worth":5347,"transpar":5348,"runs":5349,"alp":5350,"individu":5351,"bla":5352,"aff":5353,"treat":5354,"alo":5355,"meet":5356,"conference":5357,"nd":5358,"effect":5359,"shaped":5360,"personal":5361,"coach":5362,"interest":5363,"bit":5364,"creek":5365,"achi":5366,"stick":5367,"rock":5368,"forming":5369,"salad":5370,"ending":5371,"grade":5372,"ping":5373,"neu":5374,"films":5375,"tual":5376,"acry":5377,"count":5378,"orn":5379,"atory":5380,"mental":5381,"detail":5382,"sla":5383,"clean":5384,"eded":5385,"secret":5386,"neigh":5387,"move":5388,"condition":5389,"chev":5390,"dant":5391,"ride":5392,"aver":5393,"sor":5394,"hin":5395,"trends":5396,"agre":5397,"crowd":5398,"bil":5399,"sheet":5400,"dim":5401,"public":5402,"demo":5403,"pass":5404,"miles":5405,"doing":5406,"felt":5407,"black":5408,"vs":5409,"refer":5410,"fer":5411,"teen":5412,"wanted":5413,"sons":5414,"py":5415,"ki":5416,"egg":5417,"sofa":5418,"sav":5419,"milk":5420,"developed":5421,"appeared":5422,"execu":5423,"aesthetic":5424,"camp":5425,"branch":5426,"scenes":5427,"fitness":5428,"zi":5429,"bath":5430,"real":5431,"stain":5432,"pets":5433,"paci":5434,"entu":5435,"department":5436,"tland":5437,"broo":5438,"sent":5439,"aven":5440,"mo":5441,"weather":5442,"aband":5443,"bottom":5444,"wolf":5445,"ah":5446,"formal":5447,"feature":5448,"ski":5449,"chet":5450,"illa":5451,"tition":5452,"big":5453,"inting":5454,"ti":5455,"eth":5456,"goal":5457,"denti":5458,"fourth":5459,"artists":5460,"contr":5461,"acrylic":5462,"bac":5463,"tic":5464,"capital":5465,"ri":5466,"letter":5467,"bic":5468,"kne":5469,"needs":5470,"signific":5471,"blood":5472,"commit":5473,"roi":5474,"anni":5475,"thouse":5476,"ency":5477,"dential":5478,"2005":5479,"korean":5480,"blog":5481,"mus":5482,"apparel":5483,"variety":5484,"arms":5485,"bing":5486,"manga":5487,"chief":5488,"true":5489,"ong":5490,"border":5491,"pizz":5492,"sits":5493,"lie":5494,"ess":5495,"upper":5496,"eval":5497,"vet":5498,"thursday":5499,"anatomy":5500,"press":5501,"enough":5502,"sit":5503,"ize":5504,"bis":5505,"sett":5506,"hes":5507,"singap":5508,"vegan":5509,"cloud":5510,"credit":5511,"society":5512,"fy":5513,"bru":5514,"transparent":5515,"depic":5516,"historical":5517,"asia":5518,"bow":5519,"guard":5520,"singer":5521,"lier":5522,"guar":5523,"eps":5524,"everything":5525,"embroi":5526,"north":5527,"bell":5528,"items":5529,"printed":5530,"boats":5531,"wedne":5532,"ctu":5533,"acked":5534,"lit":5535,"bey":5536,"iler":5537,"squad":5538,"himself":5539,"contest":5540,"kit":5541,"spain":5542,"seat":5543,"stel":5544,"invest":5545,"backpack":5546,"provided":5547,"less":5548,"ranked":5549,"die":5550,"post":5551,"eggs":5552,"zer":5553,"irus":5554,"sydney":5555,"pa":5556,"mr":5557,"fairy":5558,"names":5559,"2nd":5560,"scale":5561,"medieval":5562,"celebrity":5563,"south":5564,"coll":5565,"ller":5566,"herit":5567,"davi":5568,"wednesday":5569,"mind":5570,"harb":5571,"safety":5572,"ctly":5573,"thre":5574,"nat":5575,"doll":5576,"nas":5577,"conver":5578,"scra":5579,"ho":5580,"ladies":5581,"ece":5582,"tues":5583,"monday":5584,"singapore":5585,"coronav":5586,"vinyl":5587,"sty":5588,"successful":5589,"larger":5590,"denim":5591,"ups":5592,"stun":5593,"eves":5594,"might":5595,"covers":5596,"bang":5597,"dn":5598,"dan":5599,"coronavirus":5600,"fiel":5601,"fail":5602,"chic":5603,"bill":5604,"tuesday":5605,"label":5606,"announced":5607,"source":5608,"dies":5609,"returned":5610,"bracele":5611,"elephant":5612,"flooring":5613,"bon":5614,"basket":5615,"turned":5616,"ditions":5617,"aland":5618,"rough":5619,"crochet":5620,"completed":5621,"brus":5622,"mount":5623,"pacific":5624,"native":5625,"cost":5626,"shadow":5627,"mermaid":5628,"kong":5629,"dden":5630,"kit":5631,"ateg":5632,"dation":5633,"gym":5634,"cape":5635,"nis":5636,"previous":5637,"witch":5638,"railway":5639,"stairs":5640,"pir":5641,"drinks":5642,"gradi":5643,"sketches":5644,"ight":5645,"ald":5646,"provin":5647,"harry":5648,"pand":5649,"fal":5650,"mal":5651,"resc":5652,"power":5653,"45":5654,"kid":5655,"fted":5656,"bet":5657,"forward":5658,"aerial":5659,"rear":5660,"volu":5661,"troo":5662,"secur":5663,"vari":5664,"traff":5665,"ot":5666,"note":5667,"kie":5668,"pen":5669,"toron":5670,"sar":5671,"bury":5672,"boston":5673,"sides":5674,"90":5675,"rocks":5676,"versary":5677,"streets":5678,"light":5679,"advertising":5680,"toronto":5681,"teac":5682,"network":5683,"actress":5684,"sery":5685,"material":5686,"dancing":5687,"ques":5688,"nursery":5689,"canadian":5690,"amic":5691,"tural":5692,"practice":5693,"yu":5694,"screen":5695,"liter":5696,"dor":5697,"pear":5698,"philipp":5699,"coast":5700,"decorative":5701,"setting":5702,"heritage":5703,"hurric":5704,"figures":5705,"refu":5706,"liber":5707,"zealand":5708,"radio":5709,"ians":5710,"factory":5711,"medit":5712,"recogn":5713,"bible":5714,"voc":5715,"pizza":5716,"awesome":5717,"animation":5718,"least":5719,"weeks":5720,"vel":5721,"article":5722,"landscaping":5723,"standard":5724,"marble":5725,"sauce":5726,"council":5727,"solid":5728,"cathe":5729,"fair":5730,"syn":5731,"purch":5732,"yl":5733,"odle":5734,"spr":5735,"anniversary":5736,"share":5737,"cheap":5738,"recorded":5739,"dressed":5740,"write":5741,"ounds":5742,"god":5743,"stat":5744,"focus":5745,"bbit":5746,"language":5747,"institu":5748,"provide":5749,"competition":5750,"cities":5751,"lions":5752,"handmade":5753,"yet":5754,"finish":5755,"alu":5756,"ire":5757,"robo":5758,"sac":5759,"destroy":5760,"inj":5761,"victory":5762,"rd":5763,"manager":5764,"solution":5765,"falls":5766,"iv":5767,"relationship":5768,"grand":5769,"distance":5770,"offers":5771,"facts":5772,"exhibition":5773,"pel":5774,"security":5775,"nic":5776,"lamp":5777,"hole":5778,"dral":5779,"brain":5780,"moving":5781,"bench":5782,"academy":5783,"shore":5784,"environment":5785,"tops":5786,"ible":5787,"either":5788,"fence":5789,"contain":5790,"writer":5791,"wheels":5792,"discu":5793,"entertain":5794,"ential":5795,"cine":5796,"dessert":5797,"abandoned":5798,"2004":5799,"spirit":5800,"fight":5801,"eling":5802,"awar":5803,"cham":5804,"lettering":5805,"hop":5806,"todd":5807,"75":5808,"jam":5809,"stated":5810,"cocktail":5811,"pages":5812,"cabin":5813,"fication":5814,"horiz":5815,"parties":5816,"gent":5817,"condu":5818,"scotland":5819,"oak":5820,"monster":5821,"respe":5822,"kle":5823,"sus":5824,"element":5825,"ang":5826,"command":5827,"fighter":5828,"beginning":5829,"universe":5830,"prom":5831,"your":5832,"letters":5833,"onic":5834,"tools":5835,"weekend":5836,"viol":5837,"officer":5838,"reli":5839,"ingredients":5840,"heav":5841,"traffic":5842,"businessman":5843,"squadron":5844,"larly":5845,"cathedral":5846,"speech":5847,"ado":5848,"jump":5849,"positive":5850,"dino":5851,"damage":5852,"horror":5853,"pin":5854,"cell":5855,"jan":5856,"expen":5857,"wish":5858,"indic":5859,"woods":5860,"iss":5861,"loss":5862,"russia":5863,"current":5864,"prev":5865,"performed":5866,"driving":5867,"brought":5868,"waves":5869,"selling":5870,"gradient":5871,"diffic":5872,"demon":5873,"nine":5874,"eding":5875,"balcon":5876,"potat":5877,"broken":5878,"santa":5879,"data":5880,"smoo":5881,"cher":5882,"originally":5883,"icy":5884,"warri":5885,"40":5886,"republic":5887,"cot":5888,"framed":5889,"britain":5890,"glu":5891,"hil":5892,"population":5893,"arab":5894,"wonder":5895,"kil":5896,"rather":5897,"activity":5898,"appo":5899,"motor":5900,"management":5901,"teacher":5902,"las":5903,"tian":5904,"dia":5905,"huge":5906,"viet":5907,"zes":5908,"tent":5909,"tery":5910,"hour":5911,"worked":5912,"aled":5913,"len":5914,"homemade":5915,"hills":5916,"slide":5917,"solu":5918,"thin":5919,"ament":5920,"haw":5921,"salt":5922,"adop":5923,"inet":5924,"sign":5925,"spread":5926,"line":5927,"journal":5928,"layout":5929,"bloc":5930,"copy":5931,"key":5932,"parents":5933,"enced":5934,"occas":5935,"fleet":5936,"accep":5937,"ultimate":5938,"symbols":5939,"gg":5940,"mond":5941,"francis":5942,"decided":5943,"quick":5944,"complex":5945,"spot":5946,"celebrate":5947,"eagle":5948,"planning":5949,"32":5950,"hau":5951,"backgrounds":5952,"straw":5953,"smile":5954,"angel":5955,"dam":5956,"outline":5957,"organization":5958,"movement":5959,"faces":5960,"delicious":5961,"actions":5962,"clip":5963,"themes":5964,"seating":5965,"expla":5966,"countries":5967,"stren":5968,"estim":5969,"fi":5970,"ste":5971,"mit":5972,"unic":5973,"leg":5974,"amoun":5975,"unk":5976,"mechan":5977,"ona":5978,"soup":5979,"vals":5980,"theater":5981,"choice":5982,"mono":5983,"knight":5984,"ership":5985,"entertainment":5986,"manchester":5987,"alistic":5988,"motiv":5989,"surpr":5990,"menu":5991,"corn":5992,"throw":5993,"solar":5994,"shelf":5995,"lemon":5996,"adult":5997,"30":5998,"neon":5999,"trendy":6000,"ris":6001,"seattle":6002,"streng":6003,"incredi":6004,"treat":6005,"identi":6006,"mph":6007,"earrings":6008,"cise":6009,"tiger":6010,"reported":6011,"hus":6012,"hurricane":6013,"sunglasses":6014,"alone":6015,"specially":6016,"ireland":6017,"link":6018,"lers":6019,"nes":6020,"cric":6021,"spaces":6022,"entire":6023,"quart":6024,"eventu":6025,"loun":6026,"related":6027,"famil":6028,"victorian":6029,"hong":6030,"50":6031,"bo":6032,"geous":6033,"pendant":6034,"francisco":6035,"cave":6036,"xi":6037,"eventually":6038,"association":6039,"serving":6040,"techni":6041,"cha":6042,"mass":6043,"gress":6044,"spect":6045,"rich":6046,"rics":6047,"peace":6048,"goes":6049,"tomat":6050,"decis":6051,"iced":6052,"188":6053,"straight":6054,"chal":6055,"ser":6056,"tennis":6057,"mine":6058,"silk":6059,"stories":6060,"imag":6061,"ix":6062,"analy":6063,"steps":6064,"bubble":6065,"pie":6066,"list":6067,"killed":6068,"pictured":6069,"gen":6070,"temper":6071,"pau":6072,"zoo":6073,"units":6074,"wa":6075,"pix":6076,"claimed":6077,"allow":6078,"cabinet":6079,"19th":6080,"enty":6081,"tus":6082,"tube":6083,"mexican":6084,"cruise":6085,"pressure":6086,"teams":6087,"lodge":6088,"god":6089,"candy":6090,"vegetables":6091,"deer":6092,"du":6093,"appearance":6094,"ribb":6095,"mmy":6096,"nike":6097,"fau":6098,"fl":6099,"extra":6100,"smart":6101,"sometimes":6102,"exclu":6103,"lane":6104,"indoor":6105,"vity":6106,"bicy":6107,"moment":6108,"kings":6109,"2000":6110,"tained":6111,"ier":6112,"ara":6113,"presents":6114,"raised":6115,"highest":6116,"iment":6117,"kers":6118,"remained":6119,"reas":6120,"tas":6121,"usually":6122,"followed":6123,"metho":6124,"reached":6125,"cow":6126,"creation":6127,"caused":6128,"minister":6129,"taining":6130,"panel":6131,"rier":6132,"pin":6133,"butter":6134,"wildlife":6135,"rent":6136,"lets":6137,"roc":6138,"ordin":6139,"surrounded":6140,"walks":6141,"cabine":6142,"lined":6143,"celebration":6144,"average":6145,"foods":6146,"guy":6147,"carr":6148,"suite":6149,"offer":6150,"suits":6151,"geometric":6152,"iors":6153,"tournament":6154,"records":6155,"bronze":6156,"ske":6157,"leader":6158,"gives":6159,"swi":6160,"pumpkin":6161,"repe":6162,"bie":6163,"bouquet":6164,"leave":6165,"clar":6166,"ip":6167,"civil":6168,"add":6169,"ny":6170,"alternate":6171,"workers":6172,"established":6173,"ely":6174,"mom":6175,"performs":6176,"state":6177,"creating":6178,"cran":6179,"pig":6180,"istic":6181,"tely":6182,"puppy":6183,"balcony":6184,"berries":6185,"brush":6186,"wire":6187,"colum":6188,"brother":6189,"access":6190,"weap":6191,"affe":6192,"mans":6193,"choose":6194,"2003":6195,"braz":6196,"decorated":6197,"illustrated":6198,"bull":6199,"soldiers":6200,"conserv":6201,"enger":6202,"hoc":6203,"allowed":6204,"communic":6205,"medic":6206,"mentary":6207,"brig":6208,"tted":6209,"sunrise":6210,"womens":6211,"36":6212,"cement":6213,"cherry":6214,"operation":6215,"additional":6216,"lobby":6217,"sewing":6218,"nation":6219,"adap":6220,"5th":6221,"zzle":6222,"regular":6223,"glit":6224,"beat":6225,"sib":6226,"pak":6227,"shell":6228,"notes":6229,"onto":6230,"meaning":6231,"plates":6232,"cases":6233,"stressed":6234,"psy":6235,"tons":6236,"guns":6237,"recording":6238,"hockey":6239,"avenue":6240,"lock":6241,"spider":6242,"welcome":6243,"fs":6244,"whis":6245,"quarter":6246,"changes":6247,"legend":6248,"already":6249,"renov":6250,"rel":6251,"om":6252,"potter":6253,"fix":6254,"waiting":6255,"lip":6256,"stones":6257,"religious":6258,"goals":6259,"ances":6260,"ridge":6261,"pal":6262,"mural":6263,"pro":6264,"launch":6265,"memorial":6266,"pond":6267,"pul":6268,"gency":6269,"nut":6270,"joined":6271,"cutting":6272,"camping":6273,"mine":6274,"ishes":6275,"prime":6276,"noted":6277,"turkey":6278,"roses":6279,"probab":6280,"argu":6281,"range":6282,"mers":6283,"sands":6284,"ur":6285,"global":6286,"james":6287,"starting":6288,"ori":6289,"pine":6290,"bill":6291,"gorgeous":6292,"bee":6293,"piano":6294,"rer":6295,"ball":6296,"discover":6297,"situ":6298,"humor":6299,"shoo":6300,"lying":6301,"christian":6302,"sian":6303,"chee":6304,"stration":6305,"goth":6306,"jungle":6307,"william":6308,"selection":6309,"contribu":6310,"pregn":6311,"3rd":6312,"fighting":6313,"ker":6314,"lounge":6315,"means":6316,"porch":6317,"youth":6318,"ko":6319,"bourne":6320,"saying":6321,"rights":6322,"rey":6323,"relax":6324,"ingham":6325,"exc":6326,"globe":6327,"maint":6328,"attends":6329,"hope":6330,"gla":6331,"saint":6332,"note":6333,"lic":6334,"beyond":6335,"flash":6336,"button":6337,"motion":6338,"umb":6339,"forced":6340,"journey":6341,"tam":6342,"trade":6343,"cali":6344,"toddler":6345,"ests":6346,"lunch":6347,"extre":6348,"acts":6349,"lovely":6350,"report":6351,"kyo":6352,"polish":6353,"theast":6354,"nearby":6355,"blue":6356,"pit":6357,"irish":6358,"len":6359,"tical":6360,"peak":6361,"portraits":6362,"stunning":6363,"ini":6364,"slow":6365,"posing":6366,"33":6367,"bene":6368,"dated":6369,"pillow":6370,"holidays":6371,"4th":6372,"ouse":6373,"atic":6374,"acu":6375,"bedrooms":6376,"mode":6377,"transport":6378,"scoo":6379,"needed":6380,"marke":6381,"themed":6382,"ctural":6383,"ahead":6384,"gowns":6385,"mpic":6386,"earlier":6387,"finger":6388,"wel":6389,"igra":6390,"especially":6391,"lives":6392,"john":6393,"iti":6394,"knee":6395,"leton":6396,"lows":6397,"married":6398,"otic":6399,"evi":6400,"stickers":6401,"pic":6402,"parade":6403,"admini":6404,"grow":6405,"law":6406,"lifestyle":6407,"cartoons":6408,"touch":6409,"nam":6410,"thail":6411,"drinking":6412,"2001":6413,"divi":6414,"like":6415,"egypt":6416,"rolling":6417,"thailand":6418,"whether":6419,"tutorial":6420,"required":6421,"smith":6422,"sister":6423,"ases":6424,"bai":6425,"balloon":6426,"common":6427,"quick":6428,"inspirational":6429,"wich":6430,"floo":6431,"dv":6432,"exercise":6433,"tote":6434,"belt":6435,"sized":6436,"sunny":6437,"groups":6438,"virg":6439,"realistic":6440,"organic":6441,"actually":6442,"tokyo":6443,"tan":6444,"stance":6445,"ceramic":6446,"sleeping":6447,"cris":6448,"stained":6449,"owner":6450,"mn":6451,"ison":6452,"transpor":6453,"operations":6454,"lia":6455,"yon":6456,"dir":6457,"studios":6458,"valentine":6459,"ied":6460,"miss":6461,"red":6462,"usual":6463,"multiple":6464,"ige":6465,"dolls":6466,"proce":6467,"irs":6468,"problems":6469,"script":6470,"apol":6471,"comfor":6472,"philippines":6473,"contra":6474,"sity":6475,"watching":6476,"driver":6477,"mint":6478,"glow":6479,"alized":6480,"packaging":6481,"flav":6482,"six":6483,"teaching":6484,"37":6485,"heat":6486,"dang":6487,"counter":6488,"mig":6489,"shark":6490,"186":6491,"jersey":6492,"lantic":6493,"dary":6494,"closet":6495,"main":6496,"voice":6497,"nearly":6498,"dubai":6499,"changed":6500,"unicorn":6501,"pressed":6502,"burgh":6503,"sales":6504,"bracelet":6505,"rug":6506,"ilities":6507,"mbl":6508,"primary":6509,"aries":6510,"passed":6511,"systems":6512,"crossing":6513,"yourself":6514,"tracks":6515,"greatest":6516,"novel":6517,"crazy":6518,"astro":6519,"soul":6520,"boxes":6521,"iring":6522,"chef":6523,"effects":6524,"bc":6525,"holy":6526,"corps":6527,"recent":6528,"stribu":6529,"mascot":6530,"cocon":6531,"ut":6532,"fiction":6533,"child":6534,"invitation":6535,"coura":6536,"subje":6537,"ell":6538,"fields":6539,"leaving":6540,"growth":6541,"uniform":6542,"particip":6543,"secu":6544,"fifth":6545,"rabbit":6546,"parking":6547,"told":6548,"helmet":6549,"talking":6550,"ues":6551,"enge":6552,"itch":6553,"reference":6554,"sher":6555,"problem":6556,"olympic":6557,"cabinets":6558,"critics":6559,"husband":6560,"ribbon":6561,"small":6562,"atlantic":6563,"legs":6564,"videos":6565,"gui":6566,"prior":6567,"snake":6568,"stro":6569,"exi":6570,"owl":6571,"phia":6572,"explore":6573,"significant":6574,"knit":6575,"george":6576,"weddings":6577,"regi":6578,"micha":6579,"carib":6580,"subsequ":6581,"troops":6582,"cricket":6583,"mbia":6584,"reviews":6585,"gian":6586,"ingly":6587,"distribu":6588,"closeup":6589,"seasons":6590,"too":6591,"visit":6592,"istan":6593,"300":6594,"ice":6595,"acted":6596,"caribbean":6597,"fall":6598,"shoot":6599,"monu":6600,"itself":6601,"battal":6602,"princi":6603,"conclu":6604,"alpha":6605,"oid":6606,"inches":6607,"involved":6608,"vertical":6609,"2002":6610,"avengers":6611,"ises":6612,"term":6613,"ran":6614,"feb":6615,"fur":6616,"message":6617,"bouti":6618,"challenge":6619,"poly":6620,"mounted":6621,"interview":6622,"venue":6623,"container":6624,"gil":6625,"probably":6626,"longer":6627,"blon":6628,"calligra":6629,"coastal":6630,"wales":6631,"monkey":6632,"roads":6633,"bin":6634,"shing":6635,"everyone":6636,"stant":6637,"tered":6638,"guests":6639,"marriage":6640,"finally":6641,"interest":6642,"stream":6643,"berg":6644,"yach":6645,"sphere":6646,"meat":6647,"mble":6648,"chapter":6649,"34":6650,"cers":6651,"jackets":6652,"frozen":6653,"value":6654,"saur":6655,"parli":6656,"sion":6657,"susp":6658,"ffed":6659,"produce":6660,"reti":6661,"bit":6662,"barbie":6663,"hearts":6664,"marketing":6665,"dge":6666,"mand":6667,"ppers":6668,"boo":6669,"topped":6670,"distressed":6671,"elev":6672,"financi":6673,"foundation":6674,"noon":6675,"scored":6676,"spent":6677,"diet":6678,"preci":6679,"zone":6680,"overall":6681,"coconut":6682,"viet":6683,"basic":6684,"sleeves":6685,"amp":6686,"pleas":6687,"balls":6688,"expected":6689,"55":6690,"replaced":6691,"houston":6692,"campus":6693,"reach":6694,"adding":6695,"fact":6696,"leve":6697,"gers":6698,"schools":6699,"mar":6700,"catho":6701,"budd":6702,"pocket":6703,"quarters":6704,"trend":6705,"greece":6706,"honey":6707,"pearl":6708,"scope":6709,"adel":6710,"critic":6711,"minute":6712,"occur":6713,"excell":6714,"trailer":6715,"rate":6716,"trying":6717,"jar":6718,"sha":6719,"expensive":6720,"stly":6721,"worn":6722,"toil":6723,"wind":6724,"ffs":6725,"clipart":6726,"benef":6727,"neg":6728,"carrying":6729,"materials":6730,"scienti":6731,"ctic":6732,"resour":6733,"kh":6734,"internet":6735,"canal":6736,"grow":6737,"examples":6738,"delivery":6739,"nothing":6740,"cookie":6741,"deal":6742,"rese":6743,"boutique":6744,"numer":6745,"bone":6746,"easily":6747,"lessons":6748,"mush":6749,"nomin":6750,"bottles":6751,"someone":6752,"initially":6753,"employe":6754,"helic":6755,"pick":6756,"come":6757,"vest":6758,"approxim":6759,"catholic":6760,"gather":6761,"desserts":6762,"dery":6763,"tech":6764,"believed":6765,"wrest":6766,"titled":6767,"jor":6768,"performing":6769,"doodle":6770,"amount":6771,"ony":6772,"getty":6773,"minimalist":6774,"cell":6775,"bicycle":6776,"blank":6777,"socks":6778,"tool":6779,"muscle":6780,"20th":6781,"newsp":6782,"scott":6783,"beard":6784,"wonderful":6785,"heroes":6786,"alphabet":6787,"quickly":6788,"baking":6789,"ician":6790,"ioned":6791,"48":6792,"dinosaur":6793,"immedi":6794,"fying":6795,"sel":6796,"hote":6797,"generally":6798,"twin":6799,"smart":6800,"pilot":6801,"selves":6802,"marks":6803,"jac":6804,"engineering":6805,"author":6806,"bars":6807,"impact":6808,"dvd":6809,"iser":6810,"birth":6811,"curtain":6812,"visual":6813,"robot":6814,"whose":6815,"af":6816,"interiors":6817,"amy":6818,"lawn":6819,"habit":6820,"anced":6821,"boards":6822,"pan":6823,"sad":6824,"ae":6825,"facade":6826,"blanket":6827,"plot":6828,"individual":6829,"antly":6830,"liqu":6831,"directed":6832,"helicop":6833,"treatment":6834,"dest":6835,"folk":6836,"bands":6837,"acting":6838,"buff":6839,"stair":6840,"lev":6841,"laugh":6842,"skyline":6843,"protection":6844,"dates":6845,"fat":6846,"reserve":6847,"opport":6848,"terrace":6849,"deco":6850,"vase":6851,"evil":6852,"electron":6853,"interesting":6854,"thin":6855,"height":6856,"stainless":6857,"volun":6858,"arena":6859,"supplies":6860,"combat":6861,"boy":6862,"blonde":6863,"frequ":6864,"olym":6865,"shine":6866,"embroidery":6867,"carry":6868,"winds":6869,"ware":6870,"calligraphy":6871,"ctively":6872,"territ":6873,"alco":6874,"layer":6875,"grun":6876,"hamp":6877,"critical":6878,"iles":6879,"requ":6880,"desk":6881,"pig":6882,"landing":6883,"melbourne":6884,"mill":6885,"installation":6886,"maps":6887,"contains":6888,"umbre":6889,"chel":6890,"ok":6891,"increase":6892,"lain":6893,"vegas":6894,"captured":6895,"colours":6896,"floating":6897,"banana":6898,"character":6899,"42":6900,"mouse":6901,"there":6902,"celebrities":6903,"smoke":6904,"excep":6905,"lowing":6906,"antine":6907,"infan":6908,"dale":6909,"bra":6910,"cently":6911,"atl":6912,"executive":6913,"likely":6914,"hip":6915,"fitted":6916,"appear":6917,"ease":6918,"thick":6919,"investig":6920,"david":6921,"opportun":6922,"childhood":6923,"hidden":6924,"ace":6925,"engra":6926,"audience":6927,"recently":6928,"kra":6929,"isle":6930,"waters":6931,"companies":6932,"desktop":6933,"spin":6934,"hotels":6935,"certain":6936,"applic":6937,"pastel":6938,"tale":6939,"trou":6940,"tary":6941,"issues":6942,"shorts":6943,"flags":6944,"ident":6945,"nap":6946,"safe":6947,"asked":6948,"vietnam":6949,"lyrics":6950,"sites":6951,"sheep":6952,"beat":6953,"om":6954,"abe":6955,"ple":6956,"cycling":6957,"philadel":6958,"honda":6959,"bull":6960,"proposed":6961,"sare":6962,"higher":6963,"notebook":6964,"cleaning":6965,"smaller":6966,"nia":6967,"province":6968,"math":6969,"saur":6970,"flies":6971,"maxim":6972,"committee":6973,"arrange":6974,"unknown":6975,"platform":6976,"appears":6977,"philadelphia":6978,"oh":6979,"adul":6980,"zu":6981,"shipping":6982,"commander":6983,"response":6984,"officers":6985,"boun":6986,"fting":6987,"heads":6988,"dic":6989,"38":6990,"izing":6991,"app":6992,"garden":6993,"pher":6994,"blouse":6995,"poe":6996,"ador":6997,"dad":6998,"house":6999,"direction":7000,"eper":7001,"bloss":7002,"louis":7003,"rub":7004,"boar":7005,"inse":7006,"soviet":7007,"code":7008,"thus":7009,"liver":7010,"combination":7011,"drama":7012,"coal":7013,"suites":7014,"beast":7015,"channel":7016,"naval":7017,"anti":7018,"lift":7019,"mean":7020,"korea":7021,"increased":7022,"legal":7023,"ornam":7024,"aling":7025,"miami":7026,"strawberry":7027,"battle":7028,"preschool":7029,"ab":7030,"michael":7031,"becoming":7032,"tell":7033,"kiss":7034,"tion":7035,"join":7036,"iii":7037,"cable":7038,"egyp":7039,"poten":7040,"particularly":7041,"therap":7042,"atives":7043,"guys":7044,"typo":7045,"ddy":7046,"justice":7047,"expan":7048,"express":7049,"bikes":7050,"ell":7051,"angu":7052,"legen":7053,"afternoon":7054,"financial":7055,"eth":7056,"65":7057,"residential":7058,"tin":7059,"regar":7060,"initial":7061,"perform":7062,"ius":7063,"pag":7064,"dar":7065,"thron":7066,"score":7067,"loft":7068,"sonic":7069,"oured":7070,"pitch":7071,"election":7072,"objects":7073,"ji":7074,"removed":7075,"pean":7076,"separate":7077,"basement":7078,"sail":7079,"rapi":7080,"auto":7081,"47":7082,"trophy":7083,"believe":7084,"oct":7085,"glitter":7086,"enter":7087,"sburg":7088,"isra":7089,"aming":7090,"lots":7091,"hunting":7092,"skills":7093,"rous":7094,"outdoors":7095,"187":7096,"older":7097,"presented":7098,"nie":7099,"ales":7100,"introduced":7101,"own":7102,"hiking":7103,"climbing":7104,"bam":7105,"thinking":7106,"dollar":7107,"iness":7108,"rescue":7109,"detailed":7110,"steam":7111,"wide":7112,"punk":7113,"dishes":7114,"41":7115,"ranch":7116,"brigade":7117,"percent":7118,"powerful":7119,"bomb":7120,"visible":7121,"pap":7122,"liz":7123,"toyo":7124,"worker":7125,"gothic":7126,"tub":7127,"jeep":7128,"botan":7129,"corporate":7130,"jewell":7131,"mical":7132,"remains":7133,"categ":7134,"bm":7135,"profile":7136,"asing":7137,"continue":7138,"issance":7139,"photographic":7140,"39":7141,"decision":7142,"nings":7143,"burn":7144,"phones":7145,"dutch":7146,"screenshot":7147,"tta":7148,"wise":7149,"iconic":7150,"couch":7151,"stitch":7152,"loved":7153,"swit":7154,"questions":7155,"tap":7156,"worlds":7157,"mam":7158,"berlin":7159,"statement":7160,"helped":7161,"malay":7162,"constitu":7163,"side":7164,"cupcakes":7165,"ans":7166,"arrived":7167,"tourist":7168,"lee":7169,"federal":7170,"dition":7171,"difficult":7172,"shots":7173,"vegetable":7174,"scottish":7175,"biggest":7176,"liverpool":7177,"benefits":7178,"attention":7179,"clay":7180,"aga":7181,"battalion":7182,"flow":7183,"schedu":7184,"turtle":7185,"fruits":7186,"neighbor":7187,"150":7188,"allen":7189,"necess":7190,"canyon":7191,"greater":7192,"metres":7193,"frog":7194,"99":7195,"provides":7196,"toilet":7197,"toyota":7198,"aple":7199,"keys":7200,"direct":7201,"coral":7202,"figur":7203,"physical":7204,"copper":7205,"poor":7206,"raw":7207,"minecraft":7208,"proof":7209,"6th":7210,"stuffed":7211,"sink":7212,"brigh":7213,"championships":7214,"reasons":7215,"hit":7216,"mass":7217,"velvet":7218,"bunny":7219,"coin":7220,"shelves":7221,"60":7222,"does":7223,"gos":7224,"night":7225,"maker":7226,"doesn":7227,"cozy":7228,"nov":7229,"duck":7230,"evidence":7231,"fen":7232,"striped":7233,"pengu":7234,"ault":7235,"fa":7236,"1999":7237,"ordered":7238,"opera":7239,"associated":7240,"henry":7241,"day":7242,"ttes":7243,"volunte":7244,"chess":7245,"imper":7246,"eter":7247,"infantry":7248,"brun":7249,"wrap":7250,"dian":7251,"ks":7252,"shapes":7253,"sque":7254,"ander":7255,"alternative":7256,"corre":7257,"sche":7258,"ghost":7259,"architectural":7260,"attempt":7261,"conditions":7262,"lips":7263,"sizes":7264,"modi":7265,"deb":7266,"follow":7267,"debut":7268,"previously":7269,"viewed":7270,"ere":7271,"was":7272,"surrounding":7273,"arrow":7274,"leo":7275,"pick":7276,"ada":7277,"freedom":7278,"attend":7279,"pard":7280,"rail":7281,"spiral":7282,"nest":7283,"infin":7284,"declar":7285,"egyptian":7286,"lightly":7287,"sleep":7288,"brass":7289,"mps":7290,"aks":7291,"escape":7292,"sk":7293,"rat":7294,"runway":7295,"planned":7296,"shak":7297,"calend":7298,"quilt":7299,"shield":7300,"accom":7301,"supre":7302,"levels":7303,"ura":7304,"approximately":7305,"formers":7306,"families":7307,"evo":7308,"purpose":7309,"wears":7310,"medal":7311,"enix":7312,"yards":7313,"adults":7314,"them":7315,"grunge":7316,"bob":7317,"soldier":7318,"jackson":7319,"alex":7320,"dimen":7321,"rid":7322,"tours":7323,"rug":7324,"impres":7325,"49":7326,"patient":7327,"portu":7328,"talk":7329,"facing":7330,"residents":7331,"skin":7332,"ole":7333,"foli":7334,"champion":7335,"institute":7336,"carried":7337,"simply":7338,"font":7339,"delhi":7340,"results":7341,"aquar":7342,"mumb":7343,"smartphone":7344,"conceptual":7345,"inging":7346,"400":7347,"champions":7348,"thousands":7349,"cycle":7350,"composition":7351,"mansion":7352,"juice":7353,"vent":7354,"gluten":7355,"veter":7356,"chron":7357,"44":7358,"washington":7359,"dec":7360,"marked":7361,"inci":7362,"ci":7363,"burger":7364,"vit":7365,"agon":7366,"vectors":7367,"matter":7368,"jewellery":7369,"rig":7370,"occu":7371,"robert":7372,"sense":7373,"dete":7374,"themselves":7375,"seas":7376,"log":7377,"floors":7378,"95":7379,"rical":7380,"hear":7381,"content":7382,"bound":7383,"dressing":7384,"rules":7385,"plush":7386,"chrome":7387,"con":7388,"certi":7389,"tier":7390,"brothers":7391,"cinema":7392,"alo":7393,"essential":7394,"cad":7395,"52":7396,"fancy":7397,"generation":7398,"stoc":7399,"winner":7400,"shade":7401,"damaged":7402,"28":7403,"sole":7404,"lil":7405,"constructed":7406,"supply":7407,"contract":7408,"cancer":7409,"inations":7410,"agen":7411,"aka":7412,"ricul":7413,"mumbai":7414,"alcoho":7415,"puzzle":7416,"memories":7417,"program":7418,"emergency":7419,"clear":7420,"columbia":7421,"jet":7422,"treas":7423,"parks":7424,"collage":7425,"ctures":7426,"loaded":7427,"collabor":7428,"ledge":7429,"stery":7430,"cure":7431,"gal":7432,"aqu":7433,"finding":7434,"puppies":7435,"mostly":7436,"candle":7437,"soun":7438,"workshop":7439,"rot":7440,"parliament":7441,"didn":7442,"graff":7443,"ctional":7444,"celebrates":7445,"43":7446,"core":7447,"calendar":7448,"tray":7449,"cultural":7450,"plain":7451,"anda":7452,"whole":7453,"vine":7454,"pay":7455,"graduation":7456,"ford":7457,"motorcycles":7458,"ij":7459,"bake":7460,"mont":7461,"sword":7462,"amed":7463,"highly":7464,"fuel":7465,"defense":7466,"independent":7467,"sailing":7468,"bi":7469,"jaz":7470,"wait":7471,"olive":7472,"ino":7473,"along":7474,"tac":7475,"enjoying":7476,"town":7477,"erran":7478,"mad":7479,"acking":7480,"beige":7481,"ski":7482,"max":7483,"render":7484,"premier":7485,"chor":7486,"ali":7487,"chest":7488,"battery":7489,"46":7490,"alongside":7491,"furn":7492,"umbrella":7493,"ander":7494,"officials":7495,"airplane":7496,"nau":7497,"charge":7498,"graffiti":7499,"description":7500,"1998":7501,"7th":7502,"unusual":7503,"presentation":7504,"excellent":7505,"les":7506,"agricul":7507,"wore":7508,"zel":7509,"distin":7510,"eye":7511,"cook":7512,"patri":7513,"cedes":7514,"replac":7515,"hog":7516,"beef":7517,"dall":7518,"sir":7519,"click":7520,"strength":7521,"dome":7522,"incredible":7523,"lem":7524,"sent":7525,"51":7526,"rison":7527,"porsche":7528,"lab":7529,"appointed":7530,"brazil":7531,"nation":7532,"circa":7533,"vention":7534,"launched":7535,"democr":7536,"selected":7537,"failed":7538,"rack":7539,"fell":7540,"author":7541,"austri":7542,"sad":7543,"typical":7544,"begins":7545,"typography":7546,"lac":7547,"rows":7548,"dedic":7549,"islam":7550,"clin":7551,"soil":7552,"wic":7553,"extended":7554,"question":7555,"influence":7556,"nether":7557,"discovered":7558,"joint":7559,"fc":7560,"helicopter":7561,"wast":7562,"trust":7563,"gur":7564,"carni":7565,"walk":7566,"outs":7567,"portion":7568,"hat":7569,"sell":7570,"istan":7571,"brands":7572,"dot":7573,"ida":7574,"mercedes":7575,"armor":7576,"mediterran":7577,"seeds":7578,"vey":7579,"ideal":7580,"pole":7581,"ylor":7582,"pine":7583,"studies":7584,"scrip":7585,"alien":7586,"gul":7587,"amazon":7588,"completely":7589,"pc":7590,"offered":7591,"aug":7592,"motional":7593,"book":7594,"80":7595,"singing":7596,"currently":7597,"angry":7598,"gaming":7599,"chapel":7600,"cky":7601,"stal":7602,"rect":7603,"mediterranean":7604,"remove":7605,"underwater":7606,"wheel":7607,"ud":7608,"sex":7609,"spoon":7610,"ae":7611,"scription":7612,"split":7613,"long":7614,"quite":7615,"hall":7616,"millions":7617,"54":7618,"passing":7619,"yacht":7620,"tag":7621,"scarf":7622,"fountain":7623,"fresh":7624,"function":7625,"depression":7626,"atmo":7627,"marsh":7628,"eo":7629,"mitted":7630,"baked":7631,"tone":7632,"governor":7633,"demonstr":7634,"billboard":7635,"observ":7636,"imperial":7637,"reason":7638,"remaining":7639,"kill":7640,"ballet":7641,"visitors":7642,"destroyed":7643,"64":7644,"pris":7645,"dep":7646,"dome":7647,"laundry":7648,"toward":7649,"headquarters":7650,"acci":7651,"cos":7652,"carbon":7653,"estimated":7654,"pur":7655,"thai":7656,"exclusive":7657,"ps":7658,"gary":7659,"balloons":7660,"bell":7661,"users":7662,"rule":7663,"futur":7664,"brow":7665,"triangle":7666,"newly":7667,"fanta":7668,"innov":7669,"weal":7670,"shepher":7671,"burning":7672,"formu":7673,"taste":7674,"ens":7675,"mean":7676,"sample":7677,"unis":7678,"rely":7679,"11":7680,"hammer":7681,"seeing":7682,"supreme":7683,"bears":7684,"dreams":7685,"conven":7686,"matching":7687,"installed":7688,"rural":7689,"panels":7690,"lag":7691,"revealed":7692,"matt":7693,"techniques":7694,"califor":7695,"lanter":7696,"inc":7697,"approach":7698,"lack":7699,"rated":7700,"saf":7701,"attractive":7702,"falling":7703,"quali":7704,"venice":7705,"junior":7706,"oxford":7707,"breed":7708,"producer":7709,"rh":7710,"powder":7711,"lived":7712,"ferred":7713,"superhero":7714,"ancy":7715,"feeling":7716,"dem":7717,"band":7718,"comedy":7719,"scent":7720,"cush":7721,"dallas":7722,"repair":7723,"mall":7724,"horizon":7725,"compared":7726,"fill":7727,"printing":7728,"ology":7729,"bey":7730,"unisex":7731,"icians":7732,"cry":7733,"yers":7734,"paralle":7735,"tics":7736,"hats":7737,"directly":7738,"defeated":7739,"branches":7740,"intended":7741,"courty":7742,"mile":7743,"cow":7744,"pure":7745,"weapons":7746,"llig":7747,"1920":7748,"bass":7749,"ug":7750,"protect":7751,"kun":7752,"know":7753,"vancou":7754,"yment":7755,"dson":7756,"1997":7757,"ache":7758,"vancouver":7759,"carved":7760,"bold":7761,"breaking":7762,"possib":7763,"wreath":7764,"martin":7765,"wan":7766,"counter":7767,"zing":7768,"tti":7769,"slice":7770,"praised":7771,"residence":7772,"glam":7773,"comfort":7774,"calling":7775,"atively":7776,"changing":7777,"metro":7778,"nasa":7779,"pyra":7780,"sept":7781,"comfortable":7782,"owned":7783,"sung":7784,"pasta":7785,"diac":7786,"warrior":7787,"surg":7788,"curtains":7789,"stret":7790,"volume":7791,"laid":7792,"phys":7793,"wick":7794,"transportation":7795,"bmw":7796,"foreign":7797,"pse":7798,"eral":7799,"bought":7800,"formula":7801,"pain":7802,"buffalo":7803,"phoenix":7804,"vesse":7805,"terms":7806,"assemb":7807,"rugby":7808,"wardro":7809,"56":7810,"18th":7811,"netherlands":7812,"parad":7813,"regiment":7814,"covering":7815,"pull":7816,"trains":7817,"cannot":7818,"disease":7819,"sor":7820,"options":7821,"01":7822,"universal":7823,"combined":7824,"chance":7825,"episodes":7826,"wy":7827,"jazz":7828,"newspaper":7829,"eing":7830,"lotus":7831,"honor":7832,"whom":7833,"sterling":7834,"positions":7835,"charles":7836,"bamboo":7837,"jumping":7838,"plant":7839,"encoura":7840,"uries":7841,"beha":7842,"cool":7843,"latter":7844,"revo":7845,"boho":7846,"metal":7847,"oz":7848,"joker":7849,"patro":7850,"mosa":7851,"oon":7852,"grill":7853,"inner":7854,"potato":7855,"dur":7856,"done":7857,"taylor":7858,"workout":7859,"flic":7860,"medicine":7861,"ninten":7862,"classi":7863,"numerous":7864,"essay":7865,"shades":7866,"rists":7867,"johnson":7868,"thomas":7869,"mosque":7870,"nintendo":7871,"nose":7872,"recomm":7873,"iller":7874,"indone":7875,"particular":7876,"spective":7877,"handle":7878,"1996":7879,"target":7880,"use":7881,"oven":7882,"collar":7883,"whale":7884,"ading":7885,"cav":7886,"illumin":7887,"propos":7888,"athers":7889,"remember":7890,"breeds":7891,"rus":7892,"economic":7893,"connected":7894,"kilo":7895,"reads":7896,"meme":7897,"harbor":7898,"candi":7899,"citiz":7900,"status":7901,"keeping":7902,"icing":7903,"suitable":7904,"logos":7905,"maple":7906,"rocky":7907,"aked":7908,"cross":7909,"ike":7910,"nity":7911,"han":7912,"check":7913,"guil":7914,"68":7915,"pre":7916,"lum":7917,"rolls":7918,"swing":7919,"rement":7920,"slightly":7921,"gie":7922,"scand":7923,"minor":7924,"lars":7925,"mytho":7926,"fried":7927,"gone":7928,"nurse":7929,"chi":7930,"nations":7931,"iny":7932,"kis":7933,"attached":7934,"signature":7935,"1995":7936,"confir":7937,"paul":7938,"monit":7939,"kni":7940,"balance":7941,"intellig":7942,"pakistan":7943,"lincol":7944,"wholesale":7945,"summit":7946,"wardrobe":7947,"fork":7948,"ella":7949,"gon":7950,"gram":7951,"mainta":7952,"cen":7953,"awarded":7954,"ssions":7955,"animated":7956,"albums":7957,"kept":7958,"duke":7959,"muslim":7960,"fantastic":7961,"buck":7962,"turquo":7963,"turquoise":7964,"supported":7965,"ass":7966,"tried":7967,"restaurants":7968,"leopard":7969,"river":7970,"independence":7971,"immediately":7972,"anything":7973,"challeng":7974,"analysis":7975,"aki":7976,"airy":7977,"deliver":7978,"ppy":7979,"barcel":7980,"sci":7981,"begin":7982,"rubber":7983,"roof":7984,"ficial":7985,"powered":7986,"commission":7987,"hl":7988,"lab":7989,"whit":7990,"hap":7991,"posite":7992,"subject":7993,"ttery":7994,"kins":7995,"bol":7996,"aquarium":7997,"assembly":7998,"cs":7999,"atlanta":8000,"arctic":8001,"brook":8002,"grove":8003,"process":8004,"format":8005,"destination":8006,"bel":8007,"entered":8008,"9th":8009,"exotic":8010,"potential":8011,"tip":8012,"shooting":8013,"meals":8014,"soph":8015,"seven":8016,"rope":8017,"orial":8018,"ird":8019,"blocks":8020,"depen":8021,"lime":8022,"capture":8023,"normal":8024,"cd":8025,"behavi":8026,"waterfall":8027,"hedge":8028,"climate":8029,"chevro":8030,"stered":8031,"surf":8032,"contact":8033,"coins":8034,"california":8035,"arrang":8036,"theory":8037,"organis":8038,"nba":8039,"kay":8040,"sims":8041,"missing":8042,"revolution":8043,"document":8044,"shops":8045,"wash":8046,"armed":8047,"google":8048,"dead":8049,"gaz":8050,"ale":8051,"crop":8052,"leaders":8053,"ably":8054,"moo":8055,"diego":8056,"stein":8057,"monument":8058,"godde":8059,"civil":8060,"specific":8061,"wool":8062,"rep":8063,"techn":8064,"courtyard":8065,"catch":8066,"islamic":8067,"linear":8068,"advant":8069,"yar":8070,"vamp":8071,"ffin":8072,"faith":8073,"advis":8074,"ssive":8075,"attended":8076,"teeth":8077,"serious":8078,"towel":8079,"tar":8080,"continues":8081,"bee":8082,"memory":8083,"photographs":8084,"asted":8085,"object":8086,"exchange":8087,"brides":8088,"pier":8089,"barcelona":8090,"integr":8091,"caught":8092,"53":8093,"cuts":8094,"nutr":8095,"shared":8096,"leads":8097,"narrow":8098,"backdrop":8099,"ornament":8100,"chevrolet":8101,"newborn":8102,"amel":8103,"classical":8104,"motive":8105,"aviation":8106,"bones":8107,"seed":8108,"tunnel":8109,"insu":8110,"becomes":8111,"story":8112,"olympics":8113,"lip":8114,"ene":8115,"infe":8116,"category":8117,"iner":8118,"fear":8119,"1994":8120,"thumb":8121,"horizontal":8122,"resources":8123,"bat":8124,"agreed":8125,"serves":8126,"stored":8127,"turns":8128,"faux":8129,"looked":8130,"hang":8131,"ingu":8132,"opy":8133,"composed":8134,"ontar":8135,"beatles":8136,"anch":8137,"ren":8138,"world":8139,"tails":8140,"determin":8141,"journ":8142,"branding":8143,"zodiac":8144,"futuristic":8145,"fortun":8146,"tribal":8147,"issued":8148,"nfl":8149,"structures":8150,"miniature":8151,"plaza":8152,"vin":8153,"retri":8154,"regional":8155,"emper":8156,"smoking":8157,"springs":8158,"cig":8159,"overhead":8160,"rist":8161,"groo":8162,"shortly":8163,"ican":8164,"simul":8165,"mainten":8166,"risk":8167,"shutter":8168,"ja":8169,"lations":8170,"tre":8171,"ontario":8172,"matches":8173,"corn":8174,"maintenance":8175,"effor":8176,"bathrooms":8177,"assigned":8178,"ots":8179,"pipe":8180,"gardening":8181,"difference":8182,"advanced":8183,"idas":8184,"eco":8185,"legis":8186,"identity":8187,"sage":8188,"ria":8189,"allows":8190,"chevy":8191,"hedgehog":8192,"aro":8193,"thanks":8194,"com":8195,"viewers":8196,"owners":8197,"footage":8198,"deg":8199,"therapy":8200,"clean":8201,"luxe":8202,"odles":8203,"eigh":8204,"tourism":8205,"effort":8206,"ffy":8207,"avoid":8208,"satin":8209,"relief":8210,"staircase":8211,"ists":8212,"1960s":8213,"1970s":8214,"tourists":8215,"lap":8216,"singles":8217,"competit":8218,"dodge":8219,"troit":8220,"boot":8221,"prevent":8222,"latin":8223,"skeleton":8224,"sri":8225,"casino":8226,"detroit":8227,"multi":8228,"rena":8229,"tax":8230,"retail":8231,"ola":8232,"stores":8233,"cowboy":8234,"curly":8235,"fish":8236,"mail":8237,"congress":8238,"dark":8239,"organized":8240,"twenty":8241,"agency":8242,"cupcake":8243,"rough":8244,"arrives":8245,"ruins":8246,"decal":8247,"improve":8248,"non":8249,"paid":8250,"softw":8251,"adidas":8252,"gain":8253,"atar":8254,"drum":8255,"zero":8256,"facility":8257,"cargo":8258,"peter":8259,"kat":8260,"renaissance":8261,"minimal":8262,"ehouse":8263,"capac":8264,"frames":8265,"alt":8266,"duty":8267,"mosaic":8268,"ita":8269,"roots":8270,"airman":8271,"offen":8272,"ffle":8273,"shes":8274,"bunk":8275,"housing":8276,"lyn":8277,"wet":8278,"blazer":8279,"pet":8280,"meadow":8281,"calm":8282,"suggested":8283,"70":8284,"buted":8285,"passes":8286,"converse":8287,"knitting":8288,"sandwich":8289,"metallic":8290,"57":8291,"software":8292,"58":8293,"thanks":8294,"eff":8295,"ethnic":8296,"manufact":8297,"thanksgiving":8298,"flam":8299,"loves":8300,"sacred":8301,"religion":8302,"ikea":8303,"lovers":8304,"600":8305,"showed":8306,"blues":8307,"string":8308,"rio":8309,"scoring":8310,"tai":8311,"beij":8312,"blin":8313,"negative":8314,"del":8315,"fictional":8316,"acle":8317,"arrivals":8318,"teddy":8319,"beijing":8320,"dolph":8321,"northwest":8322,"nap":8323,"walt":8324,"tradition":8325,"grant":8326,"chandeli":8327,"tors":8328,"hai":8329,"una":8330,"customer":8331,"mac":8332,"angels":8333,"massive":8334,"warning":8335,"elementary":8336,"joy":8337,"magical":8338,"igan":8339,"borough":8340,"harbour":8341,"dating":8342,"volcan":8343,"roy":8344,"kel":8345,"masks":8346,"leans":8347,"colonial":8348,"tooth":8349,"1992":8350,"gloves":8351,"diamon":8352,"degree":8353,"textured":8354,"transformers":8355,"onia":8356,"virgin":8357,"smooth":8358,"buying":8359,"secretary":8360,"restor":8361,"norway":8362,"syl":8363,"paradise":8364,"sap":8365,"croat":8366,"bollywood":8367,"solo":8368,"footwear":8369,"finals":8370,"contrast":8371,"waii":8372,"cheese":8373,"lincoln":8374,"item":8375,"prayer":8376,"road":8377,"worldwide":8378,"kir":8379,"yland":8380,"iger":8381,"angular":8382,"fis":8383,"adows":8384,"hn":8385,"ask":8386,"wis":8387,"tumbl":8388,"neighborhood":8389,"face":8390,"tomato":8391,"kitchens":8392,"visited":8393,"fing":8394,"ym":8395,"ene":8396,"shutterstock":8397,"ankle":8398,"forever":8399,"photographed":8400,"cambridge":8401,"israel":8402,"rising":8403,"pub":8404,"holder":8405,"ration":8406,"aid":8407,"boss":8408,"introduction":8409,"ero":8410,"66":8411,"rhy":8412,"snacks":8413,"deter":8414,"1990":8415,"shepherd":8416,"writers":8417,"sex":8418,"jump":8419,"organiz":8420,"convention":8421,"orch":8422,"bedding":8423,"gods":8424,"effective":8425,"eness":8426,"magne":8427,"bodies":8428,"environmental":8429,"rocket":8430,"vements":8431,"cheer":8432,"pairs":8433,"ora":8434,"gia":8435,"stations":8436,"lakes":8437,"turning":8438,"outer":8439,"smooth":8440,"meter":8441,"weak":8442,"ear":8443,"snack":8444,"busy":8445,"chelsea":8446,"vo":8447,"cart":8448,"visiting":8449,"milan":8450,"trek":8451,"cloth":8452,"mist":8453,"zelda":8454,"linen":8455,"protein":8456,"botanical":8457,"tup":8458,"hits":8459,"majority":8460,"cups":8461,"spots":8462,"dying":8463,"peninsu":8464,"lighthouse":8465,"pirate":8466,"cliff":8467,"liquid":8468,"un":8469,"attan":8470,"carnival":8471,"wai":8472,"weird":8473,"ink":8474,"purchase":8475,"decorate":8476,"chemical":8477,"stack":8478,"beaches":8479,"vampire":8480,"wrapped":8481,"opportunity":8482,"confi":8483,"richard":8484,"waste":8485,"emperor":8486,"gment":8487,"handsome":8488,"texts":8489,"extension":8490,"saree":8491,"sound":8492,"knowledge":8493,"24":8494,"braids":8495,"85":8496,"carol":8497,"alism":8498,"divided":8499,"mented":8500,"peach":8501,"arabic":8502,"pointing":8503,"pier":8504,"bucket":8505,"pregnant":8506,"try":8507,"hindu":8508,"strap":8509,"amph":8510,"connect":8511,"aking":8512,"experim":8513,"surf":8514,"goods":8515,"diamonds":8516,"temperature":8517,"helping":8518,"reaching":8519,"shi":8520,"agne":8521,"leadership":8522,"adventures":8523,"audi":8524,"ore":8525,"goat":8526,"strike":8527,"touch":8528,"prices":8529,"meters":8530,"grain":8531,"rum":8532,"races":8533,"virus":8534,"itude":8535,"meets":8536,"polar":8537,"receive":8538,"alumin":8539,"ffel":8540,"reality":8541,"tang":8542,"seal":8543,"locations":8544,"feeding":8545,"charm":8546,"malaysia":8547,"personnel":8548,"helps":8549,"fighters":8550,"anyone":8551,"trim":8552,"devil":8553,"dramatic":8554,"iel":8555,"drew":8556,"confe":8557,"admi":8558,"chip":8559,"seconds":8560,"smiles":8561,"moments":8562,"space":8563,"cus":8564,"bohe":8565,"istics":8566,"dental":8567,"resolution":8568,"59":8569,"terrier":8570,"stress":8571,"seats":8572,"assistant":8573,"conflic":8574,"jordan":8575,"orche":8576,"calls":8577,"su":8578,"picnic":8579,"hardwood":8580,"dust":8581,"thank":8582,"succe":8583,"makeover":8584,"charts":8585,"draft":8586,"bush":8587,"passenger":8588,"subur":8589,"1993":8590,"ai":8591,"lix":8592,"engineer":8593,"pottery":8594,"prepare":8595,"absolu":8596,"soap":8597,"tape":8598,"reu":8599,"claim":8600,"ornaments":8601,"vene":8602,"teenage":8603,"1991":8604,"profess":8605,"copies":8606,"send":8607,"circular":8608,"nuts":8609,"depar":8610,"evolution":8611,"performances":8612,"enting":8613,"knife":8614,"architect":8615,"madrid":8616,"liberty":8617,"broadcast":8618,"badge":8619,"maxi":8620,"1950s":8621,"southeast":8622,"knit":8623,"scary":8624,"scenery":8625,"displays":8626,"interpre":8627,"mate":8628,"educational":8629,"dle":8630,"existing":8631,"brief":8632,"runner":8633,"ime":8634,"authority":8635,"77":8636,"attacks":8637,"hip":8638,"jo":8639,"lest":8640,"suffered":8641,"asts":8642,"virginia":8643,"candles":8644,"8th":8645,"gi":8646,"layers":8647,"instruments":8648,"necessary":8649,"angles":8650,"kshire":8651,"shang":8652,"carrier":8653,"ku":8654,"1970":8655,"vic":8656,"exp":8657,"filming":8658,"76":8659,"package":8660,"ign":8661,"beside":8662,"sixth":8663,"electronic":8664,"eless":8665,"sequence":8666,"dedicated":8667,"gru":8668,"remain":8669,"emblem":8670,"isn":8671,"wrestling":8672,"banks":8673,"tutorials":8674,"ilers":8675,"safari":8676,"gely":8677,"spoke":8678,"accent":8679,"boxing":8680,"retreat":8681,"must":8682,"tral":8683,"celebrating":8684,"subsequently":8685,"pineapple":8686,"vanity":8687,"twel":8688,"marina":8689,"artistic":8690,"heaven":8691,"connection":8692,"accu":8693,"jack":8694,"somewhere":8695,"peninsula":8696,"neutral":8697,"concent":8698,"parallel":8699,"nis":8700,"cade":8701,"compon":8702,"mont":8703,"bohemi":8704,"hot":8705,"policy":8706,"not":8707,"truth":8708,"oval":8709,"develop":8710,"reflection":8711,"eries":8712,"hundre":8713,"meant":8714,"dal":8715,"wants":8716,"pork":8717,"advance":8718,"putting":8719,"peanut":8720,"bling":8721,"perspective":8722,"speaks":8723,"colourful":8724,"abbey":8725,"disco":8726,"mosco":8727,"marines":8728,"advice":8729,"classes":8730,"archi":8731,"earned":8732,"maximum":8733,"grown":8734,"domestic":8735,"72":8736,"officially":8737,"icient":8738,"incorpor":8739,"bishop":8740,"cage":8741,"ssy":8742,"allowing":8743,"method":8744,"planets":8745,"philo":8746,"situation":8747,"faced":8748,"io":8749,"fallen":8750,"sources":8751,"atmosphere":8752,"merry":8753,"overlooking":8754,"tact":8755,"sharing":8756,"responsible":8757,"nyc":8758,"alcohol":8759,"patch":8760,"ibi":8761,"rick":8762,"dges":8763,"miss":8764,"moscow":8765,"tax":8766,"champagne":8767,"administration":8768,"adorable":8769,"esp":8770,"bond":8771,"rein":8772,"quarter":8773,"scrat":8774,"waist":8775,"panda":8776,"elf":8777,"gues":8778,"inspe":8779,"bacon":8780,"audio":8781,"pickup":8782,"sustain":8783,"lightning":8784,"prison":8785,"capit":8786,"worst":8787,"airlines":8788,"vice":8789,"triple":8790,"1980s":8791,"dairy":8792,"eland":8793,"lis":8794,"promin":8795,"samu":8796,"apolis":8797,"arrival":8798,"cc":8799,"sandals":8800,"ley":8801,"prepared":8802,"orders":8803,"option":8804,"jesus":8805,"phase":8806,"guesthouse":8807,"youtube":8808,"favour":8809,"licen":8810,"modu":8811,"speaking":8812,"feather":8813,"measu":8814,"pile":8815,"orleans":8816,"lily":8817,"tomatoes":8818,"zerland":8819,"switzerland":8820,"cking":8821,"ille":8822,"aspe":8823,"renovation":8824,"williams":8825,"rho":8826,"communication":8827,"fare":8828,"arranged":8829,"danger":8830,"veland":8831,"fame":8832,"accessory":8833,"hundreds":8834,"consul":8835,"rich":8836,"trial":8837,"reef":8838,"1989":8839,"founded":8840,"mol":8841,"drops":8842,"weekly":8843,"motel":8844,"arrangement":8845,"manor":8846,"porce":8847,"referred":8848,"creamy":8849,"dynam":8850,"jas":8851,"cia":8852,"thor":8853,"occasion":8854,"ettes":8855,"90s":8856,"tablet":8857,"iv":8858,"stive":8859,"gin":8860,"vul":8861,"loy":8862,"presence":8863,"chips":8864,"underground":8865,"lose":8866,"ables":8867,"passengers":8868,"flan":8869,"potatoes":8870,"whi":8871,"enemy":8872,"porcelain":8873,"remin":8874,"customers":8875,"laven":8876,"broke":8877,"mythology":8878,"beans":8879,"rating":8880,"swiss":8881,"goddess":8882,"timber":8883,"accoun":8884,"dropped":8885,"logi":8886,"hydro":8887,"invas":8888,"samsung":8889,"63":8890,"playground":8891,"consider":8892,"solutions":8893,"metry":8894,"ender":8895,"hunt":8896,"haven":8897,"ared":8898,"salmon":8899,"02":8900,"mesh":8901,"sse":8902,"circuit":8903,"ived":8904,"moroc":8905,"destinations":8906,"sunshine":8907,"territory":8908,"legacy":8909,"raff":8910,"internal":8911,"photoshop":8912,"stretch":8913,"ativity":8914,"tx":8915,"bage":8916,"psycho":8917,"accompan":8918,"lover":8919,"couples":8920,"oldest":8921,"actresses":8922,"09":8923,"bag":8924,"twice":8925,"reduced":8926,"asis":8927,"inth":8928,"jobs":8929,"kick":8930,"protective":8931,"sculptures":8932,"flyer":8933,"proposal":8934,"efforts":8935,"offering":8936,"stol":8937,"reveal":8938,"lavender":8939,"filling":8940,"03":8941,"illi":8942,"stra":8943,"phar":8944,"kim":8945,"economy":8946,"visions":8947,"sight":8948,"cells":8949,"legends":8950,"lus":8951,"playstation":8952,"horse":8953,"78":8954,"aa":8955,"twist":8956,"ee":8957,"jen":8958,"predic":8959,"starts":8960,"kan":8961,"aust":8962,"ferr":8963,"handbags":8964,"wron":8965,"vibr":8966,"deplo":8967,"argent":8968,"alised":8969,"brooklyn":8970,"injury":8971,"anger":8972,"nast":8973,"det":8974,"unable":8975,"pride":8976,"lement":8977,"simpsons":8978,"mates":8979,"depicting":8980,"strip":8981,"invitations":8982,"blossom":8983,"kal":8984,"slope":8985,"aggre":8986,"avian":8987,"stang":8988,"glaci":8989,"kish":8990,"mann":8991,"illery":8992,"farmer":8993,"loose":8994,"bbles":8995,"jewish":8996,"christ":8997,"edward":8998,"warriors":8999,"defeat":9000,"represent":9001,"jam":9002,"spotted":9003,"stood":9004,"05":9005,"towers":9006,"mustang":9007,"managed":9008,"lamps":9009,"revi":9010,"poland":9011,"foreground":9012,"panor":9013,"chu":9014,"inge":9015,"marath":9016,"professor":9017,"consider":9018,"traveling":9019,"flas":9020,"crafted":9021,"consu":9022,"team":9023,"tire":9024,"artificial":9025,"cleveland":9026,"casting":9027,"steamp":9028,"62":9029,"attacked":9030,"mirro":9031,"steampunk":9032,"pepper":9033,"tales":9034,"peter":9035,"facilities":9036,"penguin":9037,"males":9038,"containing":9039,"08":9040,"diesel":9041,"mani":9042,"spray":9043,"sunflower":9044,"mayor":9045,"wealth":9046,"opens":9047,"rally":9048,"mystery":9049,"dred":9050,"rine":9051,"ishment":9052,"design":9053,"crash":9054,"sab":9055,"good":9056,"sten":9057,"capacity":9058,"sumed":9059,"allo":9060,"tells":9061,"respectively":9062,"sioned":9063,"accommo":9064,"oce":9065,"circus":9066,"scandin":9067,"elected":9068,"celebs":9069,"advertis":9070,"bre":9071,"agent":9072,"calcul":9073,"user":9074,"customi":9075,"largely":9076,"kee":9077,"dock":9078,"skinny":9079,"kok":9080,"ular":9081,"keeper":9082,"providing":9083,"insur":9084,"dium":9085,"bohemian":9086,"epic":9087,"except":9088,"mem":9089,"800":9090,"celtic":9091,"galo":9092,"railroad":9093,"account":9094,"harmon":9095,"protest":9096,"mood":9097,"presidential":9098,"afp":9099,"dent":9100,"mat":9101,"awareness":9102,"valentines":9103,"craf":9104,"61":9105,"occup":9106,"crimin":9107,"choosing":9108,"depth":9109,"ares":9110,"surprise":9111,"supporting":9112,"fat":9113,"ivory":9114,"snowy":9115,"garlic":9116,"hosted":9117,"else":9118,"strateg":9119,"kitchen":9120,"infinity":9121,"representing":9122,"1969":9123,"recru":9124,"phan":9125,"derly":9126,"jones":9127,"distribution":9128,"chro":9129,"occurred":9130,"gulf":9131,"arsen":9132,"ffon":9133,"literature":9134,"tied":9135,"merch":9136,"buddh":9137,"progress":9138,"salon":9139,"ordinary":9140,"sley":9141,"semi":9142,"southwest":9143,"bbc":9144,"preparing":9145,"phra":9146,"achu":9147,"billion":9148,"brings":9149,"apart":9150,"dough":9151,"quiet":9152,"dal":9153,"receiving":9154,"task":9155,"returns":9156,"initi":9157,"though":9158,"andre":9159,"1968":9160,"dangerous":9161,"cheesecake":9162,"device":9163,"extr":9164,"scientific":9165,"personalized":9166,"seems":9167,"hill":9168,"chandelier":9169,"measure":9170,"myster":9171,"tend":9172,"uro":9173,"satis":9174,"skate":9175,"perman":9176,"relaxing":9177,"crime":9178,"wrist":9179,"graduate":9180,"originals":9181,"treats":9182,"rental":9183,"blur":9184,"tenant":9185,"appli":9186,"67":9187,"ec":9188,"metro":9189,"04":9190,"dublin":9191,"07":9192,"bear":9193,"superman":9194,"convers":9195,"mock":9196,"heard":9197,"retired":9198,"vanilla":9199,"hilar":9200,"white":9201,"tionary":9202,"upcoming":9203,"cyclone":9204,"mole":9205,"planes":9206,"06":9207,"humans":9208,"portugal":9209,"cout":9210,"spirit":9211,"discovery":9212,"96":9213,"vote":9214,"documents":9215,"employees":9216,"astrono":9217,"application":9218,"survival":9219,"chanel":9220,"postcard":9221,"sticks":9222,"ronau":9223,"trin":9224,"sno":9225,"fog":9226,"dding":9227,"roasted":9228,"fusion":9229,"bringing":9230,"aboard":9231,"instant":9232,"quest":9233,"circles":9234,"properties":9235,"polo":9236,"talks":9237,"gender":9238,"declared":9239,"fou":9240,"rod":9241,"versions":9242,"dream":9243,"laying":9244,"answer":9245,"trunk":9246,"astronau":9247,"darkness":9248,"alli":9249,"prison":9250,"fellow":9251,"manufac":9252,"moves":9253,"1980":9254,"importance":9255,"agreement":9256,"country":9257,"promis":9258,"1920":9259,"closure":9260,"reje":9261,"motivational":9262,"pm":9263,"uts":9264,"kawaii":9265,"strange":9266,"os":9267,"andro":9268,"florida":9269,"reduce":9270,"finds":9271,"human":9272,"scheme":9273,"machines":9274,"fake":9275,"remodel":9276,"resting":9277,"tractor":9278,"attempted":9279,"thr":9280,"ual":9281,"seaf":9282,"sweden":9283,"rides":9284,"mely":9285,"raise":9286,"loop":9287,"messages":9288,"bali":9289,"rup":9290,"wheat":9291,"anton":9292,"1984":9293,"epy":9294,"ben":9295,"republic":9296,"inscription":9297,"teach":9298,"prize":9299,"path":9300,"firm":9301,"represented":9302,"architects":9303,"athletic":9304,"plaid":9305,"orts":9306,"frank":9307,"nutrition":9308,"71":9309,"dynast":9310,"democratic":9311,"md":9312,"pitch":9313,"beng":9314,"strate":9315,"nuclear":9316,"embroidered":9317,"testing":9318,"rian":9319,"threat":9320,"extreme":9321,"thumb":9322,"farmers":9323,"texas":9324,"returning":9325,"ira":9326,"actus":9327,"cylin":9328,"measures":9329,"motivation":9330,"friendship":9331,"defence":9332,"tul":9333,"ingo":9334,"blog":9335,"continental":9336,"osc":9337,"sin":9338,"tarian":9339,"grid":9340,"andy":9341,"politan":9342,"sounds":9343,"ili":9344,"cooker":9345,"archae":9346,"purchased":9347,"inum":9348,"stripes":9349,"sture":9350,"feed":9351,"gings":9352,"ums":9353,"resulted":9354,"gar":9355,"outlet":9356,"etings":9357,"electrical":9358,"paired":9359,"88":9360,"setup":9361,"edin":9362,"squad":9363,"fired":9364,"wishes":9365,"exhibit":9366,"gat":9367,"chiffon":9368,"squir":9369,"cinnam":9370,"card":9371,"kits":9372,"lanka":9373,"conservation":9374,"mushroom":9375,"technical":9376,"judge":9377,"brad":9378,"extremely":9379,"ager":9380,"game":9381,"commissioned":9382,"bangkok":9383,"snap":9384,"modeling":9385,"ah":9386,"sens":9387,"yan":9388,"missed":9389,"operating":9390,"discount":9391,"switch":9392,"chy":9393,"headed":9394,"gging":9395,"appeal":9396,"indig":9397,"equal":9398,"pred":9399,"apo":9400,"crack":9401,"fits":9402,"restric":9403,"tau":9404,"1967":9405,"avoc":9406,"mega":9407,"therefore":9408,"chosen":9409,"gola":9410,"cannes":9411,"parked":9412,"menti":9413,"east":9414,"ania":9415,"poem":9416,"rai":9417,"1986":9418,"tally":9419,"impressive":9420,"alists":9421,"restoration":9422,"engraving":9423,"festive":9424,"everyday":9425,"short":9426,"vine":9427,"speaker":9428,"pad":9429,"shri":9430,"1987":9431,"pitts":9432,"rose":9433,"padd":9434,"rons":9435,"woodworking":9436,"accepted":9437,"rays":9438,"tially":9439,"25":9440,"templates":9441,"mars":9442,"station":9443,"extra":9444,"pically":9445,"northeast":9446,"hacks":9447,"loco":9448,"boyfriend":9449,"vans":9450,"mist":9451,"progre":9452,"creature":9453,"tag":9454,"mau":9455,"expansion":9456,"indonesia":9457,"hunter":9458,"ith":9459,"lesson":9460,"onna":9461,"gentle":9462,"lieu":9463,"spiritual":9464,"mehn":9465,"couture":9466,"mehndi":9467,"missions":9468,"turkish":9469,"gained":9470,"jel":9471,"romance":9472,"wea":9473,"pergola":9474,"supplied":9475,"relatively":9476,"bridesmaid":9477,"silhouettes":9478,"losing":9479,"attempts":9480,"bomb":9481,"cactus":9482,"reports":9483,"opin":9484,"1988":9485,"exercis":9486,"skirts":9487,"cinnamon":9488,"17th":9489,"stamp":9490,"zar":9491,"sections":9492,"promotion":9493,"wrong":9494,"junction":9495,"nor":9496,"cho":9497,"belle":9498,"lb":9499,"rider":9500,"caucas":9501,"zard":9502,"1960":9503,"washing":9504,"chalk":9505,"charity":9506,"platinum":9507,"refused":9508,"29":9509,"gau":9510,"herself":9511,"kg":9512,"younger":9513,"1940":9514,"exercises":9515,"mainly":9516,"oli":9517,"laboratory":9518,"expression":9519,"jokes":9520,"dwar":9521,"brilli":9522,"oak":9523,"arsenal":9524,"horizon":9525,"waving":9526,"austria":9527,"ila":9528,"modest":9529,"stages":9530,"monsters":9531,"cci":9532,"hostel":9533,"wallet":9534,"sheets":9535,"partner":9536,"porters":9537,"disc":9538,"navig":9539,"sandy":9540,"strategy":9541,"twelve":9542,"gt":9543,"denver":9544,"aph":9545,"cash":9546,"bakery":9547,"olet":9548,"stem":9549,"expedition":9550,"edi":9551,"habitat":9552,"sue":9553,"crossed":9554,"afford":9555,"79":9556,"textile":9557,"need":9558,"isometric":9559,"venues":9560,"countryside":9561,"pools":9562,"primar":9563,"manh":9564,"coordin":9565,"ginger":9566,"resulting":9567,"shanghai":9568,"pad":9569,"hundred":9570,"transfer":9571,"cattle":9572,"oning":9573,"adjust":9574,"conce":9575,"appreci":9576,"uniforms":9577,"automatic":9578,"goal":9579,"stan":9580,"diving":9581,"struck":9582,"designated":9583,"exposed":9584,"patrol":9585,"ira":9586,"ored":9587,"250":9588,"understand":9589,"ude":9590,"wins":9591,"cryst":9592,"typically":9593,"palette":9594,"fications":9595,"1985":9596,"proto":9597,"bros":9598,"regions":9599,"aim":9600,"automobile":9601,"leggings":9602,"femin":9603,"elderly":9604,"enna":9605,"ns":9606,"muscles":9607,"console":9608,"styling":9609,"conducted":9610,"raising":9611,"lized":9612,"chose":9613,"consists":9614,"emerald":9615,"boeing":9616,"intelligence":9617,"mise":9618,"honey":9619,"yorkshire":9620,"bug":9621,"80s":9622,"eno":9623,"body":9624,"entary":9625,"melon":9626,"69":9627,"giraff":9628,"applied":9629,"remote":9630,"grew":9631,"laser":9632,"pirates":9633,"desh":9634,"teenth":9635,"explained":9636,"arab":9637,"pop":9638,"warehouse":9639,"tender":9640,"spark":9641,"1966":9642,"hilarious":9643,"protected":9644,"madonna":9645,"feathers":9646,"scandinavian":9647,"edinburgh":9648,"lens":9649,"teal":9650,"personality":9651,"shelter":9652,"replacement":9653,"logist":9654,"powers":9655,"master":9656,"closer":9657,"burgun":9658,"pavil":9659,"kur":9660,"che":9661,"possibly":9662,"1942":9663,"lieutenant":9664,"shares":9665,"1965":9666,"operated":9667,"ferrari":9668,"musician":9669,"ultimately":9670,"affordable":9671,"flat":9672,"villas":9673,"bristol":9674,"mud":9675,"programs":9676,"pyramid":9677,"zomb":9678,"reuters":9679,"facebook":9680,"guitars":9681,"gad":9682,"virtual":9683,"gold":9684,"dots":9685,"insurance":9686,"amster":9687,"affected":9688,"leban":9689,"mber":9690,"attri":9691,"ements":9692,"coloured":9693,"lighted":9694,"decals":9695,"herbs":9696,"happen":9697,"enn":9698,"broadway":9699,"hul":9700,"actual":9701,"amsterdam":9702,"38":9703,"dirt":9704,"perh":9705,"surgery":9706,"tees":9707,"penn":9708,"pied":9709,"1975":9710,"sisters":9711,"creates":9712,"resorts":9713,"girl":9714,"offices":9715,"tanks":9716,"ji":9717,"dried":9718,"rap":9719,"increasing":9720,"vegetarian":9721,"spend":9722,"74":9723,"cedar":9724,"collap":9725,"manhattan":9726,"natur":9727,"pregnancy":9728,"fine":9729,"say":9730,"widely":9731,"rose":9732,"causing":9733,"montre":9734,"claims":9735,"modified":9736,"portland":9737,"spring":9738,"elim":9739,"transm":9740,"vocals":9741,"perhaps":9742,"inary":9743,"matte":9744,"haun":9745,"perio":9746,"approved":9747,"molecu":9748,"repl":9749,"spice":9750,"opp":9751,"collections":9752,"zoo":9753,"burgundy":9754,"bunch":9755,"forum":9756,"tribute":9757,"individuals":9758,"jagu":9759,"1945":9760,"commented":9761,"nego":9762,"deluxe":9763,"lantern":9764,"confirmed":9765,"splash":9766,"search":9767,"cancel":9768,"poke":9769,"but":9770,"opposite":9771,"rements":9772,"mich":9773,"primarily":9774,"bees":9775,"toms":9776,"dirty":9777,"kinds":9778,"hybri":9779,"allied":9780,"hawk":9781,"mingham":9782,"wick":9783,"tail":9784,"flag":9785,"raid":9786,"midi":9787,"rivers":9788,"random":9789,"ulty":9790,"tens":9791,"pokemon":9792,"severe":9793,"focused":9794,"rage":9795,"perfectly":9796,"research":9797,"observed":9798,"advantage":9799,"aths":9800,"sharp":9801,"files":9802,"laughing":9803,"roller":9804,"bald":9805,"soundtrack":9806,"dee":9807,"clown":9808,"babies":9809,"kinder":9810,"1964":9811,"nc":9812,"heels":9813,"1950":9814,"entic":9815,"anchor":9816,"mandala":9817,"tine":9818,"ml":9819,"trousers":9820,"rable":9821,"authentic":9822,"happiness":9823,"73":9824,"ao":9825,"fers":9826,"1944":9827,"alber":9828,"13th":9829,"favourite":9830,"seafood":9831,"accur":9832,"conve":9833,"itely":9834,"seventh":9835,"kenne":9836,"ports":9837,"intersection":9838,"vania":9839,"beads":9840,"dron":9841,"icking":9842,"secondary":9843,"playo":9844,"carolina":9845,"speak":9846,"cruiser":9847,"thousand":9848,"mentation":9849,"resident":9850,"bin":9851,"dancer":9852,"labels":9853,"rif":9854,"swedish":9855,"similar":9856,"lightweight":9857,"costa":9858,"kitchenette":9859,"saints":9860,"92":9861,"posted":9862,"converted":9863,"root":9864,"gala":9865,"artillery":9866,"definition":9867,"saving":9868,"sal":9869,"ima":9870,"address":9871,"bever":9872,"sme":9873,"sliding":9874,"restored":9875,"zip":9876,"insects":9877,"ears":9878,"extensive":9879,"slow":9880,"rai":9881,"ditionally":9882,"demand":9883,"ours":9884,"bul":9885,"garten":9886,"cardboard":9887,"lyn":9888,"identified":9889,"sea":9890,"doub":9891,"ites":9892,"otto":9893,"swim":9894,"bungalo":9895,"ttle":9896,"learned":9897,"michigan":9898,"negoti":9899,"developing":9900,"cker":9901,"cocktails":9902,"fluffy":9903,"successfully":9904,"controlled":9905,"hardware":9906,"cies":9907,"uss":9908,"mbled":9909,"1979":9910,"dav":9911,"shake":9912,"crisis":9913,"arian":9914,"gathering":9915,"dynasty":9916,"inju":9917,"heavily":9918,"yan":9919,"97":9920,"invasion":9921,"mrs":9922,"mathe":9923,"flavor":9924,"killing":9925,"brief":9926,"basin":9927,"packed":9928,"walker":9929,"survive":9930,"blend":9931,"nan":9932,"birmingham":9933,"kitten":9934,"statues":9935,"sweat":9936,"98":9937,"moon":9938,"nights":9939,"auction":9940,"xbox":9941,"mario":9942,"pittsburgh":9943,"diary":9944,"ione":9945,"antonio":9946,"acres":9947,"influenced":9948,"represents":9949,"austin":9950,"1941":9951,"sunlight":9952,"murals":9953,"depot":9954,"lipstick":9955,"marathon":9956,"defined":9957,"terri":9958,"franch":9959,"circul":9960,"cked":9961,"jama":9962,"collected":9963,"bomber":9964,"admiral":9965,"woodland":9966,"bris":9967,"gang":9968,"rasp":9969,"technique":9970,"mia":9971,"terminal":9972,"ston":9973,"ladder":9974,"plenty":9975,"sony":9976,"ress":9977,"thesis":9978,"regarding":9979,"iron":9980,"87":9981,"sought":9982,"butterflies":9983,"nova":9984,"wagon":9985,"acre":9986,"harvest":9987,"engaged":9988,"wonder":9989,"aria":9990,"benz":9991,"jose":9992,"charming":9993,"stating":9994,"emph":9995,"quarantine":9996,"laws":9997,"dancers":9998,"horn":9999,"appropri":10000,"nissan":10001,"principal":10002,"tests":10003,"itation":10004,"maternity":10005,"designers":10006,"viking":10007,"dip":10008,"arrive":10009,"montreal":10010,"luxur":10011,"volk":10012,"ilton":10013,"outstanding":10014,"costs":10015,"munici":10016,"experienced":10017,"glowing":10018,"copy":10019,"demic":10020,"instructions":10021,"brand":10022,"fix":10023,"kent":10024,"avocado":10025,"avatar":10026,"fashion":10027,"bree":10028,"fused":10029,"update":10030,"94":10031,"harley":10032,"alem":10033,"cils":10034,"caucasian":10035,"unlike":10036,"voted":10037,"bridges":10038,"weigh":10039,"keyboard":10040,"tack":10041,"voy":10042,"jets":10043,"jerus":10044,"ik":10045,"exce":10046,"greg":10047,"thrones":10048,"decade":10049,"ensure":10050,"listening":10051,"prominent":10052,"nominated":10053,"hay":10054,"fu":10055,"has":10056,"inspiring":10057,"ms":10058,"causes":10059,"jerusalem":10060,"ian":10061,"health":10062,"rainy":10063,"suede":10064,"destri":10065,"commonly":10066,"sick":10067,"drum":10068,"hook":10069,"reaction":10070,"pixar":10071,"optim":10072,"leice":10073,"booth":10074,"1972":10075,"leicester":10076,"pedestri":10077,"tism":10078,"aya":10079,"landscapes":10080,"mare":10081,"td":10082,"employee":10083,"vogue":10084,"inser":10085,"cuis":10086,"eiffel":10087,"lock":10088,"360":10089,"scenic":10090,"pavilion":10091,"fas":10092,"planting":10093,"guides":10094,"caramel":10095,"hua":10096,"16th":10097,"sanct":10098,"81":10099,"enjoyed":10100,"secrets":10101,"poetry":10102,"transferred":10103,"convin":10104,"conflict":10105,"song":10106,"replace":10107,"violet":10108,"shift":10109,"almond":10110,"skies":10111,"cooked":10112,"quir":10113,"phe":10114,"sax":10115,"bowls":10116,"proud":10117,"offensive":10118,"abilities":10119,"curved":10120,"defensive":10121,"drug":10122,"entering":10123,"magnetic":10124,"push":10125,"titles":10126,"teachers":10127,"crane":10128,"exploring":10129,"bers":10130,"viewing":10131,"senate":10132,"handwriting":10133,"follows":10134,"column":10135,"insul":10136,"lifting":10137,"apping":10138,"bulldog":10139,"equipped":10140,"bullet":10141,"ugly":10142,"gas":10143,"heading":10144,"tide":10145,"carving":10146,"portable":10147,"dians":10148,"91":10149,"striking":10150,"pione":10151,"vor":10152,"arte":10153,"afgh":10154,"stam":10155,"hear":10156,"deman":10157,"pod":10158,"delivered":10159,"hamilton":10160,"knew":10161,"roughly":10162,"columns":10163,"fabul":10164,"volksw":10165,"burn":10166,"belgi":10167,"stripe":10168,"packs":10169,"smoothie":10170,"120":10171,"teach":10172,"swift":10173,"pollution":10174,"flour":10175,"passion":10176,"sessions":10177,"inton":10178,"foam":10179,"perfu":10180,"condo":10181,"steph":10182,"halo":10183,"volkswagen":10184,"finishing":10185,"mage":10186,"commemor":10187,"straw":10188,"cze":10189,"mt":10190,"telling":10191,"folding":10192,"barrel":10193,"dimensions":10194,"tomb":10195,"nautical":10196,"inspire":10197,"cone":10198,"shrimp":10199,"lucky":10200,"yel":10201,"liest":10202,"relax":10203,"bowling":10204,"dragons":10205,"kindergarten":10206,"expand":10207,"compos":10208,"scott":10209,"fonts":10210,"harrison":10211,"basis":10212,"chim":10213,"arely":10214,"carl":10215,"iously":10216,"salv":10217,"ortho":10218,"fireworks":10219,"improved":10220,"rubs":10221,"cardin":10222,"unve":10223,"sively":10224,"ounded":10225,"bbed":10226,"1973":10227,"metropolitan":10228,"rises":10229,"wid":10230,"meditation":10231,"wels":10232,"calif":10233,"acular":10234,"laugh":10235,"fold":10236,"1990s":10237,"resistance":10238,"pou":10239,"gucci":10240,"proved":10241,"alb":10242,"buffe":10243,"rooftop":10244,"86":10245,"dra":10246,"za":10247,"agriculture":10248,"grilled":10249,"1971":10250,"acou":10251,"cair":10252,"1982":10253,"cushion":10254,"treasure":10255,"tick":10256,"threat":10257,"stable":10258,"heights":10259,"whiskey":10260,"93":10261,"forget":10262,"subsequent":10263,"1983":10264,"distingu":10265,"cuba":10266,"fabulous":10267,"electric":10268,"crying":10269,"acqu":10270,"ckets":10271,"vi":10272,"orchestra":10273,"chase":10274,"gap":10275,"adopted":10276,"sustainable":10277,"bau":10278,"reserv":10279,"scoop":10280,"luxurious":10281,"mango":10282,"ferry":10283,"origin":10284,"mechanical":10285,"clinic":10286,"reverse":10287,"700":10288,"dd":10289,"throne":10290,"nou":10291,"pale":10292,"walnut":10293,"jim":10294,"coats":10295,"editor":10296,"toe":10297,"beaded":10298,"blind":10299,"garian":10300,"generated":10301,"bap":10302,"shadows":10303,"lam":10304,"vibrant":10305,"crosses":10306,"lifetime":10307,"charged":10308,"lated":10309,"retirement":10310,"ilian":10311,"promoted":10312,"1943":10313,"strugg":10314,"boarding":10315,"furnished":10316,"scat":10317,"yam":10318,"beam":10319,"torpe":10320,"promote":10321,"suggest":10322,"secure":10323,"bia":10324,"cents":10325,"camel":10326,"historian":10327,"athletes":10328,"guaran":10329,"leis":10330,"facial":10331,"belly":10332,"df":10333,"frequently":10334,"creator":10335,"gathered":10336,"select":10337,"failure":10338,"emir":10339,"wi":10340,"earth":10341,"swee":10342,"1976":10343,"flex":10344,"ministry":10345,"virgin":10346,"mushrooms":10347,"stances":10348,"ship":10349,"understanding":10350,"correct":10351,"upgra":10352,"participated":10353,"gene":10354,"valu":10355,"alps":10356,"drivers":10357,"trails":10358,"flame":10359,"hipster":10360,"clubs":10361,"yne":10362,"mixing":10363,"tires":10364,"gab":10365,"vis":10366,"altar":10367,"pots":10368,"belgium":10369,"deal":10370,"bane":10371,"popular":10372,"volley":10373,"1000":10374,"brilliant":10375,"hosts":10376,"emotional":10377,"pillows":10378,"suspen":10379,"ito":10380,"packing":10381,"glacier":10382,"loving":10383,"glamour":10384,"markets":10385,"volleyball":10386,"croatia":10387,"ield":10388,"steak":10389,"li":10390,"diplo":10391,"retriever":10392,"essenti":10393,"decre":10394,"hog":10395,"guardian":10396,"pup":10397,"beneath":10398,"chris":10399,"thusi":10400,"82":10401,"alexander":10402,"tumblr":10403,"injured":10404,"broch":10405,"scheduled":10406,"rendered":10407,"rever":10408,"greenhouse":10409,"ipad":10410,"minne":10411,"borne":10412,"stock":10413,"dna":10414,"enthusi":10415,"dress":10416,"timore":10417,"destru":10418,"decades":10419,"oriental":10420,"mason":10421,"engines":10422,"scooter":10423,"liner":10424,"contained":10425,"1974":10426,"peaked":10427,"norwe":10428,"relationships":10429,"belon":10430,"skyscra":10431,"treated":10432,"illuminated":10433,"1930s":10434,"promp":10435,"aluminum":10436,"closing":10437,"winners":10438,"gather":10439,"accident":10440,"inated":10441,"gates":10442,"baltimore":10443,"guardians":10444,"scout":10445,"ov":10446,"oy":10447,"sr":10448,"baro":10449,"ebay":10450,"thur":10451,"maintained":10452,"crest":10453,"broad":10454,"collecti":10455,"open":10456,"tight":10457,"saurus":10458,"pulled":10459,"granite":10460,"sum":10461,"documentary":10462,"spicy":10463,"fingers":10464,"forests":10465,"blade":10466,"carries":10467,"shells":10468,"expressed":10469,"classy":10470,"casser":10471,"hike":10472,"pooh":10473,"ott":10474,"gow":10475,"dais":10476,"highlights":10477,"athens":10478,"1978":10479,"py":10480,"fund":10481,"contributed":10482,"byz":10483,"swan":10484,"microphone":10485,"tible":10486,"determined":10487,"vy":10488,"concluded":10489,"squirrel":10490,"arre":10491,"prey":10492,"appearances":10493,"imate":10494,"replica":10495,"hen":10496,"mah":10497,"lonely":10498,"never":10499,"cult":10500,"outbreak":10501,"tant":10502,"tape":10503,"dye":10504,"arriving":10505,"tiv":10506,"hearing":10507,"entirely":10508,"84":10509,"delta":10510,"heel":10511,"tsman":10512,"murder":10513,"mper":10514,"charcoal":10515,"stove":10516,"crab":10517,"school":10518,"fifa":10519,"ellite":10520,"blogger":10521,"apples":10522,"1977":10523,"limit":10524,"permanent":10525,"repu":10526,"maintain":10527,"showcas":10528,"tto":10529,"chairman":10530,"pope":10531,"conversation":10532,"musicians":10533,"wheelchair":10534,"thoughts":10535,"ological":10536,"extin":10537,"satellite":10538,"assass":10539,"buffet":10540,"beginners":10541,"pound":10542,"politics":10543,"swear":10544,"envelo":10545,"ras":10546,"gly":10547,"dual":10548,"eagles":10549,"benefit":10550,"practical":10551,"roles":10552,"iran":10553,"dispu":10554,"scientists":10555,"canopy":10556,"ottoman":10557,"norwegian":10558,"shining":10559,"bay":10560,"util":10561,"mored":10562,"parag":10563,"ati":10564,"hell":10565,"proper":10566,"consist":10567,"blush":10568,"vessel":10569,"legisl":10570,"evol":10571,"89":10572,"prise":10573,"iro":10574,"determine":10575,"casserole":10576,"bulb":10577,"accompanied":10578,"rams":10579,"neighbour":10580,"psy":10581,"authorities":10582,"35":10583,"fiber":10584,"thirty":10585,"removal":10586,"useful":10587,"driven":10588,"buddha":10589,"astronaut":10590,"hallway":10591,"monitor":10592,"cloudy":10593,"10th":10594,"wilson":10595,"peac":10596,"register":10597,"cuisine":10598,"vised":10599,"americans":10600,"superior":10601,"monochrome":10602,"rainfall":10603,"1920s":10604,"centuries":10605,"chamber":10606,"jaguar":10607,"kam":10608,"shiny":10609,"queens":10610,"assault":10611,"ax":10612,"curry":10613,"elite":10614,"grab":10615,"bust":10616,"respect":10617,"miller":10618,"incident":10619,"denmark":10620,"republican":10621,"cycled":10622,"ggle":10623,"lebanon":10624,"oral":10625,"1939":10626,"mere":10627,"1930":10628,"lex":10629,"truly":10630,"maje":10631,"emo":10632,"lington":10633,"survey":10634,"towns":10635,"picked":10636,"holes":10637,"laho":10638,"terrain":10639,"aj":10640,"expanded":10641,"fixed":10642,"flix":10643,"rad":10644,"baker":10645,"1963":10646,"lining":10647,"tten":10648,"narr":10649,"ggy":10650,"chemi":10651,"mathemat":10652,"leisure":10653,"nhl":10654,"achieve":10655,"producers":10656,"ided":10657,"cha":10658,"cut":10659,"request":10660,"onion":10661,"candidate":10662,"recreation":10663,"tested":10664,"creepy":10665,"slices":10666,"notable":10667,"netflix":10668,"83":10669,"innings":10670,"park":10671,"appearing":10672,"brunch":10673,"trips":10674,"athlete":10675,"oppon":10676,"opposition":10677,"promise":10678,"lew":10679,"illustrator":10680,"oklaho":10681,"acoustic":10682,"bun":10683,"values":10684,"hound":10685,"acqui":10686,"preparation":10687,"spacious":10688,"nate":10689,"sylvania":10690,"woven":10691,"administr":10692,"hybrid":10693,"evacu":10694,"maybe":10695,"m2":10696,"concepts":10697,"meanwhile":10698,"thunder":10699,"cod":10700,"tedly":10701,"giraffe":10702,"glasgow":10703,"closely":10704,"estone":10705,"prince":10706,"homer":10707,"carriage":10708,"emirates":10709,"drums":10710,"ori":10711,"methods":10712,"castles":10713,"labor":10714,"pursu":10715,"girlfriend":10716,"alive":10717,"snowman":10718,"request":10719,"longest":10720,"debuted":10721,"functional":10722,"buttons":10723,"ung":10724,"1981":10725,"somer":10726,"mtv":10727,"izes":10728,"constitution":10729,"twi":10730,"academic":10731,"fashionable":10732,"mike":10733,"exit":10734,"gans":10735,"farms":10736,"slip":10737,"hipp":10738,"reign":10739,"tim":10740,"kan":10741,"turt":10742,"interested":10743,"arthur":10744,"lan":10745,"revol":10746,"recycled":10747,"chin":10748,"knights":10749,"tapestry":10750,"atomic":10751,"gate":10752,"catching":10753,"dean":10754,"sail":10755,"biscu":10756,"drawers":10757,"household":10758,"duvet":10759,"alogue":10760,"appoint":10761,"memb":10762,"describes":10763,"trac":10764,"genus":10765,"residen":10766,"perfume":10767,"stopped":10768,"tags":10769,"breath":10770,"credit":10771,"sayings":10772,"nad":10773,"folio":10774,"nel":10775,"aisle":10776,"rush":10777,"garn":10778,"notice":10779,"adjustable":10780,"dice":10781,"privac":10782,"brook":10783,"literary":10784,"tribun":10785,"excited":10786,"privacy":10787,"nash":10788,"vig":10789,"will":10790,"alleg":10791,"hire":10792,"si":10793,"wig":10794,"labra":10795,"ction":10796,"revolu":10797,"iley":10798,"elle":10799,"101":10800,"eco":10801,"kr":10802,"iners":10803,"print":10804,"swa":10805,"alities":10806,"publication":10807,"references":10808,"llers":10809,"hawai":10810,"auro":10811,"inged":10812,"recon":10813,"sam":10814,"dollars":10815,"aired":10816,"mption":10817,"sville":10818,"blurred":10819,"expos":10820,"hulk":10821,"istanbul":10822,"aire":10823,"reveals":10824,"rant":10825,"consecu":10826,"climb":10827,"cigare":10828,"cyber":10829,"pakistan":10830,"ultra":10831,"stylized":10832,"killer":10833,"scu":10834,"shab":10835,"ffins":10836,"cub":10837,"valent":10838,"peace":10839,"fia":10840,"submarine":10841,"theless":10842,"sai":10843,"headphones":10844,"figurine":10845,"twil":10846,"easier":10847,"supermarket":10848,"debate":10849,"weapon":10850,"businesses":10851,"waterfront":10852,"embar":10853,"mansions":10854,"attraction":10855,"apollo":10856,"occupied":10857,"spin":10858,"founder":10859,"twilight":10860,"magnific":10861,"pige":10862,"adds":10863,"knot":10864,"manic":10865,"eighth":10866,"definitely":10867,"heim":10868,"pakistani":10869,"chen":10870,"breeding":10871,"racer":10872,"tations":10873,"tol":10874,"tired":10875,"cheerful":10876,"ance":10877,"pper":10878,"trash":10879,"manual":10880,"celebrated":10881,"grocer":10882,"cord":10883,"wounded":10884,"eva":10885,"recognized":10886,"igu":10887,"bucks":10888,"apoc":10889,"escap":10890,"puts":10891,"alph":10892,"acid":10893,"ool":10894,"warner":10895,"punch":10896,"repeat":10897,"interactive":10898,"donald":10899,"sha":10900,"investment":10901,"windsor":10902,"behavior":10903,"mentioned":10904,"collector":10905,"communities":10906,"tasty":10907,"waterproof":10908,"masters":10909,"stitch":10910,"specim":10911,"pillar":10912,"explor":10913,"pedia":10914,"tongue":10915,"volunteers":10916,"abad":10917,"baham":10918,"rainforest":10919,"kissing":10920,"eat":10921,"toast":10922,"tened":10923,"breaks":10924,"colony":10925,"versity":10926,"touring":10927,"exposure":10928,"symp":10929,"colonel":10930,"pump":10931,"bean":10932,"set":10933,"sume":10934,"shock":10935,"eliz":10936,"nursing":10937,"cottages":10938,"crocod":10939,"guitar":10940,"arily":10941,"other":10942,"iceland":10943,"diameter":10944,"characteristics":10945,"opinion":10946,"consecutive":10947,"pitcher":10948,"filmed":10949,"landmark":10950,"acce":10951,"pantry":10952,"nashville":10953,"riders":10954,"rots":10955,"corri":10956,"hanna":10957,"browse":10958,"tia":10959,"cardi":10960,"bracelets":10961,"edit":10962,"frank":10963,"lava":10964,"desi":10965,"announcement":10966,"hello":10967,"bite":10968,"triu":10969,"borgh":10970,"glad":10971,"listen":10972,"wake":10973,"spices":10974,"builder":10975,"insi":10976,"apply":10977,"corporation":10978,"caval":10979,"mercur":10980,"supporters":10981,"ceme":10982,"azz":10983,"patients":10984,"lamborgh":10985,"breast":10986,"soci":10987,"ari":10988,"touchdown":10989,"labrador":10990,"sic":10991,"erie":10992,"starry":10993,"saurs":10994,"quarterback":10995,"popcorn":10996,"wiring":10997,"oper":10998,"consisted":10999,"lion":11000,"genu":11001,"sausage":11002,"ege":11003,"exist":11004,"factor":11005,"income":11006,"alberta":11007,"factors":11008,"ticket":11009,"rectangular":11010,"chemistry":11011,"spl":11012,"explorer":11013,"volunteer":11014,"seated":11015,"moore":11016,"expert":11017,"pony":11018,"surfing":11019,"walkway":11020,"hawks":11021,"elizabe":11022,"dits":11023,"gli":11024,"icated":11025,"had":11026,"lamborghini":11027,"sexual":11028,"producing":11029,"argentina":11030,"bubbles":11031,"hs":11032,"suspension":11033,"hub":11034,"oversized":11035,"delicate":11036,"franchise":11037,"hawaiian":11038,"armored":11039,"abled":11040,"pandemic":11041,"prohib":11042,"achieved":11043,"aft":11044,"instrument":11045,"versi":11046,"ennial":11047,"mark":11048,"midnight":11049,"rhyth":11050,"ero":11051,"drone":11052,"tough":11053,"participate":11054,"cove":11055,"actly":11056,"watched":11057,"welsh":11058,"signal":11059,"confeder":11060,"spectacular":11061,"mother":11062,"healing":11063,"prix":11064,"fically":11065,"consisting":11066,"yummy":11067,"lush":11068,"kol":11069,"listing":11070,"spher":11071,"client":11072,"1962":11073,"tap":11074,"symph":11075,"require":11076,"airline":11077,"21st":11078,"rolled":11079,"spending":11080,"opposed":11081,"apocaly":11082,"von":11083,"totally":11084,"enni":11085,"resol":11086,"stitu":11087,"fleece":11088,"vocal":11089,"vable":11090,"rence":11091,"agues":11092,"slowly":11093,"bloom":11094,"collaboration":11095,"peg":11096,"partner":11097,"cosmet":11098,"mugs":11099,"gymn":11100,"dos":11101,"husky":11102,"answ":11103,"carrot":11104,"tz":11105,"posts":11106,"laces":11107,"parrot":11108,"sc":11109,"jr":11110,"ateau":11111,"flowing":11112,"acquired":11113,"sliced":11114,"daughters":11115,"verses":11116,"hms":11117,"sees":11118,"demon":11119,"bigger":11120,"none":11121,"gameplay":11122,"12th":11123,"organ":11124,"henna":11125,"midd":11126,"flames":11127,"correspon":11128,"dged":11129,"hut":11130,"stown":11131,"pixel":11132,"exactly":11133,"cosmetics":11134,"tos":11135,"mbo":11136,"derness":11137,"compact":11138,"peacock":11139,"starring":11140,"wau":11141,"indoors":11142,"cane":11143,"watermelon":11144,"regarded":11145,"thread":11146,"paw":11147,"advoc":11148,"sch":11149,"ging":11150,"zombie":11151,"kel":11152,"crust":11153,"tress":11154,"reflected":11155,"sby":11156,"suggests":11157,"naz":11158,"productions":11159,"participants":11160,"feels":11161,"trim":11162,"organs":11163,"mothers":11164,"relations":11165,"curve":11166,"degre":11167,"asking":11168,"skating":11169,"personalised":11170,"winnie":11171,"links":11172,"aro":11173,"knoc":11174,"deals":11175,"coul":11176,"compete":11177,"enders":11178,"pockets":11179,"exist":11180,"keeps":11181,"1957":11182,"ming":11183,"illusion":11184,"attend":11185,"1955":11186,"studying":11187,"hilton":11188,"sper":11189,"situated":11190,"lists":11191,"49":11192,"hull":11193,"blow":11194,"cler":11195,"ram":11196,"shrine":11197,"fought":11198,"nels":11199,"parent":11200,"1956":11201,"carb":11202,"expres":11203,"octopus":11204,"ador":11205,"atop":11206,"rates":11207,"nei":11208,"painter":11209,"sneaker":11210,"blowing":11211,"fisher":11212,"seem":11213,"ouring":11214,"center":11215,"freeway":11216,"citizens":11217,"mercury":11218,"addic":11219,"joining":11220,"phrase":11221,"tha":11222,"drawer":11223,"schedule":11224,"degrees":11225,"kenya":11226,"fred":11227,"robe":11228,"melo":11229,"plated":11230,"achie":11231,"cash":11232,"yn":11233,"worship":11234,"ela":11235,"articles":11236,"queensland":11237,"tum":11238,"continuous":11239,"false":11240,"joseph":11241,"diversity":11242,"mets":11243,"goose":11244,"tones":11245,"minster":11246,"viewer":11247,"onist":11248,"passage":11249,"agricultural":11250,"wwe":11251,"scan":11252,"kunst":11253,"borders":11254,"certified":11255,"archive":11256,"manicure":11257,"nah":11258,"caps":11259,"differences":11260,"oul":11261,"brown":11262,"15th":11263,"bills":11264,"deon":11265,"shall":11266,"noodles":11267,"sman":11268,"routes":11269,"bled":11270,"mixture":11271,"blueberry":11272,"1900":11273,"immig":11274,"dolphin":11275,"ivy":11276,"phantom":11277,"hudson":11278,"kon":11279,"victoria":11280,"4x":11281,"signing":11282,"vietnam":11283,"cylinder":11284,"upho":11285,"stacked":11286,"hampton":11287,"occur":11288,"eving":11289,"lian":11290,"quer":11291,"depu":11292,"warhammer":11293,"1914":11294,"alog":11295,"engraved":11296,"convertible":11297,"alliance":11298,"bec":11299,"tribe":11300,"blosso":11301,"promotional":11302,"steering":11303,"violence":11304,"stroke":11305,"programme":11306,"brochure":11307,"orous":11308,"ilo":11309,"lemon":11310,"sanctuary":11311,"vietnamese":11312,"ende":11313,"rix":11314,"att":11315,"feel":11316,"sailor":11317,"sort":11318,"drives":11319,"promo":11320,"blin":11321,"geek":11322,"ashed":11323,"components":11324,"monast":11325,"editorial":11326,"oma":11327,"orchid":11328,"cel":11329,"width":11330,"newest":11331,"70s":11332,"ninj":11333,"martial":11334,"meat":11335,"sso":11336,"transit":11337,"prep":11338,"recognition":11339,"cardig":11340,"neath":11341,"farming":11342,"abil":11343,"hav":11344,"treaty":11345,"bungalow":11346,"surviv":11347,"disne":11348,"pharmac":11349,"affair":11350,"perth":11351,"popularity":11352,"bugs":11353,"bacter":11354,"leaf":11355,"flights":11356,"guine":11357,"accessi":11358,"cavalry":11359,"conversion":11360,"cr":11361,"cola":11362,"strawberries":11363,"barrier":11364,"cambo":11365,"persian":11366,"currency":11367,"900":11368,"pid":11369,"layered":11370,"bbq":11371,"styled":11372,"1959":11373,"tm":11374,"chains":11375,"stanley":11376,"aurora":11377,"mania":11378,"glen":11379,"compass":11380,"arcade":11381,"trench":11382,"stest":11383,"silver":11384,"macro":11385,"ranger":11386,"investigation":11387,"ancing":11388,"credits":11389,"compr":11390,"mach":11391,"wig":11392,"mirrors":11393,"functions":11394,"gour":11395,"anna":11396,"weed":11397,"crow":11398,"czech":11399,"microsoft":11400,"surve":11401,"chun":11402,"measuring":11403,"sat":11404,"heart":11405,"leaning":11406,"argued":11407,"explosion":11408,"gang":11409,"davis":11410,"lifted":11411,"compla":11412,"sham":11413,"gesture":11414,"haunted":11415,"sole":11416,"audit":11417,"ltd":11418,"soli":11419,"poems":11420,"capable":11421,"etsy":11422,"fined":11423,"silent":11424,"stephen":11425,"brazilian":11426,"gentleman":11427,"mc":11428,"standards":11429,"stim":11430,"critic":11431,"contour":11432,"designing":11433,"makers":11434,"chickens":11435,"throwing":11436,"experiences":11437,"sock":11438,"fastest":11439,"arrangements":11440,"baskets":11441,"buddhist":11442,"aco":11443,"logan":11444,"identify":11445,"employed":11446,"bang":11447,"tigers":11448,"coli":11449,"impossible":11450,"taiwan":11451,"haired":11452,"pounds":11453,"vol":11454,"awak":11455,"portfolio":11456,"1958":11457,"banners":11458,"attending":11459,"connecting":11460,"criticized":11461,"employment":11462,"aw":11463,"lone":11464,"settlement":11465,"chili":11466,"underneath":11467,"flooding":11468,"routine":11469,"turre":11470,"dow":11471,"hem":11472,"anian":11473,"keto":11474,"shabby":11475,"enhan":11476,"cemetery":11477,"pent":11478,"kayak":11479,"oxy":11480,"edo":11481,"enz":11482,"cube":11483,"granted":11484,"relaxed":11485,"releases":11486,"celebrations":11487,"affairs":11488,"trouble":11489,"ceo":11490,"surprised":11491,"lex":11492,"lando":11493,"vern":11494,"tickets":11495,"nick":11496,"xl":11497,"isher":11498,"install":11499,"davidson":11500,"flash":11501,"maids":11502,"1961":11503,"1918":11504,"writes":11505,"orted":11506,"lumin":11507,"anderson":11508,"rapid":11509,"elevation":11510,"saudi":11511,"travels":11512,"ibility":11513,"specifically":11514,"turb":11515,"slu":11516,"drive":11517,"sadd":11518,"falcon":11519,"chalk":11520,"danger":11521,"nag":11522,"thered":11523,"ole":11524,"moth":11525,"buried":11526,"renew":11527,"cyclo":11528,"harve":11529,"anium":11530,"mel":11531,"zers":11532,"locks":11533,"ninja":11534,"sidewalk":11535,"updated":11536,"timeless":11537,"danish":11538,"separated":11539,"pointed":11540,"strong":11541,"locomotive":11542,"gm":11543,"appropriate":11544,"showcasing":11545,"host":11546,"14th":11547,"guitarist":11548,"impression":11549,"muffins":11550,"steve":11551,"haircut":11552,"vat":11553,"angered":11554,"wright":11555,"cliffs":11556,"wisdom":11557,"tulle":11558,"23":11559,"adjac":11560,"euro":11561,"spel":11562,"flowering":11563,"jamaica":11564,"pdf":11565,"bart":11566,"bure":11567,"olds":11568,"sas":11569,"lake":11570,"seoul":11571,"oe":11572,"dj":11573,"skiing":11574,"prair":11575,"bart":11576,"feelings":11577,"dame":11578,"highlands":11579,"wizard":11580,"loom":11581,"favor":11582,"challenges":11583,"lang":11584,"overcoat":11585,"vessels":11586,"pudding":11587,"adjacent":11588,"scape":11589,"storms":11590,"screening":11591,"kilometres":11592,"lewis":11593,"yarn":11594,"otherwise":11595,"bells":11596,"commission":11597,"uphol":11598,"sely":11599,"where":11600,"genre":11601,"rehear":11602,"origami":11603,"segment":11604,"caster":11605,"awa":11606,"android":11607,"madison":11608,"chalkboard":11609,"sleeved":11610,"subway":11611,"suffering":11612,"cooper":11613,"1920x":11614,"aspects":11615,"gram":11616,"prece":11617,"guards":11618,"polka":11619,"villages":11620,"saved":11621,"brisbane":11622,"disci":11623,"sibility":11624,"110":11625,"myself":11626,"entit":11627,"trig":11628,"absor":11629,"glory":11630,"jab":11631,"zebra":11632,"absolutely":11633,"tactical":11634,"och":11635,"giants":11636,"engineers":11637,"richmond":11638,"orlando":11639,"mining":11640,"famili":11641,"yd":11642,"genuine":11643,"mattress":11644,"ras":11645,"blu":11646,"symptoms":11647,"vc":11648,"mysterious":11649,"cos":11650,"oli":11651,"adapted":11652,"alpine":11653,"clark":11654,"biography":11655,"hitting":11656,"concerns":11657,"fries":11658,"edible":11659,"filter":11660,"wasn":11661,"pallet":11662,"1948":11663,"rapidly":11664,"ono":11665,"oat":11666,"trials":11667,"arc":11668,"shaw":11669,"abc":11670,"skate":11671,"tricks":11672,"oph":11673,"manila":11674,"nj":11675,"pg":11676,"legendary":11677,"tribune":11678,"prove":11679,"aware":11680,"enchan":11681,"siding":11682,"boxer":11683,"twins":11684,"dox":11685,"trading":11686,"rejected":11687,"reven":11688,"shallow":11689,"frosting":11690,"invited":11691,"wilderness":11692,"90":11693,"hide":11694,"tah":11695,"wax":11696,"reinde":11697,"accents":11698,"artan":11699,"festivals":11700,"louis":11701,"multic":11702,"assistance":11703,"desire":11704,"credited":11705,"showcase":11706,"logies":11707,"patterned":11708,"recommended":11709,"appliances":11710,"dinosaurs":11711,"niger":11712,"trained":11713,"sophist":11714,"drill":11715,"tis":11716,"knows":11717,"intensity":11718,"communications":11719,"constant":11720,"comparison":11721,"rub":11722,"100":11723,"tane":11724,"electricity":11725,"refr":11726,"margar":11727,"handbag":11728,"encoun":11729,"cis":11730,"conveni":11731,"mete":11732,"hug":11733,"tissue":11734,"calgary":11735,"ady":11736,"sprink":11737,"quilting":11738,"chill":11739,"magazines":11740,"highland":11741,"vendor":11742,"1953":11743,"courses":11744,"amphibi":11745,"apa":11746,"rugs":11747,"snowfla":11748,"dorm":11749,"attire":11750,"briefly":11751,"cheer":11752,"foil":11753,"blooming":11754,"practices":11755,"disneyland":11756,"acles":11757,"charlie":11758,"civic":11759,"dial":11760,"sapph":11761,"revival":11762,"frag":11763,"distur":11764,"turbo":11765,"rv":11766,"rin":11767,"ambas":11768,"hugging":11769,"grin":11770,"pouch":11771,"interchange":11772,"camper":11773,"27":11774,"flip":11775,"hired":11776,"textures":11777,"destruction":11778,"ring":11779,"soft":11780,"fest":11781,"ono":11782,"peru":11783,"stro":11784,"bricks":11785,"ginger":11786,"knots":11787,"substan":11788,"tune":11789,"ilation":11790,"crescent":11791,"illes":11792,"eleven":11793,"byzantine":11794,"elo":11795,"virtu":11796,"kang":11797,"oasis":11798,"folded":11799,"let":11800,"server":11801,"matt":11802,"archives":11803,"poly":11804,"sporting":11805,"finance":11806,"suspended":11807,"lagoon":11808,"partnership":11809,"mart":11810,"township":11811,"capitol":11812,"luck":11813,"discussion":11814,"vacations":11815,"craw":11816,"metr":11817,"min":11818,"hobbit":11819,"temperatures":11820,"geon":11821,"medies":11822,"oatmeal":11823,"existence":11824,"ano":11825,"lunar":11826,"turn":11827,"matters":11828,"ays":11829,"inate":11830,"cosmo":11831,"mills":11832,"churches":11833,"cabins":11834,"samurai":11835,"pheno":11836,"stuck":11837,"spag":11838,"hetti":11839,"taxi":11840,"traditions":11841,"devices":11842,"telephone":11843,"cas":11844,"info":11845,"seaside":11846,"buses":11847,"schemes":11848,"gingerbread":11849,"alpha":11850,"reindeer":11851,"foliage":11852,"criticism":11853,"raspberry":11854,"camou":11855,"twit":11856,"ark":11857,"broc":11858,"experiment":11859,"monte":11860,"chronicle":11861,"anne":11862,"occurs":11863,"sector":11864,"diverse":11865,"ambassad":11866,"diagno":11867,"camoufl":11868,"350":11869,"morrow":11870,"1946":11871,"chry":11872,"tes":11873,"trick":11874,"ik":11875,"reputation":11876,"polis":11877,"ssex":11878,"submitted":11879,"spaghetti":11880,"sedan":11881,"funds":11882,"merly":11883,"receives":11884,"homeless":11885,"hairstyle":11886,"spot":11887,"manufacturing":11888,"ral":11889,"divine":11890,"rank":11891,"platter":11892,"feminine":11893,"1917":11894,"flats":11895,"wire":11896,"guided":11897,"attor":11898,"naturally":11899,"plantation":11900,"tawa":11901,"myth":11902,"ross":11903,"describe":11904,"somerset":11905,"formerly":11906,"crowds":11907,"insect":11908,"fires":11909,"magnificent":11910,"edges":11911,"appointment":11912,"cooper":11913,"bly":11914,"armour":11915,"dive":11916,"conclusion":11917,"130":11918,"adoptable":11919,"dae":11920,"bahamas":11921,"paleo":11922,"1954":11923,"loading":11924,"tony":11925,"guinea":11926,"abu":11927,"exhau":11928,"telescope":11929,"adam":11930,"discuss":11931,"ani":11932,"visits":11933,"migh":11934,"novels":11935,"flood":11936,"sak":11937,"lighter":11938,"broccoli":11939,"maur":11940,"photographers":11941,"dynamic":11942,"rim":11943,"entitled":11944,"leeds":11945,"pulling":11946,"yogur":11947,"26":11948,"edmon":11949,"table":11950,"fortress":11951,"westin":11952,"requires":11953,"scor":11954,"recovery":11955,"signage":11956,"bark":11957,"arta":11958,"jar":11959,"vod":11960,"experts":11961,"incorporated":11962,"vitam":11963,"pouring":11964,"fundra":11965,"violin":11966,"suggestions":11967,"beanie":11968,"float":11969,"bula":11970,"duchess":11971,"monogram":11972,"survived":11973,"praise":11974,"yogurt":11975,"hoop":11976,"tering":11977,"charges":11978,"distinct":11979,"thames":11980,"anne":11981,"alexand":11982,"cardigan":11983,"joe":11984,"relative":11985,"firefighters":11986,"queens":11987,"manner":11988,"mson":11989,"excav":11990,"turtles":11991,"attic":11992,"collect":11993,"happened":11994,"efa":11995,"organisms":11996,"oakland":11997,"squares":11998,"lamb":11999,"finest":12000,"pug":12001,"electronics":12002,"beams":12003,"gratu":12004,"innovative":12005,"samples":12006,"shim":12007,"loaf":12008,"imple":12009,"creativity":12010,"temples":12011,"portal":12012,"combine":12013,"wells":12014,"atever":12015,"crystals":12016,"uefa":12017,"whatever":12018,"apest":12019,"anas":12020,"souven":12021,"battles":12022,"kennedy":12023,"jeff":12024,"asingly":12025,"brave":12026,"deliver":12027,"isation":12028,"among":12029,"hypo":12030,"amongst":12031,"partners":12032,"sto":12033,"brew":12034,"improvement":12035,"backs":12036,"neither":12037,"injuries":12038,"gene":12039,"bathtub":12040,"weak":12041,"sharks":12042,"representative":12043,"ritz":12044,"copyright":12045,"scattered":12046,"doctors":12047,"innovation":12048,"polished":12049,"mus":12050,"liked":12051,"biker":12052,"seasonal":12053,"raven":12054,"caf":12055,"utility":12056,"tamp":12057,"increasingly":12058,"regularly":12059,"depicted":12060,"stors":12061,"pear":12062,"itions":12063,"rup":12064,"suspe":12065,"1938":12066,"knitted":12067,"11th":12068,"bearded":12069,"grocery":12070,"beautifully":12071,"alley":12072,"bangla":12073,"lac":12074,"notic":12075,"grapes":12076,"puer":12077,"physics":12078,"claus":12079,"pennsylvania":12080,"corridor":12081,"mari":12082,"trainer":12083,"languages":12084,"wellington":12085,"glam":12086,"ombre":12087,"onions":12088,"controls":12089,"boul":12090,"prague":12091,"composer":12092,"assy":12093,"plying":12094,"drag":12095,"dé":12096,"conduct":12097,"wag":12098,"petro":12099,"1937":12100,"panic":12101,"1936":12102,"pea":12103,"optical":12104,"kitty":12105,"kittens":12106,"pled":12107,"skateboard":12108,"travelling":12109,"translation":12110,"ottawa":12111,"wonders":12112,"departure":12113,"outing":12114,"guess":12115,"massage":12116,"freder":12117,"row":12118,"significantly":12119,"skill":12120,"guer":12121,"veteran":12122,"entist":12123,"franklin":12124,"brian":12125,"orthodox":12126,"happen":12127,"alan":12128,"verte":12129,"intense":12130,"swir":12131,"surviving":12132,"crews":12133,"twitter":12134,"morocco":12135,"podium":12136,"1947":12137,"depicts":12138,"stool":12139,"elephants":12140,"revenge":12141,"iest":12142,"completion":12143,"scientist":12144,"controversi":12145,"jim":12146,"criminal":12147,"auck":12148,"gravel":12149,"erson":12150,"basil":12151,"profession":12152,"prairie":12153,"earliest":12154,"finland":12155,"holders":12156,"modular":12157,"bathing":12158,"avo":12159,"nightmare":12160,"yo":12161,"hollow":12162,"transition":12163,"volcano":12164,"emotions":12165,"breath":12166,"inclusive":12167,"panoramic":12168,"eter":12169,"transit":12170,"congratu":12171,"penguins":12172,"auckland":12173,"mim":12174,"gypsy":12175,"tu":12176,"armen":12177,"pancakes":12178,"hungary":12179,"prehen":12180,"ye":12181,"aqua":12182,"selective":12183,"cited":12184,"containers":12185,"tulip":12186,"deleg":12187,"torpedo":12188,"ffles":12189,"yoda":12190,"earl":12191,"propag":12192,"ruled":12193,"baroque":12194,"arrested":12195,"1915":12196,"slate":12197,"magno":12198,"nostal":12199,"ballroom":12200,"peanuts":12201,"assist":12202,"wolves":12203,"brighton":12204,"monastery":12205,"detective":12206,"cater":12207,"labour":12208,"carey":12209,"sweets":12210,"losses":12211,"skil":12212,"veterans":12213,"spell":12214,"derby":12215,"concerned":12216,"cornwall":12217,"mals":12218,"straps":12219,"ingredient":12220,"industries":12221,"petersburg":12222,"driveway":12223,"moss":12224,"ffield":12225,"shelving":12226,"yester":12227,"cameras":12228,"ee":12229,"ohi":12230,"quebec":12231,"dives":12232,"1910":12233,"accessible":12234,"mally":12235,"xmas":12236,"adaptation":12237,"geometry":12238,"bend":12239,"oxygen":12240,"sophy":12241,"editing":12242,"vent":12243,"crocodile":12244,"lowest":12245,"numb":12246,"talent":12247,"publishing":12248,"wer":12249,"purse":12250,"moda":12251,"jig":12252,"rounded":12253,"rust":12254,"teens":12255,"faster":12256,"stranger":12257,"burst":12258,"greens":12259,"norfolk":12260,"joy":12261,"hex":12262,"iner":12263,"ssil":12264,"erup":12265,"alarm":12266,"charle":12267,"courts":12268,"spinach":12269,"bn":12270,"metic":12271,"handles":12272,"clearly":12273,"brea":12274,"esco":12275,"eled":12276,"vienna":12277,"dresser":12278,"missile":12279,"icide":12280,"fossil":12281,"ctives":12282,"rajas":12283,"morgan":12284,"planted":12285,"calf":12286,"origins":12287,"vard":12288,"explain":12289,"preserve":12290,"attractions":12291,"mustard":12292,"isers":12293,"hali":12294,"apped":12295,"neighb":12296,"efficient":12297,"shoulders":12298,"disco":12299,"antiques":12300,"enterprise":12301,"flamingo":12302,"oro":12303,"bug":12304,"bonus":12305,"rescu":12306,"prepares":12307,"soda":12308,"smash":12309,"storey":12310,"indicated":12311,"opportunities":12312,"occasions":12313,"rectangle":12314,"cooler":12315,"ukra":12316,"pay":12317,"grammy":12318,"matically":12319,"portugu":12320,"thumbnail":12321,"philosophy":12322,"sequences":12323,"flows":12324,"computers":12325,"paper":12326,"hungarian":12327,"steep":12328,"bonsai":12329,"tomorrow":12330,"picking":12331,"ported":12332,"jedi":12333,"caa":12334,"blackboard":12335,"accept":12336,"cyberpunk":12337,"portions":12338,"gib":12339,"ich":12340,"collective":12341,"childrens":12342,"pioneer":12343,"ecli":12344,"iff":12345,"destroy":12346,"aren":12347,"indians":12348,"torto":12349,"advertisement":12350,"ymouth":12351,"anic":12352,"roast":12353,"hung":12354,"rangers":12355,"divisions":12356,"representatives":12357,"stayed":12358,"hate":12359,"atly":12360,"mighty":12361,"chihu":12362,"external":12363,"nepal":12364,"siege":12365,"holland":12366,"siber":12367,"adams":12368,"elder":12369,"fax":12370,"portuguese":12371,"scratch":12372,"nati":12373,"circum":12374,"symphony":12375,"orb":12376,"sco":12377,"normally":12378,"barbe":12379,"textiles":12380,"thor":12381,"moose":12382,"pedestrian":12383,"eline":12384,"polyg":12385,"puerto":12386,"mock":12387,"compl":12388,"panama":12389,"accompl":12390,"cadil":12391,"aca":12392,"cucu":12393,"suitcase":12394,"achment":12395,"quilts":12396,"eli":12397,"complic":12398,"preview":12399,"hiding":12400,"1916":12401,"topic":12402,"belgian":12403,"phenomen":12404,"syrup":12405,"indigen":12406,"elizabeth":12407,"dwarf":12408,"cry":12409,"charter":12410,"distinctive":12411,"1200":12412,"graphical":12413,"pating":12414,"scores":12415,"posse":12416,"pilgri":12417,"taught":12418,"establish":12419,"ukraine":12420,"dove":12421,"watercolour":12422,"coon":12423,"tux":12424,"decline":12425,"00x":12426,"provincial":12427,"gent":12428,"rica":12429,"abun":12430,"casa":12431,"mammals":12432,"ig":12433,"tz":12434,"cue":12435,"preserved":12436,"embr":12437,"thri":12438,"uk":12439,"enters":12440,"1952":12441,"aero":12442,"lugg":12443,"stics":12444,"coffin":12445,"purposes":12446,"carav":12447,"tulips":12448,"cultiv":12449,"garbage":12450,"indigenous":12451,"centers":12452,"esis":12453,"geographic":12454,"reader":12455,"washed":12456,"attitude":12457,"responsibility":12458,"deaths":12459,"imagine":12460,"renovated":12461,"milky":12462,"aber":12463,"campaig":12464,"ambi":12465,"bah":12466,"bruce":12467,"camouflage":12468,"ramp":12469,"wrapping":12470,"luggage":12471,"enem":12472,"threatened":12473,"trun":12474,"urs":12475,"commonwealth":12476,"ninth":12477,"sushi":12478,"partially":12479,"radar":12480,"westminster":12481,"lizard":12482,"deputy":12483,"willow":12484,"ramad":12485,"stain":12486,"fitting":12487,"gadgets":12488,"couldn":12489,"daisy":12490,"classification":12491,"lum":12492,"locked":12493,"remedies":12494,"endangered":12495,"tarot":12496,"ody":12497,"whisky":12498,"integrated":12499,"sapphire":12500,"greetings":12501,"juras":12502,"commons":12503,"jurassic":12504,"ieties":12505,"creations":12506,"miner":12507,"jay":12508,"evalu":12509,"sist":12510,"theastern":12511,"koi":12512,"coupe":12513,"deployed":12514,"ni":12515,"movements":12516,"responded":12517,"worthy":12518,"puzz":12519,"az":12520,"ducks":12521,"planner":12522,"searching":12523,"vodka":12524,"transmis":12525,"mack":12526,"75":12527,"unt":12528,"shrubs":12529,"casual":12530,"yesterday":12531,"hagen":12532,"poet":12533,"archy":12534,"hung":12535,"ylon":12536,"jon":12537,"authors":12538,"regency":12539,"param":12540,"mechanic":12541,"sheffield":12542,"rex":12543,"nd":12544,"thunder":12545,"vast":12546,"kerala":12547,"bengal":12548,"allies":12549,"tox":12550,"cé":12551,"dai":12552,"taco":12553,"classified":12554,"ohio":12555,"studied":12556,"continuing":12557,"hungry":12558,"cluster":12559,"boost":12560,"cincin":12561,"munich":12562,"georgia":12563,"fisher":12564,"helmets":12565,"zipper":12566,"arabia":12567,"exciting":12568,"casualties":12569,"catering":12570,"chihua":12571,"nelson":12572,"tonight":12573,"aler":12574,"sarees":12575,"grave":12576,"demonstration":12577,"kata":12578,"forbi":12579,"benjam":12580,"teenager":12581,"cigarette":12582,"gem":12583,"seeking":12584,"ratings":12585,"funding":12586,"sophisticated":12587,"pi":12588,"horns":12589,"obvi":12590,"processing":12591,"prest":12592,"lilac":12593,"substitu":12594,"conservative":12595,"pia":12596,"minimum":12597,"magnolia":12598,"squash":12599,"mess":12600,"camo":12601,"revolutionary":12602,"newcastle":12603,"elegance":12604,"hippie":12605,"galleries":12606,"marriott":12607,"160":12608,"coca":12609,"maz":12610,"certific":12611,"dolphins":12612,"gordon":12613,"speed":12614,"bella":12615,"checking":12616,"enemies":12617,"apers":12618,"peppers":12619,"communist":12620,"tampa":12621,"orium":12622,"hydr":12623,"pins":12624,"onym":12625,"fashioned":12626,"linked":12627,"cincinnati":12628,"builders":12629,"repairs":12630,"whil":12631,"dog":12632,"trinity":12633,"dunes":12634,"arrows":12635,"zz":12636,"francis":12637,"uploaded":12638,"sequel":12639,"clever":12640,"ellation":12641,"city":12642,"quant":12643,"pencils":12644,"involving":12645,"layed":12646,"convo":12647,"placement":12648,"1949":12649,"whilst":12650,"bach":12651,"divor":12652,"resource":12653,"refriger":12654,"trap":12655,"capsu":12656,"derman":12657,"marit":12658,"equivalent":12659,"explains":12660,"describing":12661,"pigs":12662,"thermal":12663,"pipes":12664,"struggle":12665,"atus":12666,"elections":12667,"fortune":12668,"landed":12669,"seriously":12670,"topics":12671,"reproduction":12672,"cambodia":12673,"kl":12674,"modern":12675,"envelope":12676,"corve":12677,"battleship":12678,"inning":12679,"bearing":12680,"cubs":12681,"automotive":12682,"boulev":12683,"intercep":12684,"boulevard":12685,"paras":12686,"gossi":12687,"teries":12688,"lanterns":12689,"stencil":12690,"panther":12691,"whist":12692,"raf":12693,"1919":12694,"agents":12695,"welding":12696,"hr":12697,"lob":12698,"transformed":12699,"1951":12700,"arabian":12701,"76":12702,"ros":12703,"encies":12704,"lanes":12705,"pigeon":12706,"stall":12707,"turing":12708,"grams":12709,"desc":12710,"obtained":12711,"nebula":12712,"22":12713,"speakers":12714,"organizations":12715,"cracker":12716,"benjamin":12717,"mythical":12718,"transform":12719,"cows":12720,"grows":12721,"span":12722,"researchers":12723,"intensi":12724,"tooth":12725,"immun":12726,"confidence":12727,"found":12728,"kick":12729,"rob":12730,"bank":12731,"penalty":12732,"jars":12733,"astronomy":12734,"neo":12735,"comments":12736,"axis":12737,"venture":12738,"ripe":12739,"1929":12740,"rocking":12741,"rowing":12742,"dense":12743,"directions":12744,"majestic":12745,"nell":12746,"tamil":12747,"walkthrough":12748,"brides":12749,"attorney":12750,"typho":12751,"stomach":12752,"circulation":12753,"unesco":12754,"howard":12755,"destroyer":12756,"50th":12757,"unity":12758,"sustained":12759,"ava":12760,"hero":12761,"1935":12762,"mck":12763,"acknow":12764,"parish":12765,"physi":12766,"triumph":12767,"gymnast":12768,"familiar":12769,"pated":12770,"mod":12771,"hart":12772,"readers":12773,"versy":12774,"controversy":12775,"gradually":12776,"persu":12777,"aux":12778,"genesis":12779,"merchand":12780,"atti":12781,"warming":12782,"tably":12783,"efs":12784,"hottest":12785,"wii":12786,"thigh":12787,"doodles":12788,"disaster":12789,"tesla":12790,"commu":12791,"andrew":12792,"herb":12793,"cans":12794,"infant":12795,"ionally":12796,"inty":12797,"trailers":12798,"dat":12799,"skillet":12800,"constellation":12801,"thermo":12802,"dh":12803,"marking":12804,"resin":12805,"nsw":12806,"ornate":12807,"edmonton":12808,"thick":12809,"runners":12810,"kuala":12811,"illness":12812,"narrative":12813,"intimate":12814,"piter":12815,"settled":12816,"lished":12817,"decide":12818,"necklaces":12819,"vous":12820,"indian":12821,"comprehen":12822,"twood":12823,"bility":12824,"pler":12825,"tron":12826,"afghan":12827,"seek":12828,"carrots":12829,"strips":12830,"pilots":12831,"quis":12832,"bangladesh":12833,"entertaining":12834,"eyebro":12835,"afghanistan":12836,"1940s":12837,"45":12838,"anth":12839,"semin":12840,"whipped":12841,"renamed":12842,"trout":12843,"ambassador":12844,"holm":12845,"murray":12846,"vre":12847,"committed":12848,"forti":12849,"sufficient":12850,"racks":12851,"collecting":12852,"spur":12853,"oscar":12854,"emphas":12855,"requirements":12856,"pleasant":12857,"representation":12858,"col":12859,"prisoners":12860,"48":12861,"mosqu":12862,"bumper":12863,"hara":12864,"motors":12865,"duo":12866,"depending":12867,"zel":12868,"jewel":12869,"bases":12870,"fate":12871,"rav":12872,"compound":12873,"hou":12874,"veil":12875,"half":12876,"aker":12877,"etary":12878,"nerd":12879,"affect":12880,"eo":12881,"banquet":12882,"bare":12883,"peas":12884,"hobb":12885,"handed":12886,"approached":12887,"moral":12888,"federation":12889,"exploration":12890,"dozen":12891,"sheer":12892,"applications":12893,"film":12894,"adela":12895,"cooling":12896,"reservo":12897,"specul":12898,"remodeling":12899,"maya":12900,"simon":12901,"lays":12902,"angler":12903,"declined":12904,"chihuahua":12905,"sided":12906,"versat":12907,"greenery":12908,"settings":12909,"krish":12910,"remark":12911,"stops":12912,"comb":12913,"door":12914,"drome":12915,"indicate":12916,"onymous":12917,"sag":12918,"paro":12919,"xon":12920,"bid":12921,"finale":12922,"fordshire":12923,"cal":12924,"f1":12925,"fringe":12926,"pops":12927,"norman":12928,"tton":12929,"essentials":12930,"pumps":12931,"maze":12932,"flavors":12933,"adelaide":12934,"fails":12935,"monkeys":12936,"bulle":12937,"ncaa":12938,"beats":12939,"strikes":12940,"36":12941,"brack":12942,"1500":12943,"choices":12944,"rusty":12945,"jupiter":12946,"morris":12947,"plank":12948,"upgrade":12949,"ela":12950,"rober":12951,"medals":12952,"board":12953,"begun":12954,"parachu":12955,"spectrum":12956,"orbit":12957,"storyline":12958,"noble":12959,"planter":12960,"props":12961,"believes":12962,"rig":12963,"softball":12964,"malta":12965,"estimate":12966,"donated":12967,"bulls":12968,"sussex":12969,"cairo":12970,"distinguished":12971,"noise":12972,"cabbage":12973,"populations":12974,"operate":12975,"firefighter":12976,"paths":12977,"pressing":12978,"assembled":12979,"pushing":12980,"sparkling":12981,"apparently":12982,"blossoms":12983,"sni":12984,"anx":12985,"glamorous":12986,"gossip":12987,"joke":12988,"apron":12989,"expect":12990,"kor":12991,"metre":12992,"precious":12993,"bred":12994,"spy":12995,"pepper":12996,"caves":12997,"fast":12998,"witne":12999,"geography":13000,"academia":13001,"motif":13002,"hp":13003,"profit":13004,"grapher":13005,"millenni":13006,"chile":13007,"antici":13008,"priest":13009,"highlight":13010,"senator":13011,"encing":13012,"sandwic":13013,"hooded":13014,"snoopy":13015,"portrayed":13016,"chat":13017,"interse":13018,"vivi":13019,"4x4":13020,"saga":13021,"ail":13022,"wart":13023,"nevertheless":13024,"reclaimed":13025,"cl":13026,"tallest":13027,"harmony":13028,"valuable":13029,"kon":13030,"sciences":13031,"expo":13032,"pumpkins":13033,"aul":13034,"accurate":13035,"braided":13036,"germans":13037,"craftsman":13038,"ashes":13039,"amid":13040,"walter":13041,"ann":13042,"eu":13043,"parka":13044,"athletics":13045,"oklahoman":13046,"sour":13047,"tric":13048,"dusk":13049,"surrey":13050,"transmission":13051,"duties":13052,"enham":13053,"retained":13054,"robin":13055,"staying":13056,"oi":13057,"unveiled":13058,"iris":13059,"nett":13060,"ula":13061,"drugs":13062,"indianapolis":13063,"hobby":13064,"journalist":13065,"iop":13066,"thly":13067,"biology":13068,"establishment":13069,"lig":13070,"abu":13071,"recycling":13072,"genetic":13073,"pike":13074,"certain":13075,"fed":13076,"reflect":13077,"bach":13078,"rants":13079,"filip":13080,"derived":13081,"wellness":13082,"tuxedo":13083,"lasted":13084,"elabor":13085,"phis":13086,"waukee":13087,"aside":13088,"permission":13089,"hoe":13090,"sq":13091,"trump":13092,"audio":13093,"4k":13094,"iro":13095,"atlas":13096,"conventional":13097,"possibility":13098,"gourmet":13099,"sensit":13100,"plum":13101,"sprou":13102,"assu":13103,"1932":13104,"snakes":13105,"plains":13106,"lon":13107,"copen":13108,"seemed":13109,"seg":13110,"warfare":13111,"attributed":13112,"180":13113,"peaceful":13114,"newport":13115,"effectively":13116,"wireless":13117,"tribes":13118,"concern":13119,"similarly":13120,"vases":13121,"wires":13122,"downs":13123,"needle":13124,"philosoph":13125,"lef":13126,"trainers":13127,"pill":13128,"strength":13129,"embrac":13130,"amar":13131,"predator":13132,"crisp":13133,"eats":13134,"tals":13135,"placing":13136,"whel":13137,"votes":13138,"bundle":13139,"kentu":13140,"additionally":13141,"ethiop":13142,"ank":13143,"stor":13144,"herald":13145,"romania":13146,"throws":13147,"brew":13148,"rc":13149,"cosm":13150,"max":13151,"gotten":13152,"oy":13153,"rover":13154,"presenting":13155,"vas":13156,"amend":13157,"fox":13158,"shake":13159,"wander":13160,"topper":13161,"electro":13162,"carefully":13163,"ratio":13164,"whim":13165,"nine":13166,"printable":13167,"reaches":13168,"rooster":13169,"mpy":13170,"moderate":13171,"mbling":13172,"po":13173,"radio":13174,"claiming":13175,"yman":13176,"puff":13177,"devon":13178,"lasting":13179,"martin":13180,"clients":13181,"fifty":13182,"notre":13183,"donkey":13184,"moroccan":13185,"voyage":13186,"ros":13187,"gravity":13188,"budapest":13189,"gum":13190,"dness":13191,"parker":13192,"lty":13193,"distributed":13194,"bli":13195,"loads":13196,"birch":13197,"lumpur":13198,"screens":13199,"residences":13200,"copenhagen":13201,"seren":13202,"corvette":13203,"walked":13204,"choir":13205,"shut":13206,"metrical":13207,"whales":13208,"mn":13209,"monarch":13210,"competing":13211,"deadly":13212,"pace":13213,"foss":13214,"median":13215,"stretched":13216,"sored":13217,"bulgar":13218,"doubt":13219,"harvard":13220,"agas":13221,"incredibles":13222,"jumpsuit":13223,"mandal":13224,"rap":13225,"jean":13226,"georgian":13227,"sels":13228,"exact":13229,"bugatti":13230,"inkle":13231,"catcher":13232,"iden":13233,"fridge":13234,"1933":13235,"austrian":13236,"oncé":13237,"manuscript":13238,"resume":13239,"cushions":13240,"bud":13241,"titude":13242,"mation":13243,"erra":13244,"adju":13245,"ceil":13246,"poppy":13247,"limits":13248,"realism":13249,"orchard":13250,"kicks":13251,"cork":13252,"remn":13253,"sie":13254,"peaks":13255,"live":13256,"dil":13257,"ea":13258,"discipl":13259,"wu":13260,"liver":13261,"pastry":13262,"cruisers":13263,"approaching":13264,"healthcare":13265,"memphis":13266,"bibl":13267,"index":13268,"hel":13269,"strategic":13270,"prehistoric":13271,"sponge":13272,"chorus":13273,"exceptional":13274,"interface":13275,"removing":13276,"bespoke":13277,"bows":13278,"fabrics":13279,"controversial":13280,"anthro":13281,"paints":13282,"type":13283,"organisation":13284,"phic":13285,"stur":13286,"monthly":13287,"density":13288,"bulletin":13289,"upr":13290,"ensu":13291,"apocalypse":13292,"rabb":13293,"accounts":13294,"promised":13295,"inn":13296,"leon":13297,"madagas":13298,"stle":13299,"overview":13300,"plug":13301,"introducing":13302,"ebo":13303,"ecle":13304,"gymnastics":13305,"biblical":13306,"idi":13307,"kite":13308,"kara":13309,"palms":13310,"lettu":13311,"loan":13312,"imeter":13313,"jigsaw":13314,"attracted":13315,"imagination":13316,"barre":13317,"saddle":13318,"recovered":13319,"sional":13320,"displaying":13321,"special":13322,"transformation":13323,"sox":13324,"typhoon":13325,"surround":13326,"crops":13327,"technologies":13328,"aftermath":13329,"beyoncé":13330,"forcement":13331,"pei":13332,"aller":13333,"specialist":13334,"fairy":13335,"affili":13336,"turer":13337,"bead":13338,"foster":13339,"workplace":13340,"hag":13341,"year":13342,"celer":13343,"merchant":13344,"array":13345,"milwaukee":13346,"tom":13347,"stir":13348,"devo":13349,"loud":13350,"cor":13351,"backpacks":13352,"stel":13353,"principles":13354,"boundary":13355,"uncle":13356,"concerts":13357,"take":13358,"slopes":13359,"guild":13360,"resistant":13361,"gig":13362,"inje":13363,"spra":13364,"geneva":13365,"indies":13366,"sleek":13367,"shoots":13368,"smi":13369,"tasting":13370,"sequin":13371,"afterwards":13372,"tears":13373,"thomp":13374,"flew":13375,"filipino":13376,"praying":13377,"attacking":13378,"upgraded":13379,"forth":13380,"yellow":13381,"manufacturer":13382,"krishna":13383,"lah":13384,"glue":13385,"effici":13386,"homecoming":13387,"bureau":13388,"rival":13389,"vista":13390,"prop":13391,"prospe":13392,"cosmetic":13393,"liberal":13394,"ater":13395,"sher":13396,"diaries":13397,"spare":13398,"pvc":13399,"ais":13400,"considering":13401,"fascin":13402,"hay":13403,"wad":13404,"himal":13405,"honour":13406,"iraq":13407,"ishi":13408,"forearm":13409,"distant":13410,"honeymoon":13411,"triangles":13412,"widespread":13413,"bolt":13414,"1931":13415,"occasionally":13416,"crun":13417,"russe":13418,"cannon":13419,"handwritten":13420,"tubes":13421,"ornamental":13422,"succeeded":13423,"lettuce":13424,"bian":13425,"anywhere":13426,"nascar":13427,"manufactured":13428,"boom":13429,"cil":13430,"assess":13431,"dread":13432,"ake":13433,"females":13434,"more":13435,"cowboys":13436,"funeral":13437,"ico":13438,"refre":13439,"suggest":13440,"iana":13441,"sesame":13442,"plymouth":13443,"silence":13444,"glaze":13445,"timber":13446,"1080":13447,"robinson":13448,"inder":13449,"kentucky":13450,"chers":13451,"lazy":13452,"watering":13453,"defender":13454,"ipur":13455,"deo":13456,"1928":13457,"emerged":13458,"pered":13459,"dominated":13460,"bent":13461,"acceler":13462,"experimental":13463,"spiderman":13464,"waits":13465,"robots":13466,"mortal":13467,"flick":13468,"preferred":13469,"bu":13470,"charleston":13471,"sub":13472,"particles":13473,"myan":13474,"clutch":13475,"classics":13476,"cester":13477,"mats":13478,"setts":13479,"scroll":13480,"semi":13481,"1912":13482,"coverage":13483,"mongo":13484,"certainly":13485,"odd":13486,"hangs":13487,"millennium":13488,"admitted":13489,"pushed":13490,"thompson":13491,"batting":13492,"infra":13493,"subtle":13494,"his":13495,"psyche":13496,"kolkata":13497,"skulls":13498,"immediate":13499,"aggressive":13500,"unless":13501,"forbidden":13502,"rapper":13503,"flare":13504,"obl":13505,"suicide":13506,"merged":13507,"pis":13508,"accommodation":13509,"summary":13510,"hart":13511,"organised":13512,"sloth":13513,"tackle":13514,"minneapolis":13515,"myanmar":13516,"tall":13517,"administrative":13518,"seash":13519,"requested":13520,"companion":13521,"race":13522,"commerce":13523,"inery":13524,"somewhat":13525,"admir":13526,"resso":13527,"weaving":13528,"gee":13529,"happens":13530,"beverage":13531,"aim":13532,"gus":13533,"puzzles":13534,"furious":13535,"massachu":13536,"ried":13537,"adobe":13538,"fixtures":13539,"ruler":13540,"shakespe":13541,"kyo":13542,"messenger":13543,"sings":13544,"bites":13545,"khan":13546,"fender":13547,"byr":13548,"accused":13549,"bern":13550,"hardy":13551,"reportedly":13552,"drummer":13553,"vary":13554,"asks":13555,"ceilings":13556,"eli":13557,"omega":13558,"oni":13559,"ww":13560,"reptiles":13561,"curious":13562,"registered":13563,"leh":13564,"recalled":13565,"confetti":13566,"massachusetts":13567,"nordic":13568,"harris":13569,"rivi":13570,"nets":13571,"torn":13572,"sectional":13573,"weakened":13574,"absolute":13575,"kid":13576,"madagascar":13577,"bav":13578,"kaz":13579,"mbles":13580,"convic":13581,"citrus":13582,"wy":13583,"disabled":13584,"defend":13585,"paste":13586,"corners":13587,"secured":13588,"fies":13589,"toler":13590,"gno":13591,"basics":13592,"ato":13593,"glazed":13594,"reform":13595,"formations":13596,"dak":13597,"heating":13598,"nomination":13599,"asser":13600,"brewery":13601,"ense":13602,"graham":13603,"herd":13604,"difficulty":13605,"surfer":13606,"influential":13607,"oct":13608,"rolex":13609,"persons":13610,"dissi":13611,"floyd":13612,"planters":13613,"taway":13614,"garland":13615,"fract":13616,"operational":13617,"dt":13618,"beaten":13619,"1927":13620,"aston":13621,"stair":13622,"partial":13623,"barber":13624,"velt":13625,"remainder":13626,"napole":13627,"else":13628,"sciss":13629,"maritime":13630,"cob":13631,"worm":13632,"charger":13633,"carlton":13634,"37":13635,"coated":13636,"diabe":13637,"mineral":13638,"mids":13639,"vineyard":13640,"airbus":13641,"impressed":13642,"elevated":13643,"hampshire":13644,"autism":13645,"charging":13646,"nbc":13647,"sud":13648,"chin":13649,"pling":13650,"stone":13651,"uals":13652,"wight":13653,"pest":13654,"connections":13655,"corona":13656,"hu":13657,"inois":13658,"illegal":13659,"daniel":13660,"crispy":13661,"swal":13662,"roose":13663,"911":13664,"wim":13665,"beetle":13666,"overnight":13667,"structural":13668,"nucle":13669,"draws":13670,"victim":13671,"delivers":13672,"replacing":13673,"philippine":13674,"percent":13675,"considerable":13676,"inal":13677,"ska":13678,"lawyer":13679,"eclectic":13680,"starbucks":13681,"limestone":13682,"arn":13683,"prototype":13684,"fig":13685,"wreck":13686,"cultures":13687,"lawrence":13688,"encouraged":13689,"ceram":13690,"highlighted":13691,"riverside":13692,"chat":13693,"pods":13694,"ua":13695,"pleasure":13696,"interstate":13697,"wali":13698,"starred":13699,"thy":13700,"dad":13701,"everywhere":13702,"cranberry":13703,"isan":13704,"sparkle":13705,"extend":13706,"component":13707,"eclipse":13708,"bast":13709,"marshall":13710,"slides":13711,"simpson":13712,"1934":13713,"expect":13714,"burns":13715,"moto":13716,"dora":13717,"urers":13718,"bd":13719,"fo":13720,"playoff":13721,"enamel":13722,"picks":13723,"havana":13724,"strongly":13725,"jun":13726,"flooded":13727,"suzu":13728,"nan":13729,"poker":13730,"entre":13731,"funni":13732,"confron":13733,"aroo":13734,"aspect":13735,"eria":13736,"slo":13737,"ido":13738,"even":13739,"aces":13740,"refers":13741,"shore":13742,"supports":13743,"sensory":13744,"advertisements":13745,"1913":13746,"magni":13747,"sew":13748,"logic":13749,"rhythm":13750,"diner":13751,"roosevelt":13752,"panorama":13753,"brake":13754,"gatsby":13755,"gay":13756,"lle":13757,"pois":13758,"nai":13759,"cuban":13760,"spoo":13761,"cockpit":13762,"grandfather":13763,"canter":13764,"cereal":13765,"counting":13766,"territories":13767,"terrac":13768,"appeti":13769,"dimensional":13770,"programming":13771,"bacteria":13772,"labeled":13773,"mickey":13774,"sandwiches":13775,"crush":13776,"ssey":13777,"glor":13778,"top":13779,"solve":13780,"stir":13781,"delic":13782,"veness":13783,"mccar":13784,"venetian":13785,"buckle":13786,"cch":13787,"goats":13788,"scious":13789,"esta":13790,"cityscape":13791,"audiobook":13792,"michel":13793,"cardiff":13794,"noodle":13795,"summer":13796,"caravan":13797,"thrown":13798,"ev":13799,"destiny":13800,"ony":13801,"plaque":13802,"scales":13803,"rarely":13804,"saturn":13805,"ffer":13806,"citizen":13807,"canoe":13808,"supposed":13809,"hooks":13810,"psychedelic":13811,"primit":13812,"boro":13813,"rajasthan":13814,"lings":13815,"chopped":13816,"traveler":13817,"confused":13818,"complim":13819,"increases":13820,"approval":13821,"rangoli":13822,"versus":13823,"fin":13824,"fever":13825,"firing":13826,"reservoir":13827,"doct":13828,"terminus":13829,"puma":13830,"rot":13831,"coaches":13832,"39":13833,"historians":13834,"ansas":13835,"elsewhere":13836,"slot":13837,"rebe":13838,"periods":13839,"denied":13840,"ually":13841,"gears":13842,"continent":13843,"beginner":13844,"das":13845,"wicker":13846,"sending":13847,"25th":13848,"1926":13849,"trio":13850,"ark":13851,"rounds":13852,"auditorium":13853,"stronger":13854,"till":13855,"descen":13856,"ramadan":13857,"homo":13858,"airborne":13859,"rods":13860,"dod":13861,"delayed":13862,"nut":13863,"directors":13864,"pyramids":13865,"patent":13866,"estates":13867,"westward":13868,"tex":13869,"lockdown":13870,"noting":13871,"vu":13872,"lords":13873,"emmy":13874,"carter":13875,"abroad":13876,"ota":13877,"overwhel":13878,"melting":13879,"motherboard":13880,"ange":13881,"agers":13882,"glove":13883,"mum":13884,"greatly":13885,"inher":13886,"composite":13887,"dont":13888,"sheriff":13889,"candidates":13890,"altered":13891,"repeated":13892,"caption":13893,"monuments":13894,"aliens":13895,"earring":13896,"epo":13897,"surreal":13898,"attempting":13899,"wonderland":13900,"marshmal":13901,"cca":13902,"alam":13903,"juven":13904,"draw":13905,"maryland":13906,"gazebo":13907,"espresso":13908,"starter":13909,"russell":13910,"abs":13911,"extraordinary":13912,"gateway":13913,"context":13914,"backed":13915,"victims":13916,"fry":13917,"completing":13918,"technic":13919,"extinct":13920,"47":13921,"dell":13922,"properly":13923,"evergreen":13924,"patriotic":13925,"sierra":13926,"equation":13927,"eden":13928,"flexible":13929,"cab":13930,"interpretation":13931,"hammock":13932,"combo":13933,"mite":13934,"condition":13935,"dley":13936,"gic":13937,"dency":13938,"influences":13939,"pandora":13940,"islam":13941,"measured":13942,"centerpiece":13943,"customized":13944,"wrought":13945,"diwali":13946,"lisbon":13947,"pu":13948,"coa":13949,"group":13950,"dialogue":13951,"ranking":13952,"belief":13953,"125":13954,"grape":13955,"ampton":13956,"interests":13957,"scrap":13958,"cleaner":13959,"brussels":13960,"aha":13961,"yn":13962,"micro":13963,"faction":13964,"polar":13965,"forcing":13966,"allion":13967,"ilit":13968,"ute":13969,"norse":13970,"approaches":13971,"landfall":13972,"twisted":13973,"chaos":13974,"playoffs":13975,"sega":13976,"ag":13977,"thie":13978,"strings":13979,"persi":13980,"faculty":13981,"vegetation":13982,"scal":13983,"souls":13984,"myth":13985,"yours":13986,"sant":13987,"pist":13988,"strategies":13989,"oxide":13990,"ignment":13991,"archaeological":13992,"1911":13993,"taj":13994,"coaching":13995,"asph":13996,"flagship":13997,"140":13998,"willing":13999,"amen":14000,"dover":14001,"serge":14002,"mulder":14003,"dominant":14004,"visitor":14005,"traveled":14006,"lady":14007,"dragon":14008,"quad":14009,"canterbury":14010,"lean":14011,"ography":14012,"tractors":14013,"screenshots":14014,"smell":14015,"bbing":14016,"frequency":14017,"barbecue":14018,"chand":14019,"receiver":14020,"nik":14021,"cub":14022,"quin":14023,"clash":14024,"flock":14025,"absence":14026,"wavy":14027,"inver":14028,"acknowle":14029,"glen":14030,"marsh":14031,"fairytale":14032,"forty":14033,"jane":14034,"breeze":14035,"collectible":14036,"gem":14037,"decks":14038,"fielder":14039,"mainland":14040,"attract":14041,"license":14042,"carp":14043,"sei":14044,"llis":14045,"achi":14046,"certificate":14047,"fier":14048,"menswear":14049,"sponsored":14050,"regre":14051,"bot":14052,"ruffle":14053,"christian":14054,"competitive":14055,"minerals":14056,"subjects":14057,"sibil":14058,"experiments":14059,"kimono":14060,"quirky":14061,"time":14062,"ceramics":14063,"sler":14064,"enh":14065,"persona":14066,"confident":14067,"poles":14068,"1925":14069,"cauli":14070,"frontier":14071,"achievement":14072,"hopes":14073,"pot":14074,"enchanted":14075,"nick":14076,"assumed":14077,"feast":14078,"favors":14079,"websites":14080,"stewart":14081,"visual":14082,"operator":14083,"diagrams":14084,"plaster":14085,"printer":14086,"caterpillar":14087,"infrastructure":14088,"cones":14089,"riviera":14090,"jewels":14091,"thi":14092,"vines":14093,"dump":14094,"executed":14095,"rebell":14096,"endless":14097,"pens":14098,"smalle":14099,"slave":14100,"expand":14101,"mind":14102,"alcoholic":14103,"meetings":14104,"fist":14105,"theore":14106,"engage":14107,"springfield":14108,"refreshing":14109,"tart":14110,"quiz":14111,"armchair":14112,"funniest":14113,"mantle":14114,"billy":14115,"logists":14116,"sibilities":14117,"funky":14118,"esy":14119,"chunky":14120,"isles":14121,"intric":14122,"protag":14123,"sailed":14124,"genius":14125,"innoc":14126,"fundam":14127,"reporter":14128,"indeed":14129,"mni":14130,"devi":14131,"vag":14132,"witcher":14133,"petals":14134,"grey":14135,"unfortun":14136,"stolen":14137,"matrix":14138,"partly":14139,"siberian":14140,"bulk":14141,"rebellion":14142,"smallest":14143,"ux":14144,"upside":14145,"schu":14146,"newspapers":14147,"detached":14148,"ée":14149,"ambul":14150,"ecology":14151,"municipal":14152,"agara":14153,"stump":14154,"propel":14155,"fights":14156,"erina":14157,"basil":14158,"discussed":14159,"wickets":14160,"kangaroo":14161,"roe":14162,"burberry":14163,"consistent":14164,"frig":14165,"qualified":14166,"savan":14167,"diver":14168,"sponge":14169,"hammer":14170,"napkin":14171,"chronicles":14172,"edited":14173,"seller":14174,"patriots":14175,"prede":14176,"overseas":14177,"cables":14178,"coaster":14179,"1921":14180,"oaks":14181,"bombar":14182,"infection":14183,"roger":14184,"connects":14185,"bye":14186,"cauliflower":14187,"dow":14188,"municipal":14189,"indicates":14190,"jon":14191,"cosmic":14192,"regim":14193,"numbered":14194,"woul":14195,"alex":14196,"toddlers":14197,"parachute":14198,"inage":14199,"juli":14200,"predecess":14201,"reta":14202,"asphalt":14203,"sport":14204,"ralph":14205,"philip":14206,"output":14207,"itive":14208,"atoms":14209,"removable":14210,"xy":14211,"apper":14212,"observation":14213,"oceans":14214,"enga":14215,"inflat":14216,"stical":14217,"steam":14218,"colleagues":14219,"barbar":14220,"grandmother":14221,"commerci":14222,"block":14223,"locals":14224,"enhance":14225,"reconstruction":14226,"cuff":14227,"backing":14228,"clinton":14229,"croatian":14230,"spongebob":14231,"wouldn":14232,"afraid":14233,"miniatures":14234,"reflecting":14235,"oslo":14236,"suburban":14237,"zhou":14238,"ranges":14239,"frequent":14240,"gae":14241,"vatican":14242,"congre":14243,"caric":14244,"hyun":14245,"primitive":14246,"recep":14247,"penny":14248,"patches":14249,"utc":14250,"lobster":14251,"walmart":14252,"entrepre":14253,"savannah":14254,"waterfalls":14255,"ayan":14256,"hyundai":14257,"46":14258,"scholars":14259,"venus":14260,"spooky":14261,"gross":14262,"shakespeare":14263,"street":14264,"quartz":14265,"heated":14266,"impression":14267,"apps":14268,"1922":14269,"attribu":14270,"rad":14271,"torn":14272,"exception":14273,"evolved":14274,"sbury":14275,"conqu":14276,"maldives":14277,"cadillac":14278,"consum":14279,"hydrogen":14280,"acker":14281,"formally":14282,"loch":14283,"touching":14284,"bridge":14285,"ppe":14286,"wraps":14287,"gel":14288,"gaga":14289,"quets":14290,"gauge":14291,"hindi":14292,"mics":14293,"goalkeeper":14294,"showers":14295,"pour":14296,"counts":14297,"produces":14298,"doo":14299,"artifacts":14300,"mandatory":14301,"generations":14302,"versatile":14303,"dandel":14304,"alas":14305,"dele":14306,"wait":14307,"shav":14308,"spacecraft":14309,"pixie":14310,"tear":14311,"polic":14312,"sailors":14313,"solo":14314,"cheetah":14315,"usb":14316,"freshwater":14317,"navigation":14318,"freshly":14319,"alban":14320,"cardinal":14321,"unexpected":14322,"dear":14323,"muscular":14324,"encounter":14325,"otta":14326,"invent":14327,"checks":14328,"arest":14329,"agree":14330,"involvement":14331,"skyscraper":14332,"peel":14333,"dach":14334,"assisted":14335,"metals":14336,"burned":14337,"radiation":14338,"tortoise":14339,"lition":14340,"deemed":14341,"scouts":14342,"hunger":14343,"encyclo":14344,"deno":14345,"novelty":14346,"simultane":14347,"volcanic":14348,"variations":14349,"rescued":14350,"loo":14351,"spears":14352,"rell":14353,"din":14354,"clearance":14355,"cruci":14356,"fluid":14357,"925":14358,"dandelion":14359,"wayne":14360,"janeiro":14361,"ave":14362,"aton":14363,"cock":14364,"stockholm":14365,"reflections":14366,"apparent":14367,"convoy":14368,"gb":14369,"vii":14370,"touches":14371,"holly":14372,"columbus":14373,"spinning":14374,"capsule":14375,"sack":14376,"braid":14377,"scripture":14378,"puppet":14379,"foyer":14380,"remarkable":14381,"blast":14382,"jeff":14383,"dging":14384,"gamer":14385,"parkway":14386,"patrick":14387,"burnt":14388,"pearls":14389,"indie":14390,"legislation":14391,"gentle":14392,"marker":14393,"drove":14394,"percentage":14395,"tment":14396,"propaganda":14397,"spaceship":14398,"convinced":14399,"commanded":14400,"moonlight":14401,"amate":14402,"licensed":14403,"peek":14404,"trilo":14405,"glas":14406,"dentist":14407,"urus":14408,"topping":14409,"fier":14410,"initiative":14411,"stationery":14412,"paddle":14413,"revenue":14414,"op":14415,"domain":14416,"bats":14417,"vere":14418,"bris":14419,"stools":14420,"celest":14421,"scully":14422,"cinder":14423,"wool":14424,"naissance":14425,"violent":14426,"indonesian":14427,"spotlight":14428,"ez":14429,"hitler":14430,"challenging":14431,"insert":14432,"trans":14433,"nervous":14434,"auton":14435,"icul":14436,"amendment":14437,"eyel":14438,"fiat":14439,"israeli":14440,"attendance":14441,"ioning":14442,"donut":14443,"wider":14444,"sweaters":14445,"pron":14446,"frost":14447,"existed":14448,"oo":14449,"glossy":14450,"parenting":14451,"grande":14452,"ambulance":14453,"substantial":14454,"reserved":14455,"meadows":14456,"bulbs":14457,"statistics":14458,"yachts":14459,"alloy":14460,"answers":14461,"albert":14462,"scan":14463,"balanced":14464,"objective":14465,"owls":14466,"hercu":14467,"inexpensive":14468,"anglo":14469,"fairly":14470,"mahal":14471,"informed":14472,"courage":14473,"pairing":14474,"matched":14475,"bourbon":14476,"amal":14477,"sp":14478,"solit":14479,"hobbies":14480,"sofia":14481,"poured":14482,"isbn":14483,"sketch":14484,"decisions":14485,"stes":14486,"tavern":14487,"stretching":14488,"lithu":14489,"durable":14490,"glyph":14491,"asc":14492,"grumpy":14493,"waffle":14494,"asha":14495,"gro":14496,"occupation":14497,"nook":14498,"participating":14499,"cucumber":14500,"technician":14501,"rip":14502,"nationally":14503,"screw":14504,"installing":14505,"chuck":14506,"reconnaissance":14507,"layouts":14508,"hurt":14509,"shuttle":14510,"diseas":14511,"illo":14512,"equest":14513,"enjoys":14514,"ensemble":14515,"epi":14516,"disgu":14517,"essex":14518,"microwave":14519,"raiders":14520,"trilogy":14521,"oreg":14522,"vulner":14523,"dreamworks":14524,"adoption":14525,"compilation":14526,"filed":14527,"pt":14528,"án":14529,"indiana":14530,"likes":14531,"lp":14532,"imet":14533,"phu":14534,"surge":14535,"museums":14536,"count":14537,"pover":14538,"organizing":14539,"mention":14540,"appreciation":14541,"reviewers":14542,"collapse":14543,"jur":14544,"odeon":14545,"tobac":14546,"hospital":14547,"keychain":14548,"tox":14549,"coal":14550,"earn":14551,"embo":14552,"businesswoman":14553,"tec":14554,"surrender":14555,"rodeo":14556,"liv":14557,"nobody":14558,"tacos":14559,"vibe":14560,"tailed":14561,"kest":14562,"pulls":14563,"twenti":14564,"spirits":14565,"confederate":14566,"convection":14567,"matthe":14568,"cchini":14569,"succul":14570,"psychology":14571,"ongoing":14572,"bizar":14573,"shores":14574,"marry":14575,"processes":14576,"neys":14577,"brewing":14578,"hyd":14579,"amounts":14580,"nile":14581,"slight":14582,"30th":14583,"gree":14584,"variable":14585,"inflatable":14586,"zucchini":14587,"regard":14588,"clad":14589,"hoped":14590,"bachel":14591,"anders":14592,"luminous":14593,"ju":14594,"blinds":14595,"inity":14596,"demolished":14597,"precis":14598,"cinderella":14599,"dash":14600,"1905":14601,"jaw":14602,"nostalgia":14603,"microscope":14604,"ante":14605,"conspir":14606,"suzuki":14607,"1909":14608,"policies":14609,"tram":14610,"skele":14611,"villa":14612,"showroom":14613,"junk":14614,"boardwalk":14615,"60s":14616,"discussing":14617,"whereas":14618,"inhabit":14619,"bananas":14620,"kate":14621,"naples":14622,"upholstery":14623,"ón":14624,"nies":14625,"ryan":14626,"bankno":14627,"perry":14628,"playful":14629,"argument":14630,"paws":14631,"citing":14632,"tram":14633,"mines":14634,"finishes":14635,"beach":14636,"blooms":14637,"flickr":14638,"moto":14639,"toll":14640,"obst":14641,"defeating":14642,"mainstream":14643,"variation":14644,"variant":14645,"kash":14646,"monaco":14647,"1908":14648,"sailboat":14649,"oils":14650,"identical":14651,"debris":14652,"counted":14653,"splas":14654,"cade":14655,"mous":14656,"mitch":14657,"pom":14658,"scotch":14659,"aver":14660,"nish":14661,"garrison":14662,"bikin":14663,"niagara":14664,"maurit":14665,"ulties":14666,"organize":14667,"ownership":14668,"slogan":14669,"scissors":14670,"deep":14671,"fragran":14672,"vitamin":14673,"biking":14674,"décor":14675,"cements":14676,"slov":14677,"cons":14678,"ousness":14679,"jelly":14680,"extent":14681,"spoken":14682,"error":14683,"blur":14684,"lecture":14685,"aquatic":14686,"neighbourhood":14687,"biscuits":14688,"rihanna":14689,"elm":14690,"rhe":14691,"teed":14692,"blessed":14693,"literally":14694,"arrest":14695,"publications":14696,"1920x1080":14697,"daries":14698,"vacu":14699,"instructor":14700,"cancelled":14701,"chak":14702,"meg":14703,"nutri":14704,"fab":14705,"ranging":14706,"resolve":14707,"rebel":14708,"ago":14709,"baths":14710,"recordings":14711,"christianity":14712,"institution":14713,"dreaming":14714,"stud":14715,"blown":14716,"douglas":14717,"consumer":14718,"tobacco":14719,"intenti":14720,"grazing":14721,"illinois":14722,"fossils":14723,"valve":14724,"warren":14725,"ripped":14726,"1923":14727,"1924":14728,"sticky":14729,"subsi":14730,"dances":14731,"hare":14732,"refined":14733,"brushes":14734,"lexus":14735,"zo":14736,"mpet":14737,"spru":14738,"combines":14739,"atom":14740,"dorf":14741,"equestrian":14742,"enclosed":14743,"detailing":14744,"civilization":14745,"fisherman":14746,"bulgaria":14747,"stamp":14748,"afro":14749,"steal":14750,"descent":14751,"voting":14752,"baltic":14753,"relation":14754,"whimsical":14755,"sake":14756,"heli":14757,"stures":14758,"embrace":14759,"bren":14760,"ret":14761,"subar":14762,"hijab":14763,"trol":14764,"imagery":14765,"inland":14766,"emerging":14767,"elevator":14768,"khaki":14769,"recommend":14770,"ob":14771,"alignment":14772,"locally":14773,"fifteen":14774,"coastline":14775,"odyssey":14776,"stil":14777,"inaugu":14778,"artworks":14779,"curves":14780,"thereafter":14781,"manufacturers":14782,"singers":14783,"aimed":14784,"entation":14785,"monk":14786,"earthqu":14787,"inaugural":14788,"improvements":14789,"chimney":14790,"remix":14791,"assists":14792,"liquor":14793,"assessment":14794,"maneu":14795,"wedge":14796,"nichol":14797,"pok":14798,"goa":14799,"clocks":14800,"practicing":14801,"sacra":14802,"palest":14803,"veg":14804,"viral":14805,"bombs":14806,"muffin":14807,"competed":14808,"battleships":14809,"cobra":14810,"explan":14811,"endor":14812,"arches":14813,"songwriter":14814,"yp":14815,"mara":14816,"passport":14817,"amuse":14818,"mature":14819,"possession":14820,"roll":14821,"boring":14822,"gloss":14823,"celestial":14824,"kevin":14825,"opponents":14826,"distan":14827,"koala":14828,"pradesh":14829,"jumper":14830,"ox":14831,"zan":14832,"paramount":14833,"herbal":14834,"strongest":14835,"supernatural":14836,"ray":14837,"sulli":14838,"gorge":14839,"haul":14840,"fencing":14841,"surre":14842,"point":14843,"gett":14844,"indicating":14845,"nurses":14846,"tona":14847,"pitched":14848,"33":14849,"arct":14850,"campbell":14851,"payment":14852,"rememb":14853,"jak":14854,"dropping":14855,"mberg":14856,"ique":14857,"tilla":14858,"cath":14859,"referee":14860,"glou":14861,"usable":14862,"competes":14863,"haus":14864,"vital":14865,"pictu":14866,"publicity":14867,"qualifying":14868,"capturing":14869,"yes":14870,"ssie":14871,"symboli":14872,"relev":14873,"stine":14874,"plywood":14875,"scholar":14876,"pursuit":14877,"closest":14878,"gosla":14879,"ribs":14880,"café":14881,"moured":14882,"relaxation":14883,"infinite":14884,"refrigerator":14885,"anthem":14886,"intention":14887,"lements":14888,"kia":14889,"guide":14890,"wiki":14891,"surfaces":14892,"izer":14893,"kane":14894,"memorable":14895,"bio":14896,"debt":14897,"gged":14898,"holo":14899,"faith":14900,"youngest":14901,"lesser":14902,"guilty":14903,"mery":14904,"volvo":14905,"packages":14906,"threw":14907,"boundaries":14908,"sunflowers":14909,"defended":14910,"framing":14911,"sentence":14912,"alert":14913,"dominican":14914,"rie":14915,"luke":14916,"poll":14917,"phs":14918,"symbolic":14919,"clipping":14920,"azzi":14921,"salsa":14922,"badges":14923,"kill":14924,"stag":14925,"ceremonies":14926,"flora":14927,"snail":14928,"fairies":14929,"armies":14930,"wemb":14931,"miracle":14932,"colombia":14933,"lisa":14934,"thief":14935,"thumbs":14936,"sovere":14937,"joins":14938,"rink":14939,"aser":14940,"tenth":14941,"stern":14942,"vincent":14943,"precipit":14944,"aval":14945,"cashmere":14946,"merchandise":14947,"leo":14948,"coup":14949,"78":14950,"bush":14951,"protesters":14952,"badass":14953,"fton":14954,"offshore":14955,"isha":14956,"tension":14957,"lad":14958,"promoting":14959,"grooming":14960,"cocoa":14961,"pel":14962,"shutters":14963,"dachsh":14964,"wembley":14965,"jol":14966,"kout":14967,"paying":14968,"pra":14969,"supervis":14970,"monte":14971,"pills":14972,"hosting":14973,"lakers":14974,"help":14975,"eyed":14976,"theatrical":14977,"cot":14978,"dismissed":14979,"crack":14980,"sullivan":14981,"rebuilt":14982,"trumpet":14983,"lander":14984,"villain":14985,"humanity":14986,"hoping":14987,"honor":14988,"fee":14989,"bledon":14990,"rupted":14991,"elli":14992,"dubbed":14993,"sketchbook":14994,"meaning":14995,"hive":14996,"targets":14997,"colonies":14998,"daddy":14999,"dul":15000,"exclusively":15001,"frederick":15002,"gently":15003,"judic":15004,"105":15005,"yugosla":15006,"papar":15007,"uto":15008,"graduates":15009,"introduce":15010,"mama":15011,"restore":15012,"alab":15013,"beag":15014,"coop":15015,"hyper":15016,"fail":15017,"descend":15018,"refer":15019,"controller":15020,"organizer":15021,"jews":15022,"commanding":15023,"reduction":15024,"kap":15025,"graduated":15026,"instance":15027,"smoked":15028,"districts":15029,"observatory":15030,"vault":15031,"manage":15032,"pires":15033,"collins":15034,"lind":15035,"diseases":15036,"enne":15037,"define":15038,"auburn":15039,"wrangler":15040,"maroon":15041,"gorilla":15042,"realis":15043,"skyscrapers":15044,"mazda":15045,"sights":15046,"ranks":15047,"restricted":15048,"tage":15049,"silic":15050,"sealed":15051,"oregon":15052,"olo":15053,"rats":15054,"beating":15055,"pod":15056,"tott":15057,"winery":15058,"mala":15059,"aluminium":15060,"sweatshirt":15061,"surprising":15062,"borealis":15063,"hoods":15064,"hanger":15065,"mechanism":15066,"tow":15067,"tudor":15068,"chrysler":15069,"baza":15070,"handling":15071,"everybody":15072,"embassy":15073,"civilian":15074,"mold":15075,"hurst":15076,"parliam":15077,"chennai":15078,"bachelor":15079,"vian":15080,"swick":15081,"humming":15082,"amusement":15083,"strand":15084,"darwin":15085,"rapids":15086,"dure":15087,"kai":15088,"tents":15089,"raccoon":15090,"lehenga":15091,"pp":15092,"⁄4":15093,"resp":15094,"nab":15095,"cod":15096,"electrons":15097,"livestock":15098,"witness":15099,"roofs":15100,"suburb":15101,"wimbledon":15102,"raaf":15103,"conscious":15104,"enforcement":15105,"psd":15106,"pines":15107,"mai":15108,"missis":15109,"ana":15110,"cumb":15111,"canon":15112,"difficulties":15113,"reviewer":15114,"pedest":15115,"customs":15116,"zoom":15117,"sweethe":15118,"awk":15119,"haz":15120,"colli":15121,"latte":15122,"comfy":15123,"curated":15124,"reliable":15125,"bt":15126,"lilies":15127,"how":15128,"messy":15129,"weave":15130,"rotat":15131,"voices":15132,"knowing":15133,"founding":15134,"albany":15135,"veggie":15136,"suv":15137,"organism":15138,"fabric":15139,"crashed":15140,"insan":15141,"legislative":15142,"yweight":15143,"thon":15144,"tactics":15145,"grass":15146,"axe":15147,"honors":15148,"elk":15149,"caes":15150,"coolest":15151,"55":15152,"fla":15153,"symbol":15154,"batter":15155,"carriers":15156,"meanings":15157,"guarantee":15158,"paparazzi":15159,"impe":15160,"dash":15161,"mund":15162,"retaining":15163,"crowded":15164,"opponent":15165,"mitchell":15166,"mankind":15167,"landmarks":15168,"fford":15169,"encourage":15170,"dum":15171,"mahog":15172,"extensions":15173,"delivering":15174,"penn":15175,"specimens":15176,"protecting":15177,"bizarre":15178,"mahogany":15179,"bombers":15180,"symmetry":15181,"sergeant":15182,"kath":15183,"uma":15184,"habits":15185,"potted":15186,"perce":15187,"clearing":15188,"frustr":15189,"targe":15190,"sorry":15191,"professionals":15192,"warts":15193,"varieties":15194,"riot":15195,"legion":15196,"syndrome":15197,"atlant":15198,"montgo":15199,"leys":15200,"asy":15201,"gibson":15202,"successor":15203,"wolver":15204,"ruption":15205,"shear":15206,"mystic":15207,"1890":15208,"predators":15209,"negotiations":15210,"hope":15211,"obtain":15212,"extending":15213,"mistakes":15214,"subaru":15215,"ls":15216,"bangs":15217,"wicket":15218,"ao":15219,"leigh":15220,"curb":15221,"casts":15222,"desired":15223,"magnet":15224,"louvre":15225,"sins":15226,"onally":15227,"dispos":15228,"usage":15229,"mustache":15230,"resque":15231,"measurements":15232,"xton":15233,"50s":15234,"drain":15235,"cigar":15236,"traditionally":15237,"shorter":15238,"arctica":15239,"tottenham":15240,"oyster":15241,"assign":15242,"prosecu":15243,"premiered":15244,"middle":15245,"cubes":15246,"reducing":15247,"shre":15248,"networks":15249,"karate":15250,"ankara":15251,"claw":15252,"dhab":15253,"olor":15254,"locker":15255,"dave":15256,"basilica":15257,"construct":15258,"eta":15259,"juni":15260,"lc":15261,"nz":15262,"upset":15263,"prayers":15264,"knock":15265,"1907":15266,"cosmos":15267,"honored":15268,"buddhism":15269,"itiner":15270,"categories":15271,"cardinals":15272,"tasse":15273,"tuna":15274,"kyoto":15275,"ek":15276,"osa":15277,"leash":15278,"elaborate":15279,"handcrafted":15280,"divorce":15281,"moist":15282,"sonville":15283,"barely":15284,"patag":15285,"machinery":15286,"winding":15287,"cousin":15288,"complex":15289,"picturesque":15290,"ritual":15291,"bits":15292,"managing":15293,"olk":15294,"pement":15295,"interaction":15296,"1904":15297,"kb":15298,"tney":15299,"perched":15300,"behalf":15301,"kisses":15302,"expressions":15303,"departed":15304,"adequ":15305,"1906":15306,"fascinating":15307,"obe":15308,"ground":15309,"blocked":15310,"fallout":15311,"ov":15312,"bosn":15313,"gotham":15314,"mahar":15315,"identification":15316,"amateur":15317,"comprehensive":15318,"yal":15319,"sham":15320,"renovations":15321,"catches":15322,"noticed":15323,"a3":15324,"batch":15325,"grizz":15326,"ponds":15327,"maintaining":15328,"plun":15329,"trapped":15330,"mercy":15331,"forgotten":15332,"mme":15333,"enor":15334,"furb":15335,"ante":15336,"phi":15337,"procedure":15338,"ui":15339,"uae":15340,"pant":15341,"quan":15342,"batteries":15343,"qatar":15344,"reunion":15345,"enda":15346,"isa":15347,"unsuccessful":15348,"ballerina":15349,"gc":15350,"sporty":15351,"reflects":15352,"tibet":15353,"mento":15354,"reason":15355,"foggy":15356,"burton":15357,"grinch":15358,"nickel":15359,"grammar":15360,"notably":15361,"varsity":15362,"accommodate":15363,"montgomery":15364,"harness":15365,"adopt":15366,"marching":15367,"bok":15368,"scotia":15369,"mockup":15370,"toxic":15371,"refurb":15372,"2d":15373,"lg":15374,"shaft":15375,"hydrange":15376,"scribed":15377,"convert":15378,"wat":15379,"veloc":15380,"sures":15381,"favorable":15382,"advantages":15383,"sfield":15384,"armament":15385,"bridesmaids":15386,"rifle":15387,"pr":15388,"tastes":15389,"stag":15390,"catalog":15391,"shawl":15392,"burgers":15393,"highways":15394,"laurel":15395,"platforms":15396,"accumul":15397,"eternal":15398,"idol":15399,"zim":15400,"melan":15401,"counties":15402,"intelle":15403,"neuro":15404,"nigeria":15405,"inhabitants":15406,"winged":15407,"ento":15408,"overlay":15409,"slab":15410,"indigo":15411,"acon":15412,"aspen":15413,"fir":15414,"exquis":15415,"cutter":15416,"squee":15417,"explaining":15418,"silly":15419,"trace":15420,"versace":15421,"pillars":15422,"gio":15423,"calories":15424,"specimen":15425,"halls":15426,"fudge":15427,"tir":15428,"fewer":15429,"applying":15430,"partments":15431,"lu":15432,"clam":15433,"hyde":15434,"nig":15435,"pant":15436,"paraly":15437,"antarctica":15438,"chest":15439,"zzy":15440,"elopement":15441,"wikipedia":15442,"joc":15443,"bouquets":15444,"procedu":15445,"poverty":15446,"fli":15447,"ively":15448,"locate":15449,"berke":15450,"ibu":15451,"parody":15452,"lefto":15453,"evans":15454,"temporarily":15455,"realized":15456,"commissioner":15457,"nativity":15458,"compare":15459,"32":15460,"distancing":15461,"pray":15462,"william":15463,"kelly":15464,"jefferson":15465,"enclosure":15466,"checked":15467,"complicated":15468,"pac":15469,"sang":15470,"simpli":15471,"murphy":15472,"rev":15473,"sensitive":15474,"asi":15475,"travelled":15476,"postcards":15477,"dispute":15478,"sari":15479,"lessly":15480,"lineup":15481,"concentration":15482,"mcdonald":15483,"incredibly":15484,"nouve":15485,"llama":15486,"dine":15487,"reacts":15488,"peoples":15489,"mitsub":15490,"jerse":15491,"sworth":15492,"uri":15493,"bushes":15494,"treatments":15495,"scripts":15496,"mascu":15497,"solved":15498,"intelligent":15499,"jimmy":15500,"3⁄4":15501,"biological":15502,"delight":15503,"pebble":15504,"flesh":15505,"mistake":15506,"mington":15507,"ror":15508,"spit":15509,"heavyweight":15510,"juicy":15511,"motifs":15512,"pom":15513,"inspection":15514,"eric":15515,"helm":15516,"hunt":15517,"lore":15518,"fiesta":15519,"airways":15520,"salt":15521,"fishes":15522,"wasp":15523,"encountered":15524,"phuket":15525,"orang":15526,"bro":15527,"developers":15528,"gems":15529,"magnifying":15530,"mitsubishi":15531,"oda":15532,"snor":15533,"poll":15534,"paved":15535,"tibetan":15536,"niel":15537,"roofing":15538,"speedway":15539,"geons":15540,"teammates":15541,"vacuum":15542,"headband":15543,"orly":15544,"dee":15545,"inian":15546,"frogs":15547,"smoky":15548,"behaviour":15549,"prosper":15550,"observations":15551,"altitude":15552,"benches":15553,"shaker":15554,"meatballs":15555,"instrumental":15556,"tn":15557,"18k":15558,"desper":15559,"ise":15560,"melody":15561,"omous":15562,"jerseys":15563,"judges":15564,"launches":15565,"volumes":15566,"exists":15567,"teammate":15568,"pedestal":15569,"biscuit":15570,"bazaar":15571,"rors":15572,"survivors":15573,"br":15574,"input":15575,"protests":15576,"cracked":15577,"badly":15578,"mu":15579,"fern":15580,"involves":15581,"2000s":15582,"melted":15583,"railways":15584,"audiences":15585,"corgi":15586,"warsaw":15587,"arched":15588,"intricate":15589,"conservatory":15590,"pathway":15591,"450":15592,"playroom":15593,"examination":15594,"finger":15595,"encyclopedia":15596,"bry":15597,"upstairs":15598,"cbs":15599,"intermedi":15600,"spel":15601,"aids":15602,"translated":15603,"constitutional":15604,"stupid":15605,"reflective":15606,"echo":15607,"ario":15608,"lucas":15609,"darker":15610,"crushed":15611,"vivid":15612,"turner":15613,"barn":15614,"hyatt":15615,"housed":15616,"escal":15617,"ilo":15618,"serbia":15619,"scream":15620,"prisoner":15621,"guate":15622,"thirteen":15623,"stap":15624,"recognize":15625,"rehearsal":15626,"tigh":15627,"devil":15628,"yamaha":15629,"rest":15630,"rockets":15631,"jab":15632,"beir":15633,"seals":15634,"throat":15635,"oa":15636,"congreg":15637,"petite":15638,"itted":15639,"cott":15640,"sloven":15641,"authorized":15642,"trolley":15643,"tist":15644,"adri":15645,"gospel":15646,"dhabi":15647,"ome":15648,"pall":15649,"nazi":15650,"anthropo":15651,"sweetheart":15652,"rington":15653,"kees":15654,"sweep":15655,"clips":15656,"kills":15657,"titan":15658,"static":15659,"demons":15660,"depos":15661,"productive":15662,"usd":15663,"asparag":15664,"complement":15665,"tana":15666,"cookbook":15667,"dors":15668,"moul":15669,"purses":15670,"34":15671,"sar":15672,"sol":15673,"1903":15674,"armoured":15675,"besides":15676,"talented":15677,"holi":15678,"24x":15679,"hn":15680,"paragra":15681,"sofas":15682,"bicycles":15683,"spruce":15684,"glim":15685,"collectors":15686,"developer":15687,"tory":15688,"ett":15689,"reinforced":15690,"drift":15691,"bodybuilding":15692,"agan":15693,"characteristic":15694,"nouveau":15695,"trade":15696,"prevented":15697,"mathematics":15698,"dior":15699,"centered":15700,"dah":15701,"test":15702,"diaper":15703,"starfish":15704,"jane":15705,"lengths":15706,"eaten":15707,"stability":15708,"ader":15709,"beirut":15710,"ghe":15711,"daytona":15712,"rogers":15713,"ister":15714,"tasmania":15715,"legislature":15716,"hercules":15717,"venezu":15718,"anxiety":15719,"zilla":15720,"rests":15721,"nes":15722,"stole":15723,"voiced":15724,"bentley":15725,"oats":15726,"blender":15727,"mir":15728,"raz":15729,"ghi":15730,"worried":15731,"exam":15732,"favour":15733,"750":15734,"coco":15735,"unlimited":15736,"charms":15737,"480":15738,"scrap":15739,"ision":15740,"comment":15741,"kale":15742,"tights":15743,"preserv":15744,"royce":15745,"modeled":15746,"gage":15747,"denali":15748,"membership":15749,"launching":15750,"molecular":15751,"lacro":15752,"sheds":15753,"pome":15754,"function":15755,"mirrored":15756,"laughter":15757,"atro":15758,"goth":15759,"calyp":15760,"asparagus":15761,"erosion":15762,"protagonist":15763,"muda":15764,"jelly":15765,"avon":15766,"engaging":15767,"buck":15768,"ignor":15769,"round":15770,"ctivity":15771,"demands":15772,"prompted":15773,"renow":15774,"maria":15775,"gilbert":15776,"fundraiser":15777,"invisible":15778,"chen":15779,"tracking":15780,"allig":15781,"masquer":15782,"bender":15783,"rt":15784,"demo":15785,"resigned":15786,"chateau":15787,"newton":15788,"renowned":15789,"patagonia":15790,"skater":15791,"battlefield":15792,"erected":15793,"suburbs":15794,"pomegran":15795,"militia":15796,"crib":15797,"travelers":15798,"dum":15799,"chalet":15800,"breathtaking":15801,"efficiency":15802,"swamp":15803,"pony":15804,"vp":15805,"earning":15806,"sprint":15807,"raptor":15808,"terrible":15809,"hygi":15810,"pharmacy":15811,"enhanced":15812,"dachshund":15813,"syria":15814,"demolition":15815,"habitats":15816,"sf":15817,"mons":15818,"zones":15819,"atoon":15820,"arium":15821,"beloved":15822,"elig":15823,"blings":15824,"bulary":15825,"retro":15826,"hose":15827,"malibu":15828,"conducting":15829,"shred":15830,"torch":15831,"zens":15832,"dotted":15833,"jumps":15834,"warm":15835,"progressive":15836,"poison":15837,"strat":15838,"mur":15839,"motives":15840,"guaranteed":15841,"remnants":15842,"fathers":15843,"atlantis":15844,"cms":15845,"clock":15846,"blaze":15847,"hack":15848,"builds":15849,"pies":15850,"rains":15851,"spy":15852,"metall":15853,"001":15854,"tornado":15855,"beagle":15856,"flank":15857,"frame":15858,"1901":15859,"diver":15860,"laughs":15861,"abuse":15862,"mys":15863,"rib":15864,"fee":15865,"southwestern":15866,"prefer":15867,"walled":15868,"deadpool":15869,"moor":15870,"siblings":15871,"villains":15872,"elastic":15873,"awkward":15874,"mg":15875,"va":15876,"foto":15877,"venom":15878,"mixer":15879,"asting":15880,"instruction":15881,"nielsen":15882,"hence":15883,"ingle":15884,"desks":15885,"quarant":15886,"institutions":15887,"characterized":15888,"whar":15889,"homestay":15890,"molecules":15891,"designation":15892,"crate":15893,"flyers":15894,"cheesy":15895,"mbs":15896,"slam":15897,"alexandria":15898,"dispen":15899,"crossover":15900,"meaningful":15901,"cog":15902,"stems":15903,"figu":15904,"personally":15905,"chiang":15906,"westwood":15907,"rhin":15908,"excellence":15909,"wynd":15910,"lou":15911,"myr":15912,"butler":15913,"browns":15914,"clan":15915,"soy":15916,"steel":15917,"combinations":15918,"dyed":15919,"strokes":15920,"lennon":15921,"extends":15922,"music":15923,"haute":15924,"som":15925,"ultra":15926,"originated":15927,"wealthy":15928,"imposed":15929,"serial":15930,"hillside":15931,"wyndham":15932,"hardcover":15933,"widow":15934,"anes":15935,"rotation":15936,"isoto":15937,"radius":15938,"congo":15939,"angled":15940,"competitions":15941,"lancaster":15942,"1902":15943,"appetizer":15944,"roadster":15945,"isian":15946,"fear":15947,"email":15948,"mobility":15949,"declaration":15950,"dded":15951,"beth":15952,"camaro":15953,"deliber":15954,"hospit":15955,"lol":15956,"curls":15957,"segments":15958,"anonymous":15959,"woodstock":15960,"zig":15961,"girly":15962,"dealer":15963,"cleared":15964,"ya":15965,"troy":15966,"digit":15967,"velocity":15968,"par":15969,"stepping":15970,"neckline":15971,"bikini":15972,"tiled":15973,"gins":15974,"wald":15975,"tening":15976,"sixteen":15977,"embell":15978,"berkeley":15979,"220":15980,"rival":15981,"collectibles":15982,"obvious":15983,"overs":15984,"dune":15985,"sicily":15986,"rogue":15987,"fiji":15988,"relevant":15989,"kaw":15990,"40k":15991,"figurines":15992,"anthony":15993,"ounce":15994,"brooch":15995,"homestead":15996,"memo":15997,"associate":15998,"tasks":15999,"disorder":16000,"playhouse":16001,"hanoi":16002,"rupt":16003,"sandstone":16004,"crimes":16005,"census":16006,"sudden":16007,"simultaneously":16008,"lenses":16009,"ditch":16010,"borg":16011,"alization":16012,"serie":16013,"salem":16014,"johnny":16015,"opener":16016,"fici":16017,"pads":16018,"lette":16019,"sec":16020,"pulp":16021,"principle":16022,"harvey":16023,"defending":16024,"victories":16025,"safely":16026,"divide":16027,"skip":16028,"vertebr":16029,"atri":16030,"lacrosse":16031,"jade":16032,"mechanics":16033,"scuba":16034,"zen":16035,"seized":16036,"slaves":16037,"canber":16038,"ateful":16039,"bermuda":16040,"binary":16041,"jury":16042,"rentals":16043,"onian":16044,"plicity":16045,"significance":16046,"isolation":16047,"66":16048,"bison":16049,"scared":16050,"collapsed":16051,"alfred":16052,"nep":16053,"horri":16054,"jason":16055,"courthouse":16056,"arnold":16057,"suddenly":16058,"blueberries":16059,"masterpiece":16060,"tranqu":16061,"consumption":16062,"chasing":16063,"graphs":16064,"painters":16065,"triangular":16066,"sketching":16067,"periodic":16068,"coinci":16069,"alike":16070,"drake":16071,"furry":16072,"documentation":16073,"snowflakes":16074,"quarantined":16075,"glee":16076,"beaver":16077,"reno":16078,"unfinished":16079,"terra":16080,"flu":16081,"milk":16082,"brushed":16083,"accent":16084,"drau":16085,"wow":16086,"shocked":16087,"clone":16088,"turrets":16089,"exquisite":16090,"proph":16091,"ishings":16092,"kah":16093,"silicone":16094,"infer":16095,"gou":16096,"recipi":16097,"poké":16098,"recess":16099,"luci":16100,"owa":16101,"oliver":16102,"rand":16103,"camps":16104,"liers":16105,"operates":16106,"blessing":16107,"campaigns":16108,"79":16109,"asa":16110,"page":16111,"stew":16112,"charlo":16113,"contributions":16114,"word":16115,"polye":16116,"hunters":16117,"signals":16118,"addiction":16119,"reed":16120,"luct":16121,"960":16122,"elan":16123,"tina":16124,"reluct":16125,"nott":16126,"dorset":16127,"vings":16128,"bird":16129,"gaining":16130,"piggy":16131,"configur":16132,"verde":16133,"souvenir":16134,"love":16135,"stretches":16136,"compounds":16137,"fade":16138,"ethiopia":16139,"diabetes":16140,"sedim":16141,"doubles":16142,"gundam":16143,"rugged":16144,"psal":16145,"mantel":16146,"acne":16147,"lol":16148,"elds":16149,"buns":16150,"eliminated":16151,"carat":16152,"refuge":16153,"manship":16154,"pulse":16155,"canberra":16156,"emotion":16157,"commentary":16158,"resale":16159,"avier":16160,"stitches":16161,"sm":16162,"plas":16163,"escaped":16164,"escort":16165,"turbine":16166,"bobby":16167,"inhab":16168,"forge":16169,"itinerary":16170,"hesive":16171,"electron":16172,"tartan":16173,"faye":16174,"rochester":16175,"chau":16176,"updates":16177,"cruz":16178,"honest":16179,"donuts":16180,"destroyers":16181,"expanding":16182,"eyebrows":16183,"pecan":16184,"ruling":16185,"believ":16186,"taipei":16187,"masquerade":16188,"lemonade":16189,"thriller":16190,"restrictions":16191,"echo":16192,"wicked":16193,"reminis":16194,"enormous":16195,"cole":16196,"breathing":16197,"ptic":16198,"guin":16199,"kirk":16200,"shy":16201,"kim":16202,"flir":16203,"hamburger":16204,"asym":16205,"winni":16206,"tyler":16207,"stamps":16208,"succession":16209,"otter":16210,"cubic":16211,"assorted":16212,"universities":16213,"late":16214,"spreading":16215,"telegraph":16216,"rhino":16217,"bangal":16218,"furnishings":16219,"quit":16220,"believing":16221,"sequins":16222,"stopping":16223,"melt":16224,"demonstrated":16225,"oney":16226,"chrono":16227,"fierce":16228,"mauritius":16229,"codes":16230,"tides":16231,"jellyfish":16232,"aba":16233,"peas":16234,"windmill":16235,"alleged":16236,"rico":16237,"durham":16238,"quantum":16239,"pokémon":16240,"freight":16241,"poorly":16242,"isley":16243,"advices":16244,"kor":16245,"moody":16246,"dently":16247,"toned":16248,"respir":16249,"grip":16250,"subl":16251,"locking":16252,"sington":16253,"ppi":16254,"gonna":16255,"hugs":16256,"emphasis":16257,"dus":16258,"pren":16259,"shiva":16260,"cameron":16261,"axles":16262,"positioned":16263,"focal":16264,"surgical":16265,"pete":16266,"mixes":16267,"devast":16268,"fragrance":16269,"recognised":16270,"statements":16271,"helicopters":16272,"inoa":16273,"tweed":16274,"relay":16275,"predicted":16276,"pam":16277,"jail":16278,"princesses":16279,"justi":16280,"welcoming":16281,"washer":16282,"addresses":16283,"contents":16284,"hemis":16285,"yang":16286,"astrology":16287,"loyal":16288,"sweat":16289,"atmospher":16290,"noir":16291,"airplanes":16292,"fauna":16293,"ardo":16294,"fled":16295,"rowed":16296,"moist":16297,"norman":16298,"teamwork":16299,"litter":16300,"godzilla":16301,"teap":16302,"duffel":16303,"wines":16304,"dah":16305,"imported":16306,"lifts":16307,"compati":16308,"tte":16309,"reactions":16310,"pasture":16311,"belong":16312,"spine":16313,"radical":16314,"dealing":16315,"rabbits":16316,"dozens":16317,"poodle":16318,"rum":16319,"interviews":16320,"legit":16321,"jamas":16322,"inau":16323,"chiefs":16324,"abulary":16325,"ashi":16326,"captures":16327,"churchill":16328,"compatible":16329,"ern":16330,"enta":16331,"cupboard":16332,"munition":16333,"publicly":16334,"wand":16335,"branded":16336,"itz":16337,"agencies":16338,"formul":16339,"turret":16340,"iscus":16341,"fixture":16342,"oriented":16343,"hallmark":16344,"cun":16345,"awakens":16346,"mani":16347,"anton":16348,"deserted":16349,"botanic":16350,"meteoro":16351,"kre":16352,"labyr":16353,"distr":16354,"synthetic":16355,"eous":16356,"towels":16357,"1800":16358,"understood":16359,"thinks":16360,"wanting":16361,"bangalore":16362,"labyrinth":16363,"gill":16364,"moms":16365,"screws":16366,"varied":16367,"archbishop":16368,"specialized":16369,"criticised":16370,"rails":16371,"undated":16372,"grandma":16373,"circumstances":16374,"mod":16375,"trek":16376,"gladi":16377,"onry":16378,"rims":16379,"marl":16380,"stepped":16381,"darin":16382,"hogwarts":16383} \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..c168ea5ad9baefea626541e197b4d8ffc784b1cb --- /dev/null +++ b/app.py @@ -0,0 +1,353 @@ +import os, torch +import gradio as gr +import torchvision.utils as vutils +import torchvision.transforms as transforms +from dalle.models import StoryDalle +import argparse +from PIL import Image +import numpy as np +from torchvision.utils import save_image +import tensorflow_hub as hub +import gdown + + +source_frame_paths = { + 'Pororo': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_2/Pororo_ENGLISH1_2_ep6/12.png', + 'Loopy': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep12/26.png', + 'Crong': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep12/10.png', + 'Poby': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep9/34.png', + 'Eddy': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep12/46.png', + 'Petty': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH2_1/Pororo_ENGLISH2_1_ep1/34.png', + 'Tongtong': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH3_1/Pororo_ENGLISH3_1_ep7/8.png', + 'Rody': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH3_1/Pororo_ENGLISH3_1_ep6/66.png', + 'Harry': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH3_1/Pororo_ENGLISH3_1_ep7/39.png', +} + + +def inverse_normalize(tensor, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)): + mean = torch.as_tensor(mean, dtype=tensor.dtype, device=tensor.device) + std = torch.as_tensor(std, dtype=tensor.dtype, device=tensor.device) + if mean.ndim == 1: + mean = mean.view(-1, 1, 1) + if std.ndim == 1: + std = std.view(-1, 1, 1) + tensor.mul_(std).add_(mean) + return tensor + + +def save_story_results(images, video_len=4, n_candidates=1, mask=None): + # print("Generated Images shape: ", images.shape) + + if mask is None: + mask = [1 for _ in range(len(video_len))] + + all_images = [] + for i in range(len(images)): # batch size = 1 + for j in range(n_candidates): + story = [] + for k, m in enumerate(mask): + if m == 1: + story.append(images[i][j][k]) + all_images.append(vutils.make_grid(story, sum(mask), padding=0)) + all_images = vutils.make_grid(all_images, 1, padding=20) + print(all_images) + + pad_len = video_len - sum(mask) + + if pad_len > 0: + pad_height = 256 * n_candidates + 20 * (n_candidates + 1) + pad_width = 256 * pad_len + 20 * (pad_len) + pad_image = torch.ones(3, pad_height, pad_width) + + print(all_images.shape, pad_image.shape) + all_images = torch.cat([all_images[:, :, :-15], pad_image], dim=-1) + + print(all_images.shape) + return all_images[:, 15:-15, 15:-15] + + +def main(args): + device = 'cuda:0' + + model_url = 'https://drive.google.com/file/d/1lJ6zMZ6qTvFu6H35-VEdFlN13MMslivJ/view?usp=sharing' + png_url = 'https://drive.google.com/file/d/1C33A1IzSHDPoQ4QBsgFWbF61QWaAxRo_/view?usp=sharing' + + gdown.download(model_url, quiet=True, use_cookies=False, output="./ckpt/25.pth") + gdown.download(png_url, quiet=True, use_cookies=False, output="demo_pororo_good.png") + + if args.debug: + model = None + embed = None + else: + model, config = StoryDalle.from_pretrained(args) + model.tokenizer.add_tokens(['pororo', 'loopy', 'eddy', 'harry', 'poby', 'tongtong', 'crong', 'rody', 'petty']) + model.eval() + model.to(device=device) + embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-large/5") + + if model.config.story.condition: + for i in range(len(model.cross_attention_layers)): + model.cross_attention_layers[i].to(device) + print("Cross-attention layers are in cuda:", next(model.cross_attention_layers[0].parameters()).is_cuda) + + valid_transform = transforms.Compose( + [transforms.Resize(config.dataset.image_resolution), + transforms.CenterCrop(config.dataset.image_resolution), + transforms.ToTensor(), + transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])] + ) + + def predict(caption_1, caption_2, caption_3, caption_4, source='Pororo', top_k=32, top_p=0.2, n_candidates=4, + supercondition=False): + + if not args.debug: + captions = [caption_1, caption_2, caption_3, caption_4] + mask = [1 if caption != '' else 0 for caption in captions] + print(captions, mask, source, n_candidates) + for i, caption in enumerate(captions): + if caption == "": + captions[i] = "Pororo is reading a book." + tokens = [model.tokenizer.encode(caption) for caption in captions] + texts = torch.stack([torch.LongTensor(token.ids) for token in tokens]).unsqueeze(0) + sent_embeds = torch.tensor(embed(captions).numpy()) + # sent_embeds = torch.tensor(description_vecs[source_frame_paths[source]. + # replace('/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/', '')[:-4]][0]).unsqueeze(0).repeat(4, 1) + + src_image = valid_transform(Image.open('./demo/%s.png' % source).convert('RGB')) + + stories = [] + with torch.no_grad(): + for i in range(texts.shape[0]): + pixels = model.sampling_batch(texts[i].to(device), src_image.unsqueeze(0).to(device), + sent_embeds.unsqueeze(0).to(device), top_k=top_k, top_p=top_p, + prompt=None, n_candidates=n_candidates).cpu() + stories.append(pixels) + + img = save_story_results(stories, video_len=4, n_candidates=n_candidates, mask=mask) + save_image(img, "gradio_demo_pororo.png", normalize=True) + + return "gradio_demo_pororo.png" + + with gr.Blocks(css='#output {width:750px; height:750px; float:left;}') as demo: + gr.Markdown(''' +

StoryDALL-E: Adapting Pretrained Text-to-Image Transformers for Story Continuation
Adyasha Maharana, Darryl Hannan and Mohit Bansal (UNC Chapel Hill)
Published at ECCV 2022

+ + StoryDALL-E \[1\] is a model trained for the task of Story Visualization \[2\]. + The model receives a sequence of captions as input and generates a corresponding sequence of images which form a visual story depicting the narrative in the captions. + We modify this task to enable the model to receive an initial scene as input, which can be used as a cue for the setting of the story and also for generating unseen or low-resource visual elements. We refer to this task as Story Continuation \[1\]. + StoryDALL-E is based on the [mega-dalle](https://github.com/borisdayma/dalle-mini) model and is adapted from the corresponding [PyTorch codebase](https://github.com/kuprel/min-dalle). + **This model has been developed for academic purposes only.** + + \[[Paper](http://arxiv.org/abs/2209.06192)\] \[[Code](https://github.com/adymaharana/storydalle)\] \[[Model Card](https://github.com/adymaharana/storydalle/blob/main/MODEL_CARD.MD)\] + + ### Dataset + This model has been trained using the Pororo story visualization dataset \[1\]. + The data was adapted from the popular cartoon series *Pororo the Little Penguin* and originally released by \[2\]. + The Pororo dataset contains 9 recurring characters, as shown below, in the decreasing order of their frequency in the training data. +

+ +

+ The training dataset contains nearly 10,000 samples in the training set. Most of the scenes occur in a snowy village, surrounded by hills, trees and houses. A few episodes are located in gardens or water bodies. All the captions are in the English language and predominantly contain verbs in the present tense. Additionally, the training of this model starts from the pretrained checkpoint of mega-dalle, which is trained on the Conceptual Captions dataset. + + ### Intended Use + This model is intended for generating visual stories containing the 9 characters in the Pororo dataset. This version of the StoryDALL-E model is reasonable at the following scenarios: + * Frames containing a single character. + * Overtly visual actions such as *making cookies*, *walking*, *reading a book*, *sitting*. + * Scenes taking place in snowy settings, indoors and gardens. + * Visual stories contaning 1-3 characters across all frames. + * Scene transitions e.g. from day to night. + * Moderately capable of generating semantic concepts that do not appear in the story continuation dataset, such as *doughnut* and *lion*. + + Here are some examples of generated visual stories for the above-mentioned settings. + +

+ +

+ + Due to the small training dataset size for story visualization, the model has poor generalization to some unseen settings. The model struggles to generate coherent images in the following scenarios. + * Multiple characters in a frame. + * Non-visual actions such as *compliment*. + * Characters that are infrequent in the training dataset e.g. Rody, Harry. + * Background locations that are not found in the cartoon e.g. a busy city. + * Color-based descriptions for object. + * Completely new characters based on textual descriptions. + + In the following demo, four or less captions can be entered in the `caption` text fields for the visual story. + Select a `source` frame based on the character that is predominant in your visual story. + `top_k` refers to the number of highest probability vocabulary tokens to keep for top-k-filtering. + Only the most probable tokens with probabilities that add up to `top_p` or higher are kept for generation. + Set `supercondition` to True to enable generation using a null hypothesis. + Select between 1-4 `n_candidates` to generate a diverse set of stories for the given captions. +

+ Feel free to send feedback to adyasha@cs.unc.edu. + ''') + + with gr.Row(): + with gr.Column(): + caption_1 = gr.Textbox(label="Caption 1", value='Pororo is reading a book.') + caption_2 = gr.Textbox(label="Caption 2", value='Pororo is sleeping on the couch.') + caption_3 = gr.Textbox(label="Caption 3", value='Pororo wakes up in the middle of the night in his bed.') + caption_4 = gr.Textbox(label="Caption 4", value='Pororo is in his bedroom and looks terrified.') + source = gr.Radio(["Pororo", "Loopy", "Crong", "Poby", "Eddy", "Petty", "Tongtong", "Rody", "Harry"], + label="Source", value="Pororo") + top_k = gr.Slider(16, 128, label="top_k", value=32) + top_p = gr.Slider(0.01, 1.0, label="top_p", value=0.2) + supercondition = gr.Checkbox(value=False, label='supercondition') + n_candidates = gr.Dropdown([1, 2, 3, 4], value=4, label='n_candidates') + + with gr.Row(): + # clear_btn = gr.Button("Clear") + submit_btn = gr.Button("Submit") + + with gr.Column(): + with gr.Row(): + frame_1_label = gr.Button("Frame 1") + frame_2_label = gr.Button("Frame 2") + frame_3_label = gr.Button("Frame 3") + frame_4_label = gr.Button("Frame 4") + # frame_1_label = gr.Label("Frame 1") + # frame_2_label = gr.Label("Frame 2") + # frame_3_label = gr.Label("Frame 3") + # frame_4_label = gr.Label("Frame 4") + output = gr.Image(label="", elem_id='output') + + submit_btn.click(fn=predict, + inputs=[caption_1, caption_2, caption_3, caption_4, source, top_k, top_p, n_candidates, + supercondition], outputs=output) + + gr.Markdown(''' + ### References + + \[1\] Maharana, Adyasha, et al. "StoryDALL-E: Adapting Pretrained Text-to-Image Transformers for Story Continuation." ECCV. 2022. + + \[2\] Li, Yitong, et al. "Storygan: A sequential conditional gan for story visualization." Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition. 2019. + + \[3\] Kim, Kyung-Min, et al. "DeepStory: video story QA by deep embedded memory networks." Proceedings of the 26th International Joint Conference on Artificial Intelligence. 2017. + + \[4\] Sharma, Piyush, et al. "Conceptual captions: A cleaned, hypernymed, image alt-text dataset for automatic image captioning." Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers). 2018. + ''') + + demo.launch(share=True) + + +if __name__ == "__main__": + args_list = ['--model_name_or_path', './ckpt/25.pth', + '--prefix_model_name_or_path', './1.3B/', + '--dataset_name', 'pororo', + '--tuning_mode', 'story', + '--preseqlen', '32', + '--condition', + '--story_len', '4', + '--sent_embed', '512', + '--prefix_dropout', '0.2', + '--data_dir', '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/', + '--dataloader_num_workers', '1', + '--do_eval', + '--per_gpu_eval_batch_size', '16', + '--mode', 'story'] + + parser = argparse.ArgumentParser(description='arguments for training/evaluating prefix-tuning DALLE') + + # Model Arguments + parser.add_argument('--model_name_or_path', type=str, default=None, + help='The model checkpoint for weights initialization.') + parser.add_argument('--prefix_model_name_or_path', type=str, default=None, + help='The prefix model checkpoint for weights initialization.') + parser.add_argument('--prefix_mode', type=str, default='activation', help='activation or embedding') + parser.add_argument('--preseqlen', type=int, default=0, help='how many tokens of prefix should we include.') + parser.add_argument('--optim_prefix', action="store_true", + help='set to True if optimizing prefix directly; no if through amortized function') + parser.add_argument('--tuning_mode', type=str, default='prefixtune', help='prefixtune or finetune') + parser.add_argument('--top_k_layers', type=int, default=2, + help='In finetuning setting, if we only tune the top k layers.') + parser.add_argument('--parameterize_mode', type=str, default='mlp', + help="mlp or emb to parametrize when we optimize for the embeddings.") + parser.add_argument('--prefix_dropout', type=float, default=0.0, help='dropout rate for the prefix tuning model.') + parser.add_argument('--teacher_dropout', type=float, default=0.0, help='dropout rate for the teacher model.') + parser.add_argument('--init_random', action="store_true", help="set True if initializing random embeddings") + parser.add_argument('--init_shallow', action="store_true", help="set True if not using reparameterization") + parser.add_argument('--init_shallow_word', type=bool, default=False, + help="set True if init_shallow and specify words") + parser.add_argument('--replay_buffer', action="store_true", help="set True if using replay buffer in training") + parser.add_argument('--gumbel', action="store_true", help="set True if using the gumbel softmax in training") + parser.add_argument('--hidden_dim_prefix', type=float, default=512, help="hidden dim of MLP for generating prefix?") + + # Data Arguments + parser.add_argument('--dataset_name', type=str, default='pororo', help="dataset name") + parser.add_argument('--data_dir', type=str, default=None, help="Path to data directory") + parser.add_argument('--lowdata_token', type=str, default='story', + help="The token to be prepended at initialization time.") + parser.add_argument('--use_lowdata_token', type=bool, default=True, + help="Whether we should use the lowdata token for prefix-tuning") + parser.add_argument('--train_embeddings', action="store_true", help="Whether to train word embeddings") + parser.add_argument('--train_max_target_length', type=int, default=100, + help='the max target length for training data.') + parser.add_argument('--val_max_target_length', type=int, default=100, help='the max target length for dev data.') + parser.add_argument('--dataloader_num_workers', type=int, default=8, help='number of workers when loading data') + + # new arguments for story + parser.add_argument('--prompt', action="store_true", help="set True if using prompts in StoryDALLE") + parser.add_argument('--story_len', type=int, default=4, help='the max target length for dev data.') + parser.add_argument('--sent_embed', type=int, default=384, help='the max target length for dev data.') + parser.add_argument('--condition', action="store_true", help="set True if using prompts in StoryDALLE") + parser.add_argument('--clip_embed', action="store_true", help="set True if using prompts in StoryDALLE") + + # Training Arguments + parser.add_argument('--output_dir', type=str, default=None, help="Path to data directory") + parser.add_argument("--do_train", action="store_true", help="Whether to run training.") + parser.add_argument("--do_eval", action="store_true", help="Whether to run evaluation.") + parser.add_argument("--do_test", action="store_true", help="Whether to run test.") + parser.add_argument('--seed', type=int, default=42, help='seed for reproducibility') + parser.add_argument("--overwrite_output_dir", action="store_true", help="Whether to overwrite output dir.") + parser.add_argument("--per_gpu_train_batch_size", default=8, type=int, help="Batch size per GPU/CPU for training.") + parser.add_argument( + "--per_gpu_eval_batch_size", default=8, type=int, help="Batch size per GPU/CPU for evaluation." + ) + parser.add_argument( + "--gradient_accumulation_steps", + type=int, + default=1, + help="Number of updates steps to accumulate before performing a backward/update pass.", + ) + + parser.add_argument('--mode', type=str, default='val', help="mval or test.") + + parser.add_argument("--learning_rate", default=5e-5, type=float, help="The initial learning rate for Adam.") + parser.add_argument("--weight_decay", default=0.0, type=float, help="Weight deay if we apply some.") + parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.") + parser.add_argument("--max_grad_norm", default=1.0, type=float, help="Max gradient norm.") + parser.add_argument( + "--num_train_epochs", default=3, type=int, help="Total number of training epochs to perform." + ) + parser.add_argument( + "--max_steps", + default=-1, + type=int, + help="If > 0: set total number of training steps to perform. Override num_train_epochs.", + ) + parser.add_argument("--warmup_steps", default=0, type=int, help="Linear warmup over warmup_steps.") + parser.add_argument("--logging_steps", type=int, default=50, help="Log every X updates steps.") + parser.add_argument("--save_steps", type=int, default=50, help="Save checkpoint every X updates steps.") + parser.add_argument( + "--eval_all_checkpoints", + action="store_true", + help="Evaluate all checkpoints starting with the same prefix as model_name ending and ending with step number", + ) + parser.add_argument("--no_cuda", action="store_true", help="Avoid using CUDA when available") + parser.add_argument("--local_rank", type=int, default=-1, help="For distributed training: local_rank") + parser.add_argument( + "--fp16", + action="store_true", + help="Whether to use 16-bit (mixed) precision (through NVIDIA apex) instead of 32-bit", + ) + + parser.add_argument("--debug", action="store_true", help="Whether to debug the demo.") + + args = parser.parse_args(args_list) + + main(args) + + + + + diff --git a/dalle/__init__.py b/dalle/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dalle/__pycache__/__init__.cpython-38.pyc b/dalle/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34bc3bf5d4235efc211df0cad50d4941d93a1d5d Binary files /dev/null and b/dalle/__pycache__/__init__.cpython-38.pyc differ diff --git a/dalle/__pycache__/trainer_prefix.cpython-38.pyc b/dalle/__pycache__/trainer_prefix.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9c94501d621c92f4a945009143c8ea29bd1cc5e Binary files /dev/null and b/dalle/__pycache__/trainer_prefix.cpython-38.pyc differ diff --git a/dalle/models/__init__.py b/dalle/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3dee465bf5b7ac96b055d00d8f1aa6918e86a24f --- /dev/null +++ b/dalle/models/__init__.py @@ -0,0 +1,1462 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ + +import os +import torch +import torch.nn as nn +import pytorch_lightning as pl +from typing import Optional, Tuple, Union +from omegaconf import OmegaConf +from torch.cuda.amp import autocast +from torch.optim.lr_scheduler import CosineAnnealingLR, LambdaLR +from torch.nn import functional as F +from .stage1.vqgan import VQGAN +from .stage2.transformer import Transformer1d, iGPT +from .stage2.layers import Block +from .. import utils +from ..utils.config import get_base_config +from ..utils.sampling import sampling, sampling_igpt, get_positional_encoding, sampling_prefix, sampling_conditional +from ..utils.utils import save_image +from .tokenizer import build_tokenizer +import numpy as np +from .stage2.layers import CrossAttentionLayer + +_MODELS = { + 'minDALL-E/1.3B': 'https://arena.kakaocdn.net/brainrepo/models/minDALL-E/57b008f02ceaa02b779c8b7463143315/1.3B.tar.gz' +} + +class Dalle(pl.LightningModule): + def __init__(self, + config: OmegaConf) -> None: + super().__init__() + self.tokenizer = None + self.stage1 = VQGAN(n_embed=config.stage1.n_embed, + embed_dim=config.stage1.embed_dim, + hparams=config.stage1.hparams) + self.stage2 = Transformer1d(vocab_size_txt=config.stage2.vocab_size_txt, + vocab_size_img=config.stage2.vocab_size_img, + hparams=config.stage2.hparams) + self.config = config + self.config_stage1 = config.stage1 + self.config_stage2 = config.stage2 + self.config_dataset = config.dataset + + # # make the parameters in stage 1 not trainable + # self.stage1.eval() + # for p in self.stage1.parameters(): + # p.requires_grad = False + + @classmethod + def from_pretrained(cls, args) -> Tuple[nn.Module, OmegaConf]: + + path = args.model_name_or_path + config_new = OmegaConf.load(os.path.join(path, 'config.yaml')) + if args.do_train: + config_base = get_base_config('finetuning') + config_update = OmegaConf.merge(config_base, config_new) + for key, val in vars(args).items(): + if key in config_update.optimizer.keys(): + OmegaConf.update(config_update, "optimizer.%s" % key, val, merge=False) + if key in config_update.experiment.keys(): + OmegaConf.update(config_update, "experiment.%s" % key, val, merge=False) + else: + config_base = get_base_config('default') + config_update = OmegaConf.merge(config_base, config_new) + + model = cls(config_update) + model.tokenizer = build_tokenizer(os.path.join(path, 'tokenizer'), + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + + print("Loading models from checkpoint %s" % path) + + if hasattr(args, 'dalle_path') and args.dalle_path and args.dalle_path.endswith('.pth'): + model.load_state_dict(torch.load(args.dalle_path)["model_state_dict"]) + else: + model.stage1.from_ckpt(os.path.join(path, 'stage1_last.ckpt')) + model.stage2.from_ckpt(os.path.join(path, 'stage2_last.ckpt')) + + return model, config_update + + + @torch.no_grad() + def sampling(self, + prompt: Union[str, torch.LongTensor], + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True) -> torch.FloatTensor: + self.stage1.eval() + self.stage2.eval() + + if type(prompt) == str: + tokens = self.tokenizer.encode(prompt) + tokens = torch.LongTensor(tokens.ids) + else: + tokens = prompt + tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + # Check if the encoding works as intended + # print(self.tokenizer.decode_batch(tokens.tolist(), skip_special_tokens=True)[0]) + + tokens = tokens.to(device) + codes = sampling(self.stage2, + tokens, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16) + codes = codes.view(num_candidates, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + def forward(self, + images: torch.FloatTensor, + texts: Optional[torch.LongTensor], + past=None + ) -> tuple: + B, C, H, W = images.shape + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + pos_enc_tokens = get_positional_encoding(texts, mode='1d') + codes = codes.clone().detach() + pos_enc_code = get_positional_encoding(codes, mode='1d') + # codes = codes.unsqueeze(-1) + # pos_enc_code = pos_enc_code.unsqueeze(-1) + logits_img, logits_txt = self.stage2(codes, texts, pos_enc_code, pos_enc_tokens, past) + return logits_img, logits_txt, codes + + def training_step(self, batch, batch_idx): + images, texts = batch + logits_img, logits_txt, codes = self(images, texts) + + loss_img = F.cross_entropy(logits_img.view(-1, logits_img.shape[-1]), codes.view(-1)) + loss_txt = F.cross_entropy(logits_txt.view(-1, logits_txt.shape[-1]), texts[:, 1:].reshape(-1)) + self.log("train/loss_img", loss_img, on_step=True, on_epoch=True, prog_bar=False, logger=True) + self.log("train/loss_txt", loss_txt, on_step=True, on_epoch=True, prog_bar=False, logger=True) + return loss_img + loss_txt + + def validation_step(self, batch, batch_idx): + images, texts = batch + logits_img, logits_txt, codes = self(images, texts) + # print(logits_img.shape, logits_txt.shape, codes.shape, texts.shape) + + loss_img = F.cross_entropy(logits_img.view(-1, logits_img.shape[-1]), codes.view(-1)) + loss_txt = F.cross_entropy(logits_txt.view(-1, logits_txt.shape[-1]), texts[:, 1:].reshape(-1)) + self.log("val/loss_img", loss_img, on_step=False, on_epoch=True, prog_bar=False, logger=True) + self.log("val/loss_txt", loss_txt, on_step=False, on_epoch=True, prog_bar=False, logger=True) + return loss_img + loss_txt + + def configure_optimizers(self): + assert self.config.optimizer.opt_type == 'adamW' + # assert self.config.optimizer.sched_type == 'cosine' + + opt = torch.optim.AdamW(self.parameters(), + lr=self.config.optimizer.learning_rate, + betas=self.config.optimizer.betas, + weight_decay=self.config.optimizer.weight_decay) + # sched = CosineAnnealingLR(opt, + # T_max=self.config.optimizer.max_steps, + # eta_min=self.config.optimizer.min_lr) + + def lr_lambda(current_step: int): + return max( + 0.0, float(self.config.optimizer.max_steps - current_step) / float(max(1, self.config.optimizer.max_steps)) + ) + + sched = LambdaLR(opt, lr_lambda) + sched = { + 'scheduler': sched, + 'name': 'linear' + } + return [opt], [sched] + + def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx, optimizer_closure, + on_tpu=False, using_native_amp=False, using_lbfgs=False): + optimizer.step(closure=optimizer_closure) + self.lr_schedulers().step() + self.log("lr", self.lr_schedulers().get_last_lr()[0], on_step=True, on_epoch=False, prog_bar=True, logger=True) + + def on_epoch_start(self): + self.stage1.eval() + + +class ImageGPT(pl.LightningModule): + def __init__(self, + config: OmegaConf) -> None: + super().__init__() + self.stage1 = VQGAN(n_embed=config.stage1.n_embed, + embed_dim=config.stage1.embed_dim, + hparams=config.stage1.hparams) + self.stage2 = iGPT(vocab_size_img=config.stage2.vocab_size_img, + use_cls_cond=config.stage2.use_cls_cond, + hparams=config.stage2.hparams) + self.config = config + self.use_cls_cond = config.stage2.use_cls_cond + + # make the parameters in stage 1 not trainable + self.stage1.eval() + for p in self.stage1.parameters(): + p.requires_grad = False + + @classmethod + def from_pretrained(cls, + path_upstream: str, + path_downstream: str) -> Tuple[nn.Module, OmegaConf]: + config_base = get_base_config(use_default=False) + config_down = OmegaConf.load(path_downstream) + config_down = OmegaConf.merge(config_base, config_down) + + model = cls(config_down) + model.stage1.from_ckpt(os.path.join(path_upstream, 'stage1_last.ckpt'), strict=True) + model.stage2.from_ckpt(os.path.join(path_upstream, 'stage2_last.ckpt'), strict=False) + return model, config_down + + def sample(self, + cls_idx: Optional[int] = None, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 16, + device: str = 'cuda:0', + use_fp16: bool = True, + is_tqdm: bool = True) -> torch.FloatTensor: + self.stage1.eval() + self.stage2.eval() + + if cls_idx is None: + sos = self.stage2.sos.repeat(num_candidates, 1, 1) + else: + sos = torch.LongTensor([cls_idx]).to(device=device) + sos = sos.repeat(num_candidates) + sos = self.stage2.sos(sos).unsqueeze(1) + + codes = sampling_igpt(self.stage2, + sos=sos, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + is_tqdm=is_tqdm) + codes = codes.view(num_candidates, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + def forward(self, + images: torch.FloatTensor, + labels: Optional[torch.LongTensor] = None) -> torch.FloatTensor: + B, C, H, W = images.shape + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + logits = self.stage2(codes, labels) + return logits, codes + + def training_step(self, batch, batch_idx): + images, labels = batch + logits, codes = self(images, labels=labels if self.use_cls_cond else None) + loss = F.cross_entropy(logits.view(-1, logits.shape[-1]), codes.view(-1)) + self.log("train/loss", loss, on_step=True, on_epoch=True, prog_bar=False, logger=True) + return loss + + def validation_step(self, batch, batch_idx): + images, labels = batch + logits, codes = self(images, labels=labels if self.use_cls_cond else None) + loss = F.cross_entropy(logits.view(-1, logits.shape[-1]), codes.view(-1)) + self.log("val/loss", loss, on_step=False, on_epoch=True, prog_bar=False, logger=True) + return loss + + def configure_optimizers(self): + assert self.config.optimizer.opt_type == 'adamW' + assert self.config.optimizer.sched_type == 'cosine' + + opt = torch.optim.AdamW(self.parameters(), + lr=self.config.optimizer.base_lr, + betas=self.config.optimizer.betas, + weight_decay=self.config.optimizer.weight_decay) + sched = CosineAnnealingLR(opt, + T_max=self.config.optimizer.max_steps, + eta_min=self.config.optimizer.min_lr) + sched = { + 'scheduler': sched, + 'name': 'cosine' + } + return [opt], [sched] + + def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx, optimizer_closure, + on_tpu=False, using_native_amp=False, using_lbfgs=False): + optimizer.step(closure=optimizer_closure) + self.lr_schedulers().step() + self.log("lr", self.lr_schedulers().get_last_lr()[0], on_step=True, on_epoch=False, prog_bar=True, logger=True) + + def on_epoch_start(self): + self.stage1.eval() + + +class PromptDalle(Dalle): + """Classification Head for transformer encoders""" + def __init__(self, config): + super().__init__(config) + print('Initializing the PromptTuning model') + + self.config = config + self.n_embd = config.stage2.hparams.embed_dim + self.preseqlen = config.prompt.preseqlen + self.prefix_dropout = config.prompt.prefix_dropout + + # DIFFERENT PARAMETRIZATION: + + print('[Full prompt-tuning Setting :) ]') + self.input_tokens = torch.arange(self.preseqlen).long() + self.wte = nn.Embedding(self.preseqlen, self.n_embd) + self.control_trans = nn.Sequential( + nn.Linear(self.n_embd, self.n_embd), + nn.Tanh(), + nn.Linear(self.n_embd, self.n_embd)) + self.get_prompt = self.get_prompt_p5 + self.dropout = nn.Dropout(self.prefix_dropout) + + ###### NUM PARAMS ######### + total_param = 0 + for name, param in self.named_parameters(): + # print(param.shape) + total_param += param.numel() + print('Total parameters is {}'.format(total_param)) + + + @classmethod + def from_pretrained(cls, args) -> Tuple[nn.Module, OmegaConf]: + + # if not args.model_name_or_path: + # args.model_name_or_path = args.prefix_model_name_or_path + + path = args.prefix_model_name_or_path + path = _MODELS[path] if path in _MODELS else path + path = utils.realpath_url_or_path(path, root=os.path.expanduser("~/.cache/minDALL-E")) + + config_base = get_base_config('prompt_tuning') + config_new = OmegaConf.load(os.path.join(path, 'config.yaml')) + config_update = OmegaConf.merge(config_base, config_new) + + for key, val in vars(args).items(): + if key in config_update.prompt.keys(): + OmegaConf.update(config_update, "prompt.%s" % key, val, merge=False) + if key in config_update.optimizer.keys(): + OmegaConf.update(config_update, "optimizer.%s" % key, val, merge=False) + if key in config_update.experiment.keys(): + OmegaConf.update(config_update, "experiment.%s" % key, val, merge=False) + + model = cls(config_update) + model.tokenizer = build_tokenizer(os.path.join(path, 'tokenizer'), + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + + if args.model_name_or_path: + print("Loading model from pretrained checkpoint %s" % args.model_name_or_path) + # model.from_ckpt(args.model_name_or_path) + try: + model.load_state_dict(torch.load(args.model_name_or_path)['state_dict']) + except KeyError: + model.load_state_dict(torch.load(args.model_name_or_path)['model_state_dict']) + + else: + print("Loading models from checkpoint %s" % path) + model.stage1.from_ckpt(os.path.join(path, 'stage1_last.ckpt')) + model.stage2.from_ckpt(os.path.join(path, 'stage2_last.ckpt')) + + return model, config_update + + def get_prompt_p5(self, bsz=None, eval=False): + input_tokens = self.input_tokens.unsqueeze(0).expand(bsz, -1).to(self.device) + temp_control = self.wte(input_tokens) + past_key_values = self.control_trans(temp_control) #bsz, seqlen, layer*emb + if not eval: + past_key_values = self.dropout(past_key_values) + return past_key_values + + def forward(self, + images: torch.FloatTensor, + texts: Optional[torch.LongTensor], + **kwargs, + ): + + #{"input_ids": batch, "labels": labels, 'src_attn': src_attn, 'tgt_attn':tgt_attn, 'src':src} + + B, C, H, W = images.shape + prompt = self.get_prompt(bsz=B) + pos_enc_prompt = get_positional_encoding(self.input_tokens.unsqueeze(0).expand(B, -1).to(self.device), mode='1d') + + # if self.mode_para == 2 and src_attn is not None and tgt_attn is not None: + # attention_mask = torch.cat([src_attn, tgt_attn], dim=1) + + + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + + pos_enc_tokens = get_positional_encoding(texts, mode='1d') + codes = codes.clone().detach() + pos_enc_code = get_positional_encoding(codes, mode='1d') + # codes = codes.unsqueeze(-1) + # pos_enc_code = pos_enc_code.unsqueeze(-1) + # print(images.shape, codes.shape, texts.shape) + logits_img, logits_txt = self.stage2(codes, texts, pos_enc_code, pos_enc_tokens, prompt=prompt, pos_prompt=pos_enc_prompt) + return logits_img, logits_txt, codes + + + @torch.no_grad() + def sampling(self, + tokens: torch.LongTensor, + prompt: torch.FloatTensor, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True, + labels = None) -> torch.FloatTensor: + self.stage1.eval() + self.stage2.eval() + + # tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + tokens = tokens.to(device) + pos_enc_prompt = get_positional_encoding(self.input_tokens.unsqueeze(0).expand(num_candidates, -1).to(self.device), mode='1d') + + codes = sampling(self.stage2, + tokens, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + prompt=prompt, + pos_prompt=pos_enc_prompt) + + codes = codes.view(-1, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + + @torch.no_grad() + def predict_step(self, batch, batch_idx, return_images=False): + orig_images, texts = batch + + # extra for checks + logits_img, logits_txt, codes = self(orig_images, texts) + pred = torch.argmax(logits_img.view(-1, logits_img.shape[-1]), dim=-1) + bs = orig_images.shape[0] + pred = pred.view(bs, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(pred) * 0.5 + 0.5, 0, 1).cpu().numpy() # [B, 256, 256] + pixels = np.transpose(pixels, (0, 2, 3, 1)) + + # print(texts.shape, orig_images.shape) + prompt = self.get_prompt(bsz=5, eval=True) + + images = [] + for i, t in enumerate(texts): + pixels = self.sampling(t, prompt, top_k=16, num_candidates=5, labels=codes[i]).cpu().numpy() + pixels = np.transpose(pixels, (0, 2, 3, 1)) + images.append(pixels) + + if return_images: + return images + else: + save_image(orig_images, pixels, './out/images/pororo_prompt', batch_idx+10) + save_image(orig_images, images, './out/images/pororo_prompt', batch_idx) + + +class PrefixTuningDalle(Dalle): + """Classification Head for transformer encoders""" + def __init__(self, config): + super().__init__(config) + print('Initializing the PrefixTuning model') + + self.config = config + + self.match_n_layer = config.stage2.hparams.n_layers + self.match_n_head = config.stage2.hparams.n_heads + self.match_n_embd = config.stage2.hparams.embed_dim // config.stage2.hparams.n_heads + self.n_embd = config.stage2.hparams.embed_dim + + self.optim_prefix = config.prefix.optim_prefix + self.preseqlen = config.prefix.preseqlen + self.prefix_dropout = config.prefix.prefix_dropout + self.init_random = config.prefix.init_random + self.hidden_dim_prefix = config.prefix.hidden_dim_prefix + + self.lowdata_token = config.prefix.lowdata_token + self.init_shallow = config.prefix.init_shallow + self.init_shallow_word = config.prefix.init_shallow_word + self.mode_para = 0 + + print('PrefixTuning') + print('preseqlen is {}, optimizing the prefix directly'.format(self.preseqlen)) + + # DIFFERENT PARAMETRIZATION: + + print('[Full prefix-tuning Setting :) ]') + self.input_tokens = torch.arange(self.preseqlen).long() + self.wte = nn.Embedding(self.preseqlen, self.n_embd) + self.control_trans = nn.Sequential( + nn.Linear(self.n_embd, self.hidden_dim_prefix), + nn.Tanh(), + nn.Linear(self.hidden_dim_prefix, self.match_n_layer * 2 * self.n_embd)) + self.get_prompt = self.get_prompt_p5 + self.dropout = nn.Dropout(self.prefix_dropout) + + ###### NUM PARAMS ######### + total_param = 0 + for name, param in self.named_parameters(): + # print(param.shape) + total_param += param.numel() + print('Total parameters is {}'.format(total_param)) + + + @classmethod + def from_pretrained(cls, args) -> Tuple[nn.Module, OmegaConf]: + + # if not args.model_name_or_path: + # args.model_name_or_path = args.prefix_model_name_or_path + + path = args.prefix_model_name_or_path + path = _MODELS[path] if path in _MODELS else path + path = utils.realpath_url_or_path(path, root=os.path.expanduser("~/.cache/minDALL-E")) + + config_base = get_base_config('prefixtuning') + config_new = OmegaConf.load(os.path.join(path, 'config.yaml')) + config_update = OmegaConf.merge(config_base, config_new) + + for key, val in vars(args).items(): + if key in config_update.prefix.keys(): + OmegaConf.update(config_update, "prefix.%s" % key, val, merge=False) + if key in config_update.optimizer.keys(): + OmegaConf.update(config_update, "optimizer.%s" % key, val, merge=False) + if key in config_update.experiment.keys(): + OmegaConf.update(config_update, "experiment.%s" % key, val, merge=False) + + model = cls(config_update) + model.tokenizer = build_tokenizer(os.path.join(path, 'tokenizer'), + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + + if args.model_name_or_path: + print("Loading model from pretrained checkpoint %s" % args.model_name_or_path) + # model.from_ckpt(args.model_name_or_path) + try: + model.load_state_dict(torch.load(args.model_name_or_path)['state_dict']) + except KeyError: + model.load_state_dict(torch.load(args.model_name_or_path)['model_state_dict']) + + else: + print("Loading models from checkpoint %s" % path) + model.stage1.from_ckpt(os.path.join(path, 'stage1_last.ckpt')) + model.stage2.from_ckpt(os.path.join(path, 'stage2_last.ckpt')) + + return model, config_update + + def get_prompt_p5(self, bsz=None, eval=False): + input_tokens = self.input_tokens.unsqueeze(0).expand(bsz, -1).to(self.device) + temp_control = self.wte(input_tokens) + past_key_values = self.control_trans(temp_control) #bsz, seqlen, layer*emb + bsz, seqlen, _ = past_key_values.shape + past_key_values = past_key_values.view(bsz, seqlen, self.match_n_layer * 2, self.match_n_head, + self.match_n_embd) + if not eval: + past_key_values = self.dropout(past_key_values) + # past_key_values = past_key_values.permute([2, 0, 3, 1, 4]).split(2) + past_key_values = past_key_values.permute([2, 0, 3, 1, 4]) + # print(past_key_values.shape) + return past_key_values.split(2) + + def forward(self, + images: torch.FloatTensor, + texts: Optional[torch.LongTensor], + **kwargs, + ): + + #{"input_ids": batch, "labels": labels, 'src_attn': src_attn, 'tgt_attn':tgt_attn, 'src':src} + + B, C, H, W = images.shape + + if self.mode_para == 2: + past_key_values_prompt = self.get_prompt(bsz=B) + else: + past_key_values_prompt = self.get_prompt(bsz=B) + + # if self.mode_para == 2 and src_attn is not None and tgt_attn is not None: + # attention_mask = torch.cat([src_attn, tgt_attn], dim=1) + + + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + + pos_enc_tokens = get_positional_encoding(texts, mode='1d') + codes = codes.clone().detach() + pos_enc_code = get_positional_encoding(codes, mode='1d') + # codes = codes.unsqueeze(-1) + # pos_enc_code = pos_enc_code.unsqueeze(-1) + # print(images.shape, codes.shape, texts.shape) + logits_img, logits_txt = self.stage2(codes, texts, pos_enc_code, pos_enc_tokens, past_key_values_prompt) + return logits_img, logits_txt, codes + + @torch.no_grad() + def sampling(self, + tokens: torch.LongTensor, + past: torch.FloatTensor, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True, + labels = None) -> torch.FloatTensor: + self.stage1.eval() + self.stage2.eval() + + if len(past.shape) == 6: + n_layers, temp, bs, n_heads, seq_len, n_dim = past.shape + past = past.view(n_layers, temp, bs*n_heads, seq_len, n_dim) + + tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + # Check if the encoding works as intended + # print(self.tokenizer.decode_batch(tokens.tolist(), skip_special_tokens=True)[0]) + + tokens = tokens.to(device) + codes = sampling_prefix(self.stage2, + tokens, + past, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + labels = None if labels is None else labels.view(-1)) + + # codes = sampling(self.stage2, + # tokens, + # top_k=top_k, + # top_p=top_p, + # softmax_temperature=softmax_temperature, + # use_fp16=use_fp16) + + codes = codes.view(num_candidates, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + def training_step(self, batch, batch_idx): + images, texts = batch + logits_img, logits_txt, codes = self(images, texts) + + loss_img = F.cross_entropy(logits_img.view(-1, logits_img.shape[-1]), codes.view(-1)) + loss_txt = F.cross_entropy(logits_txt.view(-1, logits_txt.shape[-1]), texts[:, 1:].reshape(-1)) + self.log("train/loss_img", loss_img, on_step=True, on_epoch=True, prog_bar=False, logger=True) + self.log("train/loss_txt", loss_txt, on_step=True, on_epoch=True, prog_bar=False, logger=True) + return loss_img + loss_txt + + def validation_step(self, batch, batch_idx): + images, texts = batch + logits_img, logits_txt, codes = self(images, texts) + # print(logits_img.shape, logits_txt.shape, codes.shape, texts.shape) + + loss_img = F.cross_entropy(logits_img.view(-1, logits_img.shape[-1]), codes.view(-1)) + loss_txt = F.cross_entropy(logits_txt.view(-1, logits_txt.shape[-1]), texts[:, 1:].reshape(-1)) + self.log("val/loss_img", loss_img, on_step=False, on_epoch=True, prog_bar=False, logger=True) + self.log("val/loss_txt", loss_txt, on_step=False, on_epoch=True, prog_bar=False, logger=True) + return loss_img + loss_txt + + @torch.no_grad() + def predict_step(self, batch, batch_idx, return_images=False): + orig_images, texts = batch + + # extra for checks + logits_img, logits_txt, codes = self(orig_images, texts) + pred = torch.argmax(logits_img.view(-1, logits_img.shape[-1]), dim=-1) + bs = orig_images.shape[0] + pred = pred.view(bs, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(pred) * 0.5 + 0.5, 0, 1).cpu().numpy() # [B, 256, 256] + pixels = np.transpose(pixels, (0, 2, 3, 1)) + + + # print(texts.shape, orig_images.shape) + # concatenate the list of prompts (split by n_head) for better downstream processing + past_key_values_prompt = self.get_prompt(bsz=5, eval=True) + # print(past_key_values_prompt[0].shape, past_key_values_prompt[1].shape, len(past_key_values_prompt)) + past_key_values_prompt = torch.cat([x.unsqueeze(0) for x in past_key_values_prompt], dim=0) + n_layers, temp, bs, n_heads, seq_len, n_dim = past_key_values_prompt.shape + past_key_values_prompt = past_key_values_prompt.view(n_layers, temp, bs*n_heads, seq_len, n_dim) + # print(past_key_values_prompt.shape) + images = [] + for i, t in enumerate(texts): + pixels = self.sampling(t, past_key_values_prompt, top_k=16, num_candidates=5, labels=codes[i]).cpu().numpy() + pixels = np.transpose(pixels, (0, 2, 3, 1)) + images.append(pixels) + # images.extend([p for p in pixels]) + # print([i.shape for i in images]) + + + if return_images: + return images + else: + save_image(orig_images, pixels, './out/images/pororo_prefix', batch_idx+10) + save_image(orig_images, images, './out/images/pororo_prefix', batch_idx) + + +class ConditionalDalle(Dalle): + """Classification Head for transformer encoders""" + def __init__(self, config): + super().__init__(config) + print('Initializing the Conditional Dalle model') + + self.config = config + + print('Setting up Cross-attention Layers') + self.init_cross_attention(list(range(2,42,3)), config.stage2.hparams) + + ###### NUM PARAMS ######### + total_param = 0 + for name, param in self.named_parameters(): + # print(param.shape) + total_param += param.numel() + print('Total parameters is {}'.format(total_param)) + + @classmethod + def from_pretrained(cls, args) -> Tuple[nn.Module, OmegaConf]: + + # if not args.model_name_or_path: + # args.model_name_or_path = args.prefix_model_name_or_path + + path = args.model_name_or_path + config_new = OmegaConf.load(os.path.join(path, 'config.yaml')) + if args.do_train: + config_base = get_base_config('finetuning') + config_update = OmegaConf.merge(config_base, config_new) + for key, val in vars(args).items(): + if key in config_update.optimizer.keys(): + OmegaConf.update(config_update, "optimizer.%s" % key, val, merge=False) + if key in config_update.experiment.keys(): + OmegaConf.update(config_update, "experiment.%s" % key, val, merge=False) + else: + config_base = get_base_config('default') + config_update = OmegaConf.merge(config_base, config_new) + + model = cls(config_update) + model.tokenizer = build_tokenizer(os.path.join(path, 'tokenizer'), + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + print(model.cross_attention_idxs) + # print(next(model.cross_attention_layers[0].parameters()).is_cuda) + + if args.dalle_path: + print("Loading model from pretrained checkpoint %s" % args.dalle_path) + # model.from_ckpt(args.model_name_or_path) + model.load_state_dict(torch.load(args.dalle_path)['model_state_dict']) + else: + print("Loading models from checkpoint %s" % path) + model.stage1.from_ckpt(os.path.join(path, 'stage1_last.ckpt')) + model.stage2.from_ckpt(os.path.join(path, 'stage2_last.ckpt')) + + return model, config_update + + + def init_cross_attention(self, cross_attention_layers, hparams): + self.cross_attention_idxs = cross_attention_layers + self.cross_attention_layers = [CrossAttentionLayer(ctx_len=hparams.ctx_len_img + hparams.ctx_len_txt, + embed_dim=hparams.embed_dim, + n_heads=hparams.n_heads, + attn_bias=hparams.attn_bias, + resid_pdrop=hparams.resid_pdrop, + attn_pdrop=hparams.attn_pdrop) for i in cross_attention_layers] + + + def forward(self, + images: torch.FloatTensor, + src_images: Optional[torch.FloatTensor], + texts: Optional[torch.LongTensor], + **kwargs, + ): + + #{"input_ids": batch, "labels": labels, 'src_attn': src_attn, 'tgt_attn':tgt_attn, 'src':src} + + # print(images.shape, src_images.shape, texts.shape) + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + src_codes = self.stage1.get_codes(src_images).detach() + + pos_enc_tokens = get_positional_encoding(texts, mode='1d') + codes = codes.clone().detach() + pos_enc_code = get_positional_encoding(codes, mode='1d') + src_codes = src_codes.clone().detach() + src_pos_enc_code = get_positional_encoding(src_codes, mode='1d') + # codes = codes.unsqueeze(-1) + # pos_enc_code = pos_enc_code.unsqueeze(-1) + # print(images.shape, codes.shape, texts.shape) + logits_img, logits_txt = self.stage2.forward_with_context(codes, texts, + pos_enc_code, pos_enc_tokens, src_codes, src_pos_enc_code, + self.cross_attention_idxs, self.cross_attention_layers) + # print(logits_img.shape, logits_txt.shape, codes.shape, texts.shape) + return logits_img, logits_txt, codes + + @torch.no_grad() + def sampling(self, + prompt: torch.LongTensor, + source: torch.FloatTensor, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True) -> torch.FloatTensor: + self.stage1.eval() + self.stage2.eval() + + if type(prompt) == str: + tokens = self.tokenizer.encode(prompt) + tokens = torch.LongTensor(tokens.ids) + else: + tokens = prompt + + tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + # Check if the encoding works as intended + # print(self.tokenizer.decode_batch(tokens.tolist(), skip_special_tokens=True)[0]) + + tokens = tokens.to(device) + source = source.to(device) + + with autocast(enabled=False): + src_codes = self.stage1.get_codes(source).detach() + src_codes = torch.repeat_interleave(src_codes, num_candidates, dim=0) + + codes = sampling_conditional(self.stage2, + self.cross_attention_idxs, + self.cross_attention_layers, + tokens, + src_codes, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16) + codes = codes.view(num_candidates, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + def training_step(self, batch, batch_idx): + images, texts = batch + logits_img, logits_txt, codes = self(images, texts) + + loss_img = F.cross_entropy(logits_img.view(-1, logits_img.shape[-1]), codes.view(-1)) + loss_txt = F.cross_entropy(logits_txt.view(-1, logits_txt.shape[-1]), texts[:, 1:].reshape(-1)) + self.log("train/loss_img", loss_img, on_step=True, on_epoch=True, prog_bar=False, logger=True) + self.log("train/loss_txt", loss_txt, on_step=True, on_epoch=True, prog_bar=False, logger=True) + return loss_img + loss_txt + + def validation_step(self, batch, batch_idx): + images, texts = batch + logits_img, logits_txt, codes = self(images, texts) + # print(logits_img.shape, logits_txt.shape, codes.shape, texts.shape) + + loss_img = F.cross_entropy(logits_img.view(-1, logits_img.shape[-1]), codes.view(-1)) + loss_txt = F.cross_entropy(logits_txt.view(-1, logits_txt.shape[-1]), texts[:, 1:].reshape(-1)) + self.log("val/loss_img", loss_img, on_step=False, on_epoch=True, prog_bar=False, logger=True) + self.log("val/loss_txt", loss_txt, on_step=False, on_epoch=True, prog_bar=False, logger=True) + return loss_img + loss_txt + + @torch.no_grad() + def predict_step(self, batch, batch_idx): + orig_images, texts = batch + # concatenate the list of prompts (split by n_head) for better downstream processing + past_key_values_prompt = self.get_prompt(bsz=5) + past_key_values_prompt = torch.cat([x.unsqueeze(0) for x in past_key_values_prompt], dim=0) + images = [] + for t in texts: + pixels = self.sampling(t, past_key_values_prompt, top_k=64, num_candidates=5).cpu().numpy() + pixels = np.transpose(pixels, (0, 2, 3, 1)) + images.append(pixels) + # images.extend([p for p in pixels]) + # print([i.shape for i in images]) + + save_image(orig_images, images, './out/images/', batch_idx) + + +class PromptConditionalDalle(Dalle): + """Classification Head for transformer encoders""" + def __init__(self, config): + super().__init__(config) + print('Initializing the Conditional Dalle model') + + self.config = config + + print('Setting up Cross-attention Layers') + self.init_cross_attention(list(range(2,42,3)), config.stage2.hparams) + + self.n_embd = config.stage2.hparams.embed_dim + self.preseqlen = config.story.preseqlen + self.prefix_dropout = config.story.prefix_dropout + + # DIFFERENT PARAMETRIZATION: + + print('[Full prompt-tuning Setting :) ]') + self.input_tokens = torch.arange(self.preseqlen).long() + self.wte = nn.Embedding(self.preseqlen, self.n_embd) + self.control_trans = nn.Sequential( + nn.Linear(self.n_embd, self.n_embd), + nn.Tanh(), + nn.Linear(self.n_embd, self.n_embd)) + self.get_prompt = self.get_prompt_p5 + self.dropout = nn.Dropout(self.prefix_dropout) + + ###### NUM PARAMS ######### + total_param = 0 + for name, param in self.named_parameters(): + # print(param.shape) + total_param += param.numel() + print('Total parameters is {}'.format(total_param)) + + @classmethod + def from_pretrained(cls, args) -> Tuple[nn.Module, OmegaConf]: + + # if not args.model_name_or_path: + # args.model_name_or_path = args.prefix_model_name_or_path + + path = args.prefix_model_name_or_path + path = _MODELS[path] if path in _MODELS else path + path = utils.realpath_url_or_path(path, root=os.path.expanduser("~/.cache/minDALL-E")) + + config_new = OmegaConf.load(os.path.join(path, 'config.yaml')) + if args.do_train: + config_base = get_base_config('story') + config_update = OmegaConf.merge(config_base, config_new) + for key, val in vars(args).items(): + if key in config_update.story.keys(): + OmegaConf.update(config_update, "story.%s" % key, val, merge=False) + if key in config_update.optimizer.keys(): + OmegaConf.update(config_update, "optimizer.%s" % key, val, merge=False) + if key in config_update.experiment.keys(): + OmegaConf.update(config_update, "experiment.%s" % key, val, merge=False) + else: + config_base = get_base_config('default') + config_update = OmegaConf.merge(config_base, config_new) + + model = cls(config_update) + model.tokenizer = build_tokenizer(os.path.join(path, 'tokenizer'), + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + print(model.cross_attention_idxs) + # print(next(model.cross_attention_layers[0].parameters()).is_cuda) + + if args.model_name_or_path: + print("Loading model from pretrained checkpoint %s" % args.model_name_or_path) + # model.from_ckpt(args.model_name_or_path) + try: + model.load_state_dict(torch.load(args.model_name_or_path)['state_dict']) + except KeyError: + model.load_state_dict(torch.load(args.model_name_or_path)['model_state_dict']) + + else: + print("Loading models from checkpoint %s" % path) + model.stage1.from_ckpt(os.path.join(path, 'stage1_last.ckpt')) + model.stage2.from_ckpt(os.path.join(path, 'stage2_last.ckpt')) + + return model, config_update + + + def init_cross_attention(self, cross_attention_layers, hparams): + self.cross_attention_idxs = cross_attention_layers + self.cross_attention_layers = [CrossAttentionLayer(ctx_len=hparams.ctx_len_img + hparams.ctx_len_txt, + embed_dim=hparams.embed_dim, + n_heads=hparams.n_heads, + attn_bias=hparams.attn_bias, + resid_pdrop=hparams.resid_pdrop, + attn_pdrop=hparams.attn_pdrop) for i in cross_attention_layers] + + def get_prompt_p5(self, bsz=None, eval=False): + input_tokens = self.input_tokens.unsqueeze(0).expand(bsz, -1).to(self.device) + temp_control = self.wte(input_tokens) + past_key_values = self.control_trans(temp_control) #bsz, seqlen, layer*emb + if not eval: + past_key_values = self.dropout(past_key_values) + return past_key_values + + def forward(self, + images: torch.FloatTensor, + src_images: Optional[torch.FloatTensor], + texts: Optional[torch.LongTensor], + **kwargs, + ): + + #{"input_ids": batch, "labels": labels, 'src_attn': src_attn, 'tgt_attn':tgt_attn, 'src':src} + + # print(images.shape, src_images.shape, texts.shape) + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + src_codes = self.stage1.get_codes(src_images).detach() + + B, C, H, W = images.shape + prompt = self.get_prompt(bsz=B) + pos_enc_prompt = get_positional_encoding(self.input_tokens.unsqueeze(0).expand(B, -1).to(self.device), mode='1d') + + pos_enc_tokens = get_positional_encoding(texts, mode='1d') + codes = codes.clone().detach() + pos_enc_code = get_positional_encoding(codes, mode='1d') + src_codes = src_codes.clone().detach() + src_pos_enc_code = get_positional_encoding(src_codes, mode='1d') + # codes = codes.unsqueeze(-1) + # pos_enc_code = pos_enc_code.unsqueeze(-1) + # print(images.shape, codes.shape, texts.shape) + logits_img, logits_txt = self.stage2.forward_with_context(codes, texts, + pos_enc_code, pos_enc_tokens, src_codes, src_pos_enc_code, + self.cross_attention_idxs, self.cross_attention_layers, + prompt=prompt, pos_prompt=pos_enc_prompt) + # print(logits_img.shape, logits_txt.shape, codes.shape, texts.shape) + return logits_img, logits_txt, codes + + @torch.no_grad() + def sampling(self, + tokens: torch.LongTensor, + prompt: torch.LongTensor, + source: torch.FloatTensor, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True, + labels=None) -> torch.FloatTensor: + + self.stage1.eval() + self.stage2.eval() + + if type(tokens) == str: + tokens = self.tokenizer.encode(prompt) + tokens = torch.LongTensor(tokens.ids) + else: + pass + + tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + # Check if the encoding works as intended + # print(self.tokenizer.decode_batch(tokens.tolist(), skip_special_tokens=True)[0]) + + tokens = tokens.to(device) + source = source.to(device) + + pos_enc_prompt = get_positional_encoding(self.input_tokens.unsqueeze(0).expand(num_candidates, -1).to(self.device), mode='1d') + + with autocast(enabled=False): + src_codes = self.stage1.get_codes(source).detach() + src_codes = torch.repeat_interleave(src_codes, num_candidates, dim=0) + + codes = sampling_conditional(self.stage2, + self.cross_attention_idxs, + self.cross_attention_layers, + tokens, + src_codes, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + prompt=prompt, + pos_prompt=pos_enc_prompt) + + codes = codes.view(num_candidates, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + + @torch.no_grad() + def predict_step(self, batch, batch_idx, return_images=False): + orig_images, texts = batch + # concatenate the list of prompts (split by n_head) for better downstream processing + + # extra for checks + logits_img, logits_txt, codes = self(orig_images, texts) + pred = torch.argmax(logits_img.view(-1, logits_img.shape[-1]), dim=-1) + bs = orig_images.shape[0] + pred = pred.view(bs, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(pred) * 0.5 + 0.5, 0, 1).cpu().numpy() # [B, 256, 256] + pixels = np.transpose(pixels, (0, 2, 3, 1)) + + prompt = self.get_prompt(bsz=5, eval=True) + + images = [] + for t in texts: + pixels = self.sampling(t, prompt, top_k=64, num_candidates=5, labels=codes[i]).cpu().numpy() + pixels = np.transpose(pixels, (0, 2, 3, 1)) + images.append(pixels) + # images.extend([p for p in pixels]) + # print([i.shape for i in images]) + + if return_images: + return images + else: + save_image(orig_images, pixels, './out/images/pororo_story', batch_idx+10) + save_image(orig_images, images, './out/images/pororo_story', batch_idx) + + +class StoryDalle(Dalle): + """Base model with story block""" + def __init__(self, config): + super().__init__(config) + print('Initializing the Conditional Dalle model') + + self.config = config + + self.story_linear = nn.Linear(config.story.sent_embed, config.stage2.hparams.embed_dim) + self.story_block = Block(ctx_len=config.story.story_len, + embed_dim=config.stage2.hparams.embed_dim, + n_heads=config.stage2.hparams.n_heads, + mlp_bias=config.stage2.hparams.mlp_bias, + attn_bias=config.stage2.hparams.attn_bias, + resid_pdrop=config.stage2.hparams.resid_pdrop, + attn_pdrop=config.stage2.hparams.attn_pdrop, + gelu_use_approx=config.stage2.hparams.gelu_use_approx) + + if self.config.story.prompt: + self.n_embd = config.stage2.hparams.embed_dim + self.preseqlen = config.story.preseqlen + self.prefix_dropout = config.story.prefix_dropout + + # DIFFERENT PARAMETRIZATION: + + print('[Full prompt-tuning Setting :) ]') + self.input_tokens = torch.arange(self.preseqlen).long() + self.wte = nn.Embedding(self.preseqlen, self.n_embd) + self.control_trans = nn.Sequential( + nn.Linear(self.n_embd, self.n_embd), + nn.Tanh(), + nn.Linear(self.n_embd, self.n_embd)) + self.get_prompt = self.get_prompt_p5 + self.dropout = nn.Dropout(self.prefix_dropout) + + if self.config.story.condition: + print('Setting up Cross-attention Layers') + self.init_cross_attention(list(range(2,42,3)), config.stage2.hparams) + + ###### NUM PARAMS ######### + total_param = 0 + for name, param in self.named_parameters(): + # print(param.shape) + total_param += param.numel() + print('Total parameters is {}'.format(total_param)) + + @classmethod + def from_pretrained(cls, args) -> Tuple[nn.Module, OmegaConf]: + + # if not args.model_name_or_path: + # args.model_name_or_path = args.prefix_model_name_or_path + + path = args.prefix_model_name_or_path + path = _MODELS[path] if path in _MODELS else path + path = utils.realpath_url_or_path(path, root=os.path.expanduser("~/.cache/minDALL-E")) + + config_new = OmegaConf.load(os.path.join(path, 'config.yaml')) + # if args.do_train: + config_base = get_base_config('story') + config_update = OmegaConf.merge(config_base, config_new) + for key, val in vars(args).items(): + if key in config_update.story.keys(): + OmegaConf.update(config_update, "story.%s" % key, val, merge=False) + if key in config_update.optimizer.keys(): + OmegaConf.update(config_update, "optimizer.%s" % key, val, merge=False) + if key in config_update.experiment.keys(): + OmegaConf.update(config_update, "experiment.%s" % key, val, merge=False) + # else: + # config_base = get_base_config('story') + # config_update = OmegaConf.merge(config_base, config_new) + # print(next(model.cross_attention_layers[0].parameters()).is_cuda) + + if args.model_name_or_path: + if 'pororo' in args.model_name_or_path: + config_update.stage2.vocab_size_txt = config_update.stage2.vocab_size_txt + 9 + elif 'flintstones' in args.model_name_or_path: + config_update.stage2.vocab_size_txt = config_update.stage2.vocab_size_txt + 7 + model = cls(config_update) + model_dir = os.path.dirname(args.model_name_or_path) + print(model_dir) + model.tokenizer = build_tokenizer(model_dir, + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + print("Loaded tokenizer from finetuned checkpoint") + print(model.cross_attention_idxs) + print("Loading model from pretrained checkpoint %s" % args.model_name_or_path) + # model.from_ckpt(args.model_name_or_path) + try: + model.load_state_dict(torch.load(args.model_name_or_path)['state_dict']) + except KeyError: + model.load_state_dict(torch.load(args.model_name_or_path)['model_state_dict']) + else: + model = cls(config_update) + print(model.cross_attention_idxs) + print("Loading models from checkpoint %s" % path) + model.stage1.from_ckpt(os.path.join(path, 'stage1_last.ckpt')) + model.stage2.from_ckpt(os.path.join(path, 'stage2_last.ckpt')) + + model.tokenizer = build_tokenizer(os.path.join(path, 'tokenizer'), + context_length=model.config_dataset.context_length, + lowercase=True, + dropout=None) + + + return model, config_update + + + def init_cross_attention(self, cross_attention_layers, hparams): + self.cross_attention_idxs = cross_attention_layers + self.cross_attention_layers = [CrossAttentionLayer(ctx_len=hparams.ctx_len_img + hparams.ctx_len_txt, + embed_dim=hparams.embed_dim, + n_heads=hparams.n_heads, + attn_bias=hparams.attn_bias, + resid_pdrop=hparams.resid_pdrop, + attn_pdrop=hparams.attn_pdrop) for i in cross_attention_layers] + + def get_prompt_p5(self, bsz=None, eval=False): + input_tokens = self.input_tokens.unsqueeze(0).expand(bsz, -1).to(self.device) + temp_control = self.wte(input_tokens) + past_key_values = self.control_trans(temp_control) #bsz, seqlen, layer*emb + if not eval: + past_key_values = self.dropout(past_key_values) + return past_key_values + + def forward(self, + images: torch.FloatTensor, + src_images: Optional[torch.FloatTensor], + texts: Optional[torch.LongTensor], + sent_embeds: Optional[torch.FloatTensor], + **kwargs, + ): + + # print(images.shape, src_images.shape, texts.shape, sent_embeds.shape) + + B, L, C, H, W = images.shape + images = images.view(B*L, C, H, W) + src_images = src_images.unsqueeze(1).expand(-1, L, -1, -1, -1).reshape(B*L, C, H, W) + sent_embeds = self.story_block(self.story_linear(sent_embeds)).view(B * L, -1).unsqueeze(1) + texts = texts.view(B * L, -1) + + #{"input_ids": batch, "labels": labels, 'src_attn': src_attn, 'tgt_attn':tgt_attn, 'src':src} + + with torch.no_grad(): + with autocast(enabled=False): + codes = self.stage1.get_codes(images).detach() + src_codes = self.stage1.get_codes(src_images).detach() + + B, C, H, W = images.shape + + if self.config.story.prompt: + prompt = self.get_prompt(bsz=B) + prompt = torch.cat([prompt, sent_embeds], dim=1) + else: + prompt = sent_embeds + + # dim = 0 for full-model finetuning?? + pos_enc_prompt = get_positional_encoding(torch.arange(prompt.shape[1]).long().unsqueeze(0).expand(B, -1).to(self.device), + mode='1d') + + pos_enc_tokens = get_positional_encoding(texts, mode='1d') + codes = codes.clone().detach() + pos_enc_code = get_positional_encoding(codes, mode='1d') + src_codes = src_codes.clone().detach() + src_pos_enc_code = get_positional_encoding(src_codes, mode='1d') + # codes = codes.unsqueeze(-1) + # pos_enc_code = pos_enc_code.unsqueeze(-1) + # print(images.shape, codes.shape, texts.shape) + if self.config.story.condition: + logits_img, logits_txt = self.stage2.forward_with_context(codes, texts, + pos_enc_code, pos_enc_tokens, src_codes, src_pos_enc_code, + self.cross_attention_idxs, self.cross_attention_layers, + prompt=prompt, pos_prompt=pos_enc_prompt) + else: + logits_img, logits_txt = self.stage2(codes, texts, pos_enc_code, pos_enc_tokens, prompt=prompt, + pos_prompt=pos_enc_prompt) + + # print(logits_img.shape, logits_txt.shape, codes.shape, texts.shape) + return logits_img, logits_txt, codes + + @torch.no_grad() + def sampling(self, + tokens: torch.LongTensor, + source: torch.FloatTensor, + sent_embeds: torch.FloatTensor, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True, + labels=None, + prompt = None) -> torch.FloatTensor: + + self.stage1.eval() + self.stage2.eval() + + if type(tokens) == str: + tokens = self.tokenizer.encode(tokens) + tokens = torch.LongTensor(tokens.ids) + + # tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + # Check if the encoding works as intended + # print(self.tokenizer.decode_batch(tokens.tolist(), skip_special_tokens=True)[0]) + + tokens = tokens.to(device) + source = source.to(device) + + # print(tokens.shape, sent_embeds.shape, prompt.shape) + B, L, _ = sent_embeds.shape + sent_embeds = self.story_block(self.story_linear(sent_embeds)).view(B * L, -1).unsqueeze(1) + if prompt is not None: + prompt = torch.cat([prompt, sent_embeds], dim=1) + else: + prompt = sent_embeds + pos_enc_prompt = get_positional_encoding(torch.arange(prompt.shape[1]).long().unsqueeze(0).expand(B*L, -1).to(self.device), mode='1d') + + with autocast(enabled=False): + src_codes = self.stage1.get_codes(source).detach() + src_codes = torch.repeat_interleave(src_codes, self.config.story.story_len, dim=0) + print(tokens.shape, src_codes.shape, prompt.shape) + if self.config.story.condition: + codes = sampling_conditional(self.stage2, + self.cross_attention_idxs, + self.cross_attention_layers, + tokens, + src_codes, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + prompt=prompt, + pos_prompt=pos_enc_prompt) + else: + codes = sampling(self.stage2, + tokens, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + prompt=prompt, + pos_prompt=pos_enc_prompt) + + codes = codes.view(self.config.story.story_len, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 256, 256] + return pixels + + @torch.no_grad() + def sampling_batch(self, + tokens: torch.LongTensor, + source: torch.FloatTensor, + sent_embeds: torch.FloatTensor, + top_k: int = 256, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + num_candidates: int = 96, + device: str = 'cuda:0', + use_fp16: bool = True, + labels=None, + prompt=None, n_candidates=1) -> torch.FloatTensor: + + self.stage1.eval() + self.stage2.eval() + + if type(tokens) == str: + tokens = self.tokenizer.encode(tokens) + tokens = torch.LongTensor(tokens.ids) + + # tokens = torch.repeat_interleave(tokens.unsqueeze(0), num_candidates, dim=0) + + # Check if the encoding works as intended + # print(self.tokenizer.decode_batch(tokens.tolist(), skip_special_tokens=True)[0]) + + tokens = tokens.to(device) + source = source.to(device) + + # print(tokens.shape, sent_embeds.shape, prompt.shape) + B, L, _ = sent_embeds.shape + sent_embeds = self.story_block(self.story_linear(sent_embeds)).view(B * L, -1).unsqueeze(1) + if prompt is not None: + prompt = torch.cat([prompt, sent_embeds], dim=1) + else: + prompt = sent_embeds + pos_enc_prompt = get_positional_encoding( + torch.arange(prompt.shape[1]).long().unsqueeze(0).expand(B * L, -1).to(self.device), mode='1d') + + with autocast(enabled=False): + src_codes = self.stage1.get_codes(source).detach() + + # repeat inputs to adjust to n_candidates and story length + src_codes = torch.repeat_interleave(src_codes, self.config.story.story_len * n_candidates, dim=0) + prompt = prompt.repeat(n_candidates, 1, 1) + pos_enc_prompt = pos_enc_prompt.repeat(n_candidates, 1) + tokens = tokens.repeat(n_candidates, 1) + print(tokens.shape, src_codes.shape, prompt.shape, pos_enc_prompt.shape) + if self.config.story.condition: + codes = sampling_conditional(self.stage2, + self.cross_attention_idxs, + self.cross_attention_layers, + tokens, + src_codes, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + prompt=prompt, + pos_prompt=pos_enc_prompt) + else: + codes = sampling(self.stage2, + tokens, + top_k=top_k, + top_p=top_p, + softmax_temperature=softmax_temperature, + use_fp16=use_fp16, + prompt=prompt, + pos_prompt=pos_enc_prompt) + + codes = codes.view(self.config.story.story_len * n_candidates, 16, 16) # [B, 16, 16] + print(codes.shape) + pixels = torch.clamp(self.stage1.decode_code(codes) * 0.5 + 0.5, 0, 1) # [B, 3, 256, 256] + print(pixels.shape) + return pixels.view(n_candidates, self.config.story.story_len, pixels.shape[-3], pixels.shape[-2], pixels.shape[-1]) + + + @torch.no_grad() + def predict_step(self, batch, batch_idx, return_images=False): + orig_images, texts = batch + # concatenate the list of prompts (split by n_head) for better downstream processing + + # extra for checks + logits_img, logits_txt, codes = self(orig_images, texts) + pred = torch.argmax(logits_img.view(-1, logits_img.shape[-1]), dim=-1) + bs = orig_images.shape[0] + pred = pred.view(bs, 16, 16) # [B, 16, 16] + pixels = torch.clamp(self.stage1.decode_code(pred) * 0.5 + 0.5, 0, 1).cpu().numpy() # [B, 256, 256] + pixels = np.transpose(pixels, (0, 2, 3, 1)) + + prompt = self.get_prompt(bsz=5, eval=True) + + images = [] + for t in texts: + pixels = self.sampling(t, prompt, top_k=64, num_candidates=5, labels=codes[i]).cpu().numpy() + pixels = np.transpose(pixels, (0, 2, 3, 1)) + images.append(pixels) + # images.extend([p for p in pixels]) + # print([i.shape for i in images]) + + if return_images: + return images + else: + save_image(orig_images, pixels, './out/images/pororo_story', batch_idx+10) + save_image(orig_images, images, './out/images/pororo_story', batch_idx) diff --git a/dalle/models/__pycache__/__init__.cpython-38.pyc b/dalle/models/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7083a13daaa652d9124c5f444783d5d4280c39a Binary files /dev/null and b/dalle/models/__pycache__/__init__.cpython-38.pyc differ diff --git a/dalle/models/__pycache__/prefix_tuning_model.cpython-38.pyc b/dalle/models/__pycache__/prefix_tuning_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83d91dbaec217977eb4cbed6b5b0b459bd5eaf06 Binary files /dev/null and b/dalle/models/__pycache__/prefix_tuning_model.cpython-38.pyc differ diff --git a/dalle/models/__pycache__/tokenizer.cpython-38.pyc b/dalle/models/__pycache__/tokenizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd38e7b40c43a16cd12116f72a67f604886451ee Binary files /dev/null and b/dalle/models/__pycache__/tokenizer.cpython-38.pyc differ diff --git a/dalle/models/stage1/__pycache__/layers.cpython-38.pyc b/dalle/models/stage1/__pycache__/layers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1b9811eb9e47d731a39a71c634e7cbf4c0ee8b2 Binary files /dev/null and b/dalle/models/stage1/__pycache__/layers.cpython-38.pyc differ diff --git a/dalle/models/stage1/__pycache__/vqgan.cpython-38.pyc b/dalle/models/stage1/__pycache__/vqgan.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..99209169533db6f7aff22406bee02dbfe757df77 Binary files /dev/null and b/dalle/models/stage1/__pycache__/vqgan.cpython-38.pyc differ diff --git a/dalle/models/stage1/layers.py b/dalle/models/stage1/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..16c758c98089b6278190b7b52479df0eed941d9f --- /dev/null +++ b/dalle/models/stage1/layers.py @@ -0,0 +1,373 @@ +# ------------------------------------------------------------------------------------ +# Modified from VQGAN (https://github.com/CompVis/taming-transformers) +# Copyright (c) 2020 Patrick Esser and Robin Rombach and Björn Ommer. All Rights Reserved. +# ------------------------------------------------------------------------------------ + +import torch +import torch.nn as nn +from typing import Tuple, Optional + + +def nonlinearity(x): + # swish + return x*torch.sigmoid(x) + + +def Normalize(in_channels): + return torch.nn.GroupNorm(num_groups=32, + num_channels=in_channels, + eps=1e-6, + affine=True) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512): + assert temb_channels == 0 + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb=None): + assert temb is None + + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + return x+h + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, c, h*w) + q = q.permute(0, 2, 1) # b,hw,c + k = k.reshape(b, c, h*w) # b,c,hw + w_ = torch.bmm(q, k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b, c, h*w) + w_ = w_.permute(0, 2, 1) # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm(v, w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b, c, h, w) + + h_ = self.proj_out(h_) + return x+h_ + + +class Encoder(nn.Module): + def __init__(self, + *, # forced to use named arguments + ch: int, + out_ch: int, + ch_mult: Tuple[int] = (1, 2, 4, 8), + num_res_blocks: int, + attn_resolutions: Tuple[int], + pdrop: float = 0.0, + resamp_with_conv: bool = True, + in_channels: int, + resolution: int, + z_channels: int, + double_z: Optional[bool] = None) -> None: + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=pdrop)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(AttnBlock(block_in)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=pdrop) + self.mid.attn_1 = AttnBlock(block_in) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=pdrop) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + assert x.shape[2] == x.shape[3] == self.resolution, \ + "{}, {}".format(x.shape, self.resolution) + + # downsampling + h = self.conv_in(x) + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](h) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + if i_level != self.num_resolutions-1: + h = self.down[i_level].downsample(h) + + # middle + h = self.mid.block_1(h) + h = self.mid.attn_1(h) + h = self.mid.block_2(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, + *, # forced to use named arguments + ch: int, + out_ch: int, + ch_mult: Tuple[int] = (1, 2, 4, 8), + num_res_blocks: int, + attn_resolutions: Tuple[int], + pdrop: float = 0.0, + resamp_with_conv: bool = True, + in_channels: int, + resolution: int, + z_channels: int, + double_z: bool) -> None: + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # compute in_ch_mult, block_in and curr_res at lowest res + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1, z_channels, curr_res, curr_res) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=pdrop) + self.mid.attn_1 = AttnBlock(block_in) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=pdrop) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=pdrop)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(AttnBlock(block_in)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z): + assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h) + h = self.mid.attn_1(h) + h = self.mid.block_2(h) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h diff --git a/dalle/models/stage1/vqgan.py b/dalle/models/stage1/vqgan.py new file mode 100644 index 0000000000000000000000000000000000000000..7f03a4d02aa579275d58290bc4f3714fd58bfe00 --- /dev/null +++ b/dalle/models/stage1/vqgan.py @@ -0,0 +1,93 @@ +# ------------------------------------------------------------------------------------ +# Modified from VQGAN (https://github.com/CompVis/taming-transformers) +# Copyright (c) 2020 Patrick Esser and Robin Rombach and Björn Ommer. All Rights Reserved. +# ------------------------------------------------------------------------------------ + +import torch +import torch.nn as nn +from typing import List, Tuple, Optional +from einops import rearrange +from omegaconf import OmegaConf +from .layers import Encoder, Decoder + + +class VectorQuantizer(nn.Module): + """ + Simplified VectorQuantizer in the original VQGAN repository + by removing unncessary modules for sampling + """ + def __init__(self, dim: int, n_embed: int, beta: float) -> None: + super().__init__() + self.n_embed = n_embed + self.dim = dim + self.beta = beta + + self.embedding = nn.Embedding(self.n_embed, self.dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_embed, 1.0 / self.n_embed) + + def forward(self, + z: torch.FloatTensor) -> Tuple[torch.FloatTensor, torch.LongTensor]: + z = rearrange(z, 'b c h w -> b h w c').contiguous() # [B,C,H,W] -> [B,H,W,C] + z_flattened = z.view(-1, self.dim) + + d = torch.sum(z_flattened ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight**2, dim=1) - 2 * \ + torch.einsum('bd,dn->bn', z_flattened, rearrange(self.embedding.weight, 'n d -> d n')) + + min_encoding_indices = torch.argmin(d, dim=1) + z_q = self.embedding(min_encoding_indices).view(z.shape) + return z_q, min_encoding_indices + + def get_codebook_entry(self, + indices: torch.LongTensor, + shape: Optional[List[int]] = None) -> torch.FloatTensor: + z_q = self.embedding(indices) + if shape is not None: + z_q = z_q.view(shape) + z_q = z_q.permute(0, 3, 1, 2).contiguous() + return z_q + + +class VQGAN(nn.Module): + def __init__(self, n_embed: int, embed_dim: int, hparams: OmegaConf) -> None: + super().__init__() + self.encoder = Encoder(**hparams) + self.decoder = Decoder(**hparams) + self.quantize = VectorQuantizer(dim=embed_dim, n_embed=n_embed, beta=0.25) + self.quant_conv = torch.nn.Conv2d(hparams.z_channels, embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, hparams.z_channels, 1) + self.latent_dim = hparams.attn_resolutions[0] + + def forward(self, x: torch.FloatTensor) -> torch.FloatTensor: + quant = self.encode(x) + dec = self.decode(quant) + return dec + + def encode(self, x: torch.FloatTensor) -> torch.FloatTensor: + h = self.encoder(x) + h = self.quant_conv(h) + quant = self.quantize(h)[0] + quant = rearrange(quant, 'b h w c -> b c h w').contiguous() + return quant + + def decode(self, quant: torch.FloatTensor) -> torch.FloatTensor: + quant = self.post_quant_conv(quant) + dec = self.decoder(quant) + return dec + + def decode_code(self, code: torch.LongTensor) -> torch.FloatTensor: + quant = self.quantize.get_codebook_entry(code) + quant = quant.permute(0, 3, 1, 2) + dec = self.decode(quant) + return dec + + def get_codes(self, x: torch.FloatTensor) -> torch.LongTensor: + h = self.encoder(x) + h = self.quant_conv(h) + codes = self.quantize(h)[1].view(x.shape[0], self.latent_dim ** 2) + return codes + + def from_ckpt(self, path: str, strict: bool = True) -> None: + ckpt = torch.load(path, map_location='cpu')['state_dict'] + self.load_state_dict(ckpt, strict=strict) + print(f'{path} successfully restored..') diff --git a/dalle/models/stage2/__pycache__/layers.cpython-38.pyc b/dalle/models/stage2/__pycache__/layers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..475d1f6a8b3b21e66242e55980197e1e6269275c Binary files /dev/null and b/dalle/models/stage2/__pycache__/layers.cpython-38.pyc differ diff --git a/dalle/models/stage2/__pycache__/transformer.cpython-38.pyc b/dalle/models/stage2/__pycache__/transformer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f096414ba66635c44b4c1ef2637c2270e4e1406a Binary files /dev/null and b/dalle/models/stage2/__pycache__/transformer.cpython-38.pyc differ diff --git a/dalle/models/stage2/layers.py b/dalle/models/stage2/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..d0a60c297cbf94a0c7ac1946ac70f6e862f912e6 --- /dev/null +++ b/dalle/models/stage2/layers.py @@ -0,0 +1,216 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ +# Modified from minGPT (https://github.com/karpathy/minGPT) +# Copyright (c) 2020 Andrej Karpathy. All Rights Reserved. +# ------------------------------------------------------------------------------------ + +import math +import torch +import torch.nn as nn +from torch.nn import functional as F + + +class GELU(nn.Module): + def __init__(self, use_approx=False): + super().__init__() + self.use_approx = use_approx + + def forward(self, x): + if self.use_approx: + return x * torch.sigmoid(1.702 * x) + else: + return F.gelu(x) + + +class MultiHeadSelfAttention(nn.Module): + + def __init__(self, + ctx_len: int, + embed_dim: int, + n_heads: int, + resid_pdrop: float, + attn_pdrop: float, + attn_bias: bool, + use_mask: bool = True): + super().__init__() + assert embed_dim % n_heads == 0 + + # key, query, value projections for all heads + self.key = nn.Linear(embed_dim, embed_dim, bias=attn_bias) + self.query = nn.Linear(embed_dim, embed_dim, bias=attn_bias) + self.value = nn.Linear(embed_dim, embed_dim, bias=attn_bias) + + # regularization + self.attn_drop = nn.Dropout(attn_pdrop) + self.resid_drop = nn.Dropout(resid_pdrop) + + # output projection + self.proj = nn.Linear(embed_dim, embed_dim, attn_bias) + + self.n_heads = n_heads + self.ctx_len = ctx_len + self.use_mask = use_mask + if self.use_mask: + self.register_buffer("mask", torch.ones(ctx_len, ctx_len), persistent=False) + self.mask = torch.tril(self.mask).view(1, ctx_len, ctx_len) + + def forward(self, x, use_cache=False, layer_past=None): + B, T, C = x.shape + x = x.transpose(0, 1).contiguous() # (B, T, C) -> (T, B, C) + + # calculate query, key, values for all heads in batch and move head forward to be the batch dim + k = self.key(x).view(T, B*self.n_heads, C//self.n_heads).transpose(0, 1) # (B*nh, T, hs) + q = self.query(x).view(T, B*self.n_heads, C//self.n_heads).transpose(0, 1) # (B*nh, T, hs) + v = self.value(x).view(T, B*self.n_heads, C//self.n_heads).transpose(0, 1) # (B*nh, T, hs) + + if use_cache: + present = torch.stack([k, v]) + + if layer_past is not None: + # print(layer_past.shape, k.shape, v.shape, q.shape) + # print("LayerPast shape", layer_past.shape) + past_key, past_value = layer_past + + if len(past_key.shape) == 4: + _, _, seq_len, dim = past_key.shape + k = torch.cat([past_key.reshape(-1, seq_len, dim), k], dim=-2) + v = torch.cat([past_value.reshape(-1, seq_len, dim), v], dim=-2) + elif len(past_key.shape) == 3: + past_key, past_value = layer_past + k = torch.cat([past_key, k], dim=-2) + v = torch.cat([past_value, v], dim=-2) + else: + raise ValueError + + if use_cache and layer_past is not None: + # Tensor shape below: (B * nh, 1, hs) X (B * nh, hs, K) -> (B * nh, 1, K) + att = torch.bmm(q, (k.transpose(-2, -1)) * (1.0 / math.sqrt(k.size(-1)))) + att = F.softmax(att, dim=-1) + att = self.attn_drop(att) + y = torch.bmm(att, v) # (B*nh, 1, K) X (B*nh, K, hs) -> (B*nh, 1, hs) + else: + # Tensor shape below: (B * nh, T, hs) X (B * nh, hs, T) -> (B * nh, T, T) + att = torch.bmm(q, (k.transpose(-2, -1)) * (1.0 / math.sqrt(k.size(-1)))) + if self.use_mask: + # TODO : Flip when not prompt tunign + # mask = self.mask if T == self.ctx_len else self.mask[:, :T, :T] + if T == self.ctx_len: + mask = self.mask + else: + mask = torch.tril(torch.ones(T, T)).view(1, T, T).to(att.device) + att = att.masked_fill(mask == 0, float('-inf')) + att = F.softmax(att, dim=-1) + att = self.attn_drop(att) + y = torch.bmm(att, v) # (B*nh, T, T) X (B*nh, T, hs) -> (B*nh, T, hs) + y = y.transpose(0, 1).contiguous().view(T, B, C) # re-assemble all head outputs side by side + + # output projection + y = self.resid_drop(self.proj(y)) + if use_cache: + return y.transpose(0, 1).contiguous(), present # (T, B, C) -> (B, T, C) + else: + return y.transpose(0, 1).contiguous() # (T, B, C) -> (B, T, C) + + def forward_with_context(self, x, context, mask=None): + B, T, C = x.shape + x = x.transpose(0, 1).contiguous() # (B, T, C) -> (T, B, C) + + # calculate query, key, values for all heads in batch and move head forward to be the batch dim + q = self.query(x).view(T, B*self.n_heads, C//self.n_heads).transpose(0, 1) # (B*nh, T, hs) + + B, T_c, C = context.shape + k = self.key(context).view(T_c, B * self.n_heads, C // self.n_heads).transpose(0, 1) # (B*nh, T, hs) + v = self.value(context).view(T_c, B*self.n_heads, C//self.n_heads).transpose(0, 1) # (B*nh, T, hs) + + # Tensor shape below: (B * nh, T, hs) X (B * nh, hs, Tc) -> (B * nh, T, Tc) + att = torch.bmm(q, (k.transpose(-2, -1)) * (1.0 / math.sqrt(k.size(-1)))) + att = F.softmax(att, dim=-1) + att = self.attn_drop(att) + y = torch.bmm(att, v) # (B*nh, T, T) X (B*nh, T, hs) -> (B*nh, T, hs) + y = y.transpose(0, 1).contiguous().view(T, B, C) # re-assemble all head outputs side by side + + # output projection + y = self.resid_drop(self.proj(y)).transpose(0, 1).contiguous() + if mask is not None: + y = y.masked_fill(mask == 0, float('0.0')) + return y # (T, B, C) -> (B, T, C) + + +class Block(nn.Module): + + def __init__(self, + ctx_len: int, + embed_dim: int, + n_heads: int, + mlp_bias: bool, + attn_bias: bool, + resid_pdrop: bool, + attn_pdrop: bool, + gelu_use_approx: bool): + super().__init__() + self.ln1 = nn.LayerNorm(embed_dim) + self.ln2 = nn.LayerNorm(embed_dim) + + self.attn = MultiHeadSelfAttention(ctx_len=ctx_len, + embed_dim=embed_dim, + n_heads=n_heads, + attn_pdrop=attn_pdrop, + resid_pdrop=resid_pdrop, + attn_bias=attn_bias, + use_mask=True) + self.mlp = nn.Sequential( + nn.Linear(embed_dim, 4 * embed_dim, bias=mlp_bias), + GELU(gelu_use_approx), + nn.Linear(4 * embed_dim, embed_dim, bias=mlp_bias), + nn.Dropout(resid_pdrop), + ) + + def forward(self, x, layer_past=None): + x = x + self.attn(self.ln1(x), layer_past=layer_past) + x = x + self.mlp(self.ln2(x)) + return x + + def sample(self, x, layer_past=None): + attn, present = self.attn(self.ln1(x), use_cache=True, layer_past=layer_past) + x = x + attn + x = x + self.mlp(self.ln2(x)) + return x, present + + def sample_with_context(self, x, context, context_mask, cross_attn_layer, layer_past=None): + attn, present = self.attn(self.ln1(x), use_cache=True, layer_past=layer_past) + x = x + attn + c_attn = cross_attn_layer(x, context, context_mask) + x = x + c_attn + x = x + self.mlp(self.ln2(x)) + return x, present + + +class CrossAttentionLayer(nn.Module): + + def __init__(self, + ctx_len: int, + embed_dim: int, + n_heads: int, + attn_bias: bool, + resid_pdrop: bool, + attn_pdrop: bool): + super().__init__() + + self.ln1 = nn.LayerNorm(embed_dim) + self.ln2 = nn.LayerNorm(embed_dim) + self.attn = MultiHeadSelfAttention(ctx_len=ctx_len, + embed_dim=embed_dim, + n_heads=n_heads, + attn_pdrop=attn_pdrop, + resid_pdrop=resid_pdrop, + attn_bias=attn_bias, + use_mask=False) + + def forward(self, x, context, context_mask=None): + attn = self.attn.forward_with_context(self.ln1(x), self.ln2(context), context_mask) + # x = x + attn + # return x + return attn \ No newline at end of file diff --git a/dalle/models/stage2/transformer.py b/dalle/models/stage2/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..fc74a2992813d65d364b5562e8912398af61135e --- /dev/null +++ b/dalle/models/stage2/transformer.py @@ -0,0 +1,502 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ +# Modified from minGPT (https://github.com/karpathy/minGPT) +# Copyright (c) 2020 Andrej Karpathy. All Rights Reserved. +# ------------------------------------------------------------------------------------ + +import torch +import torch.nn as nn +from typing import Optional, Tuple, List +from torch.cuda.amp import autocast +from omegaconf import OmegaConf +from .layers import Block + +class Transformer1d(nn.Module): + + def __init__(self, + vocab_size_txt: int, + vocab_size_img: int, + hparams: OmegaConf) -> None: + super().__init__() + assert hparams.n_layers == hparams.n_dense_layers + + # input embedding for image and text + self.tok_emb_img = nn.Embedding(vocab_size_img, hparams.embed_dim) + self.tok_emb_txt = nn.Embedding(vocab_size_txt, hparams.embed_dim) + + self.pos_emb_img = nn.Embedding(hparams.ctx_len_img, hparams.embed_dim) + self.pos_emb_txt = nn.Embedding(hparams.ctx_len_txt, hparams.embed_dim) + + self.drop = nn.Dropout(hparams.embd_pdrop) + + # transformer blocks + self.blocks = [Block(ctx_len=hparams.ctx_len_img + hparams.ctx_len_txt, + embed_dim=hparams.embed_dim, + n_heads=hparams.n_heads, + mlp_bias=hparams.mlp_bias, + attn_bias=hparams.attn_bias, + resid_pdrop=hparams.resid_pdrop, + attn_pdrop=hparams.attn_pdrop, + gelu_use_approx=hparams.gelu_use_approx) for i in range(1, hparams.n_layers+1)] + self.blocks = nn.Sequential(*self.blocks) + + # heads for image and text + self.ln_f = nn.LayerNorm(hparams.embed_dim) + self.head_img = nn.Linear(hparams.embed_dim, vocab_size_img, bias=False) + self.head_txt = nn.Linear(hparams.embed_dim, vocab_size_txt, bias=False) + + self.ctx_len_img = hparams.ctx_len_img + self.ctx_len_txt = hparams.ctx_len_txt + self.n_layers = hparams.n_layers + + self.apply(self._init_weights) + + + def _init_weights(self, module: nn.Module) -> None: + if isinstance(module, (nn.Linear, nn.Embedding)): + module.weight.data.normal_(mean=0.0, std=0.02) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + + def resize_token_embeddings(self, new_num_tokens): + + old_num_tokens, old_embedding_dim = self.tok_emb_txt.weight.size() + new_embeddings = nn.Embedding(new_num_tokens, old_embedding_dim) + new_embeddings.to(self.tok_emb_txt.weight.device, dtype=self.tok_emb_txt.weight.dtype) + self._init_weights(new_embeddings) + # numbers of tokens to copy + n = min(old_num_tokens, new_num_tokens) + new_embeddings.weight.data[:n, :] = self.tok_emb_txt.weight.data[:n, :] + self.tok_emb_txt = new_embeddings + + self.resize_lm_head(new_num_tokens) + # TODO: also change config to reflect new vocab size + + return new_embeddings + + + def resize_lm_head( + self, new_num_tokens: Optional[int] = None, transposed: Optional[bool] = False) -> nn.Linear: + + old_num_tokens, old_lm_head_dim = ( + self.head_txt.weight.size() if not transposed else self.head_txt.weight.t().size() + ) + # Build new lm head + new_lm_head_shape = (old_lm_head_dim, new_num_tokens) if not transposed else (new_num_tokens, old_lm_head_dim) + has_new_lm_head_bias = self.head_txt.bias is not None + new_lm_head = nn.Linear(*new_lm_head_shape, bias=has_new_lm_head_bias) + new_lm_head = new_lm_head.to(self.head_txt.weight.device, dtype=self.head_txt.weight.dtype) + + # initialize new lm head (in particular added tokens) + self._init_weights(new_lm_head) + num_tokens_to_copy = min(old_num_tokens, new_num_tokens) + # Copy old lm head weights to new lm head + if not transposed: + new_lm_head.weight.data[:num_tokens_to_copy, :] = self.head_txt.weight.data[:num_tokens_to_copy, :] + else: + new_lm_head.weight.data[:, :num_tokens_to_copy] = self.head_txt.weight.data[:, :num_tokens_to_copy] + + # Copy bias weights to new lm head + if has_new_lm_head_bias: + new_lm_head.bias.data[:num_tokens_to_copy] = self.head_txt.bias.data[:num_tokens_to_copy] + + self.head_txt = new_lm_head + + return new_lm_head + + + def forward(self, + images: torch.LongTensor, + texts: torch.LongTensor, + pos_images: torch.LongTensor, + pos_texts: torch.LongTensor, + past: Optional[List[torch.Tensor]] = None, + prompt: Optional[List[torch.Tensor]] = None, + pos_prompt: Optional[List[torch.Tensor]] = None) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + + + B, T = images.shape + _, N = texts.shape + + assert T <= self.ctx_len_img, "Already reached the maximum context length (image)." + assert N == self.ctx_len_txt, "Already reached the maximum context length (text)." + + texts = self.tok_emb_txt(texts) + images = self.tok_emb_img(images) + + texts = texts + self.pos_emb_txt(pos_texts) + images = images + self.pos_emb_img(pos_images) + + if prompt is not None: + prompt = prompt + self.pos_emb_txt(pos_prompt) + texts = torch.cat([prompt, texts], dim=1).contiguous() + P = prompt.shape[1] + + x = torch.cat([texts, images], dim=1).contiguous() + x = self.drop(x) + + # x = self.blocks(x) + for i, block in enumerate(self.blocks): + x, _ = block.sample(x, layer_past=None if past is None else past[i]) + + x = self.ln_f(x) + + if prompt is not None: + texts = x[:, P:N+P-1].contiguous() + images = x[:, N+P-1:-1].contiguous() + else: + texts = x[:, :N-1].contiguous() + images = x[:, N-1:-1].contiguous() + + logits_txt = self.head_txt(texts) + logits_img = self.head_img(images) + return logits_img, logits_txt + + def forward_with_context(self, + images: torch.LongTensor, + texts: torch.LongTensor, + pos_images: torch.LongTensor, + pos_texts: torch.LongTensor, + src_images: torch.LongTensor, + src_pos_images: torch.LongTensor, + cross_attention_idxs: List, + cross_attention_layers, + past: Optional[List[torch.Tensor]] = None, + prompt: Optional[List[torch.Tensor]] = None, + pos_prompt: Optional[List[torch.Tensor]] = None) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + + + B, T = images.shape + _, N = texts.shape + + assert T <= self.ctx_len_img, "Already reached the maximum context length (image)." + assert N == self.ctx_len_txt, "Already reached the maximum context length (text)." + + texts = self.tok_emb_txt(texts) + images = self.tok_emb_img(images) + src_images = self.tok_emb_img(src_images) + + texts = texts + self.pos_emb_txt(pos_texts) + images = images + self.pos_emb_img(pos_images) + src_images = src_images + self.pos_emb_img(src_pos_images) + + if prompt is not None: + prompt = prompt + self.pos_emb_txt(pos_prompt) + texts = torch.cat([prompt, texts], dim=1).contiguous() + P = prompt.shape[1] + else: + P = 0 + + x = torch.cat([texts, images], axis=1).contiguous() + x = self.drop(x) + + # prepare mask + mask = torch.zeros_like(x[0]) + mask[self.ctx_len_txt+P-1:, :].fill_(1.0) + mask = mask.unsqueeze(0) + + # print(images.shape, texts.shape, src_images.shape, mask.shape, x.shape) + + # x = self.blocks(x) + for i, block in enumerate(self.blocks): + if i in cross_attention_idxs: + x, _ = block.sample_with_context(x, src_images, mask, cross_attention_layers[int(((i+1)/3)-1)], layer_past=None if past is None else past[i]) + else: + x, _ = block.sample(x, layer_past=None if past is None else past[i]) + + x = self.ln_f(x) + + if prompt is not None: + texts = x[:, P:N+P-1].contiguous() + images = x[:, N+P-1:-1].contiguous() + else: + texts = x[:, :N-1].contiguous() + images = x[:, N-1:-1].contiguous() + + logits_txt = self.head_txt(texts) + logits_img = self.head_img(images) + return logits_img, logits_txt + + @torch.no_grad() + def sampling(self, + images: torch.LongTensor, + texts: torch.LongTensor, + pos_images: torch.LongTensor, + pos_texts: torch.LongTensor, + use_fp16: bool = True, + past: Optional[List[torch.Tensor]] = None, + prompt: Optional[List[torch.Tensor]] = None, + pos_prompt: Optional[List[torch.Tensor]] = None) -> Tuple[torch.FloatTensor, List[torch.FloatTensor]]: + + _, N = texts.shape + assert N == self.ctx_len_txt, "Already reached the maximum context length (text)." + + with autocast(enabled=use_fp16): + if images is None: + # assert past is None + + texts = self.tok_emb_txt(texts) + x = texts + self.pos_emb_txt(pos_texts) + + if prompt is not None: + prompt = prompt + self.pos_emb_txt(pos_prompt) + texts = torch.cat([prompt, texts], dim=1).contiguous() + + x = self.drop(x) + + if past is not None: + past = torch.cat(past, dim=-2) + + presents = [] + for i, block in enumerate(self.blocks): + x, present = block.sample(x, layer_past=None if past is None else past[i]) + presents.append(present) + x = self.ln_f(x) + x = x[:, N-1].contiguous() + logits = self.head_img(x) + else: + if past is None: + texts = self.tok_emb_txt(texts) + images = self.tok_emb_img(images) + texts = texts + self.pos_emb_txt(pos_texts) + images = images + self.pos_emb_img(pos_images) + + if prompt is not None: + prompt = prompt + self.pos_emb_txt(pos_prompt) + texts = torch.cat([prompt, texts], dim=1).contiguous() + + x = torch.cat([texts, images], axis=1).contiguous() + else: + images = self.tok_emb_img(images) + x = images + self.pos_emb_img(pos_images) + x = self.drop(x) + + # if past is not None and len(past) > 1: + if past is not None: + past = torch.cat(past, dim=-2) + # print('Past', past.shape) + presents = [] + # print(len(past), past[0].shape) + for i, block in enumerate(self.blocks): + x, present = block.sample(x, layer_past=None if past is None else past[i]) + presents.append(present) + x = self.ln_f(x) + x = x[:, -1].contiguous() + logits = self.head_img(x) + return logits, presents + + @torch.no_grad() + def sampling_with_context(self, + images: torch.LongTensor, + cross_attention_idxs, + cross_attention_layers, + texts: torch.LongTensor, + pos_images: torch.LongTensor, + pos_texts: torch.LongTensor, + source_image: torch.LongTensor, + use_fp16: bool = True, + past: Optional[List[torch.Tensor]] = None, + prompt: Optional[List[torch.Tensor]] = None, + pos_prompt: Optional[List[torch.Tensor]] = None + ) -> Tuple[torch.FloatTensor, List[torch.FloatTensor]]: + + _, N = texts.shape + assert N == self.ctx_len_txt, "Already reached the maximum context length (text)." + + if prompt is not None: + P = prompt.shape[1] + else: + P = 0 + + with autocast(enabled=use_fp16): + if images is None: + # assert past is None + + texts = self.tok_emb_txt(texts) + texts = texts + self.pos_emb_txt(pos_texts) + + if prompt is not None: + prompt = prompt + self.pos_emb_txt(pos_prompt) + texts = torch.cat([prompt, texts], dim=1).contiguous() + + x = self.drop(texts) + + if past is not None: + past = torch.cat(past, dim=-2) + + # prepare mask + mask = torch.zeros_like(x[0]) + mask[self.ctx_len_txt+P - 1:, :].fill_(1.0) + mask = mask.unsqueeze(0) + + presents = [] + for i, block in enumerate(self.blocks): + if i in cross_attention_idxs: + x, present = block.sample_with_context(x, source_image, mask, + cross_attention_layers[int(((i + 1) / 3) - 1)], + layer_past=None if past is None else past[i]) + else: + x, present = block.sample(x, layer_past=None if past is None else past[i]) + presents.append(present) + x = self.ln_f(x) + x = x[:, N-1].contiguous() + logits = self.head_img(x) + else: + if past is None: + texts = self.tok_emb_txt(texts) + images = self.tok_emb_img(images) + texts = texts + self.pos_emb_txt(pos_texts) + images = images + self.pos_emb_img(pos_images) + + if prompt is not None: + prompt = prompt + self.pos_emb_txt(pos_prompt) + texts = torch.cat([prompt, texts], dim=1).contiguous() + + x = torch.cat([texts, images], axis=1).contiguous() + else: + images = self.tok_emb_img(images) + x = images + self.pos_emb_img(pos_images) + x = self.drop(x) + + # if past is not None and len(past) > 1: + if past is not None: + past = torch.cat(past, dim=-2) + presents = [] + + # prepare mask + mask = torch.zeros_like(x[0]) + mask[self.ctx_len_txt+P - 1:, :].fill_(1.0) + mask = mask.unsqueeze(0) + + # print(len(past), past[0].shape) + for i, block in enumerate(self.blocks): + if i in cross_attention_idxs: + x, present = block.sample_with_context(x, source_image, mask, + cross_attention_layers[int(((i + 1) / 3) - 1)], + layer_past=None if past is None else past[i]) + else: + x, present = block.sample(x, layer_past=None if past is None else past[i]) + presents.append(present) + x = self.ln_f(x) + x = x[:, -1].contiguous() + logits = self.head_img(x) + return logits, presents + + def from_ckpt(self, path: str) -> None: + ckpt = torch.load(path, map_location='cpu')['state_dict'] + self.load_state_dict(ckpt, strict=True) + print(f'{path} succesfully restored..') + + +class iGPT(nn.Module): + def __init__(self, + vocab_size_img: int, + use_cls_cond: bool, + hparams: OmegaConf) -> None: + super().__init__() + self.use_cls_cond = use_cls_cond + + # sos token embedding + if self.use_cls_cond: + self.sos = nn.Embedding(hparams.n_classes, hparams.embed_dim) + else: + self.sos = nn.Parameter(torch.randn(1, 1, hparams.embed_dim)) + + # input embedding + self.tok_emb_img = nn.Embedding(vocab_size_img, hparams.embed_dim) + self.pos_emb_img = nn.Embedding(hparams.ctx_len_img, hparams.embed_dim) + + self.drop = nn.Dropout(hparams.embd_pdrop) + + # transformer blocks + self.blocks = [Block(ctx_len=hparams.ctx_len_img + 1, + embed_dim=hparams.embed_dim, + n_heads=hparams.n_heads, + mlp_bias=hparams.mlp_bias, + attn_bias=hparams.attn_bias, + resid_pdrop=hparams.resid_pdrop, + attn_pdrop=hparams.attn_pdrop, + gelu_use_approx=hparams.gelu_use_approx) for i in range(1, hparams.n_layers+1)] + self.blocks = nn.Sequential(*self.blocks) + + # head + self.ln_f = nn.LayerNorm(hparams.embed_dim) + self.head = nn.Linear(hparams.embed_dim, vocab_size_img, bias=False) + + self.ctx_len_img = hparams.ctx_len_img + self.n_layers = hparams.n_layers + + self.apply(self._init_weights) + + def _init_weights(self, module: nn.Module) -> None: + if isinstance(module, (nn.Linear, nn.Embedding)): + module.weight.data.normal_(mean=0.0, std=0.02) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + @torch.no_grad() + def sampling(self, + sos: torch.FloatTensor, + codes: torch.LongTensor, + pos_codes: torch.LongTensor, + n_samples: int = 16, + use_fp16: bool = True, + past: Optional[torch.Tensor] = None) -> Tuple[torch.FloatTensor, List[torch.FloatTensor]]: + with autocast(enabled=use_fp16): + if codes is None: + assert past is None + xs = self.drop(sos) + presents = [] + for i, block in enumerate(self.blocks): + xs, present = block.sample(xs, layer_past=None) + presents.append(present) + xs = self.ln_f(xs) + logits = self.head(xs)[:, -1] + else: + if past is None: + xs = self.tok_emb_img(codes) + self.pos_emb_img(pos_codes) + xs = torch.cat([sos, xs], dim=1) + else: + xs = self.tok_emb_img(codes) + self.pos_emb_img(pos_codes) + xs = self.drop(xs) + + past = torch.cat(past, dim=-2) if past is not None else past + presents = [] + for i, block in enumerate(self.blocks): + xs, present = block.sample(xs, layer_past=None if past is None else past[i]) + presents.append(present) + + xs = self.ln_f(xs) + logits = self.head(xs)[:, -1] + return logits, presents + + def forward(self, + codes: torch.LongTensor, + labels: Optional[torch.LongTensor] = None) -> torch.FloatTensor: + B, T = codes.shape + xps = torch.arange(T, device=codes.device).repeat((B, 1)) + sos = self.sos.repeat((B, 1, 1)) if labels is None else self.sos(labels).unsqueeze(1) + + h = self.tok_emb_img(codes) + self.pos_emb_img(xps) + h = torch.cat([sos, h[:, :-1]], dim=1).contiguous() + + h = self.drop(h) + h = self.blocks(h) + h = self.ln_f(h) + logits = self.head(h) + return logits + + def from_ckpt(self, path: str, strict: bool = True) -> None: + ckpt = torch.load(path, map_location='cpu')['state_dict'] + self.load_state_dict(ckpt, strict=strict) + print(f'{path} successfully restored..') diff --git a/dalle/models/tokenizer.py b/dalle/models/tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..1187abc02d364d414b86cddf2f77180ece688197 --- /dev/null +++ b/dalle/models/tokenizer.py @@ -0,0 +1,35 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ + +import os +from functools import partial +from tokenizers import CharBPETokenizer + + +def build_tokenizer(path: str, + context_length: int = 64, + *args, + **kwargs): + try: + from_file = partial(CharBPETokenizer.from_file, + vocab_filename=os.path.join(path, 'bpe-16k-vocab.json'), + merges_filename=os.path.join(path, 'bpe-16k-merges.txt'), + unk_token='[UNK]') + tokenizer = from_file(*args, **kwargs) + except: + from_file = partial(CharBPETokenizer.from_file, + vocab_filename=os.path.join(path, 'vocab.json'), + merges_filename=os.path.join(path, 'merges.txt'), + unk_token='[UNK]') + tokenizer = from_file(*args, **kwargs) + + # tokenizer = from_file(*args, **kwargs) + tokenizer.add_special_tokens(['[PAD]']) + tokenizer.enable_padding(length=context_length, + pad_id=tokenizer.token_to_id('[PAD]')) + tokenizer.enable_truncation(max_length=context_length) + print(f'{path} successfully restored..') + return tokenizer diff --git a/dalle/trainer_prefix.py b/dalle/trainer_prefix.py new file mode 100644 index 0000000000000000000000000000000000000000..77e216d07bfe191c84b917db0bd4e02e593972e0 --- /dev/null +++ b/dalle/trainer_prefix.py @@ -0,0 +1,1629 @@ +import inspect +import json +import math +import os +import re +import shutil +import warnings +from contextlib import contextmanager +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from nltk import word_tokenize +import numpy as np +import torch +from packaging import version +from torch import nn +from torch.utils.data.dataloader import DataLoader +from torch.utils.data.dataset import Dataset +from torch.utils.data.distributed import DistributedSampler +from torch.utils.data.sampler import RandomSampler, Sampler, SequentialSampler +from tqdm.auto import tqdm, trange +from torch.nn.utils.rnn import pad_sequence +import random + +from transformers.data.data_collator import DataCollator, DataCollatorWithPadding, default_data_collator +from transformers.file_utils import is_datasets_available, is_torch_tpu_available +from transformers.integrations import ( + default_hp_search_backend, + is_comet_available, + is_optuna_available, + is_ray_available, + is_tensorboard_available, + is_wandb_available, + run_hp_search_optuna, + run_hp_search_ray, +) + +from transformers.modeling_utils import PreTrainedModel +from transformers.optimization import AdamW, get_linear_schedule_with_warmup, get_constant_schedule_with_warmup +from transformers.tokenization_utils_base import PreTrainedTokenizerBase +from transformers.trainer_utils import ( + PREFIX_CHECKPOINT_DIR, + BestRun, + EvalPrediction, + EvaluationStrategy, + HPSearchBackend, + PredictionOutput, + TrainOutput, + default_compute_objective, + default_hp_space, + set_seed, +) +from transformers.training_args import TrainingArguments +from transformers.utils import logging + + +_use_native_amp = False +_use_apex = False +EPS = 1e-12 +INIT_GUMBEL_TEMP = 5.0 + +control_lst = ['positive', 'negative', 'neutral'] +Control_Temp = {'positive': 3967, 'negative':4633, 'neutral':8500} +control_Map = [torch.LongTensor([3967]), torch.LongTensor([4633]), torch.LongTensor([8500])] +sst_lst = [(0, 2), (1, 3), (4,)] +sst_standard = ["positive", "negative", "very positive", "very negative", "neutral"] +# Control_?Map = {j:i for i, j in enumerate(control_lst)} + +# Check if Pytorch version >= 1.6 to switch between Native AMP and Apex +if version.parse(torch.__version__) < version.parse("1.6"): + from transformers.file_utils import is_apex_available + + if is_apex_available(): + from apex import amp + _use_apex = True +else: + _use_native_amp = True + from torch.cuda.amp import autocast + +if is_datasets_available(): + import datasets + +if is_torch_tpu_available(): + import torch_xla.core.xla_model as xm + import torch_xla.debug.metrics as met + import torch_xla.distributed.parallel_loader as pl + +if is_tensorboard_available(): + try: + from torch.utils.tensorboard import SummaryWriter + except ImportError: + from tensorboardX import SummaryWriter + +if is_wandb_available(): + import wandb + +if is_comet_available(): + import comet_ml + +if is_optuna_available(): + import optuna + +if is_ray_available(): + from ray import tune + + +logger = logging.get_logger(__name__) + + +@contextmanager +def torch_distributed_zero_first(local_rank: int): + """ + Decorator to make all processes in distributed training wait for each local_master to do something. + + Args: + local_rank (:obj:`int`): The rank of the local process. + """ + if local_rank not in [-1, 0]: + torch.distributed.barrier() + yield + if local_rank == 0: + torch.distributed.barrier() + +def helper_token2bpe(offsets): + full_lst = [] + for example_offset in offsets: + bpe2token = [] + token2bpe = [] + token_idx = -1 + # print(example_offset) + for bpe_idx, (a,b) in enumerate(example_offset): + # print(token2bpe, a, b, bpe_idx) + if b - a > 0: + if a == 0: + # new token + token_idx += 1 + bpe2token.append(token_idx) + token2bpe.append([]) + token2bpe[-1].append(bpe_idx) + else: + # prev token. + bpe2token.append(token_idx) + token2bpe[-1].append(bpe_idx) + else: + bpe2token.append(None) + full_lst.append((bpe2token, token2bpe)) + return full_lst + +class SequentialDistributedSampler(Sampler): + """ + Distributed Sampler that subsamples indicies sequentially, + making it easier to collate all results at the end. + + Even though we only use this sampler for eval and predict (no training), + which means that the model params won't have to be synced (i.e. will not hang + for synchronization even if varied number of forward passes), we still add extra + samples to the sampler to make it evenly divisible (like in `DistributedSampler`) + to make it easy to `gather` or `reduce` resulting tensors at the end of the loop. + """ + + def __init__(self, dataset, num_replicas=None, rank=None): + if num_replicas is None: + if not torch.distributed.is_available(): + raise RuntimeError("Requires distributed package to be available") + num_replicas = torch.distributed.get_world_size() + if rank is None: + if not torch.distributed.is_available(): + raise RuntimeError("Requires distributed package to be available") + rank = torch.distributed.get_rank() + self.dataset = dataset + self.num_replicas = num_replicas + self.rank = rank + self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas)) + self.total_size = self.num_samples * self.num_replicas + + def __iter__(self): + indices = list(range(len(self.dataset))) + + # add extra samples to make it evenly divisible + indices += indices[: (self.total_size - len(indices))] + assert ( + len(indices) == self.total_size + ), f"Indices length {len(indices)} and total size {self.total_size} mismatched" + + # subsample + indices = indices[self.rank * self.num_samples : (self.rank + 1) * self.num_samples] + assert ( + len(indices) == self.num_samples + ), f"Indices length {len(indices)} and sample number {self.num_samples} mismatched" + + return iter(indices) + + def __len__(self): + return self.num_samples + + +def get_tpu_sampler(dataset: Dataset): + if xm.xrt_world_size() <= 1: + return RandomSampler(dataset) + return DistributedSampler(dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal()) + + +class Trainer_Prefix: + """ + Trainer is a simple but feature-complete training and eval loop for PyTorch, + optimized for 🤗 Transformers. + + Args: + model (:class:`~transformers.PreTrainedModel`, `optional`): + The model to train, evaluate or use for predictions. If not provided, a ``model_init`` must be passed. + args (:class:`~transformers.TrainingArguments`, `optional`): + The arguments to tweak for training. Will default to a basic instance of :class:`~transformers.TrainingArguments` + with the ``output_dir`` set to a directory named `tmp_trainer` in the current directory if not provided. + data_collator (:obj:`DataCollator`, `optional`): + The function to use to form a batch from a list of elements of :obj:`train_dataset` or + :obj:`eval_dataset`. Will default to :func:`~transformers.default_data_collator` if no ``tokenizer`` is + provided, an instance of :func:`~transformers.DataCollatorWithPadding` otherwise. + train_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`): + The dataset to use for training. If it is an :obj:`datasets.Dataset`, columns not accepted by the + ``model.forward()`` method are automatically removed. + eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`): + The dataset to use for evaluation. If it is an :obj:`datasets.Dataset`, columns not accepted by the + ``model.forward()`` method are automatically removed. + tokenizer (:class:`PreTrainedTokenizerBase`, `optional`): + The tokenizer used to preprocess the data. If provided, will be used to automatically pad the inputs the + maximum length when batching inputs, and it will be saved along the model to make it easier to rerun an + interrupted training or reuse the fine-tuned model. + model_init (:obj:`Callable[[], PreTrainedModel]`, `optional`): + A function that instantiates the model to be used. If provided, each call to + :meth:`~transformers.Trainer.train` will start from a new instance of the model as given by this function. + compute_metrics (:obj:`Callable[[EvalPrediction], Dict]`, `optional`): + The function that will be used to compute metrics at evaluation. Must take a + :class:`~transformers.EvalPrediction` and return a dictionary string to metric values. + tb_writer (:obj:`SummaryWriter`, `optional`): + Object to write to TensorBoard. + optimizers (:obj:`Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR`, `optional`): + A tuple containing the optimizer and the scheduler to use. Will default to an instance of + :class:`~transformers.AdamW` on your model and a scheduler given by + :func:`~transformers.get_linear_schedule_with_warmup` controlled by :obj:`args`. + kwargs: + Deprecated keyword arguments. + """ + + def __init__( + self, + model: Optional[PreTrainedModel] = None, + model_gpt2 : Optional[PreTrainedModel] = None, + args: TrainingArguments = None, + data_collator: Optional[DataCollator] = None, + train_dataset: Optional[Dataset] = None, + eval_dataset: Optional[Dataset] = None, + tokenizer: Optional["PreTrainedTokenizerBase"] = None, + model_init: Callable[[], PreTrainedModel] = None, + compute_metrics: Optional[Callable[[EvalPrediction], Dict]] = None, + tb_writer: Optional["SummaryWriter"] = None, + optimizers: Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR] = (None, None), + task_mode: Optional[str] = None, + use_dropout: Optional[bool] = False, + distill: Optional[bool] = False, + matching_objective:Optional[str]= None, + finetuned_gpt2: Optional[PreTrainedModel] = None, + **kwargs, + ): + if args is None: + logger.info("No `TrainingArguments` passed, using the current path as `output_dir`.") + args = TrainingArguments("tmp_trainer") + self.args = args + # Seed must be set before instantiating the model when using model + set_seed(self.args.seed) + assert ( + model is not None or model_init is not None + ), "You must provide a model to use `Trainer`, either by using the `model` argument or the `model_init` argument." + assert model_init is None + self.model = model.to(args.device) if model is not None else None + self.gpt2 = model_gpt2.to(args.device) if model_gpt2 is not None else None + default_collator = default_data_collator if tokenizer is None else DataCollatorWithPadding(tokenizer) + self.data_collator = data_collator if data_collator is not None else default_collator + self.train_dataset = train_dataset + self.eval_dataset = eval_dataset + self.tokenizer = tokenizer + self.model_init = model_init + self.compute_metrics = compute_metrics + self.optimizer, self.lr_scheduler = optimizers + self.task_mode = task_mode + self.use_dropout = use_dropout + + self.curr_best_eval = 10000000. + + self.distill = distill + if self.distill: + self.matching_objective = matching_objective + self.finetuned_gpt2 = finetuned_gpt2 + + if model_init is not None and (self.optimizer is not None or self.lr_scheduler is not None): + raise RuntimeError( + "Passing a `model_init` is incompatible with providing the `optimizers` argument." + "You should subclass `Trainer` and override the `create_optimizer_and_scheduler` method." + ) + self.tb_writer = tb_writer + self.log_history = [] + if "prediction_loss_only" in kwargs: + warnings.warn( + "Passing `prediction_loss_only` as a keyword argument is deprecated and won't be possible in a future version. Use `args.prediction_loss_only` instead.", + FutureWarning, + ) + self.args.prediction_loss_only = kwargs.pop("prediction_loss_only") + assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}." + + if tb_writer is None and is_tensorboard_available() and self.is_world_process_zero(): + self.tb_writer = SummaryWriter(log_dir=self.args.logging_dir) + if not is_tensorboard_available(): + logger.warning( + "You are instantiating a Trainer but Tensorboard is not installed. You should consider installing it." + ) + + # Will be set to True by `self._setup_loggers()` on first call to `self.log()`. + self._loggers_initialized = False + + # Create output directory if needed + if self.is_world_process_zero(): + os.makedirs(self.args.output_dir, exist_ok=True) + if is_torch_tpu_available(): + # Set an xla_device flag on the model's config. + # We'll find a more elegant and not need to do this in the future. + self.model.config.xla_device = True + if not callable(self.data_collator) and callable(getattr(self.data_collator, "collate_batch", None)): + self.data_collator = self.data_collator.collate_batch + warnings.warn( + ( + "The `data_collator` should now be a simple callable (function, class with `__call__`), classes " + + "with a `collate_batch` are deprecated and won't be supported in a future version." + ), + FutureWarning, + ) + + if is_datasets_available(): + if isinstance(train_dataset, datasets.Dataset): + self._remove_unused_columns(self.train_dataset, description="training") + if isinstance(eval_dataset, datasets.Dataset): + self._remove_unused_columns(self.eval_dataset, description="evaluation") + + self.global_step = None + self.epoch = None + self.total_flos = None + if self.args.fp16 and _use_native_amp: + self.scaler = torch.cuda.amp.GradScaler() + self.hp_search_backend = None + self.use_tune_checkpoints = False + if self.args.label_names is None: + self.args.label_names = (["labels"] + ) + + def _remove_unused_columns(self, dataset: "datasets.Dataset", description: Optional[str] = None): + if not self.args.remove_unused_columns: + return + # Inspect model forward signature to keep only the arguments it accepts. + signature = inspect.signature(self.model.forward) + signature_columns = list(signature.parameters.keys()) + # Labels may be named label or label_ids, the default data collator handles that. + signature_columns += ["label", "label_ids"] + columns = [k for k in signature_columns if k in dataset.column_names] + ignored_columns = list(set(dataset.column_names) - set(signature_columns)) + dset_description = "" if description is None else f"in the {description} set " + logger.info( + f"The following columns {dset_description}don't have a corresponding argument in `{self.model.__class__.__name__}.forward` and have been ignored: {', '.join(ignored_columns)}." + ) + dataset.set_format(type=dataset.format["type"], columns=columns) + + def _get_train_sampler(self) -> Optional[torch.utils.data.sampler.Sampler]: + if isinstance(self.train_dataset, torch.utils.data.IterableDataset): + return None + elif is_torch_tpu_available(): + return get_tpu_sampler(self.train_dataset) + else: + return ( + RandomSampler(self.train_dataset) + if self.args.local_rank == -1 + else DistributedSampler(self.train_dataset) + ) + + def get_train_dataloader(self) -> DataLoader: + """ + Returns the training :class:`~torch.utils.data.DataLoader`. + + Will use no sampler if :obj:`self.train_dataset` is a :obj:`torch.utils.data.IterableDataset`, a random sampler + (adapted to distributed training if necessary) otherwise. + + Subclass and override this method if you want to inject some custom behavior. + """ + if self.train_dataset is None: + raise ValueError("Trainer: training requires a train_dataset.") + train_sampler = self._get_train_sampler() + + return DataLoader( + self.train_dataset, + batch_size=self.args.train_batch_size, + sampler=train_sampler, + collate_fn=self.data_collator, + drop_last=self.args.dataloader_drop_last, + num_workers=self.args.dataloader_num_workers, + worker_init_fn=np.random.seed(self.args.seed) + ) + + def _get_eval_sampler(self, eval_dataset: Dataset) -> Optional[torch.utils.data.sampler.Sampler]: + if isinstance(eval_dataset, torch.utils.data.IterableDataset): + return None + elif is_torch_tpu_available(): + return SequentialDistributedSampler(eval_dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal()) + elif self.args.local_rank != -1: + return SequentialDistributedSampler(eval_dataset) + else: + return SequentialSampler(eval_dataset) + + def get_eval_dataloader(self, eval_dataset: Optional[Dataset] = None) -> DataLoader: + """ + Returns the evaluation :class:`~torch.utils.data.DataLoader`. + + Will use no sampler if :obj:`self.eval_dataset` is a :obj:`torch.utils.data.IterableDataset`, a sequential + sampler (adapted to distributed training if necessary) otherwise. + + Subclass and override this method if you want to inject some custom behavior. + + Args: + eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`): + If provided, will override :obj:`self.eval_dataset`. If it is an :obj:`datasets.Dataset`, columns not + accepted by the ``model.forward()`` method are automatically removed. + """ + if eval_dataset is None and self.eval_dataset is None: + raise ValueError("Trainer: evaluation requires an eval_dataset.") + elif eval_dataset is not None and is_datasets_available() and isinstance(eval_dataset, datasets.Dataset): + self._remove_unused_columns(eval_dataset, description="evaluation") + eval_dataset = eval_dataset if eval_dataset is not None else self.eval_dataset + eval_sampler = self._get_eval_sampler(eval_dataset) + + return DataLoader( + eval_dataset, + sampler=eval_sampler, + batch_size=self.args.eval_batch_size, + collate_fn=self.data_collator, + drop_last=self.args.dataloader_drop_last, + num_workers=self.args.dataloader_num_workers, + worker_init_fn=np.random.seed(self.args.seed) + ) + + def get_test_dataloader(self, test_dataset: Dataset) -> DataLoader: + """ + Returns the test :class:`~torch.utils.data.DataLoader`. + + Will use no sampler if :obj:`test_dataset` is a :obj:`torch.utils.data.IterableDataset`, a sequential + sampler (adapted to distributed training if necessary) otherwise. + + Subclass and override this method if you want to inject some custom behavior. + + Args: + eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`): + The test dataset to use. If it is an :obj:`datasets.Dataset`, columns not accepted by the + ``model.forward()`` method are automatically removed. + """ + if is_datasets_available() and isinstance(test_dataset, datasets.Dataset): + self._remove_unused_columns(test_dataset, description="test") + test_sampler = self._get_eval_sampler(test_dataset) + + # We use the same batch_size as for eval. + return DataLoader( + test_dataset, + sampler=test_sampler, + batch_size=self.args.eval_batch_size, + collate_fn=self.data_collator, + drop_last=self.args.dataloader_drop_last, + worker_init_fn=np.random.seed(self.args.seed) + ) + + def create_optimizer_and_scheduler(self, num_training_steps: int): + """ + Setup the optimizer and the learning rate scheduler. + + We provide a reasonable default that works well. If you want to use something else, you can pass a tuple in the + Trainer's init through :obj:`optimizers`, or subclass and override this method in a subclass. + """ + if self.optimizer is None: + no_decay = ["bias", "LayerNorm.weight"] + optimizer_grouped_parameters = [ + { + "params": [p for n, p in self.model.named_parameters() if (not any(nd in n for nd in no_decay)) and p.requires_grad], + "weight_decay": self.args.weight_decay, + }, + { + "params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay) and p.requires_grad], + "weight_decay": 0.0, + }, + ] + + self.optimizer = AdamW( + optimizer_grouped_parameters, + lr=self.args.learning_rate, + betas=(self.args.adam_beta1, self.args.adam_beta2), + eps=self.args.adam_epsilon, + ) + + + # for n, p in self.model.named_parameters(): + # print(n,p.requires_grad) + print(self.optimizer.state_dict()) + if self.lr_scheduler is None: + self.lr_scheduler = get_linear_schedule_with_warmup( + self.optimizer, num_warmup_steps=self.args.warmup_steps, num_training_steps=num_training_steps + ) + + + def setup_wandb(self): + """ + Setup the optional Weights & Biases (`wandb`) integration. + + One can subclass and override this method to customize the setup if needed. Find more information + `here `__. You can also override the following environment variables: + + Environment: + WANDB_WATCH: + (Optional, ["gradients", "all", "false"]) "gradients" by default, set to "false" to disable gradient logging + or "all" to log gradients and parameters + WANDB_PROJECT: + (Optional): str - "huggingface" by default, set this to a custom string to store results in a different project + WANDB_DISABLED: + (Optional): boolean - defaults to false, set to "true" to disable wandb entirely + """ + if hasattr(self, "_setup_wandb"): + warnings.warn( + "The `_setup_wandb` method is deprecated and won't be called in a future version, define `setup_wandb` in your subclass.", + FutureWarning, + ) + return self._setup_wandb() + + if self.is_world_process_zero(): + logger.info( + 'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"' + ) + try: + combined_dict = {**self.model.config.to_dict(), **self.args.to_sanitized_dict()} + except AttributeError: + # in case the model has no config + combined_dict = {**self.args.to_sanitized_dict()} + wandb.init( + project=os.getenv("WANDB_PROJECT", "huggingface"), config=combined_dict, name=self.args.run_name + ) + # keep track of model topology and gradients, unsupported on TPU + if not is_torch_tpu_available() and os.getenv("WANDB_WATCH") != "false": + wandb.watch( + self.model, log=os.getenv("WANDB_WATCH", "gradients"), log_freq=max(100, self.args.logging_steps) + ) + + def setup_comet(self): + """ + Setup the optional Comet.ml integration. + + Environment: + COMET_MODE: + (Optional): str - "OFFLINE", "ONLINE", or "DISABLED" + COMET_PROJECT_NAME: + (Optional): str - Comet.ml project name for experiments + COMET_OFFLINE_DIRECTORY: + (Optional): str - folder to use for saving offline experiments when `COMET_MODE` is "OFFLINE" + + For a number of configurable items in the environment, + see `here `__ + """ + if self.is_world_master(): + comet_mode = os.getenv("COMET_MODE", "ONLINE").upper() + args = {"project_name": os.getenv("COMET_PROJECT_NAME", "huggingface")} + experiment = None + if comet_mode == "ONLINE": + experiment = comet_ml.Experiment(**args) + logger.info("Automatic Comet.ml online logging enabled") + elif comet_mode == "OFFLINE": + args["offline_directory"] = os.getenv("COMET_OFFLINE_DIRECTORY", "./") + experiment = comet_ml.OfflineExperiment(**args) + logger.info("Automatic Comet.ml offline logging enabled; use `comet upload` when finished") + if experiment is not None: + experiment._set_model_graph(self.model, framework="transformers") + experiment._log_parameters(self.args, prefix="args/", framework="transformers") + experiment._log_parameters(self.model.config, prefix="config/", framework="transformers") + + def num_examples(self, dataloader: DataLoader) -> int: + """ + Helper to get number of samples in a :class:`~torch.utils.data.DataLoader` by accessing its dataset. + """ + return len(dataloader.dataset) + + def _setup_loggers(self): + if self._loggers_initialized: + return + if is_wandb_available(): + self.setup_wandb() + elif os.environ.get("WANDB_DISABLED") != "true": + logger.info( + "You are instantiating a Trainer but W&B is not installed. To use wandb logging, " + "run `pip install wandb; wandb login` see https://docs.wandb.com/huggingface." + ) + if is_comet_available(): + self.setup_comet() + elif os.environ.get("COMET_MODE") != "DISABLED": + logger.info( + "To use comet_ml logging, run `pip/conda install comet_ml` " + "see https://www.comet.ml/docs/python-sdk/huggingface/" + ) + self._loggers_initialized = True + + def _hp_search_setup(self, trial: Union["optuna.Trial", Dict[str, Any]]): + """ HP search setup code """ + if self.hp_search_backend is None or trial is None: + return + params = self.hp_space(trial) if self.hp_search_backend == HPSearchBackend.OPTUNA else trial + for key, value in params.items(): + if not hasattr(self.args, key): + raise AttributeError( + f"Trying to set {key} in the hyperparameter search but there is no corresponding field in `TrainingArguments`." + ) + old_attr = getattr(self.args, key, None) + # Casting value to the proper type + if old_attr is not None: + value = type(old_attr)(value) + setattr(self.args, key, value) + if self.hp_search_backend == HPSearchBackend.OPTUNA: + logger.info("Trial:", trial.params) + + def _report_to_hp_search( + self, trial: Union["optuna.Trial", Dict[str, Any]], epoch: int, metrics: Dict[str, float] + ): + if self.hp_search_backend is None or trial is None: + return + self.objective = self.compute_objective(metrics) + if self.hp_search_backend == HPSearchBackend.OPTUNA: + trial.report(self.objective, epoch) + if trial.should_prune(): + raise optuna.TrialPruned() + elif self.hp_search_backend == HPSearchBackend.RAY: + if self.global_step % self.args.save_steps == 0: + self._tune_save_checkpoint() + tune.report(objective=self.objective, **metrics) + + def _tune_save_checkpoint(self): + if not self.use_tune_checkpoints: + return + with tune.checkpoint_dir(step=self.global_step) as checkpoint_dir: + self.args.output_dir = checkpoint_dir + output_dir = os.path.join(self.args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-{self.global_step}") + self.save_model(output_dir) + if self.is_world_master(): + torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt")) + torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt")) + + + def train(self, model_path: Optional[str] = None, trial: Union["optuna.Trial", Dict[str, Any]] = None): + """ + Main training entry point. + + Args: + model_path (:obj:`str`, `optional`): + Local path to the model if the model to train has been instantiated from a local path. If present, + training will resume from the optimizer/scheduler states loaded here. + trial (:obj:`optuna.Trial` or :obj:`Dict[str, Any]`, `optional`): + The trial run or the hyperparameter dictionary for hyperparameter search. + """ + # This might change the seed so needs to run first. + self._hp_search_setup(trial) + + # Model re-init + if self.model_init is not None: + # Seed must be set before instantiating the model when using model_init. + set_seed(self.args.seed) + model = self.model_init() + self.model = model.to(self.args.device) + + # Reinitializes optimizer and scheduler + self.optimizer, self.lr_scheduler = None, None + + # Data loader and number of training steps + train_dataloader = self.get_train_dataloader() + num_update_steps_per_epoch = len(train_dataloader) // self.args.gradient_accumulation_steps + num_update_steps_per_epoch = max(num_update_steps_per_epoch, 1) + if self.args.max_steps > 0: + t_total = self.args.max_steps + num_train_epochs = self.args.max_steps // num_update_steps_per_epoch + int( + self.args.max_steps % num_update_steps_per_epoch > 0 + ) + else: + t_total = int(num_update_steps_per_epoch * self.args.num_train_epochs) + num_train_epochs = self.args.num_train_epochs + self.args.max_steps = t_total + + self.create_optimizer_and_scheduler(num_training_steps=t_total) + + # Check if saved optimizer or scheduler states exist + if ( + model_path is not None + and os.path.isfile(os.path.join(model_path, "optimizer.pt")) + and os.path.isfile(os.path.join(model_path, "scheduler.pt")) + ): + # Load in optimizer and scheduler states + self.optimizer.load_state_dict( + torch.load(os.path.join(model_path, "optimizer.pt"), map_location=self.args.device) + ) + self.lr_scheduler.load_state_dict(torch.load(os.path.join(model_path, "scheduler.pt"))) + + model = self.model + if self.args.fp16 and _use_apex: + if not is_apex_available(): + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.") + model, self.optimizer = amp.initialize(model, self.optimizer, opt_level=self.args.fp16_opt_level) + + # multi-gpu training (should be after apex fp16 initialization) + if self.args.n_gpu > 1: + model = torch.nn.DataParallel(model) + + # Distributed training (should be after apex fp16 initialization) + if self.args.local_rank != -1: + model = torch.nn.parallel.DistributedDataParallel( + model, + device_ids=[self.args.local_rank], + output_device=self.args.local_rank, + find_unused_parameters=True, + ) + + if self.tb_writer is not None: + self.tb_writer.add_text("args", self.args.to_json_string()) + self.tb_writer.add_hparams(self.args.to_sanitized_dict(), metric_dict={}) + + # Train! + if is_torch_tpu_available(): + total_train_batch_size = self.args.train_batch_size * xm.xrt_world_size() + else: + total_train_batch_size = ( + self.args.train_batch_size + * self.args.gradient_accumulation_steps + * (torch.distributed.get_world_size() if self.args.local_rank != -1 else 1) + ) + logger.info("***** Running training *****") + logger.info(" Num examples = %d", self.num_examples(train_dataloader)) + logger.info(" Num Epochs = %d", num_train_epochs) + logger.info(" Instantaneous batch size per device = %d", self.args.per_device_train_batch_size) + logger.info(" Total train batch size (w. parallel, distributed & accumulation) = %d", total_train_batch_size) + logger.info(" Gradient Accumulation steps = %d", self.args.gradient_accumulation_steps) + logger.info(" Total optimization steps = %d", t_total) + + self.global_step = 0 + self.epoch = 0 + self.total_flos = 0 + epochs_trained = 0 + steps_trained_in_current_epoch = 0 + # Check if continuing training from a checkpoint + if model_path is not None: + # set global_step to global_step of last saved checkpoint from model path + try: + self.global_step = int(model_path.split("-")[-1].split(os.path.sep)[0]) + # print(model, model.module) + if self.args.n_gpu > 1: + self.total_flos = getattr(model.module.config, "total_flos", 0) + else: + self.total_flos = getattr(model.config, "total_flos", 0) + + epochs_trained = self.global_step // num_update_steps_per_epoch + steps_trained_in_current_epoch = self.global_step % (num_update_steps_per_epoch) + + logger.info(" Continuing training from checkpoint, will skip to saved global_step") + logger.info(" Continuing training from epoch %d", epochs_trained) + logger.info(" Continuing training from global step %d", self.global_step) + logger.info(" Continuing training from %d non-embedding floating-point operations", self.total_flos) + logger.info(" Will skip the first %d steps in the first epoch", steps_trained_in_current_epoch) + except ValueError: + self.global_step = 0 + self.total_flos = 0 + logger.info(" Starting fine-tuning.") + + tr_loss = torch.tensor(0.0).to(self.args.device) + logging_loss_scalar = 0.0 + model.zero_grad() + disable_tqdm = self.args.disable_tqdm or not self.is_local_process_zero() + train_pbar = trange(epochs_trained, int(np.ceil(num_train_epochs)), desc="Epoch", disable=disable_tqdm) + for epoch in range(epochs_trained, int(np.ceil(num_train_epochs))): + if isinstance(train_dataloader, DataLoader) and isinstance(train_dataloader.sampler, DistributedSampler): + train_dataloader.sampler.set_epoch(epoch) + + if is_torch_tpu_available(): + parallel_loader = pl.ParallelLoader(train_dataloader, [self.args.device]).per_device_loader( + self.args.device + ) + epoch_iterator = parallel_loader + else: + epoch_iterator = train_dataloader + + # Reset the past mems state at the beginning of each epoch if necessary. + if self.args.past_index >= 0: + self._past = None + + epoch_pbar = tqdm(epoch_iterator, desc="Iteration", disable=disable_tqdm) + for step, inputs in enumerate(epoch_iterator): + + # Skip past any already trained steps if resuming training + if steps_trained_in_current_epoch > 0: + steps_trained_in_current_epoch -= 1 + epoch_pbar.update(1) + continue + + tr_loss += self.training_step(model, inputs) + + self.total_flos += self.floating_point_ops(inputs) + + if (step + 1) % self.args.gradient_accumulation_steps == 0 or ( + # last step in epoch but step is always smaller than gradient_accumulation_steps + len(epoch_iterator) <= self.args.gradient_accumulation_steps + and (step + 1) == len(epoch_iterator) + ): + if self.args.fp16 and _use_native_amp: + self.scaler.unscale_(self.optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), self.args.max_grad_norm) + elif self.args.fp16 and _use_apex: + torch.nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), self.args.max_grad_norm) + else: + torch.nn.utils.clip_grad_norm_(model.parameters(), self.args.max_grad_norm) + + if is_torch_tpu_available(): + xm.optimizer_step(self.optimizer) + elif self.args.fp16 and _use_native_amp: + self.scaler.step(self.optimizer) + self.scaler.update() + else: + self.optimizer.step() + + # URGENT + self.lr_scheduler.step() + model.zero_grad() + self.global_step += 1 + self.epoch = epoch + (step + 1) / len(epoch_iterator) + + + if (self.args.logging_steps > 0 and self.global_step % self.args.logging_steps == 0) or ( + self.global_step == 1 and self.args.logging_first_step + ): + logs: Dict[str, float] = {} + tr_loss_scalar = tr_loss.item() + logs["loss"] = (tr_loss_scalar - logging_loss_scalar) / self.args.logging_steps + # backward compatibility for pytorch schedulers + logs["learning_rate"] = ( + self.lr_scheduler.get_last_lr()[0] + if version.parse(torch.__version__) >= version.parse("1.4") + else self.lr_scheduler.get_lr()[0] + ) + logging_loss_scalar = tr_loss_scalar + + self.log(logs) + + # print(self.args.evaluation_strategy == EvaluationStrategy.STEPS ) + # print(self.global_step % self.args.eval_steps == 0) + # print() + + if ( + self.args.evaluation_strategy == EvaluationStrategy.STEPS + and self.global_step % self.args.eval_steps == 0 + ): + metrics = self.evaluate() + self._report_to_hp_search(trial, epoch, metrics) + + #############################EARLY STOPPING######################## + if 'lowdata' in self.args.output_dir or 'earlystop' in self.args.output_dir: + self.save_based_on_eval = True + else: + self.save_based_on_eval = False + print('if not see a line lowdata: below, then did not go into low data. ') + if self.save_based_on_eval and metrics["eval_loss"] < self.curr_best_eval: + print('lowdata:', self.global_step, self.curr_best_eval, metrics["eval_loss"], + 'perplexity={}'.format(math.exp(metrics["eval_loss"]))) + self.curr_best_eval = metrics["eval_loss"] + if hasattr(model, "module"): + assert ( + model.module is self.model + ), f"Module {model.module} should be a reference to self.model" + else: + assert model is self.model, f"Model {model} should be a reference to self.model" + # Save model checkpoint + output_dir_name = os.path.basename(self.args.output_dir) + checkpoint_folder = f"{output_dir_name}-{PREFIX_CHECKPOINT_DIR}-{self.global_step}" + if self.hp_search_backend is not None and trial is not None: + run_id = ( + trial.number + if self.hp_search_backend == HPSearchBackend.OPTUNA + else tune.get_trial_id() + ) + checkpoint_folder += f"-run-{run_id}" + output_dir = os.path.join(self.args.output_dir, checkpoint_folder) + + self.store_flos() + print('saving to output_dir', output_dir) + self.save_model(output_dir) + + if self.is_world_process_zero(): + self._rotate_checkpoints(use_mtime=True) + ##################################################### + + if self.args.save_steps > 0 and self.global_step % self.args.save_steps == 0: + print('saving model at a checkpoint!!') + # In all cases (even distributed/parallel), self.model is always a reference + # to the model we want to save. + if hasattr(model, "module"): + assert ( + model.module is self.model + ), f"Module {model.module} should be a reference to self.model" + else: + assert model is self.model, f"Model {model} should be a reference to self.model" + # Save model checkpoint + checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{self.global_step}" + if self.hp_search_backend is not None and trial is not None: + run_id = ( + trial.number + if self.hp_search_backend == HPSearchBackend.OPTUNA + else tune.get_trial_id() + ) + checkpoint_folder += f"-run-{run_id}" + output_dir = os.path.join(self.args.output_dir, checkpoint_folder) + + self.store_flos() + + self.save_model(output_dir) + + if self.is_world_process_zero(): + self._rotate_checkpoints(use_mtime=True) + + if is_torch_tpu_available(): + xm.rendezvous("saving_optimizer_states") + xm.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt")) + xm.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt")) + elif self.is_world_process_zero(): + torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt")) + torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt")) + + epoch_pbar.update(1) + if self.args.max_steps > 0 and self.global_step >= self.args.max_steps: + break + epoch_pbar.close() + train_pbar.update(1) + + if self.args.evaluation_strategy == EvaluationStrategy.EPOCH: + metrics = self.evaluate() + self._report_to_hp_search(trial, epoch, metrics) + + if self.args.tpu_metrics_debug or self.args.debug: + if is_torch_tpu_available(): + # tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.) + xm.master_print(met.metrics_report()) + else: + logger.warning( + "You enabled PyTorch/XLA debug metrics but you don't have a TPU " + "configured. Check your training configuration if this is unexpected." + ) + if self.args.max_steps > 0 and self.global_step >= self.args.max_steps: + break + + train_pbar.close() + if self.tb_writer: + self.tb_writer.close() + if self.args.past_index and hasattr(self, "_past"): + # Clean the state at the end of training + delattr(self, "_past") + + logger.info("\n\nTraining completed. Do not forget to share your model on huggingface.co/models =)\n\n") + return TrainOutput(self.global_step, tr_loss.item() / self.global_step) + + def hyperparameter_search( + self, + hp_space: Optional[Callable[["optuna.Trial"], Dict[str, float]]] = None, + compute_objective: Optional[Callable[[Dict[str, float]], float]] = None, + n_trials: int = 20, + direction: str = "minimize", + backend: Optional[Union["str", HPSearchBackend]] = None, + **kwargs + ) -> BestRun: + """ + Launch an hyperparameter search using ``optuna`` or ``Ray Tune``. The optimized quantity is determined by + :obj:`compute_objectie`, which defaults to a function returning the evaluation loss when no metric is provided, + the sum of all metrics otherwise. + + .. warning:: + + To use this method, you need to have provided a ``model_init`` when initializing your + :class:`~transformers.Trainer`: we need to reinitialize the model at each new run. This is incompatible + with the ``optimizers`` argument, so you need to subclass :class:`~transformers.Trainer` and override the + method :meth:`~transformers.Trainer.create_optimizer_and_scheduler` for custom optimizer/scheduler. + + Args: + hp_space (:obj:`Callable[["optuna.Trial"], Dict[str, float]]`, `optional`): + A function that defines the hyperparameter search space. Will default to + :func:`~transformers.trainer_utils.default_hp_space_optuna` or + :func:`~transformers.trainer_utils.default_hp_space_ray` depending on your backend. + compute_objective (:obj:`Callable[[Dict[str, float]], float]`, `optional`): + A function computing the objective to minimize or maximize from the metrics returned by the + :obj:`evaluate` method. Will default to :func:`~transformers.trainer_utils.default_compute_objective`. + n_trials (:obj:`int`, `optional`, defaults to 100): + The number of trial runs to test. + direction(:obj:`str`, `optional`, defaults to :obj:`"minimize"`): + Whether to optimize greater or lower objects. Can be :obj:`"minimize"` or :obj:`"maximize"`, you should + pick :obj:`"minimize"` when optimizing the validation loss, :obj:`"maximize"` when optimizing one or + several metrics. + backend(:obj:`str` or :class:`~transformers.training_utils.HPSearchBackend`, `optional`): + The backend to use for hyperparameter search. Will default to optuna or Ray Tune, depending on which + one is installed. If both are installed, will default to optuna. + kwargs: + Additional keyword arguments passed along to :obj:`optuna.create_study` or :obj:`ray.tune.run`. For + more information see: + + - the documentation of `optuna.create_study `__ + - the documentation of `tune.run `__ + + Returns: + :class:`transformers.trainer_utils.BestRun`: All the informations about the best run. + """ + if backend is None: + backend = default_hp_search_backend() + if backend is None: + raise RuntimeError( + "At least one of optuna or ray should be installed. " + "To install optuna run `pip install optuna`." + "To install ray run `pip install ray[tune]`." + ) + backend = HPSearchBackend(backend) + if backend == HPSearchBackend.OPTUNA and not is_optuna_available(): + raise RuntimeError("You picked the optuna backend, but it is not installed. Use `pip install optuna`.") + if backend == HPSearchBackend.RAY and not is_ray_available(): + raise RuntimeError( + "You picked the Ray Tune backend, but it is not installed. Use `pip install 'ray[tune]'`." + ) + self.hp_search_backend = backend + + if self.model_init is None: + raise RuntimeError( + "To use hyperparameter search, you need to pass your model through a model_init function." + ) + + self.hp_space = default_hp_space[backend] if hp_space is None else hp_space + self.compute_objective = default_compute_objective if compute_objective is None else compute_objective + + run_hp_search = run_hp_search_optuna if backend == HPSearchBackend.OPTUNA else run_hp_search_ray + best_run = run_hp_search(self, n_trials, direction, **kwargs) + + self.hp_search_backend = None + return best_run + + def log(self, logs: Dict[str, float], iterator: Optional[tqdm] = None) -> None: + """ + Log :obj:`logs` on the various objects watching training. + + Subclass and override this method to inject custom behavior. + + Args: + logs (:obj:`Dict[str, float]`): + The values to log. + iterator (:obj:`tqdm`, `optional`): + A potential tqdm progress bar to write the logs on. + """ + # Set up loggers like W&B or Comet ML + self._setup_loggers() + + if hasattr(self, "_log"): + warnings.warn( + "The `_log` method is deprecated and won't be called in a future version, define `log` in your subclass.", + FutureWarning, + ) + return self._log(logs, iterator=iterator) + + if self.epoch is not None: + logs["epoch"] = self.epoch + if self.total_flos is not None: + if self.args.local_rank != -1: + total_flos = distributed_broadcast_scalars([self.total_flos]).sum().item() + else: + total_flos = self.total_flos + if total_flos > 0: + logs["total_flos"] = self.total_flos + if self.global_step is None: + # when logging evaluation metrics without training + self.global_step = 0 + if self.tb_writer: + for k, v in logs.items(): + if isinstance(v, (int, float)): + self.tb_writer.add_scalar(k, v, self.global_step) + else: + logger.warning( + "Trainer is attempting to log a value of " + '"%s" of type %s for key "%s" as a scalar. ' + "This invocation of Tensorboard's writer.add_scalar() " + "is incorrect so we dropped this attribute.", + v, + type(v), + k, + ) + self.tb_writer.flush() + if is_wandb_available(): + if self.is_world_process_zero(): + wandb.log(logs, step=self.global_step) + if is_comet_available(): + if self.is_world_process_zero(): + experiment = comet_ml.config.get_global_experiment() + if experiment is not None: + experiment._log_metrics(logs, step=self.global_step, epoch=self.epoch, framework="transformers") + output = {**logs, **{"step": self.global_step}} + if self.is_world_process_zero(): + self.log_history.append(output) + if iterator is not None: + iterator.write(output) + else: + print(output) + + def _prepare_inputs(self, inputs: Dict[str, Union[torch.Tensor, Any]]) -> Dict[str, Union[torch.Tensor, Any]]: + """ + Prepare :obj:`inputs` before feeding them to the model, converting them to tensors if they are not already and + handling potential state. + """ + for k, v in inputs.items(): + if isinstance(v, torch.Tensor): + inputs[k] = v.to(self.args.device) + + if self.args.past_index >= 0 and self._past is not None: + assert False + inputs["mems"] = self._past + + return inputs + + def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor: + """ + Perform a training step on a batch of inputs. + + Subclass and override to inject custom behavior. + + Args: + model (:obj:`nn.Module`): + The model to train. + inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`): + The inputs and targets of the model. + + The dictionary will be unpacked before being fed to the model. Most models expect the targets under the + argument :obj:`labels`. Check your model's documentation for all accepted arguments. + + Return: + :obj:`torch.Tensor`: The tensor with training loss on this batch. + """ + if hasattr(self, "_training_step"): + warnings.warn( + "The `_training_step` method is deprecated and won't be called in a future version, define `training_step` in your subclass.", + FutureWarning, + ) + return self._training_step(model, inputs, self.optimizer) + + model.train() + if self.use_dropout: + if self.gpt2 is not None: + self.gpt2.train() + inputs = self._prepare_inputs(inputs) + + if self.args.fp16 and _use_native_amp: + with autocast(): + if self.distill: + loss = self.compute_loss_distill(model, inputs, gpt2_model=self.gpt2, ) + else: + loss = self.compute_loss(model, inputs, gpt2_model=self.gpt2) + else: + if self.distill: + loss = self.compute_loss_distill(model, inputs, gpt2_model=self.gpt2) + else: + loss = self.compute_loss(model, inputs, gpt2_model=self.gpt2) + + if self.args.n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu parallel training + + if self.args.gradient_accumulation_steps > 1: + loss = loss / self.args.gradient_accumulation_steps + + if self.args.fp16 and _use_native_amp: + self.scaler.scale(loss).backward() + elif self.args.fp16 and _use_apex: + with amp.scale_loss(loss, self.optimizer) as scaled_loss: + scaled_loss.backward() + else: + # print(loss) + loss.backward() + + # print('max allocated_memory:', torch.cuda.max_memory_allocated(0), 'total_memory:', torch.cuda.get_device_properties(0).total_memory, + # 'percentage', torch.cuda.max_memory_allocated(0)/torch.cuda.get_device_properties(0).total_memory) + + + return loss.detach() + + + + + + def compute_loss(self, model, inputs, gpt2_model=None): + """ + How the loss is computed by Trainer. By default, all models return the loss in the first element. + + Subclass and override for custom behavior. + """ + # outputs = model.forward_weighted(**inputs) + if 'prompt_lab' in inputs: + prompt_lab_ = inputs['prompt_lab'] + k = torch.cat(self.discri_labels_code, dim=0) + inputs['control_code'] = torch.index_select(k, 0, prompt_lab_) + del inputs['prompt_lab'] + + outputs = model(**inputs, gpt2_model=gpt2_model) + # Save past state if it exists + if self.args.past_index >= 0: + self._past = outputs[self.args.past_index] + + # print(outputs[0]) + # We don't use .loss here since the model may return tuples instead of ModelOutput. + # print(outputs[0], outputs.loss) + # URGENT + # print('compute_loss', outputs[0]) + return outputs[0].mean() + + def compute_loss_distill(self, model, inputs, gpt2_model=None): + """ + How the loss is computed by Trainer. By default, all models return the loss in the first element. + + Subclass and override for custom behavior. + """ + # outputs = model.forward_weighted(**inputs) + + with torch.no_grad(): + output_finetuned = self.finetuned_gpt2(**inputs) + + outputs = model(**inputs, gpt2_model=gpt2_model) + # Save past state if it exists + if self.args.past_index >= 0: + self._past = outputs[self.args.past_index] + + if self.matching_objective == 'kl': + # distrib_finetuned=torch.log_softmax(output_finetuned.logits[:,:,:-2], dim=-1) #bsz, seqlen, vocab + distrib_finetuned=torch.log_softmax(output_finetuned.logits, dim=-1) #bsz, seqlen, vocab + distrib_prefix = torch.log_softmax(outputs.logits, dim=-1) # bsz, seqlen, vocab + loss = torch.sum(distrib_finetuned.exp() * (distrib_finetuned - distrib_prefix), dim=-1) #bsz, seqlen + + elif self.matching_objective == 'logits': + loss = torch.norm(output_finetuned.logits - outputs.logits, dim=-1) #bsz, seqlen + # loss = torch.norm(output_finetuned.logits[:,:,:-2] - outputs.logits, dim=-1) #bsz, seqlen + + elif self.matching_objective == 'last_layer': + activation_diff = output_finetuned.last_hidden_state - outputs.last_hidden_state + loss = torch.norm(activation_diff, dim=-1) # bsz, seqlen + else: + assert False, "invalid matching_objective" + + return loss.sum(dim=-1).mean() + + def is_local_master(self) -> bool: + """ + Whether or not this process is the local (e.g., on one machine if training in a distributed fashion on + several machines) main process. + + .. warning:: + + This method is deprecated, use :meth:`~transformers.Trainer.is_local_process_zero` instead. + """ + warnings.warn("This method is deprecated, use `Trainer.is_local_process_zero()` instead.", FutureWarning) + return self.is_local_process_zero() + + def is_local_process_zero(self) -> bool: + """ + Whether or not this process is the local (e.g., on one machine if training in a distributed fashion on + several machines) main process. + """ + if is_torch_tpu_available(): + return xm.is_master_ordinal(local=True) + else: + return self.args.local_rank in [-1, 0] + + def is_world_master(self) -> bool: + """ + Whether or not this process is the global main process (when training in a distributed fashion on + several machines, this is only going to be :obj:`True` for one process). + + .. warning:: + + This method is deprecated, use :meth:`~transformers.Trainer.is_world_process_zero` instead. + """ + warnings.warn("This method is deprecated, use `Trainer.is_world_process_zero()` instead.", FutureWarning) + return self.is_world_process_zero() + + def is_world_process_zero(self) -> bool: + """ + Whether or not this process is the global main process (when training in a distributed fashion on + several machines, this is only going to be :obj:`True` for one process). + """ + if is_torch_tpu_available(): + return xm.is_master_ordinal(local=False) + else: + return self.args.local_rank == -1 or torch.distributed.get_rank() == 0 + + def save_model(self, output_dir: Optional[str] = None): + """ + Will save the model, so you can reload it using :obj:`from_pretrained()`. + + Will only save from the world_master process (unless in TPUs). + """ + + if is_torch_tpu_available(): + self._save_tpu(output_dir) + elif self.is_world_process_zero(): + self._save(output_dir) + + def _save_tpu(self, output_dir: Optional[str] = None): + output_dir = output_dir if output_dir is not None else self.args.output_dir + logger.info("Saving model checkpoint to %s", output_dir) + + if xm.is_master_ordinal(): + os.makedirs(output_dir, exist_ok=True) + torch.save(self.args, os.path.join(output_dir, "training_args.bin")) + json.dump( + self.log_history, open(os.path.join(output_dir, "log_history.json"), "w"), indent=2, ensure_ascii=False + ) + + # Save a trained model and configuration using `save_pretrained()`. + # They can then be reloaded using `from_pretrained()` + if not isinstance(self.model, PreTrainedModel): + raise ValueError("Trainer.model appears to not be a PreTrainedModel") + + xm.rendezvous("saving_checkpoint") + self.model.save_pretrained(output_dir) + if self.tokenizer is not None: + self.tokenizer.save_pretrained(output_dir) + + def _save(self, output_dir: Optional[str] = None): + output_dir = output_dir if output_dir is not None else self.args.output_dir + os.makedirs(output_dir, exist_ok=True) + logger.info("Saving model checkpoint to %s", output_dir) + # Save a trained model and configuration using `save_pretrained()`. + # They can then be reloaded using `from_pretrained()` + if not isinstance(self.model, PreTrainedModel): + raise ValueError("Trainer.model appears to not be a PreTrainedModel") + self.model.save_pretrained(output_dir) + if self.tokenizer is not None: + self.tokenizer.save_pretrained(output_dir) + + # Good practice: save your training arguments together with the trained model + torch.save(self.args, os.path.join(output_dir, "training_args.bin")) + json.dump( + self.log_history, open(os.path.join(output_dir, "log_history.json"), "w"), indent=2, ensure_ascii=False + ) + + def store_flos(self): + # Storing the number of floating-point operations that went into the model + if self.total_flos is not None: + if self.args.local_rank != -1: + total_flos = distributed_broadcast_scalars([self.total_flos]).sum().item() + else: + total_flos = self.total_flos + if total_flos > 0: + self.model.config.total_flos = total_flos + + def _sorted_checkpoints(self, checkpoint_prefix=PREFIX_CHECKPOINT_DIR, use_mtime=False) -> List[str]: + output_dir_name = os.path.basename(self.args.output_dir) + checkpoint_prefix = f"{output_dir_name}-{PREFIX_CHECKPOINT_DIR}" + + ordering_and_checkpoint_path = [] + + glob_checkpoints = [str(x) for x in Path(self.args.output_dir).glob(f"{checkpoint_prefix}-*")] + + for path in glob_checkpoints: + if use_mtime: + ordering_and_checkpoint_path.append((os.path.getmtime(path), path)) + else: + regex_match = re.match(f".*{checkpoint_prefix}-([0-9]+)", path) + if regex_match and regex_match.groups(): + ordering_and_checkpoint_path.append((int(regex_match.groups()[0]), path)) + + checkpoints_sorted = sorted(ordering_and_checkpoint_path) + checkpoints_sorted = [checkpoint[1] for checkpoint in checkpoints_sorted] + return checkpoints_sorted + + def _rotate_checkpoints(self, use_mtime=False) -> None: + if self.args.save_total_limit is None or self.args.save_total_limit <= 0: + return + + # Check if we should delete older checkpoint(s) + checkpoints_sorted = self._sorted_checkpoints(use_mtime=use_mtime) + if len(checkpoints_sorted) <= self.args.save_total_limit: + return + + number_of_checkpoints_to_delete = max(0, len(checkpoints_sorted) - self.args.save_total_limit) + checkpoints_to_be_deleted = checkpoints_sorted[:number_of_checkpoints_to_delete] + for checkpoint in checkpoints_to_be_deleted: + logger.info("Deleting older checkpoint [{}] due to args.save_total_limit".format(checkpoint)) + shutil.rmtree(checkpoint) + + def evaluate(self, eval_dataset: Optional[Dataset] = None) -> Dict[str, float]: + """ + Run evaluation and returns metrics. + + The calling script will be responsible for providing a method to compute metrics, as they are + task-dependent (pass it to the init :obj:`compute_metrics` argument). + + You can also subclass and override this method to inject custom behavior. + + Args: + eval_dataset (:obj:`Dataset`, `optional`): + Pass a dataset if you wish to override :obj:`self.eval_dataset`. If it is an :obj:`datasets.Dataset`, + columns not accepted by the ``model.forward()`` method are automatically removed. + + Returns: + A dictionary containing the evaluation loss and the potential metrics computed from the predictions. + """ + eval_dataloader = self.get_eval_dataloader(eval_dataset) + + output = self.prediction_loop(eval_dataloader, description="Evaluation") + + self.log(output.metrics) + + if self.args.tpu_metrics_debug or self.args.debug: + # tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.) + xm.master_print(met.metrics_report()) + + return output.metrics + + + + def predict(self, test_dataset: Dataset) -> PredictionOutput: + """ + Run prediction and returns predictions and potential metrics. + + Depending on the dataset and your use case, your test dataset may contain labels. + In that case, this method will also return metrics, like in :obj:`evaluate()`. + + Args: + test_dataset (:obj:`Dataset`): + Dataset to run the predictions on. If it is an :obj:`datasets.Dataset`, columns not accepted by the + ``model.forward()`` method are automatically removed. + + Returns: + `NamedTuple`: + predictions (:obj:`np.ndarray`): + The predictions on :obj:`test_dataset`. + label_ids (:obj:`np.ndarray`, `optional`): + The labels (if the dataset contained some). + metrics (:obj:`Dict[str, float]`, `optional`): + The potential dictionary of metrics (if the dataset contained labels). + """ + test_dataloader = self.get_test_dataloader(test_dataset) + + return self.prediction_loop(test_dataloader, description="Prediction") + + def prediction_loop( + self, dataloader: DataLoader, description: str, prediction_loss_only: Optional[bool] = None + ) -> PredictionOutput: + """ + Prediction/evaluation loop, shared by :obj:`Trainer.evaluate()` and :obj:`Trainer.predict()`. + + Works both with or without labels. + """ + if hasattr(self, "_prediction_loop"): + warnings.warn( + "The `_prediction_loop` method is deprecated and won't be called in a future version, define `prediction_loop` in your subclass.", + FutureWarning, + ) + return self._prediction_loop(dataloader, description, prediction_loss_only=prediction_loss_only) + + prediction_loss_only = ( + prediction_loss_only if prediction_loss_only is not None else self.args.prediction_loss_only + ) + + assert not getattr( + self.model.config, "output_attentions", False + ), "The prediction loop does not work with `output_attentions=True`." + assert not getattr( + self.model.config, "output_hidden_states", False + ), "The prediction loop does not work with `output_hidden_states=True`." + + model = self.model + # multi-gpu eval + if self.args.n_gpu > 1: + model = torch.nn.DataParallel(model) + else: + model = self.model + # Note: in torch.distributed mode, there's no point in wrapping the model + # inside a DistributedDataParallel as we'll be under `no_grad` anyways. + + batch_size = dataloader.batch_size + logger.info("***** Running %s *****", description) + logger.info(" Num examples = %d", self.num_examples(dataloader)) + logger.info(" Batch size = %d", batch_size) + eval_losses: List[float] = [] + preds: torch.Tensor = None + label_ids: torch.Tensor = None + entropy_losses: List[float] = [] + model.eval() + if self.gpt2 is not None: + self.gpt2.eval() + + print(model.training) + print(self.gpt2.training) + + if is_torch_tpu_available(): + dataloader = pl.ParallelLoader(dataloader, [self.args.device]).per_device_loader(self.args.device) + + if self.args.past_index >= 0: + self._past = None + + disable_tqdm = not self.is_local_process_zero() or self.args.disable_tqdm + for inputs in tqdm(dataloader, desc=description, disable=disable_tqdm): + loss, logits, labels = self.prediction_step(model, inputs, prediction_loss_only) + batch_size = inputs[list(inputs.keys())[0]].shape[0] + if loss is not None: + eval_losses.extend([loss] * batch_size) + if logits is not None: + preds = logits if preds is None else nested_concat(preds, logits, dim=0) + temp_logits = [torch.log_softmax(x) for x in logits] + entropy_losses.extend([(x.exp() * x).sum() for x in temp_logits]) + if labels is not None: + label_ids = labels if label_ids is None else nested_concat(label_ids, labels, dim=0) + + if self.args.past_index and hasattr(self, "_past"): + # Clean the state at the end of the evaluation loop + delattr(self, "_past") + + + + if self.compute_metrics is not None and preds is not None and label_ids is not None: + metrics = self.compute_metrics(EvalPrediction(predictions=preds, label_ids=label_ids)) + else: + metrics = {} + + # Prefix all keys with eval_ + for key in list(metrics.keys()): + if not key.startswith("eval_"): + metrics[f"eval_{key}"] = metrics.pop(key) + if len(entropy_losses) > 0: + metrics['entropy'] = np.mean(entropy_losses) + print('entropy', metrics['entropy'] ) + + return PredictionOutput(predictions=preds, label_ids=label_ids, metrics=metrics) + + def prediction_step( + self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]], prediction_loss_only: bool + ) -> Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]: + """ + Perform an evaluation step on :obj:`model` using obj:`inputs`. + + Subclass and override to inject custom behavior. + + Args: + model (:obj:`nn.Module`): + The model to evaluate. + inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`): + The inputs and targets of the model. + + The dictionary will be unpacked before being fed to the model. Most models expect the targets under the + argument :obj:`labels`. Check your model's documentation for all accepted arguments. + prediction_loss_only (:obj:`bool`): + Whether or not to return the loss only. + + Return: + Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]: + A tuple with the loss, logits and labels (each being optional). + """ + has_labels = all(inputs.get(k) is not None for k in self.args.label_names) + inputs = self._prepare_inputs(inputs) + + # At eval time, set the weights to 1/bsz. and see the results.. + + # if 'weights' in inputs: + # weights = inputs['weights'] + # bsz = weights.view(-1).shape[0] + # weights = (torch.ones(weights.shape)/bsz).to(weights.device) + # inputs['weights'] = weights + + with torch.no_grad(): + # outputs = model.forward_weighted(**inputs) + outputs = model(**inputs, gpt2_model=self.gpt2) + if has_labels: + # The .mean() is to reduce in case of distributed training + loss = outputs[0].mean().item() + logits = outputs[1:] + else: + loss = None + # Slicing so we get a tuple even if `outputs` is a `ModelOutput`. + logits = outputs[:] + if self.args.past_index >= 0: + self._past = outputs[self.args.past_index if has_labels else self.args.past_index - 1] + + if prediction_loss_only: + return (loss, None, None) + + logits = tuple(logit.detach() for logit in logits) + if len(logits) == 1: + logits = logits[0] + + if has_labels: + labels = tuple(inputs.get(name).detach() for name in self.args.label_names) + if len(labels) == 1: + labels = labels[0] + else: + labels = None + + return (loss, logits, labels) + + def floating_point_ops(self, inputs: Dict[str, Union[torch.Tensor, Any]]): + """ + For models that inherit from :class:`~transformers.PretrainedModel`, uses + that method to compute the number of floating point operations for every backward + forward pass. If using + another model, either implement such a method in the model or subclass and override this method. + + Args: + model (:obj:`nn.Module`): + The model to evaluate. + inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`): + The inputs and targets of the model. + + Returns: + :obj:`int`: The number of floating-point operations. + """ + + if isinstance(self.model, torch.nn.DataParallel) or isinstance( + self.model, torch.nn.parallel.DistributedDataParallel + ): + model = self.model.module + else: + model = self.model + + if hasattr(model, "floating_point_ops"): + return model.floating_point_ops(inputs) + + else: + return 0 \ No newline at end of file diff --git a/dalle/utils/__init__.py b/dalle/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..776dd3a6ef93a2d905cbcaec159b6db320bdf3db --- /dev/null +++ b/dalle/utils/__init__.py @@ -0,0 +1,3 @@ +from .utils import * +from .config import * +from .sampling import * \ No newline at end of file diff --git a/dalle/utils/__pycache__/__init__.cpython-38.pyc b/dalle/utils/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d5329913307482e9cf9e4c122fc97034e19c2c2 Binary files /dev/null and b/dalle/utils/__pycache__/__init__.cpython-38.pyc differ diff --git a/dalle/utils/__pycache__/config.cpython-38.pyc b/dalle/utils/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee89be6c3dc9d70bee9d56dd50983a4a5e3d316d Binary files /dev/null and b/dalle/utils/__pycache__/config.cpython-38.pyc differ diff --git a/dalle/utils/__pycache__/sampling.cpython-38.pyc b/dalle/utils/__pycache__/sampling.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a6cd8cf58094adfee641629754bffcf568cd107 Binary files /dev/null and b/dalle/utils/__pycache__/sampling.cpython-38.pyc differ diff --git a/dalle/utils/__pycache__/utils.cpython-38.pyc b/dalle/utils/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6837fcbd7fc891ddb589e9d27e20a0769184dfc5 Binary files /dev/null and b/dalle/utils/__pycache__/utils.cpython-38.pyc differ diff --git a/dalle/utils/config.py b/dalle/utils/config.py new file mode 100644 index 0000000000000000000000000000000000000000..9dfd95eda19d4c852b1c9a1865919f6b6f140482 --- /dev/null +++ b/dalle/utils/config.py @@ -0,0 +1,209 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ + +from typing import Optional, List +from dataclasses import dataclass, field +from omegaconf import OmegaConf + + +@dataclass +class DataConfig: + dataset: Optional[str] = None + tokenizer_type: str = 'CharBPE' + context_length: int = 64 + image_resolution: int = 256 + transforms: str = 'dalle-vqvae' + bpe_pdrop: Optional[float] = None + + +@dataclass +class Stage1Hparams: + double_z: bool = False + z_channels: int = 256 + resolution: int = 256 + in_channels: int = 3 + out_ch: int = 3 + ch: int = 128 + ch_mult: List[int] = field(default_factory=lambda: [1, 1, 2, 2, 4]) + num_res_blocks: int = 2 + attn_resolutions: List[int] = field(default_factory=lambda: [16]) + pdrop: float = 0.0 + + +@dataclass +class Stage2Hparams: + embed_dim: int = 1536 + n_layers: int = 42 + n_heads: int = 24 + n_dense_layers: int = 42 + ctx_len_img: int = 256 + ctx_len_txt: int = 64 + embd_pdrop: float = 0.0 + resid_pdrop: float = 0.0 + attn_pdrop: float = 0.0 + mlp_bias: bool = True + attn_bias: bool = True + gelu_use_approx: bool = False + use_head_txt: bool = True + n_classes: Optional[int] = None + + +@dataclass +class Stage1Config: + type: str = 'vqgan' + embed_dim: int = 256 + n_embed: int = 16384 + hparams: Stage1Hparams = Stage1Hparams() + + +@dataclass +class Stage2Config: + type: str = 'transformer1d' + vocab_size_txt: int = 16384 + vocab_size_img: int = 16384 + use_cls_cond: Optional[bool] = None + hparams: Stage2Hparams = Stage2Hparams() + + +@dataclass +class WarmupConfig: + epoch: int = 1 + multiplier: int = 1 + buffer_epoch: int = 0 + min_lr: float = 0.0 + mode: str = 'fix' + peak_lr: float = 1e-4 + start_from_zero: bool = True + + +@dataclass +class OptConfig: + opt_type: str = 'adamW' + learning_rate: float = 5e-5 + weight_decay: float = 1e-4 + betas: List[float] = field(default_factory=lambda: [0.9, 0.99]) + grad_clip_norm: float = 1.0 + + sched_type: str = 'cosine' + max_steps: int = 0 + min_lr: float = 1e-6 + + +@dataclass +class ExpConfig: + per_gpu_train_batch_size: int = 4 + per_gpu_eval_batch_size: int = 32 + num_train_epochs: int = 10 + save_ckpt_freq: int = 1 + test_freq: int = 10 + use_amp: bool = True + + +@dataclass +class PrefixModelConfig: + model_name_or_path: Optional[str] = '' + prefix_model_name_or_path: str = '' + prefix_mode: str = 'activation' + tuning_mode: str = 'finetune' + top_k_layers: int = 2 + parameterize_mode: str = 'mlp' + optim_prefix: bool = False + preseqlen: int = 10 + prefix_dropout: float = 0.1 + init_random: bool = False + hidden_dim_prefix: int = 512 + lowdata: bool = False + lowdata_token: str = '' + init_shallow: bool = False + init_shallow_word: bool = False + teacher_dropout: float = 0.1 + gumbel: bool = False + replay_buffer: bool = False + + +@dataclass +class PromptModelConfig: + model_name_or_path: Optional[str] = '' + prefix_model_name_or_path: str = '' + tuning_mode: str = 'prompt' + preseqlen: int = 10 + prefix_dropout: float = 0.1 + + +@dataclass +class StoryModelConfig: + model_name_or_path: Optional[str] = '' + prefix_model_name_or_path: str = '' + tuning_mode: str = 'story' + preseqlen: int = 10 + prefix_dropout: float = 0.1 + prompt: bool = False + story_len: int = 4 + sent_embed: int = 256 + condition: bool = False + clip_embed: bool = False + + +@dataclass +class DefaultConfig: + dataset: DataConfig = DataConfig() + stage1: Stage1Config = Stage1Config() + stage2: Stage2Config = Stage2Config() + + +@dataclass +class FineTuningConfig: + dataset: DataConfig = DataConfig() + stage1: Stage1Config = Stage1Config() + stage2: Stage2Config = Stage2Config() + optimizer: OptConfig = OptConfig() + experiment: ExpConfig = ExpConfig() + + +@dataclass +class PrefixTuningConfig: + dataset: DataConfig = DataConfig() + stage1: Stage1Config = Stage1Config() + stage2: Stage2Config = Stage2Config() + prefix: PrefixModelConfig = PrefixModelConfig() + optimizer: OptConfig = OptConfig() + experiment: ExpConfig = ExpConfig() + + +@dataclass +class PromptTuningConfig: + dataset: DataConfig = DataConfig() + stage1: Stage1Config = Stage1Config() + stage2: Stage2Config = Stage2Config() + prompt: PromptModelConfig = PromptModelConfig() + optimizer: OptConfig = OptConfig() + experiment: ExpConfig = ExpConfig() + + +@dataclass +class StoryConfig: + dataset: DataConfig = DataConfig() + stage1: Stage1Config = Stage1Config() + stage2: Stage2Config = Stage2Config() + story: StoryModelConfig = StoryModelConfig() + optimizer: OptConfig = OptConfig() + experiment: ExpConfig = ExpConfig() + + +def get_base_config(mode): + if mode == 'default': + return OmegaConf.structured(DefaultConfig) + elif mode == 'finetuning': + return OmegaConf.structured(FineTuningConfig) + elif mode == 'prefixtuning': + return OmegaConf.structured(PrefixTuningConfig) + elif mode == 'prompt_tuning': + return OmegaConf.structured(PromptTuningConfig) + elif mode == 'story': + return OmegaConf.structured(StoryConfig) + else: + raise ValueError + # return OmegaConf.structured(DefaultConfig if use_default else FineTuningConfig) diff --git a/dalle/utils/sampling.py b/dalle/utils/sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..26d544d960e33d3a7f0de63dd98fc1df1a521b6b --- /dev/null +++ b/dalle/utils/sampling.py @@ -0,0 +1,369 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ + +import torch +from typing import Optional +from tqdm import tqdm +from torch.nn import functional as F + + +torch.set_printoptions(precision=2, threshold=10) +def cutoff_topk_logits(logits: torch.FloatTensor, k: int) -> torch.FloatTensor: + if k is None: + return logits + else: + v, ix = torch.topk(logits, k) + out = logits.clone() + out[out < v[:, [-1]]] = -float('Inf') + return out + + +def cutoff_topp_probs(probs: torch.FloatTensor, p: float) -> torch.FloatTensor: + if p is None: + return probs + else: + sorted_probs, sorted_indices = torch.sort(probs, dim=-1, descending=True) + cum_probs = torch.cumsum(sorted_probs, dim=-1) + + sorted_idx_remove_cond = cum_probs >= p + + sorted_idx_remove_cond[..., 1:] = sorted_idx_remove_cond[..., :-1].clone() + sorted_idx_remove_cond[..., 0] = 0 + + indices_to_remove = sorted_idx_remove_cond.scatter(-1, sorted_indices, sorted_idx_remove_cond) + probs = probs.masked_fill(indices_to_remove, 0.0) + norm_probs = probs / torch.sum(probs, dim=-1, keepdim=True) + return norm_probs + + +def get_positional_encoding(inputs: torch.LongTensor, mode: str = '1d') -> torch.LongTensor: + device = inputs.device + if mode == '1d': + B, N = inputs.shape + xs_pos = torch.arange(N, device=device).repeat((B, 1)) + elif mode == '2d': + B, H, W = inputs.shape + xs_pos_h = torch.arange(H, device=device).repeat(B, W, 1).transpose(1, 2) + xs_pos_w = torch.arange(W, device=device).repeat(B, H, 1) + xs_pos = (xs_pos_h, xs_pos_w) + else: + raise ValueError('%s positional encoding invalid' % mode) + return xs_pos + + +@torch.no_grad() +def sampling(model: torch.nn.Module, + tokens: torch.LongTensor, + top_k: Optional[float] = None, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + is_tqdm: bool = True, + use_fp16: bool = True, + max_seq_len: int = 256, + prompt: Optional[torch.tensor] = None, + pos_prompt: Optional[torch.Tensor] = None) -> torch.LongTensor: + + code = None + past = None + + pbar = tqdm(range(max_seq_len), total=max_seq_len) if is_tqdm else range(max_seq_len) + pos_enc_tokens = get_positional_encoding(tokens, mode='1d') + + for cnt, h in enumerate(pbar): + if code is None: + code_ = None + pos_enc_code_ = None + else: + code_ = code.clone().detach() + pos_enc_code_ = get_positional_encoding(code_, mode='1d') + code_ = code_[:, cnt-1].unsqueeze(-1) + pos_enc_code_ = pos_enc_code_[:, cnt-1].unsqueeze(-1) + + logits, present = model.sampling(images=code_, + texts=tokens, + pos_images=pos_enc_code_, + pos_texts=pos_enc_tokens, + use_fp16=use_fp16, + past=past, + prompt=prompt, + pos_prompt=pos_prompt) + + logits = logits.to(dtype=torch.float32) + logits = logits / softmax_temperature + + # print(len(present), present[0].shape) + present = torch.stack(present).clone().detach() + if past is None: + past = [present] + else: + past.append(present) + + logits = cutoff_topk_logits(logits, top_k) + probs = F.softmax(logits, dim=-1) + probs = cutoff_topp_probs(probs, top_p) + # print(probs[0]) + + idx = torch.multinomial(probs, num_samples=1).clone().detach() + # print(idx) + code = idx if code is None else torch.cat([code, idx], axis=1) + + del past + return code + + +@torch.no_grad() +def sampling_prefix(model: torch.nn.Module, + tokens: torch.LongTensor, + past: torch.FloatTensor, + top_k: Optional[float] = None, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + is_tqdm: bool = True, + use_fp16: bool = True, + max_seq_len: int = 256, + labels = None) -> torch.LongTensor: + code = None + + pbar = tqdm(range(max_seq_len), total=max_seq_len) if is_tqdm else range(max_seq_len) + pos_enc_tokens = get_positional_encoding(tokens, mode='1d') + + # print("Entering sampling_prefix; ", past.shape) + if past is not None: + past = [past] + + for cnt, h in enumerate(pbar): + if code is None: + code_ = None + pos_enc_code_ = None + else: + code_ = code.clone().detach() + pos_enc_code_ = get_positional_encoding(code_, mode='1d') + code_ = code_[:, cnt-1].unsqueeze(-1) + pos_enc_code_ = pos_enc_code_[:, cnt-1].unsqueeze(-1) + + # print("Looop enter") + # print(cnt, past[0].shape) + # print("-------------------") + logits, present = model.sampling(images=code_, + texts=tokens, + pos_images=pos_enc_code_, + pos_texts=pos_enc_tokens, + use_fp16=use_fp16, + past=past) + logits = logits.to(dtype=torch.float32) + logits = logits / softmax_temperature + + present = torch.stack(present).clone().detach() + + # print('Present', present.shape) + + if past is None: + past = [present] + else: + # print("Loop end") + # print(present.shape) + # print("-----------------") + + # n_layers, temp, _, seq_len, n_dim = present.shape + # _, _, bs, n_heads, pre_seq_len, n_dim = past[0].shape + # assert temp == 2 + # past.append(present.view(n_layers, temp, bs, n_heads, seq_len, n_dim)) + + past.append(present) + + logits = cutoff_topk_logits(logits, top_k) + probs = F.softmax(logits, dim=-1) + probs = cutoff_topp_probs(probs, top_p) + print(torch.topk(probs, 5, dim=-1)) + if labels is not None: + print(labels[cnt]) + idx = torch.multinomial(probs, num_samples=1).clone().detach() + # print(idx) + code = idx if code is None else torch.cat([code, idx], axis=1) + + del past + return code + + +@torch.no_grad() +def sampling_prefix_new(model: torch.nn.Module, + tokens: torch.LongTensor, + past: torch.FloatTensor, + top_k: Optional[float] = None, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + is_tqdm: bool = True, + use_fp16: bool = True, + max_seq_len: int = 256) -> torch.LongTensor: + code = None + + pbar = tqdm(range(max_seq_len), total=max_seq_len) if is_tqdm else range(max_seq_len) + pos_enc_tokens = get_positional_encoding(tokens, mode='1d') + + # print("Entering sampling_prefix; ", past.shape) + if past is not None: + past = [past] + + for cnt, h in enumerate(pbar): + if code is None: + code_ = None + pos_enc_code_ = None + else: + code_ = code.clone().detach() + pos_enc_code_ = get_positional_encoding(code_, mode='1d') + # code_ = code_[:, cnt-1].unsqueeze(-1) + # pos_enc_code_ = pos_enc_code_[:, cnt-1].unsqueeze(-1) + + # print("Looop enter") + # print(cnt, past[0].shape) + # print("-------------------") + + if cnt == 0: + logits, present = model.sampling(images=code_, + texts=tokens, + pos_images=pos_enc_code_, + pos_texts=pos_enc_tokens, + use_fp16=use_fp16, + past=past) + logits = logits.to(dtype=torch.float32) + logits = logits / softmax_temperature + + present = torch.stack(present).clone().detach() + + # print('Present', present.shape) + + if past is None: + past = [present] + else: + pass + + logits = cutoff_topk_logits(logits, top_k) + probs = F.softmax(logits, dim=-1) + probs = cutoff_topp_probs(probs, top_p) + # print(torch.topk(probs[0], 5)) + idx = torch.multinomial(probs, num_samples=1).clone().detach() + # print(idx) + code = idx if code is None else torch.cat([code, idx], axis=1) + + else: + pass + + + del past + return code + +@torch.no_grad() +def sampling_conditional(model: torch.nn.Module, + cross_attention_idxs, + cross_attention_layers, + tokens: torch.LongTensor, + src_codes: torch.FloatTensor, + top_k: Optional[float] = None, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + is_tqdm: bool = True, + use_fp16: bool = True, + max_seq_len: int = 256, + prompt: Optional[torch.tensor] = None, + pos_prompt: Optional[torch.Tensor] = None) -> torch.LongTensor: + + code = None + past = None + + pbar = tqdm(range(max_seq_len), total=max_seq_len) if is_tqdm else range(max_seq_len) + pos_enc_tokens = get_positional_encoding(tokens, mode='1d') + + src_pos_tokens = get_positional_encoding(src_codes, mode='1d') + src_tokens = model.tok_emb_img(src_codes) + src_tokens = src_tokens + model.pos_emb_img(src_pos_tokens) + + for cnt, h in enumerate(pbar): + if code is None: + code_ = None + pos_enc_code_ = None + else: + code_ = code.clone().detach() + pos_enc_code_ = get_positional_encoding(code_, mode='1d') + code_ = code_[:, cnt-1].unsqueeze(-1) + pos_enc_code_ = pos_enc_code_[:, cnt-1].unsqueeze(-1) + + logits, present = model.sampling_with_context(images=code_, + cross_attention_idxs=cross_attention_idxs, + cross_attention_layers=cross_attention_layers, + texts=tokens, + pos_images=pos_enc_code_, + pos_texts=pos_enc_tokens, + source_image=src_tokens, + use_fp16=use_fp16, + past=past, + prompt=prompt, + pos_prompt=pos_prompt) + logits = logits.to(dtype=torch.float32) + logits = logits / softmax_temperature + + present = torch.stack(present).clone().detach() + if past is None: + past = [present] + else: + past.append(present) + + logits = cutoff_topk_logits(logits, top_k) + probs = F.softmax(logits, dim=-1) + probs = cutoff_topp_probs(probs, top_p) + + idx = torch.multinomial(probs, num_samples=1).clone().detach() + code = idx if code is None else torch.cat([code, idx], axis=1) + + del past + return code + + +@torch.no_grad() +def sampling_igpt(model: torch.nn.Module, + sos: torch.FloatTensor, + top_k: Optional[float] = None, + top_p: Optional[float] = None, + softmax_temperature: float = 1.0, + is_tqdm: bool = True, + use_fp16: bool = True, + max_seq_len: int = 256) -> torch.LongTensor: + code = None + past = None + pbar = tqdm(range(max_seq_len), total=max_seq_len) if is_tqdm else range(max_seq_len) + + for cnt, h in enumerate(pbar): + if code is None: + code_ = None + pos_enc_code_ = None + else: + code_ = code.clone().detach() + pos_enc_code_ = get_positional_encoding(code_, mode='1d') + code_ = code_[:, cnt-1].unsqueeze(-1) + pos_enc_code_ = pos_enc_code_[:, cnt-1].unsqueeze(-1) + + logits, present = model.sampling(sos=sos, + codes=code_, + pos_codes=pos_enc_code_, + use_fp16=use_fp16, + past=past) + logits = logits.to(dtype=torch.float32) + logits = logits / softmax_temperature + + present = torch.stack(present).clone().detach() + if past is None: + past = [present] + else: + past.append(present) + + logits = cutoff_topk_logits(logits, top_k) + probs = F.softmax(logits, dim=-1) + probs = cutoff_topp_probs(probs, top_p) + + idx = torch.multinomial(probs, num_samples=1).clone().detach() + code = idx if code is None else torch.cat([code, idx], axis=1) + + del past + return code diff --git a/dalle/utils/utils.py b/dalle/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7f0c4f5320a918330b63d0fac554e8c80050aaf9 --- /dev/null +++ b/dalle/utils/utils.py @@ -0,0 +1,131 @@ +# ------------------------------------------------------------------------------------ +# Minimal DALL-E +# Copyright (c) 2021 KakaoBrain. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------------------ + +import os +import random +import urllib +import hashlib +import tarfile +import torch +import clip +import numpy as np +from PIL import Image +from torch.nn import functional as F +from tqdm import tqdm +import torchvision.utils as vutils +import matplotlib.pyplot as plt + + +def set_seed(seed: int): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + + +@torch.no_grad() +def clip_score(prompt: str, + images: np.ndarray, + model_clip: torch.nn.Module, + preprocess_clip, + device: str) -> np.ndarray: + images = [preprocess_clip(Image.fromarray((image*255).astype(np.uint8))) for image in images] + images = torch.stack(images, dim=0).to(device=device) + texts = clip.tokenize(prompt).to(device=device) + texts = torch.repeat_interleave(texts, images.shape[0], dim=0) + + image_features = model_clip.encode_image(images) + text_features = model_clip.encode_text(texts) + + scores = F.cosine_similarity(image_features, text_features).squeeze() + rank = torch.argsort(scores, descending=True).cpu().numpy() + return rank + + +def download(url: str, root: str) -> str: + os.makedirs(root, exist_ok=True) + filename = os.path.basename(url) + pathname = filename[:-len('.tar.gz')] + + expected_md5 = url.split("/")[-2] + download_target = os.path.join(root, filename) + result_path = os.path.join(root, pathname) + + if os.path.isfile(download_target) and (os.path.exists(result_path) and not os.path.isfile(result_path)): + return result_path + + with urllib.request.urlopen(url) as source, open(download_target, 'wb') as output: + with tqdm(total=int(source.info().get('Content-Length')), ncols=80, unit='iB', unit_scale=True, + unit_divisor=1024) as loop: + while True: + buffer = source.read(8192) + if not buffer: + break + + output.write(buffer) + loop.update(len(buffer)) + + if hashlib.md5(open(download_target, 'rb').read()).hexdigest() != expected_md5: + raise RuntimeError(f'Model has been downloaded but the md5 checksum does not not match') + + with tarfile.open(download_target, 'r:gz') as f: + pbar = tqdm(f.getmembers(), total=len(f.getmembers())) + for member in pbar: + pbar.set_description(f'extracting: {member.name} (size:{member.size // (1024 * 1024)}MB)') + f.extract(member=member, path=root) + + return result_path + + +def realpath_url_or_path(url_or_path: str, root: str = None) -> str: + if urllib.parse.urlparse(url_or_path).scheme in ('http', 'https'): + return download(url_or_path, root) + return url_or_path + + +def images_to_numpy(tensor): + generated = tensor.data.cpu().numpy().transpose(1,2,0) + generated[generated < -1] = -1 + generated[generated > 1] = 1 + generated = (generated + 1) / 2 * 255 + return generated.astype('uint8') + + +def save_image(ground_truth, images, out_dir, batch_idx): + + for i, im in enumerate(images): + if len(im.shape) == 3: + plt.imsave(os.path.join(out_dir, 'test_%s_%s.png' % (batch_idx, i)), im) + else: + bs = im.shape[0] + # plt.imsave() + for j in range(bs): + plt.imsave(os.path.join(out_dir, 'test_%s_%s_%s.png' % (batch_idx, i, j)), im[j]) + + + # print("Ground truth Images shape: ", ground_truth.shape, len(images)) + + # images = vutils.make_grid(images, nrow=ground_truth.shape[0]) + # images = images_to_numpy(images) + # + # if ground_truth is not None: + # ground_truth = vutils.make_grid(ground_truth, 5) + # ground_truth = images_to_numpy(ground_truth) + # print("Ground Truth shape, Generated Images shape: ", ground_truth.shape, images.shape) + # images = np.concatenate([ground_truth, images], axis=0) + # + # output = Image.fromarray(images) + # output.save('%s/fake_samples_epoch_%03d.png' % (out_dir, batch_idx)) + + # if texts is not None: + # fid = open('%s/fake_samples_epoch_%03d_%03d.txt' % (image_dir, epoch, idx), 'w') + # for idx in range(images.shape[0]): + # fid.write(str(idx) + '--------------------------------------------------------\n') + # for i in range(len(texts)): + # fid.write(texts[i][idx] + '\n') + # fid.write('\n\n') + # fid.close() + return \ No newline at end of file diff --git a/demo/Barney.png b/demo/Barney.png new file mode 100644 index 0000000000000000000000000000000000000000..202f3175ebfcd865668847cd1aa72ffd705d4aae Binary files /dev/null and b/demo/Barney.png differ diff --git a/demo/Betty.png b/demo/Betty.png new file mode 100644 index 0000000000000000000000000000000000000000..1e10a0642ba7a1edd4728aff304f50ca1f9350bb Binary files /dev/null and b/demo/Betty.png differ diff --git a/demo/Crong.png b/demo/Crong.png new file mode 100644 index 0000000000000000000000000000000000000000..e1d48d2455750bbc95f9b0f1a067e0920f6ad032 Binary files /dev/null and b/demo/Crong.png differ diff --git a/demo/Dino.png b/demo/Dino.png new file mode 100644 index 0000000000000000000000000000000000000000..d88c798f7e00933605d785b19eaff69edf35aa77 Binary files /dev/null and b/demo/Dino.png differ diff --git a/demo/Eddy.png b/demo/Eddy.png new file mode 100644 index 0000000000000000000000000000000000000000..9479d4170af85ce7682b729895e7a523896edb98 Binary files /dev/null and b/demo/Eddy.png differ diff --git a/demo/Fred.png b/demo/Fred.png new file mode 100644 index 0000000000000000000000000000000000000000..38679f5e8661d51409ef88619547faaea2b2cf55 Binary files /dev/null and b/demo/Fred.png differ diff --git a/demo/Harry.png b/demo/Harry.png new file mode 100644 index 0000000000000000000000000000000000000000..df502a7ce46aebb55aa3be87248ab6a78147681f Binary files /dev/null and b/demo/Harry.png differ diff --git a/demo/Loopy.png b/demo/Loopy.png new file mode 100644 index 0000000000000000000000000000000000000000..fed0c25405b4eab38dd19c4bdf2df5976a712da2 Binary files /dev/null and b/demo/Loopy.png differ diff --git a/demo/MrSlate.png b/demo/MrSlate.png new file mode 100644 index 0000000000000000000000000000000000000000..51cc2b4205d213d68427c5604038615b583af8a3 Binary files /dev/null and b/demo/MrSlate.png differ diff --git a/demo/Pebbles.png b/demo/Pebbles.png new file mode 100644 index 0000000000000000000000000000000000000000..755ebbc4bc7e9e17a21cfda23c1bf69a76be4d67 Binary files /dev/null and b/demo/Pebbles.png differ diff --git a/demo/Petty.png b/demo/Petty.png new file mode 100644 index 0000000000000000000000000000000000000000..2ea5323684375f1de5e4a5615d6894c2f6d04c92 Binary files /dev/null and b/demo/Petty.png differ diff --git a/demo/Poby.png b/demo/Poby.png new file mode 100644 index 0000000000000000000000000000000000000000..a54dc035b28bc37afc8c157adb4f9dc16ad05ddb Binary files /dev/null and b/demo/Poby.png differ diff --git a/demo/Pororo.png b/demo/Pororo.png new file mode 100644 index 0000000000000000000000000000000000000000..02505f97a53b8b1ae96a8cfe455d0cb5fd99a9bf Binary files /dev/null and b/demo/Pororo.png differ diff --git a/demo/Rody.png b/demo/Rody.png new file mode 100644 index 0000000000000000000000000000000000000000..cba5e3550759a1b85fc27b6b323ca360b69b9d2a Binary files /dev/null and b/demo/Rody.png differ diff --git a/demo/Tongtong.png b/demo/Tongtong.png new file mode 100644 index 0000000000000000000000000000000000000000..db6676a88380ec8fbbf7aa4195c24829e5680584 Binary files /dev/null and b/demo/Tongtong.png differ diff --git a/demo/Wilma.png b/demo/Wilma.png new file mode 100644 index 0000000000000000000000000000000000000000..712ee8e6135087c26aa7bbdd418a854e84a6983e Binary files /dev/null and b/demo/Wilma.png differ diff --git a/demo/get_source_frames.py b/demo/get_source_frames.py new file mode 100644 index 0000000000000000000000000000000000000000..90dd15a83f24d92f65a5c1f07b59fbc874f01ee7 --- /dev/null +++ b/demo/get_source_frames.py @@ -0,0 +1,75 @@ +from PIL import Image +import os +import random +import numpy as np +import json + +pororo_source_frame_paths = { + 'Pororo': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_2/Pororo_ENGLISH1_2_ep6/12.png', + 'Loopy': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep12/26.png', + 'Crong': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep12/10.png', + 'Poby': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep9/34.png', + 'Eddy': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH1_1/Pororo_ENGLISH1_1_ep12/46.png', + 'Petty': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH2_1/Pororo_ENGLISH2_1_ep1/34.png', + 'Tongtong': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH3_1/Pororo_ENGLISH3_1_ep7/8.png', + 'Rody': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH3_1/Pororo_ENGLISH3_1_ep6/66.png', + 'Harry': '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/Pororo_ENGLISH3_1/Pororo_ENGLISH3_1_ep7/39.png', +} + + +flintstones_source_frame_paths = { + "Wilma": '', + "Fred": '', + "Betty": '', + "Barney": '', + "Dino": '', + "Pebbles": '', + "Mr Slate": '' +} + + +def sample_image(im): + shorter, longer = min(im.size[0], im.size[1]), max(im.size[0], im.size[1]) + video_len = int(longer / shorter) + se = np.random.randint(0, video_len, 1)[0] + return im.crop((0, se * shorter, shorter, (se + 1) * shorter)) + + +def get_pororo_source_frames(): + + # sample_image(Image.open(os.path.join(img_folder, tgt_img_path)).convert('RGB')) + # labels = np.load('../../StoryGAN/pororo_png/labels.npy', allow_pickle=True, encoding='latin1').item() + # for i in range(9): + # print(i) + # individual_frames = [(k, v) for k, v in labels.items() if v[i] == 1 and not any([v[j] == 1 for j in range(9) if j!=i])] + # print(random.sample(individual_frames, k=10)) + + for k, v in pororo_source_frame_paths.items(): + + img = sample_image(Image.open(v).convert('RGB')) + img.save(k + '.png') + + +def get_flintstones_source_frames(): + + dir_path = '../../StoryGAN/flintstones' + annotations = json.load(open('../../StoryGAN/flintstones/flintstones_annotations_v1-0.json', 'r')) + for k in flintstones_source_frame_paths.keys(): + + if k != "Barney": + continue + + character_frames = [] + for sample in annotations: + sample_characters = [c["entityLabel"].strip().lower() for c in sample["characters"]] + if sample_characters[0] == k.lower(): + character_frames.append(sample["globalID"]) + + globalID = random.choice(character_frames) + arr = np.load(os.path.join(dir_path, 'video_frames_sampled', globalID + '.npy')) + n_frames = arr.shape[0] + im = arr[random.randrange(n_frames)] + im = Image.fromarray(im) + im.save(k.replace(' ', '') + '.png') + +get_flintstones_source_frames() \ No newline at end of file diff --git a/demo/parse_captions.py b/demo/parse_captions.py new file mode 100644 index 0000000000000000000000000000000000000000..d05dbf556eb30733e75e78f6e440153e4162707c --- /dev/null +++ b/demo/parse_captions.py @@ -0,0 +1,35 @@ +import os +import csv +import numpy as np + +img_folder = '/playpen-ssd/adyasha/projects/StoryGAN/pororo_png/' +def get_captions_by_split(): + + video_len = 4 + descriptions_original = np.load(os.path.join(img_folder, 'descriptions.npy'), allow_pickle=True, + encoding='latin1').item() + followings = np.load(os.path.join(img_folder, 'following_cache4.npy')) + + train_ids, val_ids, test_ids = np.load(os.path.join(img_folder, 'train_seen_unseen_ids.npy'), allow_pickle=True) + filenames = ['descriptions_train.csv', 'descriptions_val.csv', 'descriptions_test.csv'] + + for ids, filename in zip([train_ids, val_ids, test_ids], filenames): + im_ids = [] + for src_img_id in ids: + tgt_img_paths = [str(followings[src_img_id][i])[2:-1] for i in range(video_len)] + tgt_img_ids = [str(tgt_img_path).replace(img_folder, '').replace('.png', '') for tgt_img_path in + tgt_img_paths] + im_ids.extend(tgt_img_ids) + # captions = [descriptions_original[tgt_img_id] for tgt_img_id in tgt_img_ids] + + im_ids = list(set(im_ids)) + im_ids.sort() + + # captions = [descriptions_original[i] for i in im_ids] + with open(os.path.join(img_folder, filename), 'w') as csvfile: + # creating a csv writer object + csvwriter = csv.writer(csvfile) + for i in im_ids: + csvwriter.writerow([i, descriptions_original[i][0]]) + +get_captions_by_split() \ No newline at end of file diff --git a/pororo_characters.png b/pororo_characters.png new file mode 100644 index 0000000000000000000000000000000000000000..edf236bab521bc6e1c7d1bf9c13f11bef1e8b9a7 Binary files /dev/null and b/pororo_characters.png differ