prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>LSIdent.cpp<|end_file_name|><|fim▁begin|>#include "VCL.h" #include "Unit1.h" #include "GridMap.h" #include "defines.h" #include "matrix_utils.h" #include "bat_run.h" #include "LoadData.h" #include <cstdio> #include <ctime> #include <cstring> #include <cmath> // special LSM mode with "-1 distance" = no aggregation of spots, just statistics per unit bool LSM_minus1_mode = false; float top_percent, min_percent, max_dist, min_simil; String LSIRfn, LSIDfn, LSImask; int spot_cnt, nwc, ccnt; int **cm=0, **nwm, nwns_cnt; std::vector<struct spot> spots; std::vector<int> s_in_nw; std::vector<bool> nw_in_min; std::vector<float> Ebd; class GridMap LSI_maskmap; void get_candidate_cells() { float val_th; int x, y; val_th = 1.0f-top_percent; ccnt=0; for(y=0; y<yd; y++) { for(x=0; x<xd; x++) { if (sol[y][x]>=val_th) { cm[y][x]=0; ccnt++; } else cm[y][x]=-1; } } Form1->Memo1->Lines->Add("Potential cells count = "+IntToStr(ccnt)); } bool nb_in_s(int x, int y, int s) { int minx, miny, maxx, maxy, gx,gy; minx=max(0, x-1); miny=max(0, y-1); maxx=min(xd,x+1); maxy=min(yd,y+1); for(gy=miny; gy<=maxy; gy++) { for(gx=minx; gx<=maxx; gx++) if (cm[gy][gx]==s) return true; } return false; } bool add_to_spot(int s) { int minx, maxx, miny, maxy, x, y, loop; bool added; float val, *rowp; minx=max(0, spots[s].min_gx-1); miny=max(0, spots[s].min_gy-1); maxx=min(xd,spots[s].max_gx+1); maxy=min(yd,spots[s].max_gy+1); added=false; for(y=miny; y<=maxy; y++) { for(x=minx; x<=maxx; x++) { // if (cm[y][x]==0) if ((cm[y][x]!=s) && (cm[y][x]!=-1)) { // Form1->Memo1->Lines->Add("HERE"); if (nb_in_s(x,y,s)) { // Form1->Memo1->Lines->Add("YES"); cm[y][x]=s; spots[s].area++; spots[s].rank += sol[y][x]; spots[s].mean_gx += x; spots[s].mean_gy += y; spots[s].min_gx=min(spots[s].min_gx, x); spots[s].min_gy=min(spots[s].min_gy, y); spots[s].max_gx=max(spots[s].max_gx, x); spots[s].max_gy=max(spots[s].max_gy, y); if (sol[y][x]>(1.0f-min_percent)) spots[s].in_min_percent=true; // rowp=&vmat[y][x][0]; // COMPACT_VMAT Biodiv_Features_Occur_Container& rowp = vmat[y][x]; for(loop=0; loop<map_cnt; loop++) { //std::cerr << "rowp: " << rowp.size() << std::endl; val = rowp[loop]; if (val!=-1) spots[s].bdv[loop] += val; // else // spots[s].bdv[loop] = 0.0f; } added=true; } } } } return added; } void expand_spot(int x, int y) { bool added; int loop; spots[spot_cnt].bdv = 0; spots[spot_cnt].min_gx = x; spots[spot_cnt].min_gy = y; spots[spot_cnt].max_gx = x; spots[spot_cnt].max_gy = y; spots[spot_cnt].mean_gx = static_cast<float>(x); spots[spot_cnt].mean_gy = static_cast<float>(y); spots[spot_cnt].area = 1; spots[spot_cnt].rank = sol[y][x]; if (sol[y][x]>=(1.0f-min_percent)) spots[spot_cnt].in_min_percent=true; else spots[spot_cnt].in_min_percent=false; spots[spot_cnt].bdv = new float[map_cnt]; for(loop=0; loop<map_cnt; loop++) spots[spot_cnt].bdv[loop] =0.0f; cm[y][x]=spot_cnt; do { added = add_to_spot(spot_cnt); Application->ProcessMessages(); } while(added); #if 0 char txt[255]; sprintf(txt,"Spot %i A=%i Xmin=%i xmax=%i ymin=%i ymax=%i mean-x=%0.3f mean-y=%0.3f", spot_cnt, spots[spot_cnt].area, spots[spot_cnt].min_gx, spots[spot_cnt].max_gx, spots[spot_cnt].min_gy, spots[spot_cnt].max_gy, spots[spot_cnt].mean_gx, spots[spot_cnt].mean_gy); // if ((spot_cnt%10)==0) Form1->Memo1->Lines->Add(txt); #endif } void get_spots() { float val_th; int x, y, in_cnt; spot_cnt=1; const size_t DEF_MAX_SPOTS = 2048; spots.reserve(DEF_MAX_SPOTS); try { spots.resize(spot_cnt+1); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } in_cnt =0; for(y=0; y<yd; y++) { for(x=0; x<xd; x++) { if (cm[y][x]==0) { Application->ProcessMessages(); expand_spot(x,y); if (spots[spot_cnt].in_min_percent) in_cnt++; // Form1->Memo1->Lines->Add("New spot, area = " // +IntToStr(spots[spot_cnt].area)); spot_cnt++; spots.resize(spots.size()+1); if ((spot_cnt%1000)==0) Form1->Memo1->Lines->Add("Spot count = "+IntToStr(spot_cnt-1)); } } } Form1->Memo1->Lines->Add("Spot count = "+IntToStr(spot_cnt-1)); Form1->Memo1->Lines->Add("Spots including best areas count = "+IntToStr(in_cnt)); } float calc_dist(int s1, int s2) { float dij, dx, dy, dm2; float minx1, minx2, maxx1, maxx2, miny1, miny2, maxy1, maxy2; int x1, x2, y1, y2; dm2 = max_dist*max_dist; if (dm2==0) return (max_dist+1.0f); // with zero dist, separate spots cannot be joined minx1=static_cast<float>(spots[s1].min_gx); maxx1=static_cast<float>(spots[s1].max_gx); miny1=static_cast<float>(spots[s1].min_gy); maxy1=static_cast<float>(spots[s1].max_gy); minx2=static_cast<float>(spots[s2].min_gx); maxx2=static_cast<float>(spots[s2].max_gx); miny2=static_cast<float>(spots[s2].min_gy); maxy2=static_cast<float>(spots[s2].max_gy); // Form1->Memo1->Lines->Add("corners"); // sprintf(txt, "minx1=%f maxx1=%f miny1=%f maxy1=%f", minx1, maxx1, miny1, maxy1); // Form1->Memo1->Lines->Add(txt); // sprintf(txt, "minx2=%f maxx2=%f miny2=%f maxy2=%f", minx2, maxx2, miny2, maxy2); // Form1->Memo1->Lines->Add(txt); // Form1->Memo1->Lines->Add("yxxxvc"); if (minx1>(maxx2+max_dist)) return (max_dist+1.0f); if (minx2>(maxx1+max_dist)) return (max_dist+1.0f); if (miny1>(maxy2+max_dist)) return (max_dist+1.0f); if (miny2>(maxy1+max_dist)) return (max_dist+1.0f); // Form1->Memo1->Lines->Add("here"); for(y1=static_cast<int>(miny1); y1<=maxy1;y1++) for(x1=static_cast<int>(minx1); x1<=maxx1;x1++) { // Form1->Memo1->Lines->Add("y1loop"+IntToStr(y1)); if (cm[y1][x1]==s1) { for(y2=static_cast<int>(miny2); y2<=maxy2;y2++) for(x2=static_cast<int>(minx2); x2<=maxx2;x2++) // xxx stuck in this loop. { if (cm[y2][x2]==s2) { dij = z_pow(x1-x2,2)+z_pow(y1-y2,2); if (dij<=dm2) return 0.0f; } } } } return (max_dist+1.0f); } float calc_sim(int s1, int s2) { int loop, s, lvl1, lvl2; float diff, val; diff=0.0f; for(loop=0; loop<map_cnt; loop++) { val = spots[s1].bdv[loop]; if (val<0.01f*Ebd[loop]) lvl1=0; else if (val<0.1f*Ebd[loop]) lvl1=1; else if (val<Ebd[loop]) lvl1=2; else if (val<10*Ebd[loop]) lvl1=3; else if (val<100*Ebd[loop]) lvl1=4; else lvl1=5; val = spots[s2].bdv[loop]; if (val<0.01f*Ebd[loop]) lvl2=0; else if (val<0.1f*Ebd[loop]) lvl2=1; else if (val<Ebd[loop]) lvl2=2; else if (val<10*Ebd[loop]) lvl2=3; else if (val<100*Ebd[loop]) lvl2=4; else lvl2=5; diff += fabs(lvl1-lvl2); } diff/=map_cnt; return diff; } void add_nb_to_nw(int spot) { int loop; float dist, diff; for(loop=1; loop<spot_cnt; loop++) { // Form1->Memo1->Lines->Add("spot = "+IntToStr(loop)); if (spots[loop].nwn==-1) { // Form1->Memo1->Lines->Add("dist next"); dist = calc_dist(spot, loop); // Form1->Memo1->Lines->Add("sim next"); diff = calc_sim(spot, loop); if ((dist<=max_dist) && (diff<min_simil)) { spots[loop].nwn = nwc; nw_in_min[nwc] = (nw_in_min[nwc] || spots[loop].in_min_percent); s_in_nw[nwns_cnt] = loop; nwns_cnt++; // Form1->Memo1->Lines->Add("joined"); } } } } void expand_network(int s) { int pos; spots[s].nwn = nwc; nwns_cnt = 1; s_in_nw[0] = s; nw_in_min[nwc] = (nw_in_min[nwc] || spots[s].in_min_percent); pos=0; while(pos<nwns_cnt) { // Form1->Memo1->Lines->Add("pos="+IntToStr(pos)); add_nb_to_nw(s_in_nw[pos]); pos++; } } void Fix_bd_values() { int loop, s; for(loop=0; loop<map_cnt; loop++) Ebd[loop]=0.0f; for(s=1;s<spot_cnt;s++) { for(loop=0; loop<map_cnt; loop++) { Ebd[loop] += spots[s].bdv[loop]; spots[s].bdv[loop] /= spots[s].area; } } for(loop=0; loop<map_cnt; loop++) Ebd[loop] /= ccnt; // Ebd[loop]=average over cells in cut } void get_networks() { int loop, x, y, cilcnt; /* const size_t DEF_MAX_NW = 2048; nw_in_min.reserve(DEF_MAX_NW); nw_in_min.assign(2, false); */ // get_networks always called after get_spots() -> spot_cnt known try { s_in_nw.resize(spot_cnt, 0); Ebd.resize(map_cnt, 0); nw_in_min.resize(spot_cnt+1, false); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } nwc = 1; // xxx for(loop=0; loop<spot_cnt; loop++) { spots[loop].nwn=-1; } // uses Ebd[] Fix_bd_values(); for(loop=1; loop<spot_cnt; loop++) { if ((spots[loop].nwn==-1) && (spots[loop].in_min_percent)) { // Form1->Memo1->Lines->Add(IntToStr(loop)); expand_network(loop); nwc++; nw_in_min.push_back(false); } } for(y=0; y<yd; y++) { for(x=0; x<xd; x++) { if (false && cm[y][x] >0 && spots[cm[y][x]].nwn >= nw_in_min.size()) { Form1->Memo1->Lines->Add("SEGFAUUUUUUUUUUUUUUUUUULT, cm: "+ IntToStr(cm[y][x])); Form1->Memo1->Lines->Add("SEGFAUUUUUUUUUUUUUUUUUULT, cm: "+ IntToStr(cm[y][x]) + ", spots:"+IntToStr(spots[cm[y][x]].nwn)); } //if (Rmax[y][x]==-1) if (-1 == status[y][x]) nwm[y][x]=-1; else if (cm[y][x]==0) nwm[y][x]=-2; else if (cm[y][x]==-1) nwm[y][x]=-2; // the -1== check is critical, or likely segfault! else if ((cm[y][x]>0) && (-1==spots[cm[y][x]].nwn || (!nw_in_min[spots[cm[y][x]].nwn]))) nwm[y][x]=-2; else nwm[y][x]=spots[cm[y][x]].nwn; // xxx error } } Form1->Memo1->Lines->Add("Found networks count = "+IntToStr(nwc-1)); std::vector<float> spdat; try { spdat.resize(map_cnt, 0.0); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } cilcnt=0; for(y=0; y<yd; y++) { for(x=0; x<xd; x++) { //if (Rmax[y][x]==-1) if (-1 == status[y][x]) continue; if (nwm[y][x]>0) { // rowp=&vmat[y][x][0]; // COMPACT_VMAT Biodiv_Features_Occur_Container& rowp = vmat[y][x]; //for(loop=0;loop<map_cnt;loop++) for(loop=rowp.first(); loop!=rowp.overflow(); loop=rowp.next(loop)) { if (rowp[loop]>0.0f) spdat[loop] += rowp[loop]; } cilcnt++; } } } Form1->Memo1->Lines->Add("Cells in classified landscapes = "+IntToStr(cilcnt)); const size_t MAX_STR_LEN = 512; char txt[MAX_STR_LEN]; for(loop=0;loop<map_cnt;loop++) { sprintf(txt, "%-6.3f %-5.3f %s\n", spp[loop].weight, spdat[loop], spp[loop].fname.toUtf8().constData()); Form1->Memo1->Lines->Add(txt); } } // Gets statistics for units specified in the PPA mask, // so networks will actually be the units // all the nwarea, nwx, nwy, nwrank are aggregated (not normalized) and will be divided by nwarea[] later on void get_fake_networks_from_mask(std::vector<int>& nwarea, std::vector<float>& nwx, std::vector<float>& nwy, std::vector<float>& nwrank, float**& mat) { // max_val pre-calculated in load_from_file... spot_cnt = (int)LSI_maskmap.max_val+1; try { spots.resize(spot_cnt); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } /* // Init to 0. units have numbers >=1 for (size_t i=0; i<spot_cnt; i++) spots[i].num = 0; // Find used unit numbers. Use the array spots[].num // for the unit numbers for (size_t y=0; y<yd; y++) { for (size_t x=0; x<xd; x++) { // make sure the mask doesn't include "missing" cells if (sol[y][x] < 0.0f) continue; int unit_idx = LSI_maskmap.m[y][x]; if (unit_idx > 0 && unit_idx <= spots.size()) if (0==spots[unit_idx].num) spots[unit_idx].num++; } } // unit numbers actually used in the LSI analysis mask std::vector<int> unit_nums; nwc = 0; for (size_t i=0; i<spot_cnt; i++) { if (0 < spots[i].num) { nwc++; // nwc is global unit_nums.push_back(i); } } nwc++; // yes, it is number of networks/units +1 */ // bypass the 2 loops above. Use as many networks as the biggest number in the planning // units layer/mask. This avoids crashes if non-consecutive numbers are used. nwc = spot_cnt; try { nwarea.resize(nwc+1, 0); nwrank.resize(nwc+1, 0); nwrank.resize(nwc+1, 0); nwx.resize(nwc+1, 0); nwy.resize(nwc+1, 0); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } mat = matrix(0, nwc+1, 0, map_cnt); if (!mat) { ShowMessage("Out of memory when doing LSIdent"); return; } for(int nw=0; nw<=nwc; nw++) { nwarea[nw]=0; nwrank[nw]=0.0f; nwx[nw]=0.0f; nwy[nw]=0.0f; for(int sp=0; sp<map_cnt; sp++) mat[nw][sp]=0.0f; } for (size_t y=0; y<yd; y++) { for (size_t x=0; x<xd; x++) { // make sure the mask doesn't include "missing" cells if (sol[y][x] < 0.0f) continue; int unit_idx = LSI_maskmap.m[y][x]; if (unit_idx <= 0 || unit_idx >= spot_cnt) continue; nwarea[unit_idx]++; nwx[unit_idx] += x; nwy[unit_idx] += y; nwrank[unit_idx] += sol[y][x]; // float* rowp = &vmat[y][x][0]; // COMPACT_VMAT Biodiv_Features_Occur_Container& rowp = vmat[y][x]; if (rowp) //for(size_t spp_idx=0; spp_idx<map_cnt; spp_idx++) for(size_t spp_idx=rowp.first(); spp_idx!=rowp.overflow(); spp_idx=rowp.next(spp_idx)) if (rowp[spp_idx] > .0f) mat[unit_idx][spp_idx] += rowp[spp_idx]; } } // And the nwout raster (variable nwm) is not generated (it's = LSImask) } void print_network_data() { int loop, nw, sp, num, c10, c1, c01, c001, c0001, sp_at_zero; float **mat, nwtot; FILE *f; f=fopen(LSIDfn.toUtf8().constData(), "w+t"); if (!f) { ShowMessage("Could not open output file " + LSIDfn); return; } std::vector<int> nwarea; std::vector<float> nwrank, nwx, nwy; if (LSM_minus1_mode) { // All params by ref./output get_fake_networks_from_mask(nwarea, nwx, nwy, nwrank, mat); } else { try { nwarea.resize(nwc+1, 0); nwrank.resize(nwc+1, 0); nwrank.resize(nwc+1, 0); nwx.resize(nwc+1, 0); nwy.resize(nwc+1, 0); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } // sprintf(txt, "nwc=%i spots=%i",nwc, spot_cnt); // ShowMessage(txt); mat = matrix(0, nwc+1, 0, map_cnt); if (!mat) { ShowMessage("Out of memory when doing LSIdent"); fclose(f); return; } for(nw=0; nw<=nwc; nw++) { nwarea[nw]=0; nwrank[nw]=0.0f; nwx[nw]=0.0f; nwy[nw]=0.0f; for(sp=0; sp<map_cnt; sp++) mat[nw][sp]=0.0f; } for(loop=1; loop<spot_cnt; loop++) { num = spots[loop].nwn; if (num == -1) continue; for(sp=0; sp<map_cnt; sp++) mat[num][sp] += spots[loop].bdv[sp]*spots[loop].area; nwarea[num] += spots[loop].area; nwrank[num] += spots[loop].rank; nwx[num] += spots[loop].mean_gx; nwy[num] += spots[loop].mean_gy; } } std::string nets_or_units; if (LSM_minus1_mode) nets_or_units = "units"; else nets_or_units = "networks"; fprintf(f, "Most important biodiversity features (e.g. species) in %s; those occurring at a 1%%+ level\n", nets_or_units.c_str()); fprintf(f, "of original distribution\n"); std::string net_or_unit; if (LSM_minus1_mode) net_or_unit = "Unit"; else net_or_unit = "Network"; fprintf(f, "%s Area Mean-Rank X Y Spp_distribution_sum spp occurring at >10%% >1%% >0.1%% >0.01%% >0.001%%\n", net_or_unit.c_str()); std::vector<float> sptot; try { sptot.resize(map_cnt, 0); } catch(std::bad_alloc const& ex) { Form1->Memo1->Lines->Add("Out of memory in landscape identification: "+String(ex.what())); } float tottot=0.0f; for(nw=1; nw<nwc; nw++) { // do not calc/output results for empty/missing unit numbers if (LSM_minus1_mode && nwarea[nw]<=0) continue; c10 = c1 = c01 = c001 = c0001 = 0; nwtot=0.0f; for(sp=0; sp<map_cnt; sp++) { nwtot += mat[nw][sp]; sptot[sp] += mat[nw][sp]; if (mat[nw][sp]>0.1f) { c10++; c1++; c01++; c001++; c0001++; } else if (mat[nw][sp]>0.01f) { c1++; c01++; c001++; c0001++; } else if (mat[nw][sp]>0.001f) { c01++; c001++; c0001++; } else if (mat[nw][sp]>0.0001f) { c001++; c0001++; } else if (mat[nw][sp]>0.00001f) { c0001++; } } tottot += nwtot; // nw area rnk x y tot fprintf(f, "%-5i %-6i %-6.3f %-6.3f %-6.3f %-6.3f %-5i %-5i %-5i %-5i %-5i\n", nw, nwarea[nw], nwrank[nw]/nwarea[nw], nwx[nw]/nwarea[nw], nwy[nw]/nwarea[nw], nwtot, c10, c1, c01, c001, c0001); for(sp=0; sp<map_cnt; sp++) { if (mat[nw][sp]>0.01f) fprintf(f, " Feature %s, %-5.2f%% of full distribution\n", spp[sp].fname.toUtf8().constData(),100*mat[nw][sp]); } // fprintf(f, "\n"); } fprintf(f, "Repeat without spp info for easy import\n"); fprintf(f,"%s Area Mean-Rank X Y Spp_distribution_sum spp occurring at >10%% >1%% >0.1%% >0.01%% >0.001%%\n", net_or_unit.c_str()); //tottot=0.0f; for(nw=1; nw<nwc; nw++) { // do not cal/output results for empty/missing unit numbers if (LSM_minus1_mode && nwarea[nw]<=0) continue; c10 = c1 = c01 = c001 = c0001 = 0; nwtot=0.0f; for(sp=0; sp<map_cnt; sp++) { nwtot += mat[nw][sp]; // would be the second time! //sptot[sp] += mat[nw][sp]; if (mat[nw][sp]>0.1f) { c10++; c1++; c01++; c001++; c0001++; } else if (mat[nw][sp]>0.01f) { c1++; c01++; c001++; c0001++; } else if (mat[nw][sp]>0.001f) { c01++; c001++; c0001++; } else if (mat[nw][sp]>0.0001f) { c001++; c0001++; } else if (mat[nw][sp]>0.00001f) { c0001++; } } // this would make tottot 2x the true totot //tottot += nwtot; // nw area rnk x y tot fprintf(f, "%-5i %-6i %-6.3f %-6.3f %-6.3f %-6.3f %-5i %-5i %-5i %-5i %-5i\n", nw, nwarea[nw], nwrank[nw]/nwarea[nw], nwx[nw]/nwarea[nw], nwy[nw]/nwarea[nw], nwtot, c10, c1, c01, c001, c0001); } fprintf(f, "\n\nAverage proportion remaining over all spp in %s = %f\n",nets_or_units.c_str(), tottot/map_cnt); sp_at_zero=0; for(sp=0; sp<map_cnt; sp++) { if (sptot[sp]<=0.0f) sp_at_zero++; } fprintf(f, "Count of biodiversity features (e.g. species) with nothing remaining in the network = %i\n",sp_at_zero); fprintf(f, "Total proportion and sum remaining for biodiversity features\n"); for(sp=0; sp<map_cnt; sp++) { fprintf(f, "%s %-5.4f %0.4f\n", spp[sp].fname.toUtf8().constData(), sptot[sp], sptot[sp]*spp[sp].prob_sum); } if (LSM_minus1_mode) fprintf(f, "\n\nBiological data of %i %s.\n",nwc-1, nets_or_units.c_str()); else fprintf(f, "\n\nBiological data of %i %s (spots=%i).\n",nwc-1, nets_or_units.c_str(), spot_cnt-1); fprintf(f, "%s x biodiversity features matrix\n", nets_or_units.c_str()); if (LSM_minus1_mode) fprintf(f, "Unit_number area[cells] sp_data .....\n"); else fprintf(f, "Nw_number area[cells] sp_data .....\n"); for(nw=1; nw<nwc; nw++) { // do not calc/output results for empty/missing unit numbers if (LSM_minus1_mode && nwarea[nw]<=0) continue; fprintf(f, "%-5i %-6i ", nw, nwarea[nw]); for(sp=0; sp<map_cnt; sp++) fprintf(f,"%-6.4f ", mat[nw][sp]); fprintf(f, "\n"); } fclose(f); free_matrix(mat, 0, nwc+1, 0, map_cnt); } bool read_LSI_mask(int top_fraction_mode) { int x, y; LSI_maskmap.normalize=false; if (!LSI_maskmap.load_from_file(LSImask, mask_data, area_mask.m)) { Form1->Memo1->Lines->Add("************** ERROR ***************"); Form1->Memo1->Lines->Add(" FAILURE attempting LSI mask map load."); return false; } Form1->Memo1->Lines->Add("LSI mask map loaded."); val_th = 1.0f-top_percent; ccnt = 0; for(y=0; y<yd; y++) { for(x=0; x<xd; x++) { if (LSI_maskmap.m[y][x]>=1) { if (top_fraction_mode) { if (sol[y][x]>=val_th) { cm[y][x]=0; ccnt++; } else cm[y][x]=-1; } else { cm[y][x]=0; ccnt++; } } else cm[y][x]=-1; } } Form1->Memo1->Lines->Add("Potential cells count = "+IntToStr(ccnt)); return true; } int LSIdent(int LSI_mode) { const size_t MAX_STRLEN = 2048; char txt[MAX_STRLEN]; Form1->Memo1->Lines->Add(""); Form1->Memo1->Lines->Add(""); Form1->Memo1->Lines->Add("NEW LANDSCAPE IDENTIFICATION ANALYSIS"); // This is done now in the visitor class in post_process.cpp //top_percent = StrToFloat(Form1->Edit4->Text)/100.0f; //min_percent = StrToFloat(Form1->Edit5->Text)/100.0f; //max_dist = StrToFloat(Form1->Edit6->Text); //min_simil = StrToFloat(Form1->Edit7->Text); //LSIRfn = Form1->Edit8->Text; bool lsi_mask_ok = true; if (LSI_mode==0) // "LSB" { sprintf(txt, "Running LSIdent with top%%=%0.3f min%%=%0.3f max-d=%0.3f min-s=%0.3f", top_percent*100, min_percent*100, max_dist, min_simil); Form1->Memo1->Lines->Add(txt); Form1->Memo1->Lines->Add("1. Getting candidate cells."); get_candidate_cells(); } else if (LSI_mode==1) // "LSM" { if (0.0f > max_dist) { LSM_minus1_mode = true; sprintf(txt, "Running LSIdent with mask file (%s). Note: LSM special case with max. distance -1, ignoring top%%=%0.3f and using whole landscape", LSImask.toStdString().c_str(), top_percent*100); Form1->Memo1->Lines->Add(txt); top_percent = 1.0f; } else { sprintf(txt, "Running LSIdent with mask file (%s) max-d=%0.3f min-s=%0.3f", LSImask.toStdString().c_str(), max_dist, min_simil); Form1->Memo1->Lines->Add(txt); Form1->Memo1->Lines->Add("1. Reading relevant areas from mask file "+Form1->Edit42->Text); } lsi_mask_ok = read_LSI_mask(0); if (!lsi_mask_ok) { Form1->Memo1->Lines->Add("ERROR! failed to read LSM areas from mask file: " + LSImask); } } else // 2==LSI_mode "LSB" { sprintf(txt, "Running LSIdent for top fraction within masked area, mask file %s fract=%0.4f max-d=%0.3f min-s=%0.3f", LSImask.toStdString().c_str(), top_percent, max_dist, min_simil); Form1->Memo1->Lines->Add(txt); Form1->Memo1->Lines->Add("1. Reading relevant areas from mask file "+Form1->Edit42->Text); lsi_mask_ok = read_LSI_mask(1); if (!lsi_mask_ok) { Form1->Memo1->Lines->Add("ERROR! failed to read LSB areas from mask file: " + LSImask);<|fim▁hole|> } if (!lsi_mask_ok) { Form1->Memo1->Lines->Add("Please fix the mask file name. No results will be generated for this post-processing analysis."); return false; } if (!LSM_minus1_mode) { // free only if traditional modes LSI_maskmap.free_matrix_m(); Form1->Memo1->Lines->Add("2. Identifying spots."); get_spots(); // spots[s].bdv[sp] = 0.0f; sisaltaa prop of sp spotissa Form1->Memo1->Lines->Add("3. Finding networks."); get_networks(); } print_network_data(); if (LSM_minus1_mode) { LSI_maskmap.free_matrix_m(); } else { #if 0 // obsmap[0].show_spots(Form1->Image1->Picture->Bitmap, cm); obsmap[0].show_spots(Form1->Image1->Picture->Bitmap, nwm, 0); #endif obsmap[0].export_GIS_INT(nwm, LSIRfn); // the spots[].bdv are not allocated in LSM "-1 distance" mode for(int loop=0; loop<spots.size(); loop++) { if (spots[loop].bdv) delete[] spots[loop].bdv; spots[loop].bdv=0; } } Screen->Cursor=crDefault; return true; } void LSCAnalysis(float f1, float f2, const String& cfn, const String& comp_outfn) { //float f1, f2; //String cfn, comp_outfn; float f1cells, f2cells, bothcells, rodiff; class GridMap cmpmap; int x, y, z1, z2, rcnt; bool f1ok, f2ok; DecimalSeparator='.'; //cfn = Edit23->Text; cmpmap.set_no_normalize(); if (!cmpmap.load_from_file(cfn, mask_data, area_mask.m)) { ShowMessage("Could not load given comparison solution"); return; } Form1->Memo1->Lines->Add(""); Form1->Memo1->Lines->Add("Solution comparison stats"); //f1 = StrToFloat(Edit22->Text); //f2 = StrToFloat(Edit24->Text); Form1->Memo1->Lines->Add("S1 cut level = "+FloatToStrF(f1, ffFixed, 7, 4)); Form1->Memo1->Lines->Add("S2 cut level = "+FloatToStrF(f2, ffFixed, 7, 4)); f1cells=f2cells=bothcells=rodiff=0.0f; z1=z2=rcnt=0; for(y=0; y<yd; y++) for(x=0; x<xd; x++) { f1ok=f2ok=false; if (f1>0.0f) { if ((sol[y][x]!=-1) && (sol[y][x]>=(1.0f-f1))) f1ok=true; } else { if ((sol[y][x]!=-1) && (sol[y][x]<=(-f1))) f1ok=true; } if (f2>0.0f) { if ((cmpmap.m[y][x]!=-1) && (cmpmap.m[y][x]>=(1.0f-f2))) f2ok=true; } else { if ((cmpmap.m[y][x]>0.0f) && (cmpmap.m[y][x]<=(-f2))) f2ok=true; } if (f1ok) f1cells++; if (f2ok) f2cells++; if (f1ok && f2ok) bothcells++; if (sol[y][x]==0.0f) z1++; if (cmpmap.m[y][x]==0.0f) z2++; if ((sol[y][x]!=-1) && (cmpmap.m[y][x]!=0.0f)) { ++rcnt; rodiff+= fabs(sol[y][x]-cmpmap.m[y][x]); } nwm[y][x] = 0; //if (Rmax[y][x]==-1) if (-1 == status[y][x]) nwm[y][x] = -1; else if (f1ok && f2ok) nwm[y][x] = 1; else if (f1ok) nwm[y][x] = 2; else if (f2ok) nwm[y][x] = 3; } Form1->Memo1->Lines->Add("Cells in present solution fraction = " +IntToStr((int)f1cells)); Form1->Memo1->Lines->Add("Cells in comparison solution fraction = " +IntToStr((int)f2cells)); Form1->Memo1->Lines->Add("Cells included in both solutions = " +IntToStr((int)bothcells)); Form1->Memo1->Lines->Add("Initially removed in present solution = " +IntToStr(z1)); Form1->Memo1->Lines->Add("Initially removed in comparison solution = "+IntToStr(z2)); Form1->Memo1->Lines->Add("Similarity f1 = "+FloatToStrF(bothcells/f1cells, ffFixed, 7, 4)); Form1->Memo1->Lines->Add("Similarity f2 = "+FloatToStrF(bothcells/f2cells, ffFixed, 7, 4)); Form1->Memo1->Lines->Add("Average difference in removal order = "+FloatToStrF(rodiff/rcnt, ffFixed, 7, 4)); const size_t MAX_STR_LEN = 512; char txt[MAX_STR_LEN]; sprintf(txt, "Overlap f1 = %0.4f, f1 = %0.4f. Average order diff=%0.4f. See also memo.", bothcells/f1cells,bothcells/f2cells, rodiff/rcnt); Form1->Memo1->Lines->Add(txt); if (!bat_mode) ShowMessage(txt); if (Form1->CheckBox7->Checked) { #if 0 //comp_outfn = Edit29->Text; obsmap[0].show_spots(Form1->Image1->Picture->Bitmap, nwm, 1); #endif obsmap[0].export_GIS_INT(nwm, comp_outfn); } }<|fim▁end|>
}
<|file_name|>mock_engine.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0. use super::Result; use crate::storage::kv::{Callback, ExtCallback, Modify, SnapContext, WriteData}; use crate::storage::{Engine, RocksEngine}; use kvproto::kvrpcpb::Context; use std::collections::LinkedList; use std::sync::{Arc, Mutex}; /// A mock engine is a simple wrapper around RocksEngine /// but with the ability to assert the modifies,<|fim▁hole|>#[derive(Clone)] pub struct MockEngine { base: RocksEngine, expected_modifies: Arc<ExpectedWriteList>, } #[derive(Debug, Clone, Default, PartialEq)] pub struct ExpectedWrite { // if the following `Option`s are None, it means we just don't care modify: Option<Modify>, use_proposed_cb: Option<bool>, use_committed_cb: Option<bool>, } impl ExpectedWrite { pub fn new() -> Self { Default::default() } pub fn expect_modify(self, modify: Modify) -> Self { Self { modify: Some(modify), use_proposed_cb: self.use_proposed_cb, use_committed_cb: self.use_committed_cb, } } pub fn expect_proposed_cb(self) -> Self { Self { modify: self.modify, use_proposed_cb: Some(true), use_committed_cb: self.use_committed_cb, } } pub fn expect_no_proposed_cb(self) -> Self { Self { modify: self.modify, use_proposed_cb: Some(false), use_committed_cb: self.use_committed_cb, } } pub fn expect_committed_cb(self) -> Self { Self { modify: self.modify, use_proposed_cb: self.use_proposed_cb, use_committed_cb: Some(true), } } pub fn expect_no_committed_cb(self) -> Self { Self { modify: self.modify, use_proposed_cb: self.use_proposed_cb, use_committed_cb: Some(false), } } } /// `ExpectedWriteList` represents a list of writes expected to write to the engine struct ExpectedWriteList(Mutex<LinkedList<ExpectedWrite>>); // We implement drop here instead of on MockEngine // because `MockEngine` can be cloned and dropped everywhere // and we just want to assert every write impl Drop for ExpectedWriteList { fn drop(&mut self) { let expected_modifies = &mut *self.0.lock().unwrap(); assert_eq!( expected_modifies.len(), 0, "not all expected modifies have been written to the engine, {} rest", expected_modifies.len() ) } } impl Engine for MockEngine { type Snap = <RocksEngine as Engine>::Snap; type Local = <RocksEngine as Engine>::Local; fn kv_engine(&self) -> Self::Local { self.base.kv_engine() } fn snapshot_on_kv_engine(&self, start_key: &[u8], end_key: &[u8]) -> Result<Self::Snap> { self.base.snapshot_on_kv_engine(start_key, end_key) } fn modify_on_kv_engine(&self, modifies: Vec<Modify>) -> Result<()> { self.base.modify_on_kv_engine(modifies) } fn async_snapshot(&self, ctx: SnapContext<'_>, cb: Callback<Self::Snap>) -> Result<()> { self.base.async_snapshot(ctx, cb) } fn async_write(&self, ctx: &Context, batch: WriteData, write_cb: Callback<()>) -> Result<()> { self.async_write_ext(ctx, batch, write_cb, None, None) } fn async_write_ext( &self, ctx: &Context, batch: WriteData, write_cb: Callback<()>, proposed_cb: Option<ExtCallback>, committed_cb: Option<ExtCallback>, ) -> Result<()> { let mut expected_writes = self.expected_modifies.0.lock().unwrap(); for modify in batch.modifies.iter() { if let Some(expected_write) = expected_writes.pop_front() { // check whether the modify is expected if let Some(expected_modify) = expected_write.modify { assert_eq!( modify, &expected_modify, "modify writing to Engine not match with expected" ) } // check whether use right callback match expected_write.use_proposed_cb { Some(true) => assert!( proposed_cb.is_some(), "this write is supposed to return during the propose stage" ), Some(false) => assert!( proposed_cb.is_none(), "this write is not supposed to return during the propose stage" ), None => {} } match expected_write.use_committed_cb { Some(true) => assert!( committed_cb.is_some(), "this write is supposed to return during the commit stage" ), Some(false) => assert!( committed_cb.is_none(), "this write is not supposed to return during the commit stage" ), None => {} } } else { panic!("unexpected modify {:?} wrote to the Engine", modify) } } drop(expected_writes); self.base .async_write_ext(ctx, batch, write_cb, proposed_cb, committed_cb) } } pub struct MockEngineBuilder { base: RocksEngine, expected_modifies: LinkedList<ExpectedWrite>, } impl MockEngineBuilder { pub fn from_rocks_engine(rocks_engine: RocksEngine) -> Self { Self { base: rocks_engine, expected_modifies: LinkedList::new(), } } pub fn add_expected_write(mut self, write: ExpectedWrite) -> Self { self.expected_modifies.push_back(write); self } pub fn build(self) -> MockEngine { MockEngine { base: self.base, expected_modifies: Arc::new(ExpectedWriteList(Mutex::new(self.expected_modifies))), } } }<|fim▁end|>
/// the callback used, and other aspects during interaction with the engine
<|file_name|>htmltableelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::attr::{Attr, AttrHelpers, AttrValue}; use dom::bindings::codegen::Bindings::HTMLTableElementBinding::HTMLTableElementMethods; use dom::bindings::codegen::Bindings::HTMLTableElementBinding; use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableCaptionElementCast}; use dom::bindings::codegen::InheritTypes::{HTMLTableElementDerived, NodeCast}; use dom::bindings::js::{JSRef, Rootable, Temporary}; use dom::document::Document; use dom::eventtarget::{EventTarget, EventTargetTypeId}; use dom::element::ElementTypeId; use dom::htmlelement::{HTMLElement, HTMLElementTypeId}; use dom::htmltablecaptionelement::HTMLTableCaptionElement; use dom::node::{Node, NodeHelpers, NodeTypeId}; use dom::virtualmethods::VirtualMethods; use util::str::{self, DOMString, LengthOrPercentageOrAuto}; use cssparser::RGBA; use string_cache::Atom; use std::cell::Cell; #[dom_struct] pub struct HTMLTableElement { htmlelement: HTMLElement, background_color: Cell<Option<RGBA>>, border: Cell<Option<u32>>, cellspacing: Cell<Option<u32>>, width: Cell<LengthOrPercentageOrAuto>, } impl HTMLTableElementDerived for EventTarget { fn is_htmltableelement(&self) -> bool { *self.type_id() == EventTargetTypeId::Node( NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableElement))) } } impl HTMLTableElement { fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLTableElement { HTMLTableElement { htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableElement, localName, prefix, document), background_color: Cell::new(None), border: Cell::new(None), cellspacing: Cell::new(None), width: Cell::new(LengthOrPercentageOrAuto::Auto), } } #[allow(unrooted_must_root)] pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLTableElement> { let element = HTMLTableElement::new_inherited(localName, prefix, document); Node::reflect_node(box element, document, HTMLTableElementBinding::Wrap) } } impl<'a> HTMLTableElementMethods for JSRef<'a, HTMLTableElement> { // https://www.whatwg.org/html/#dom-table-caption fn GetCaption(self) -> Option<Temporary<HTMLTableCaptionElement>> { let node: JSRef<Node> = NodeCast::from_ref(self); node.children() .map(|c| c.root()) .filter_map(|c| { HTMLTableCaptionElementCast::to_ref(c.r()).map(Temporary::from_rooted) }) .next() } // https://www.whatwg.org/html/#dom-table-caption fn SetCaption(self, new_caption: Option<JSRef<HTMLTableCaptionElement>>) { let node: JSRef<Node> = NodeCast::from_ref(self); let old_caption = self.GetCaption(); match old_caption { Some(htmlelem) => { let htmlelem_root = htmlelem.root(); let old_caption_node: JSRef<Node> = NodeCast::from_ref(htmlelem_root.r()); assert!(node.RemoveChild(old_caption_node).is_ok()); } None => () } new_caption.map(|caption| { let new_caption_node: JSRef<Node> = NodeCast::from_ref(caption); assert!(node.AppendChild(new_caption_node).is_ok()); }); } } pub trait HTMLTableElementHelpers {<|fim▁hole|> fn get_cellspacing(&self) -> Option<u32>; fn get_width(&self) -> LengthOrPercentageOrAuto; } impl HTMLTableElementHelpers for HTMLTableElement { fn get_background_color(&self) -> Option<RGBA> { self.background_color.get() } fn get_border(&self) -> Option<u32> { self.border.get() } fn get_cellspacing(&self) -> Option<u32> { self.cellspacing.get() } fn get_width(&self) -> LengthOrPercentageOrAuto { self.width.get() } } impl<'a> VirtualMethods for JSRef<'a, HTMLTableElement> { fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> { let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self); Some(htmlelement as &VirtualMethods) } fn after_set_attr(&self, attr: JSRef<Attr>) { if let Some(ref s) = self.super_type() { s.after_set_attr(attr); } match attr.local_name() { &atom!("bgcolor") => { self.background_color.set(str::parse_legacy_color(&attr.value()).ok()) } &atom!("border") => { // According to HTML5 § 14.3.9, invalid values map to 1px. self.border.set(Some(str::parse_unsigned_integer(attr.value() .chars()).unwrap_or(1))) } &atom!("cellspacing") => { self.cellspacing.set(str::parse_unsigned_integer(attr.value().chars())) } &atom!("width") => self.width.set(str::parse_length(&attr.value())), _ => () } } fn before_remove_attr(&self, attr: JSRef<Attr>) { if let Some(ref s) = self.super_type() { s.before_remove_attr(attr); } match attr.local_name() { &atom!("bgcolor") => self.background_color.set(None), &atom!("border") => self.border.set(None), &atom!("cellspacing") => self.cellspacing.set(None), &atom!("width") => self.width.set(LengthOrPercentageOrAuto::Auto), _ => () } } fn parse_plain_attribute(&self, local_name: &Atom, value: DOMString) -> AttrValue { match local_name { &atom!("border") => AttrValue::from_u32(value, 1), _ => self.super_type().unwrap().parse_plain_attribute(local_name, value), } } }<|fim▁end|>
fn get_background_color(&self) -> Option<RGBA>; fn get_border(&self) -> Option<u32>;
<|file_name|>test_multi.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- coding: iso-8859-1 -*- # vi:ts=4:et # $Id: test_multi.py,v 1.10 2005/03/11 13:24:45 kjetilja Exp $ import pycurl m = pycurl.CurlMulti() m.handles = [] c1 = pycurl.Curl() c2 = pycurl.Curl() c1.setopt(c1.URL, 'http://curl.haxx.se') c2.setopt(c2.URL, 'http://cnn.com') c2.setopt(c2.FOLLOWLOCATION, 1) m.add_handle(c1) m.add_handle(c2) m.handles.append(c1) m.handles.append(c2) <|fim▁hole|>while num_handles: while 1: ret, num_handles = m.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break m.select(1.0) m.remove_handle(c2) m.remove_handle(c1) del m.handles m.close() c1.close() c2.close()<|fim▁end|>
num_handles = len(m.handles)
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Category' db.create_table(u'core_category', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')(max_length=75)), )) db.send_create_signal(u'core', ['Category']) # Adding model 'Source' db.create_table(u'core_source', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('type', self.gf('django.db.models.fields.CharField')(max_length=20)), ('title', self.gf('django.db.models.fields.CharField')(max_length=75)), ('author', self.gf('django.db.models.fields.CharField')(max_length=75)), ('year_published', self.gf('django.db.models.fields.PositiveIntegerField')()), ('more_info', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)), ('series_season', self.gf('django.db.models.fields.PositiveIntegerField')()), ('series_episode', self.gf('django.db.models.fields.PositiveIntegerField')()), ('description_E', self.gf('django.db.models.fields.TextField')(max_length=300)), ('description_D', self.gf('django.db.models.fields.TextField')(max_length=300)), ('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)), ('image_credit', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)), )) db.send_create_signal(u'core', ['Source']) # Adding model 'Prediction' db.create_table(u'core_prediction', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Source'])), ('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Category'])), ('description_E', self.gf('django.db.models.fields.TextField')(max_length=300)), ('description_D', self.gf('django.db.models.fields.TextField')(max_length=300)), ('year_predicted', self.gf('django.db.models.fields.PositiveIntegerField')()), ('more_info', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)), ('headline_E', self.gf('django.db.models.fields.TextField')(max_length=300)), ('headline_D', self.gf('django.db.models.fields.TextField')(max_length=300)), ('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)), ('image_credit', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)), ('username', self.gf('django.db.models.fields.CharField')(max_length=75)), ('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('edition_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), ('published', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal(u'core', ['Prediction']) # Adding model 'Realisation' db.create_table(u'core_realisation', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('prediction', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Prediction'])), ('description_E', self.gf('django.db.models.fields.TextField')(max_length=300)), ('description_D', self.gf('django.db.models.fields.TextField')(max_length=300)), ('year_introduced', self.gf('django.db.models.fields.PositiveIntegerField')()), ('more_info', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)), ('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)), ('image_credit', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)), ('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('edition_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), ('published', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal(u'core', ['Realisation']) def backwards(self, orm): # Deleting model 'Category' db.delete_table(u'core_category') # Deleting model 'Source' db.delete_table(u'core_source') # Deleting model 'Prediction' db.delete_table(u'core_prediction') # Deleting model 'Realisation' db.delete_table(u'core_realisation') models = { u'core.category': { 'Meta': {'object_name': 'Category'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '75'}) }, u'core.prediction': { 'Meta': {'object_name': 'Prediction'}, 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Category']"}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}), 'description_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}), 'edition_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'headline_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}), 'headline_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'image_credit': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}), 'more_info': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}), 'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Source']"}), 'username': ('django.db.models.fields.CharField', [], {'max_length': '75'}), 'year_predicted': ('django.db.models.fields.PositiveIntegerField', [], {}) }, u'core.realisation': { 'Meta': {'object_name': 'Realisation'}, 'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}), 'description_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}), 'edition_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'image_credit': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}), 'more_info': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}), 'prediction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Prediction']"}), 'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'year_introduced': ('django.db.models.fields.PositiveIntegerField', [], {}) }, u'core.source': { 'Meta': {'object_name': 'Source'}, 'author': ('django.db.models.fields.CharField', [], {'max_length': '75'}), 'description_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}), 'description_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'image_credit': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}), 'more_info': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}), 'series_episode': ('django.db.models.fields.PositiveIntegerField', [], {}), 'series_season': ('django.db.models.fields.PositiveIntegerField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '75'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'year_published': ('django.db.models.fields.PositiveIntegerField', [], {}) } } <|fim▁hole|> complete_apps = ['core']<|fim▁end|>
<|file_name|>main.js<|end_file_name|><|fim▁begin|>window.nomer=sl(1,4);<|fim▁hole|><|fim▁end|>
window.comment='Элементарные бытовые задачи.';
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages import os import cms <|fim▁hole|> 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django', 'Framework :: Django :: 1.8', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.10', ] INSTALL_REQUIREMENTS = [ 'Django>=1.8,<1.11', 'django-classy-tags>=0.7.2', 'django-formtools>=1.0', 'django-treebeard>=4.0.1', 'django-sekizai>=0.7', 'djangocms-admin-style>=1.0', ] setup( author='Patrick Lauber', author_email='[email protected]', name='django-cms', version=cms.__version__+"+nimbis.1", description='An Advanced Django CMS', long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), url='https://www.django-cms.org/', license='BSD License', platforms=['OS Independent'], classifiers=CLASSIFIERS, install_requires=INSTALL_REQUIREMENTS, packages=find_packages(exclude=['project', 'project.*']), include_package_data=True, zip_safe=False, test_suite='runtests.main', )<|fim▁end|>
CLASSIFIERS = [
<|file_name|>Sc-code-points.js<|end_file_name|><|fim▁begin|>// All code points in the `Sc` category as per Unicode v6.3.0: [ 0x24, 0xA2, 0xA3, 0xA4, 0xA5, 0x58F, 0x60B, 0x9F2, 0x9F3, 0x9FB, 0xAF1, 0xBF9, 0xE3F, 0x17DB, 0x20A0, 0x20A1, 0x20A2, 0x20A3, 0x20A4, 0x20A5, 0x20A6, 0x20A7, 0x20A8, 0x20A9, 0x20AA, 0x20AB, 0x20AC, 0x20AD, 0x20AE, 0x20AF, 0x20B0, 0x20B1, 0x20B2, 0x20B3, 0x20B4, 0x20B5, 0x20B6, 0x20B7, 0x20B8, 0x20B9, 0x20BA, 0xA838, 0xFDFC, 0xFE69, 0xFF04, 0xFFE0, 0xFFE1, 0xFFE5,<|fim▁hole|><|fim▁end|>
0xFFE6 ];
<|file_name|>power_set_for_set_of_characters.go<|end_file_name|><|fim▁begin|>/* #Date of creation : 9 Jan 2016. #Aim of program : To print power set of a set of characters. #Coded by : Rishikesh Agrawani. */ package main import "fmt" func main() { var n, r, i, j uint fmt.Print("Enter the number of binary variables(for which you want the binary combinations): ") fmt.Scanf("%d", &n) fmt.Print("\nEnter ", n, " binary variables name( name should be only 1 character long) separated by space: ") a := make([]string, n) r = 1 for i = 0; i < n; i++ { fmt.Scanf("%s", &a[i]) r *= 2 } fmt.Println("\nColumns => ", n, "\nRows => ", r) for i = 0; i < r; i++ { for j = 0; j < n; j++ { if (i>>j)&1 == 1 { fmt.Print(a[j], " ") } else { fmt.Print("- ") } } fmt.Println() } } /*1st RUN: Enter the number of binary variables(for which you want the binary combinations): 4 Enter 4 binary variables name( name should be only 1 character long) separated by space: a b c d Columns => 4 Rows => 16 - - - - a - - - - b - - a b - - - - c - a - c - - b c - a b c - - - - d a - - d - b - d a b - d - - c d a - c d - b c d a b c d */ /*2nd RUN: Enter the number of binary variables(for which you want the binary combinations): Enter 5 binary variables name( name should be only 1 character long) separated by space: Columns => 5 Rows => 32 - - - - - p - - - - - q - - - p q - - - - - r - - p - r - - - q r - - p q r - - - - - s - p - - s - - q - s - p q - s -<|fim▁hole|>p - r s - - q r s - p q r s - - - - - t p - - - t - q - - t p q - - t - - r - t p - r - t - q r - t p q r - t - - - s t p - - s t - q - s t p q - s t - - r s t p - r s t - q r s t p q r s t */<|fim▁end|>
- - r s -
<|file_name|>Equality.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.ql.tree.nodes; public class Equality extends IdentifierToValueCMP { private static final long serialVersionUID = 8151616227509392901L; /** * @param identifier * @param value */ public Equality(Identifier identifier, Value value) { super(identifier, value); } }<|fim▁end|>
<|file_name|>socket_event.rs<|end_file_name|><|fim▁begin|>use td_rp::Buffer; #[derive(Debug)] pub struct SocketEvent { socket_fd: i32, cookie: u32, client_ip: String, server_port: u16, buffer: Buffer, out_cache: Buffer, online: bool, websocket: bool, } impl SocketEvent { pub fn new(socket_fd: i32, client_ip: String, server_port: u16) -> SocketEvent { SocketEvent { socket_fd: socket_fd, cookie: 0, client_ip: client_ip, server_port: server_port, buffer: Buffer::new(), out_cache: Buffer::new(), online: true, websocket: false, } } pub fn get_socket_fd(&self) -> i32 { self.socket_fd<|fim▁hole|> pub fn get_client_ip(&self) -> String { self.client_ip.clone() } pub fn get_server_port(&self) -> u16 { self.server_port } pub fn get_cookie(&self) -> u32 { self.cookie } pub fn set_cookie(&mut self, cookie: u32) { self.cookie = cookie; } pub fn get_buffer(&mut self) -> &mut Buffer { &mut self.buffer } pub fn get_out_cache(&mut self) -> &mut Buffer { &mut self.out_cache } pub fn set_online(&mut self, online: bool) { self.online = online; } pub fn is_online(&self) -> bool { self.online } pub fn set_websocket(&mut self, websocket: bool) { self.websocket = websocket; } pub fn is_websocket(&self) -> bool { self.websocket } }<|fim▁end|>
}
<|file_name|>authoring_spec.js<|end_file_name|><|fim▁begin|>/* eslint-disable newline-per-chained-call */ var monitoring = require('./helpers/monitoring'), authoring = require('./helpers/authoring'), ctrlKey = require('./helpers/utils').ctrlKey, commandKey = require('./helpers/utils').commandKey, ctrlShiftKey = require('./helpers/utils').ctrlShiftKey, assertToastMsg = require('./helpers/utils').assertToastMsg, nav = require('./helpers/utils').nav, dictionaries = require('./helpers/dictionaries'), workspace = require('./helpers/workspace'); describe('authoring', () => { beforeEach(() => { monitoring.openMonitoring(); }); it('add an embed and respect the order', () => { // try with same block content monitoring.actionOnItem('Edit', 2, 0); authoring.cleanBodyHtmlElement(); authoring.writeText('line\n'); authoring.addEmbed('embed'); var thirdBlockContext = element(by.model('item.body_html')).all(by.repeater('block in vm.blocks')).get(2); thirdBlockContext.element(by.css('.editor-type-html')).sendKeys('line\n'); authoring.addEmbed('embed', thirdBlockContext); authoring.blockContains(0, 'line'); authoring.blockContains(1, 'embed'); authoring.blockContains(2, 'line'); authoring.blockContains(3, 'embed'); authoring.close(); authoring.ignore(); // with different block content monitoring.actionOnItem('Edit', 2, 0); authoring.cleanBodyHtmlElement(); function generateLines(from, to) { var lines = ''; for (var i = from; i < to; i++) { lines += 'line ' + i + '\n'; } return lines; } var body1 = generateLines(0, 8); var body2 = generateLines(8, 15); var body3 = generateLines(15, 20); authoring.writeText(body1 + body2 + body3); for (var i = 0; i < 5; i++) { authoring.writeText(protractor.Key.UP); } authoring.writeText(protractor.Key.ENTER); authoring.writeText(protractor.Key.UP); authoring.addEmbed('Embed at position 15'); authoring.blockContains(0, (body1 + body2).replace(/\n$/, '')); authoring.blockContains(2, body3.replace(/\n$/, '')); element(by.model('item.body_html')).all(by.css('.editor-type-html')).get(0).click(); authoring.writeText(protractor.Key.ENTER); authoring.addEmbed('Embed at position 8'); authoring.blockContains(0, body1.replace(/\n$/, '')); authoring.blockContains(2, body2.replace(/\n$/, '')); authoring.blockContains(4, body3.replace(/\n$/, '')); }); it('authoring operations', () => { // undo and redo operations by using CTRL+Z and CTRL+y ... // ... from a new item authoring.createTextItem(); browser.sleep(1000); authoring.writeText('to be undone'); expect(authoring.getBodyText()).toBe('to be undone'); browser.sleep(1000); ctrlKey('z'); expect(authoring.getBodyText()).toBe(''); ctrlKey('y'); expect(authoring.getBodyText()).toBe('to be undone'); authoring.writeText(protractor.Key.ENTER); authoring.writeText(protractor.Key.UP); authoring.addEmbed('Embed'); authoring.blockContains(1, 'Embed'); authoring.blockContains(2, 'to be undone'); commandKey('z'); authoring.blockContains(0, 'to be undone'); commandKey('y'); authoring.blockContains(1, 'Embed'); authoring.blockContains(2, 'to be undone'); authoring.cutBlock(1); authoring.blockContains(0, 'to be undone'); ctrlKey('z'); authoring.blockContains(1, 'Embed'); authoring.blockContains(2, 'to be undone'); authoring.close(); authoring.ignore(); // ... from an existing item expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); expect(authoring.getBodyText()).toBe('item5 text'); authoring.writeText('Two'); expect(authoring.getBodyText()).toBe('Twoitem5 text'); authoring.writeText('Words'); expect(authoring.getBodyText()).toBe('TwoWordsitem5 text'); ctrlKey('z'); expect(authoring.getBodyText()).toBe('Twoitem5 text'); ctrlKey('y'); expect(authoring.getBodyText()).toBe('TwoWordsitem5 text'); authoring.save(); authoring.close(); // allows to create a new empty package monitoring.createItemAction('create_package'); expect(element(by.className('packaging-screen')).isDisplayed()).toBe(true); authoring.close(); // can edit packages in which the item was linked expect(monitoring.getTextItem(2, 1)).toBe('item9'); monitoring.actionOnItem('Edit', 2, 1); authoring.showPackages(); expect(authoring.getPackages().count()).toBe(1); expect(authoring.getPackage(0).getText()).toMatch('PACKAGE2'); authoring.getPackage(0).element(by.tagName('a')).click(); authoring.showInfo(); expect(authoring.getGUID().getText()).toMatch('package2'); authoring.close(); // can change normal theme expect(monitoring.getTextItem(3, 2)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 2); authoring.changeNormalTheme('dark-theme'); expect(monitoring.hasClass(element(by.className('main-article')), 'dark-theme')).toBe(true); authoring.close(); // can change proofread theme expect(monitoring.getTextItem(3, 2)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 2); authoring.changeProofreadTheme('dark-theme-mono'); expect(monitoring.hasClass(element(by.className('main-article')), 'dark-theme-mono')).toBe(true); authoring.close(); // publish & kill item expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.publish(); browser.sleep(300); monitoring.filterAction('text'); monitoring.actionOnItem('Kill item', 5, 0); expect(authoring.send_kill_button.isDisplayed()).toBeTruthy(); authoring.cancel(); browser.sleep(300); // publish & correct item // reset filters monitoring.filterAction('all'); expect(monitoring.getTextItem(3, 2)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 2); authoring.publish(); browser.sleep(300); monitoring.filterAction('text'); monitoring.actionOnItem('Correct item', 5, 0); expect(authoring.send_correction_button.isDisplayed()).toBeTruthy(); authoring.cancel(); expect(monitoring.getTextItem(5, 0)).toBe('item6'); monitoring.actionOnItem('Open', 5, 0); expect(authoring.edit_correct_button.isDisplayed()).toBe(true); expect(authoring.edit_kill_button.isDisplayed()).toBe(true); authoring.close(); browser.sleep(300); monitoring.filterAction('all'); // reset filter // update(rewrite) item monitoring.openMonitoring(); // reset filters monitoring.filterAction('all'); expect(monitoring.getTextItem(2, 1)).toBe('item7'); monitoring.actionOnItem('Edit', 2, 1); authoring.publish(); browser.sleep(300); monitoring.filterAction('text'); expect(monitoring.getTextItem(5, 0)).toBe('item7'); monitoring.actionOnItem('Open', 5, 0); expect(authoring.update_button.isDisplayed()).toBe(true); authoring.update_button.click(); monitoring.compactActionDropdown().click(); monitoring.filterAction('all'); expect(monitoring.getTextItem(0, 0)).toBe('item7'); expect(monitoring.getTextItem(5, 0)).toBe('item7'); }); it('authoring history', () => { // view item history create-fetch operation expect(monitoring.getTextItem(3, 2)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 2); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(1); expect(authoring.getHistoryItem(0).getText()).toMatch(/Fetched by first name last name Today/); authoring.close(); // view item history move operation expect(monitoring.getTextItem(2, 3)).toBe('item8'); monitoring.actionOnItem('Edit', 2, 3); authoring.writeText('Two'); authoring.save(); expect(authoring.sendToButton.isDisplayed()).toBe(true); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(2); authoring.sendTo('Politic Desk', 'two'); authoring.confirmSendTo(); expect(monitoring.getTextItem(3, 0)).toBe('item8'); monitoring.actionOnItem('Edit', 3, 0); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(3); expect(authoring.getHistoryItem(2).getText()).toMatch(/Moved by first name last name Today/); authoring.close(); // view item history editable for newly created unsaved item authoring.createTextItem(); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(1); expect(authoring.getHistoryItem(0).getText()).toMatch(/Created by first name last name Today/); expect(authoring.save_button.isDisplayed()).toBe(true); authoring.getHistoryItem(0).click(); expect(authoring.save_button.isDisplayed()).toBe(true); // expect save button still available authoring.close(); // view item history create-update operations authoring.createTextItem(); authoring.writeTextToHeadline('new item'); authoring.writeText('some text'); authoring.save(); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(2); expect(authoring.getHistoryItem(0).getText()).toMatch(/Created by first name last name Today/); expect(authoring.getHistoryItem(1).getText()).toMatch(/Updated by.*/); authoring.save(); authoring.close(); // view item history publish operation expect(monitoring.getTextItem(3, 3)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 3); authoring.addHelpline('Children'); expect(authoring.getBodyFooter()).toMatch(/Kids Helpline*/); expect(authoring.save_button.getAttribute('disabled')).toBe(null); authoring.save(); authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Open', 5, 0); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(3); expect(authoring.getHistoryItem(0).getText()).toMatch(/Fetched by.*/); expect(authoring.getHistoryItem(1).getText()).toMatch(/Updated by.*/); expect(authoring.getHistoryItem(2).getText()).toMatch(/Published by.*/); var transmissionDetails = authoring.showTransmissionDetails(2); expect(transmissionDetails.count()).toBe(1); transmissionDetails.get(0).click(); expect(element(by.className('modal__body')).getText()).toMatch(/Kids Helpline*/); element(by.css('[ng-click="hideFormattedItem()"]')).click(); monitoring.compactActionDropdown().click(); monitoring.filterAction('text'); authoring.close(); // view item history spike-unspike operations browser.sleep(5000); monitoring.showMonitoring(); expect(monitoring.getTextItem(2, 2)).toBe('item7'); monitoring.actionOnItem('Spike', 2, 2); monitoring.showSpiked(); expect(monitoring.getSpikedTextItem(0)).toBe('item7'); monitoring.unspikeItem(0, 'Politic desk', 'Incoming Stage'); monitoring.showMonitoring(); expect(monitoring.getTextItem(1, 0)).toBe('item7'); monitoring.actionOnItem('Edit', 1, 0); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(3); expect(authoring.getHistoryItem(1).getText()).toMatch(/Spiked by first name last name Today/); expect(authoring.getHistoryItem(2).getText()).toMatch(/Unspiked by first name last name Today/); authoring.close(); // view item history duplicate operation expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItemSubmenu('Duplicate', 'Duplicate in place', 2, 0, true); expect(monitoring.getTextItem(0, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 0, 0); authoring.showHistory(); expect(authoring.getHistoryItems().count()).toBe(2); expect(authoring.getHistoryItem(1).getText()).toMatch(/Duplicated by/); authoring.close(); }); it('keyboard shortcuts', () => { monitoring.actionOnItem('Edit', 2, 0); authoring.writeText('z'); element(by.cssContainingText('label', 'Dateline')).click(); ctrlShiftKey('s'); browser.wait(() => element(by.buttonText('Save')).getAttribute('disabled'), 500); authoring.close(); monitoring.actionOnItem('Edit', 2, 0); browser.sleep(300); expect(authoring.getBodyText()).toBe('zitem5 text'); element(by.cssContainingText('label', 'Headline')).click(); ctrlShiftKey('e'); browser.sleep(300); expect(element.all(by.css('.authoring-embedded .embedded-auth-view')).count()).toBe(0); }); it('can display monitoring after publishing an item using full view of authoring', () => { monitoring.actionOnItem('Edit', 3, 2); monitoring.showHideList(); authoring.publish(); expect(monitoring.getGroups().count()).toBe(6); }); it('broadcast operation', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Create Broadcast', 5, 0); expect(authoring.getHeaderSluglineText()).toContain('item5'); }); it('can calculate word counts', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.cleanBodyHtmlElement(); authoring.writeText('There are seven words in this sentence.\n'); authoring.writeText('There are eight words in this new sentence.'); authoring.writeText(protractor.Key.ENTER); authoring.writeText(' '); authoring.writeText(protractor.Key.ENTER); authoring.writeText('There are nine words, in this final last sentence.\n'); expect(authoring.getEditorWordCount()).toBe('24 words'); authoring.save(); authoring.close(); expect(monitoring.getMonitoringWordCount('item5')).toBe('24'); monitoring.actionOnItem('Edit', 2, 0); authoring.cleanBodyHtmlElement(); expect(authoring.getEditorWordCount()).toBe('0 words'); authoring.save(); authoring.close(); expect(monitoring.getMonitoringWordCount('item5')).toBe('0'); }); it('can update sign off manually', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); expect(authoring.getSignoffText()).toBe('fl'); authoring.writeSignoffText('ABC'); authoring.save(); authoring.close(); expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); expect(authoring.getSignoffText()).toBe('ABC'); authoring.writeText('z'); authoring.save(); expect(authoring.getSignoffText()).toBe('ABC/fl'); }); it('toggle auto spellcheck and hold changes', () => { monitoring.actionOnItem('Edit', 2, 1); expect(element(by.model('spellcheckMenu.isAuto')).getAttribute('checked')).toBeTruthy(); authoring.toggleAutoSpellCheck(); expect(element(by.model('spellcheckMenu.isAuto')).getAttribute('checked')).toBeFalsy(); authoring.close(); monitoring.actionOnItem('Edit', 2, 2); expect(element(by.model('spellcheckMenu.isAuto')).getAttribute('checked')).toBeFalsy(); }); it('spellcheck hilite sentence word for capitalization and ignore the word after abbreviations', () => { nav('/settings/dictionaries'); dictionaries.edit('Test 1'); expect(dictionaries.getWordsCount()).toBe(0); dictionaries.search('abbrev.'); dictionaries.saveWord(); dictionaries.search('abbrev'); dictionaries.saveWord(); expect(dictionaries.getWordsCount()).toBe(2); dictionaries.save(); browser.sleep(200); monitoring.openMonitoring(); authoring.createTextItem();<|fim▁hole|> expect(authoring.getBodyInnerHtml()).toContain('<span class="sderror sdhilite sdCapitalize" data-word="some" ' + 'data-index="0" data-sentence-word="true">some</span>'); expect(authoring.getBodyInnerHtml()).not.toContain('<span class="sderror sdhilite sdCapitalize" ' + 'data-word="few" data-index="57">few</span>'); expect(authoring.getBodyInnerHtml()).toContain('<span class="sderror sdhilite" data-word="few" ' + 'data-index="57">few</span>'); }); it('related item widget', () => { monitoring.actionOnItem('Edit', 2, 1); authoring.writeText('something'); authoring.save(); authoring.close(); authoring.createTextItem(); browser.sleep(1000); authoring.writeText('something'); authoring.setHeaderSluglineText('item test'); authoring.save(); authoring.close(); authoring.createTextItem(); browser.sleep(1000); authoring.writeText('something'); authoring.setHeaderSluglineText('item test'); authoring.save(); authoring.openRelatedItem(); authoring.searchRelatedItems(); expect(authoring.getRelatedItems().count()).toBe(1); authoring.searchRelatedItems('slugline'); expect(authoring.getRelatedItems().count()).toBe(0); authoring.openRelatedItemConfiguration(); authoring.setRelatedItemConfigurationSlugline('ANY'); authoring.setRelatedItemConfigurationLastUpdate('now-48h'); authoring.saveRelatedItemConfiguration(); browser.sleep(1000); authoring.searchRelatedItems(); expect(authoring.getRelatedItems().count()).toBe(1); }); it('related item widget can open published item', () => { expect(monitoring.getGroups().count()).toBe(6); expect(monitoring.getTextItem(2, 1)).toBe('item9'); expect(monitoring.getTextItemBySlugline(2, 1)).toContain('ITEM9 SLUGLINE'); monitoring.actionOnItem('Edit', 2, 1); authoring.publish(); // item9 published monitoring.filterAction('text'); monitoring.actionOnItem('Update', 5, 0); // duplicate item9 text published item expect(monitoring.getGroupItems(0).count()).toBe(1); monitoring.actionOnItem('Edit', 0, 0); authoring.openRelatedItem(); // opens related item widget browser.sleep(10000); expect(authoring.getRelatedItemBySlugline(0).getText()).toContain('item9 slugline'); authoring.actionOpenRelatedItem(0); // Open item expect(authoring.getHeaderSluglineText()).toContain('item9 slugline'); }); it('Kill Template apply', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Kill item', 5, 0); browser.sleep(500); expect(authoring.getBodyText()).toBe('This is kill template. Slugged item5 slugline one/two.'); expect(authoring.getHeadlineText()).toBe('KILL NOTICE'); expect(authoring.getHeadlineText()).toBe('KILL NOTICE'); expect(authoring.send_kill_button.isDisplayed()).toBeTruthy(); }); it('Emptied body text fails to validate', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.writeText(''); ctrlShiftKey(protractor.Key.END); ctrlKey('x'); authoring.save(); authoring.publish(true); assertToastMsg('error', 'BODY_HTML empty values not allowed'); }); it('keyboard navigation operations on subject dropdown', () => { // Open any item in Edit mode monitoring.actionOnItem('Edit', 2, 1); // Open subject metadata dropdown field authoring.getSubjectMetadataDropdownOpened(); browser.sleep(500); // wait a bit // Perform down arrow would focus/active next element in list browser.actions().sendKeys(protractor.Key.DOWN).perform(); browser.sleep(200); expect(browser.driver.switchTo().activeElement().getText()).toEqual('arts, culture and entertainment'); // Perform right arrow would navigate to next level of focused category and selected as input term browser.actions().sendKeys(protractor.Key.RIGHT).perform(); var selectedTerm = authoring.getNextLevelSelectedCategory(); expect(selectedTerm.get(0).getText()).toBe('arts, culture and entertainment'); // Perform Left arrow key would back to one level up in tree and should be focused/active browser.actions().sendKeys(protractor.Key.LEFT).perform(); browser.sleep(200); expect(browser.driver.switchTo().activeElement().getText()).toEqual('arts, culture and entertainment'); // now type some search term an check if down arrow navigates the search list browser.actions().sendKeys('cri').perform(); browser.sleep(200); browser.actions().sendKeys(protractor.Key.DOWN).perform(); expect(browser.driver.switchTo().activeElement().getText()).toEqual('crime, law and justice'); }); it('hide multi-edit option when action is kill', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.moreActionsButton.click(); expect(authoring.multieditButton.isDisplayed()).toBe(true); authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Kill item', 5, 0); authoring.moreActionsButton.click(); expect(authoring.multieditButton.isDisplayed()).toBe(false); }); it('Compare versions operations of an opened article via Compare versions menu option', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); // Provide another version on save authoring.writeTextToHeadline('updated '); authoring.save(); expect(authoring.getHeadlineText()).toBe('updated item5'); // Open selected versions in compare-versions screen boards authoring.openCompareVersionsScreen(); expect(authoring.getCompareVersionsBoards().count()).toBe(2); expect(authoring.getArticleHeadlineOfBoard(0)).toEqual('updated item5'); expect(authoring.getInnerDropdownItemVersions(1).count()).toBe(1); authoring.closeCompareVersionsScreen(); // expect the article should be open on closing compare-versions screen expect(authoring.headline.isDisplayed()).toBe(true); expect(authoring.getHeadlineText()).toBe('updated item5'); // Update article headline again to get third version authoring.writeTextToHeadline('newly '); authoring.save(); expect(authoring.getHeadlineText()).toBe('newly updated item5'); authoring.openCompareVersionsScreen(); expect(authoring.getArticleHeadlineOfBoard(0)).toEqual('newly updated item5'); expect(authoring.getInnerDropdownItemVersions(1).count()).toBe(2); authoring.openItemVersionInBoard(1, 0); expect(authoring.getInnerDropdownItemVersions(0).count()).toBe(1); expect(authoring.getHtmlArticleHeadlineOfBoard(0)).toContain( '<ins style="background:#e6ffe6;">newly </ins><span>updated item5</span>' ); expect(authoring.getArticleHeadlineOfBoard(1)).toEqual('updated item5'); }); it('open publish item with footer text without <br> tag', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.addHelpline('Suicide'); expect(authoring.getBodyFooter()).toMatch(/Readers seeking support and information about suicide*/); expect(authoring.save_button.isEnabled()).toBe(true); authoring.save(); authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Open', 5, 0); expect(authoring.getBodyFooterPreview()).not.toContain('<br>'); }); it('maintains helpline first option always selected', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); authoring.addHelpline('Suicide'); expect(authoring.getBodyFooter()).toMatch(/Readers seeking support and information about suicide*/); expect(authoring.save_button.isEnabled()).toBe(true); expect(authoring.getHelplineSelectedOption(0)).toBe('true'); // first option remained selected expect(authoring.getHelplineSelectedOption(1)).toBe(null); // Suicide not remained selected // select another helpline authoring.addHelpline('Children'); expect(authoring.getHelplineSelectedOption(0)).toBe('true'); // first option remained selected expect(authoring.getHelplineSelectedOption(2)).toBe(null); // Children not remained selected }); it('Not be able to Ctrl-z to the original, actionable text when killing an item', () => { expect(monitoring.getTextItem(2, 0)).toBe('item5'); monitoring.actionOnItem('Edit', 2, 0); expect(authoring.getHeadlineText()).toBe('item5'); // original, actionable headline text expect(authoring.getBodyText()).toBe('item5 text'); // original, actionable body text authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Kill item', 5, 0); // Body: // undo without editing body text ctrlKey('z'); expect(authoring.getBodyText()).toBe('This is kill template. Slugged item5 slugline one/two.'); // now edit body text authoring.writeText('Edit kill notice body text:'); expect(authoring.getBodyText()) .toBe('Edit kill notice body text:This is kill template. Slugged item5 slugline one/two.'); // undo edited body text ctrlKey('z'); expect(authoring.getBodyText()).toBe('This is kill template. Slugged item5 slugline one/two.'); // undo one more time and expect body text not to be the original body text. ctrlKey('z'); expect(authoring.getBodyText()).not.toBe('item5 text'); expect(authoring.getBodyText()).toBe('This is kill template. Slugged item5 slugline one/two.'); // Headline: // undo without editing headline text ctrlKey('z'); expect(authoring.getHeadlineText()).toBe('KILL NOTICE'); // now edit headline text authoring.writeTextToHeadline('Edit kill headline:'); expect(authoring.getHeadlineText()).toBe('Edit kill headline:KILL NOTICE'); // undo edited headline text ctrlKey('z'); expect(authoring.getHeadlineText()).toBe('KILL NOTICE'); // undo one more time and expect headline text not to be the original headline text. ctrlKey('z'); expect(authoring.getHeadlineText()).not.toBe('item5'); expect(authoring.getHeadlineText()).toBe('KILL NOTICE'); expect(authoring.send_kill_button.isDisplayed()).toBeTruthy(); }); it('after undo/redo save last version', () => { monitoring.actionOnItem('Edit', 2, 0); authoring.cleanBodyHtmlElement(); browser.sleep(2000); authoring.writeText('one\ntwo\nthree'); browser.sleep(2000); // wait for autosave authoring.backspaceBodyHtml(5); browser.sleep(2000); ctrlKey('z'); browser.sleep(1000); authoring.save(); authoring.close(); monitoring.actionOnItem('Edit', 2, 0); expect(authoring.getBodyText()).toBe('one\ntwo\nthree'); }); it('can send and publish', () => { workspace.selectDesk('Sports Desk'); expect(monitoring.getGroupItems(0).count()).toBe(0); expect(monitoring.getGroupItems(1).count()).toBe(0); expect(monitoring.getGroupItems(2).count()).toBe(1); expect(monitoring.getGroupItems(3).count()).toBe(0); expect(monitoring.getGroupItems(4).count()).toBe(1); expect(monitoring.getGroupItems(5).count()).toBe(0); // no published content. workspace.selectDesk('Politic Desk'); expect(monitoring.getGroupItems(5).count()).toBe(0); // desk output expect(monitoring.getTextItem(3, 2)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 2); authoring.writeTextToHeadline('testing send and publish'); authoring.save(); authoring.writeText(''); ctrlShiftKey(protractor.Key.END); ctrlKey('x'); authoring.sendAndpublish('Sports Desk'); authoring.confirmSendTo(); // confirm unsaved changes authoring.publishFrom('Sports Desk'); assertToastMsg('error', 'BODY_HTML empty values not allowed'); // validation takes place authoring.writeText('Testing'); authoring.save(); authoring.publishFrom('Sports Desk'); // desk output count zero as content publish from sport desk expect(monitoring.getGroupItems(5).count()).toBe(0); workspace.selectDesk('Sports Desk'); expect(monitoring.getGroupItems(5).count()).toBe(1); }, 600000); it('can minimize story while a correction and kill is being written', () => { workspace.selectDesk('Politic Desk'); expect(monitoring.getTextItem(3, 2)).toBe('item6'); monitoring.actionOnItem('Edit', 3, 2); authoring.publish(); monitoring.filterAction('text'); monitoring.actionOnItem('Correct item', 5, 0); // Edit for correction authoring.minimize(); // minimize before publishing the correction expect(monitoring.getTextItem(2, 1)).toBe('item9'); monitoring.actionOnItem('Edit', 2, 1); authoring.publish(); monitoring.actionOnItem('Kill item', 5, 0); // Edit for kill authoring.minimize(); // minimize before publishing the kill authoring.maximize('item6'); expect(authoring.send_correction_button.isDisplayed()).toBeTruthy(); authoring.maximize('item9'); expect(authoring.send_kill_button.isDisplayed()).toBeTruthy(); }); });<|fim▁end|>
authoring.writeText('some is a sentence word, but words come after an abbrev. few are not'); browser.sleep(200);
<|file_name|>calc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2014, GEM Foundation # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from __future__ import division import collections import itertools import operator import numpy from openquake.hazardlib.imt import from_string from openquake.hazardlib.calc import gmf, filters from openquake.hazardlib.site import SiteCollection from openquake.commonlib.readinput import \ get_gsims, get_rupture, get_correl_model, get_imts MAX_INT = 2 ** 31 - 1 # this is used in the random number generator # in this way even on 32 bit machines Python will not have to convert # the generated seed into a long integer # ############## utilities for the classical calculator ############### # SourceRuptureSites = collections.namedtuple( 'SourceRuptureSites', 'source rupture sites') def gen_ruptures(sources, site_coll, maximum_distance, monitor): """ Yield (source, rupture, affected_sites) for each rupture generated by the given sources. :param sources: a sequence of sources :param site_coll: a SiteCollection instance :param maximum_distance: the maximum distance :param monitor: a Monitor object """ filtsources_mon = monitor('filtering sources') genruptures_mon = monitor('generating ruptures') filtruptures_mon = monitor('filtering ruptures') for src in sources: with filtsources_mon: s_sites = src.filter_sites_by_distance_to_source( maximum_distance, site_coll) if s_sites is None: continue with genruptures_mon: ruptures = list(src.iter_ruptures()) if not ruptures: continue for rupture in ruptures: with filtruptures_mon: r_sites = filters.filter_sites_by_distance_to_rupture( rupture, maximum_distance, s_sites) if r_sites is None: continue yield SourceRuptureSites(src, rupture, r_sites) filtsources_mon.flush() genruptures_mon.flush() filtruptures_mon.flush() def gen_ruptures_for_site(site, sources, maximum_distance, monitor): """ Yield source, <ruptures close to site> :param site: a Site object :param sources: a sequence of sources :param monitor: a Monitor object """ source_rupture_sites = gen_ruptures( sources, SiteCollection([site]), maximum_distance, monitor) for src, rows in itertools.groupby( source_rupture_sites, key=operator.attrgetter('source')): yield src, [row.rupture for row in rows] # ############## utilities for the scenario calculators ############### # def calc_gmfs_fast(oqparam, sitecol): """ Build all the ground motion fields for the whole site collection in a single step. """ max_dist = oqparam.maximum_distance correl_model = get_correl_model(oqparam) seed = oqparam.random_seed imts = get_imts(oqparam) [gsim] = get_gsims(oqparam) trunc_level = oqparam.truncation_level n_gmfs = oqparam.number_of_ground_motion_fields rupture = get_rupture(oqparam) res = gmf.ground_motion_fields( rupture, sitecol, imts, gsim, trunc_level, n_gmfs, correl_model, filters.rupture_site_distance_filter(max_dist), seed) return {str(imt): matrix for imt, matrix in res.items()} # ######################### hazard maps ################################### # # cutoff value for the poe EPSILON = 1E-30 def compute_hazard_maps(curves, imls, poes): """ Given a set of hazard curve poes, interpolate a hazard map at the specified ``poe``. :param curves: 2D array of floats. Each row represents a curve, where the values in the row are the PoEs (Probabilities of Exceedance) corresponding to ``imls``. Each curve corresponds to a geographical location. :param imls: Intensity Measure Levels associated with these hazard ``curves``. Type<|fim▁hole|> :returns: An array of shape P x N, where N is the number of curves and P the number of poes. """ curves = numpy.array(curves) poes = numpy.array(poes) if len(poes.shape) == 0: # `poes` was passed in as a scalar; # convert it to 1D array of 1 element poes = poes.reshape(1) if len(curves.shape) == 1: # `curves` was passed as 1 dimensional array, there is a single site curves = curves.reshape((1,) + curves.shape) # 1 x L result = [] imls = numpy.log(numpy.array(imls[::-1])) for curve in curves: # the hazard curve, having replaced the too small poes with EPSILON curve_cutoff = [max(poe, EPSILON) for poe in curve[::-1]] hmap_val = [] for poe in poes: # special case when the interpolation poe is bigger than the # maximum, i.e the iml must be smaller than the minumum if poe > curve_cutoff[-1]: # the greatest poes in the curve # extrapolate the iml to zero as per # https://bugs.launchpad.net/oq-engine/+bug/1292093 # a consequence is that if all poes are zero any poe > 0 # is big and the hmap goes automatically to zero hmap_val.append(0) else: # exp-log interpolation, to reduce numerical errors # see https://bugs.launchpad.net/oq-engine/+bug/1252770 val = numpy.exp( numpy.interp( numpy.log(poe), numpy.log(curve_cutoff), imls)) hmap_val.append(val) result.append(hmap_val) return numpy.array(result) # ######################### GMF->curves #################################### # # NB (MS): the approach used here will not work for non-poissonian models def gmvs_to_haz_curve(gmvs, imls, invest_time, duration): """ Given a set of ground motion values (``gmvs``) and intensity measure levels (``imls``), compute hazard curve probabilities of exceedance. :param gmvs: A list of ground motion values, as floats. :param imls: A list of intensity measure levels, as floats. :param float invest_time: Investigation time, in years. It is with this time span that we compute probabilities of exceedance. Another way to put it is the following. When computing a hazard curve, we want to answer the question: What is the probability of ground motion meeting or exceeding the specified levels (``imls``) in a given time span (``invest_time``). :param float duration: Time window during which GMFs occur. Another was to say it is, the period of time over which we simulate ground motion occurrences. NOTE: Duration is computed as the calculation investigation time multiplied by the number of stochastic event sets. :returns: Numpy array of PoEs (probabilities of exceedance). """ # convert to numpy array and redimension so that it can be broadcast with # the gmvs for computing PoE values; there is a gmv for each rupture # here is an example: imls = [0.03, 0.04, 0.05], gmvs=[0.04750576] # => num_exceeding = [1, 1, 0] coming from 0.04750576 > [0.03, 0.04, 0.05] imls = numpy.array(imls).reshape((len(imls), 1)) num_exceeding = numpy.sum(numpy.array(gmvs) >= imls, axis=1) poes = 1 - numpy.exp(- (invest_time / duration) * num_exceeding) return poes # ################## utilities for classical calculators ################ # def make_uhs(maps): """ Make Uniform Hazard Spectra curves for each location. It is assumed that the `lons` and `lats` for each of the ``maps`` are uniform. :param maps: A composite array with shape N x P, where N is the number of sites and P is the number of poes in the hazard maps :returns: an array N x I x P where I the number of intensity measure types of kind SA (with PGA = SA(0)), containing the hazard maps """ sorted_imts = list(map(str, sorted( from_string(imt) for imt in maps.dtype.fields if imt.startswith('SA') or imt == 'PGA'))) hmaps = numpy.array([maps[imt] for imt in sorted_imts]) # I * N * P return hmaps.transpose(1, 0, 2) # N * I * P def build_dict(shape, factory): """ Build a dictionary key -> factory(), where the key is a multi-index obtained from indices of the given shape. For instance >>> sorted(build_dict((2, 2), list).items()) [((0, 0), []), ((0, 1), []), ((1, 0), []), ((1, 1), [])] """ return {k: factory() for k in itertools.product(*map(range, shape))}<|fim▁end|>
should be an array-like of floats. :param poes: Value(s) on which to interpolate a hazard map from the input ``curves``. Can be an array-like or scalar value (for a single PoE).
<|file_name|>cfg-attr-syntax-validation.rs<|end_file_name|><|fim▁begin|>#[cfg] //~ ERROR `cfg` is not followed by parentheses<|fim▁hole|>struct S1; #[cfg = 10] //~ ERROR `cfg` is not followed by parentheses struct S2; #[cfg()] //~ ERROR `cfg` predicate is not specified struct S3; #[cfg(a, b)] //~ ERROR multiple `cfg` predicates are specified struct S4; #[cfg("str")] //~ ERROR `cfg` predicate key cannot be a literal struct S5; #[cfg(a::b)] //~ ERROR `cfg` predicate key must be an identifier struct S6; #[cfg(a())] //~ ERROR invalid predicate `a` struct S7; #[cfg(a = 10)] //~ ERROR literal in `cfg` predicate value must be a string struct S8; #[cfg(a = b"hi")] //~ ERROR literal in `cfg` predicate value must be a string struct S9; macro_rules! generate_s10 { ($expr: expr) => { #[cfg(feature = $expr)] //~ ERROR `cfg` is not a well-formed meta-item struct S10; } } generate_s10!(concat!("nonexistent"));<|fim▁end|>
<|file_name|>api.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import from datetime import datetime, timedelta from flask import current_app from typing import Iterator, List, Tuple from werkzeug.exceptions import Conflict, NotFound from .models import Message, Policy from .channels import send_notifications from requests import get from json import JSONDecodeError from cli_common import log from backend_common.auth import auth import os import mohawk logger = log.get_logger(__name__) AUTHENTICATION_SCOPE_PREFIX = 'project:releng:services/releng_notification_policy/permission/' def get_policies_in_json_serializable_form(notification_policies: List[Policy]) -> List[dict]: return [ policy.to_dict() for policy in notification_policies ] @auth.require_scopes([AUTHENTICATION_SCOPE_PREFIX + 'get_message']) def get_message_by_uid(uid: str) -> dict: session = current_app.db.session message = session.query(Message).filter(Message.uid == uid).first() if message: notification_policies = session.query(Policy).filter(Policy.uid == message.uid).all() policies_dicts = get_policies_in_json_serializable_form(notification_policies) logger.info('Serving {message}'.format(message=message)) return { 'shortMessage': message.shortMessage, 'message': message.message, 'deadline': message.deadline, 'policies': policies_dicts, } else: err_str = 'Message with uid {} not found.'.format(uid) logger.info(err_str) raise NotFound(err_str) def get_policies_as_dict_for_message(message: Message) -> dict: session = current_app.db.session policies = session.query(Policy).filter(Policy.uid == message.uid).all() serialized_policies = get_policies_in_json_serializable_form(policies) return { 'policies': serialized_policies, } def get_active_policies_for_identity(identity_name: str) -> dict: session = current_app.db.session now = datetime.now() active_policies = session.query(Policy).filter(Policy.identity == identity_name)\ .filter(Policy.start_timestamp < now)\ .filter(Policy.stop_timestamp > now)\ .all() if active_policies: return { 'policies': get_policies_in_json_serializable_form(active_policies), } else: raise NotFound('No active policies found for {}.'.format(identity_name)) def get_pending_messages() -> dict: session = current_app.db.session current_time = datetime.now() messages = session.query(Message).filter(Message.deadline > current_time).all() if messages: return { 'messages': [ {**message.to_dict(), **get_policies_as_dict_for_message(message)} for message in messages ], } else: raise NotFound('No pending messages found.') @auth.require_scopes([AUTHENTICATION_SCOPE_PREFIX + 'put_message']) def put_message(uid: str, body: dict) -> None: ''' Add a new message to be delivered into the service. :param uid: UID of message to track :param body: Description of message :return: No content, status code ''' session = current_app.db.session # Make sure the message UID doesn't already exist in the DB existing_message = session.query(Message).filter(Message.uid == uid).first() if existing_message: err_str = '{message} already exists'.format(message=existing_message) logger.info(err_str) raise Conflict(err_str) new_message = Message(uid=uid, shortMessage=body['shortMessage'], message=body['message'], deadline=body['deadline']) session.add(new_message) session.flush() policies = [ # Overwrite the frequency object input from the API with a db compatible timedelta object Policy(**{**p, 'frequency': timedelta(**p['frequency']), 'uid': new_message.uid}) for p in body['policies'] ] session.add_all(policies) session.commit() logger.info('{} created.'.format(new_message)) for new_policy in policies: logger.info('{} created.'.format(new_policy)) return None @auth.require_scopes([AUTHENTICATION_SCOPE_PREFIX + 'delete_message']) def delete_message(uid: str) -> None: ''' Delete the message with the specified UID :param uid: UID of the message to delete. :return: No content, status code ''' session = current_app.db.session message = session.query(Message).filter(Message.uid == uid).first() if message: session.delete(message) session.commit() logger.info('{} deleted.'.format(message)) return None else: err_str = 'Message with uid "{}" not found'.format(uid) logger.warning(err_str) raise NotFound(err_str) def determine_message_action(messages: List[Message]) -> Iterator[Tuple[Message, bool]]: current_time = datetime.now() for message in messages: if current_time > message.deadline: yield message, True else: yield message, False def create_identity_preference_url(policy: Policy) -> str: return '{endpoint}/identity/{identity_name}/{urgency}'\ .format(endpoint=current_app.config.get('RELENG_NOTIFICATION_IDENTITY_ENDPOINT'), identity_name=policy.identity, urgency=policy.urgency) def get_identity_url_for_actionable_policies(policies: List[Policy]) -> Iterator[Tuple[Policy, str]]: current_time = datetime.now() for policy in policies: # Check our policy time frame is in effect if policy.stop_timestamp < current_time or current_time < policy.start_timestamp: continue # If we have notified already, only notify according to the frequency if policy.last_notified and current_time - policy.last_notified < policy.frequency: continue identity_preference_url = create_identity_preference_url(policy) yield policy, identity_preference_url @auth.require_scopes([AUTHENTICATION_SCOPE_PREFIX + 'ticktock']) def post_tick_tock() -> dict: ''' Trigger pending notifications according to their notification policies :return: Information about notification triggered by this call in JSON format. ''' session = current_app.db.session current_time = datetime.now() pending_messages = session.query(Message).all() if not pending_messages: raise NotFound('No pending policies to trigger.') notifications = [] for message, is_past_deadline in determine_message_action(pending_messages): if is_past_deadline: session.delete(message) continue policies = session.query(Policy).filter(Policy.uid == message.uid).all() for policy, identity_preference_url in get_identity_url_for_actionable_policies(policies): try: service_credentials = { 'id': current_app.config['TASKCLUSTER_CLIENT_ID'], 'key': current_app.config['TASKCLUSTER_ACCESS_TOKEN'], 'algorithm': 'sha256', } hawk = mohawk.Sender(service_credentials, identity_preference_url, 'get', content='', content_type='application/json') # Support dev ssl ca cert ssl_dev_ca = current_app.config.get('SSL_DEV_CA') if ssl_dev_ca is not None: assert os.path.isdir(ssl_dev_ca), \ 'SSL_DEV_CA must be a dir with hashed dev ca certs' <|fim▁hole|> 'Content-Type': 'application/json', } identity_preference = get(identity_preference_url, headers=headers, verify=ssl_dev_ca).json()['preferences'].pop() notification_info = send_notifications(message, identity_preference) notifications.append(notification_info) policy.last_notified = current_time except JSONDecodeError: logger.warn('') session.add_all(policies) session.commit() return { 'notifications': notifications, }<|fim▁end|>
headers = { 'Authorization': hawk.request_header,
<|file_name|>raw_volume.go<|end_file_name|><|fim▁begin|>// -*- Mode: Go; indent-tabs-mode: t -*- /* * Copyright (C) 2019 Canonical Ltd * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 3 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package builtin import ( "fmt" "regexp" "strings" "github.com/snapcore/snapd/interfaces" "github.com/snapcore/snapd/interfaces/apparmor" "github.com/snapcore/snapd/interfaces/udev" "github.com/snapcore/snapd/snap" ) const rawVolumeSummary = `allows read/write access to specific disk partition` // raw-volume grants full access to a particular disk partition. Since the // volume is device-specific, it is desirable to limit the plugging snap's // connection (eg to avoid situations of intending to grant access to a 'data' // disk on one device but granting access to a 'system' disk on another). // Therefore, require a snap declaration for connecting the interface at all. const rawVolumeBaseDeclarationSlots = ` raw-volume: allow-installation: slot-snap-type: - core - gadget deny-connection: true deny-auto-connection: true ` // Only allow disk device partitions; not loop, ram, CDROM, generic SCSI, // network, tape, raid, etc devices const rawVolumeConnectedPlugAppArmorPath = ` # Description: can access disk partition read/write %s rw, # needed for write access capability sys_admin, # allow read access to sysfs and udev for block devices @{PROC}/devices r, /run/udev/data/b[0-9]*:[0-9]* r, /sys/block/ r, /sys/devices/**/block/** r, ` // The type for this interface type rawVolumeInterface struct{} // Getter for the name of this interface func (iface *rawVolumeInterface) Name() string { return "raw-volume" } func (iface *rawVolumeInterface) StaticInfo() interfaces.StaticInfo { return interfaces.StaticInfo{ Summary: rawVolumeSummary, BaseDeclarationSlots: rawVolumeBaseDeclarationSlots, } } func (iface *rawVolumeInterface) String() string { return iface.Name() } // https://www.kernel.org/doc/Documentation/admin-guide/devices.txt // // For now, only list common devices and skip the following: // - Acorn MFM mfma-mfmb // - ACSI ada-adp // - Parallel port IDE pda-pdd // - Parallel port ATAPI pf0-3 // - USB block device uba-ubz // // The '0' partition number (eg, hda0) is omitted since it refers to the whole // disk. // IDE, MFM, RLL hda-hdt, 1-63 partitions: const hdPat = `hd[a-t]([1-9]|[1-5][0-9]|6[0-3])` // SCSI sda-sdiv, 1-15 partitions: const sdPat = `sd([a-z]|[a-h][a-z]|i[a-v])([1-9]|1[0-5])` // I2O i2o/hda-hddx, 1-15 partitions: const i2oPat = `i2o/hd([a-z]|[a-c][a-z]|d[a-x])([1-9]|1[0-5])` // MMC mmcblk0-999, 1-63 partitions (number of partitions is kernel cmdline // configurable. Ubuntu uses 32, so use 64 for headroom):<|fim▁hole|>const mmcPat = `mmcblk([0-9]|[1-9][0-9]{1,2})p([1-9]|[1-5][0-9]|6[0-3])` // NVMe nvme0-99, 1-63 partitions with 1-63 optional namespaces: const nvmePat = `nvme([0-9]|[1-9][0-9])(n([1-9]|[1-5][0-9]|6[0-3])){0,1}p([1-9]|[1-5][0-9]|6[0-3])` // virtio vda-vdz, 1-63 partitions: const vdPat = `vd[a-z]([1-9]|[1-5][0-9]|6[0-3])` var rawVolumePartitionPattern = regexp.MustCompile(fmt.Sprintf("^/dev/(%s|%s|%s|%s|%s|%s)$", hdPat, sdPat, i2oPat, mmcPat, nvmePat, vdPat)) const invalidDeviceNodeSlotPathErrFmt = "slot %q path attribute must be a valid device node" // Check validity of the defined slot func (iface *rawVolumeInterface) BeforePrepareSlot(slot *snap.SlotInfo) error { _, err := verifySlotPathAttribute(&interfaces.SlotRef{Snap: slot.Snap.InstanceName(), Name: slot.Name}, slot, rawVolumePartitionPattern, invalidDeviceNodeSlotPathErrFmt) return err } func (iface *rawVolumeInterface) AppArmorConnectedPlug(spec *apparmor.Specification, plug *interfaces.ConnectedPlug, slot *interfaces.ConnectedSlot) error { cleanedPath, err := verifySlotPathAttribute(slot.Ref(), slot, rawVolumePartitionPattern, invalidDeviceNodeSlotPathErrFmt) if err != nil { return nil } spec.AddSnippet(fmt.Sprintf(rawVolumeConnectedPlugAppArmorPath, cleanedPath)) return nil } func (iface *rawVolumeInterface) UDevConnectedPlug(spec *udev.Specification, plug *interfaces.ConnectedPlug, slot *interfaces.ConnectedSlot) error { cleanedPath, err := verifySlotPathAttribute(slot.Ref(), slot, rawVolumePartitionPattern, invalidDeviceNodeSlotPathErrFmt) if err != nil { return nil } spec.TagDevice(fmt.Sprintf(`KERNEL=="%s"`, strings.TrimPrefix(cleanedPath, "/dev/"))) return nil } func (iface *rawVolumeInterface) AutoConnect(*snap.PlugInfo, *snap.SlotInfo) bool { // Allow what is allowed in the declarations return true } func init() { registerIface(&rawVolumeInterface{}) }<|fim▁end|>
<|file_name|>Player.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package us.aaronweiss.pixalia.core; import us.aaronweiss.pixalia.tools.Vector; public class Player extends Pixal { public Player(String hostname) { super(hostname); } public void setColor(Vector color) { this.color = color; } }<|fim▁end|>
<|file_name|>book.py<|end_file_name|><|fim▁begin|>#: one x = 1<|fim▁hole|>time.sleep(10) #: two #x = 2 print(x)<|fim▁end|>
print(x) import time
<|file_name|>chrome_proxy_benchmark.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from integration_tests import chrome_proxy_measurements as measurements from integration_tests import chrome_proxy_pagesets as pagesets from telemetry import benchmark from telemetry.core.backends.chrome import android_browser_finder ANDROID_CHROME_BROWSERS = [ browser for browser in android_browser_finder.CHROME_PACKAGE_NAMES if 'webview' not in browser] class ChromeProxyLatency(benchmark.Benchmark): tag = 'latency' test = measurements.ChromeProxyLatency page_set = pagesets.Top20PageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.latency.top_20' class ChromeProxyLatencyDirect(benchmark.Benchmark): tag = 'latency_direct' test = measurements.ChromeProxyLatency page_set = pagesets.Top20PageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.latency_direct.top_20' class ChromeProxyLatencySynthetic(ChromeProxyLatency): page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.latency.synthetic' class ChromeProxyLatencySyntheticDirect(ChromeProxyLatencyDirect): page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.latency_direct.synthetic' class ChromeProxyDataSaving(benchmark.Benchmark): tag = 'data_saving' test = measurements.ChromeProxyDataSaving page_set = pagesets.Top20PageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.data_saving.top_20' class ChromeProxyDataSavingDirect(benchmark.Benchmark): tag = 'data_saving_direct' test = measurements.ChromeProxyDataSaving page_set = pagesets.Top20PageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.data_saving_direct.top_20' class ChromeProxyDataSavingSynthetic(ChromeProxyDataSaving): page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.data_saving.synthetic' class ChromeProxyDataSavingSyntheticDirect(ChromeProxyDataSavingDirect): page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.data_saving_direct.synthetic' class ChromeProxyHeaderValidation(benchmark.Benchmark): tag = 'header_validation' test = measurements.ChromeProxyHeaders page_set = pagesets.Top20PageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.header_validation.top_20' class ChromeProxyClientVersion(benchmark.Benchmark): tag = 'client_version' test = measurements.ChromeProxyClientVersion page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_version.synthetic' class ChromeProxyClientType(benchmark.Benchmark): tag = 'client_type' test = measurements.ChromeProxyClientType page_set = pagesets.ClientTypePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_type.client_type' class ChromeProxyLoFi(benchmark.Benchmark): tag = 'lo_fi' test = measurements.ChromeProxyLoFi page_set = pagesets.LoFiPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.lo_fi.lo_fi' class ChromeProxyExpDirective(benchmark.Benchmark): tag = 'exp_directive' test = measurements.ChromeProxyExpDirective page_set = pagesets.ExpDirectivePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.exp_directive.exp_directive' class ChromeProxyBypass(benchmark.Benchmark): tag = 'bypass' test = measurements.ChromeProxyBypass page_set = pagesets.BypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.bypass.bypass' class ChromeProxyCorsBypass(benchmark.Benchmark): tag = 'bypass' test = measurements.ChromeProxyCorsBypass page_set = pagesets.CorsBypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.bypass.corsbypass' class ChromeProxyBlockOnce(benchmark.Benchmark): tag = 'block_once' test = measurements.ChromeProxyBlockOnce page_set = pagesets.BlockOncePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.block_once.block_once' @benchmark.Enabled(*ANDROID_CHROME_BROWSERS) # Safebrowsing is enabled for Android and iOS. class ChromeProxySafeBrowsingOn(benchmark.Benchmark): tag = 'safebrowsing_on' test = measurements.ChromeProxySafebrowsingOn page_set = pagesets.SafebrowsingPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.safebrowsing_on.safebrowsing' @benchmark.Disabled(*ANDROID_CHROME_BROWSERS) # Safebrowsing is switched off for Android Webview and all desktop platforms. class ChromeProxySafeBrowsingOff(benchmark.Benchmark): tag = 'safebrowsing_off' test = measurements.ChromeProxySafebrowsingOff page_set = pagesets.SafebrowsingPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.safebrowsing_off.safebrowsing' class ChromeProxyHTTPFallbackProbeURL(benchmark.Benchmark): tag = 'fallback_probe' test = measurements.ChromeProxyHTTPFallbackProbeURL page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.fallback_probe.synthetic' class ChromeProxyHTTPFallbackViaHeader(benchmark.Benchmark): tag = 'fallback_viaheader' test = measurements.ChromeProxyHTTPFallbackViaHeader page_set = pagesets.FallbackViaHeaderPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.fallback_viaheader.fallback_viaheader' class ChromeProxyHTTPToDirectFallback(benchmark.Benchmark): tag = 'http_to_direct_fallback' test = measurements.ChromeProxyHTTPToDirectFallback page_set = pagesets.HTTPToDirectFallbackPageSet @classmethod<|fim▁hole|> class ChromeProxyReenableAfterBypass(benchmark.Benchmark): tag = 'reenable_after_bypass' test = measurements.ChromeProxyReenableAfterBypass page_set = pagesets.ReenableAfterBypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.reenable_after_bypass.reenable_after_bypass' class ChromeProxySmoke(benchmark.Benchmark): tag = 'smoke' test = measurements.ChromeProxySmoke page_set = pagesets.SmokePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.smoke.smoke'<|fim▁end|>
def Name(cls): return ('chrome_proxy_benchmark.http_to_direct_fallback.' 'http_to_direct_fallback')
<|file_name|>MyMethod.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * A function wrapping interface. * @author John DeNero */ public interface MyMethod<I, O> { public O call(I obj); }<|fim▁end|>
package org.canova.api.berkeley; /**
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime <|fim▁hole|> from pandas.core.api import * from pandas.io.parsers import parseCSV, parseText, parseExcel from pandas.stats.api import * from numpy.testing import Tester class NoseWrapper(Tester): ''' This is simply a monkey patch for numpy.testing.Tester, so that extra_argv can be changed from its default None to ['--exe'] so that the tests can be run the same across platforms. ''' def test(self, label='fast', verbose=1, extra_argv=['--exe'], doctests=False, coverage=False): ''' Run tests for module using nose %(test_header)s doctests : boolean If True, run doctests in module, default False coverage : boolean If True, report coverage of NumPy code, default False (Requires the coverage module: http://nedbatchelder.com/code/modules/coverage.html) ''' # cap verbosity at 3 because nose becomes *very* verbose beyond that verbose = min(verbose, 3) from numpy.testing import utils utils.verbose = verbose if doctests: print "Running unit tests and doctests for %s" % self.package_name else: print "Running unit tests for %s" % self.package_name self._show_system_info() # reset doctest state on every run import doctest doctest.master = None argv, plugins = self.prepare_test_args(label, verbose, extra_argv, doctests, coverage) from numpy.testing.noseclasses import NumpyTestProgram t = NumpyTestProgram(argv=argv, exit=False, plugins=plugins) return t.result test = NoseWrapper().test<|fim▁end|>
import numpy as np from pandas.version import version as __version__ from pandas.info import __doc__
<|file_name|>Image.js<|end_file_name|><|fim▁begin|>/* Image.js * * copyright (c) 2010-2017, Christian Mayer and the CometVisu contributers. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 3 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA */ /** * */ qx.Class.define('cv.parser.widgets.Image', { type: "static", /* ****************************************************** STATICS ****************************************************** */ statics: { /** * Parses the widgets XML configuration and extracts the given information * to a simple key/value map. * * @param xml {Element} XML-Element * @param path {String} internal path of the widget * @param flavour {String} Flavour of the widget * @param pageType {String} Page type (2d, 3d, ...) */ parse: function (xml, path, flavour, pageType) {<|fim▁hole|> }, getAttributeToPropertyMappings: function () { return { 'width' : { "default": "100%" }, 'height' : {}, 'src' : {}, 'widthfit' : { target: 'widthFit', transform: function(value) { return value === "true"; }} }; } }, defer: function(statics) { // register the parser cv.parser.WidgetParser.addHandler("image", statics); } });<|fim▁end|>
var data = cv.parser.WidgetParser.parseElement(this, xml, path, flavour, pageType, this.getAttributeToPropertyMappings()); cv.parser.WidgetParser.parseRefresh(xml, path); return data;
<|file_name|>app-load.module.ts<|end_file_name|><|fim▁begin|>import { NgModule, APP_INITIALIZER } from '@angular/core'; import { HttpClientModule } from '@angular/common/http'; import { AppLoadService } from './app-load.service'; import { RpcConnectionService } from '../rpc/rpc-connection.service'; export function init_app(appLoadService: AppLoadService): any { return () => appLoadService.initializeApp();<|fim▁hole|> return () => rpcService.initializeRpc(); } @NgModule({ imports: [HttpClientModule], providers: [ AppLoadService, RpcConnectionService, { provide: APP_INITIALIZER, useFactory: init_app, deps: [AppLoadService], multi: true }, { provide: APP_INITIALIZER, useFactory: get_rpc, deps: [RpcConnectionService], multi: true } ] }) export class AppLoadModule { }<|fim▁end|>
} export function get_rpc(rpcService: RpcConnectionService): any {
<|file_name|>MulExpr.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 1998-2008 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * * Free Software Foundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Nam Nguyen */ package com.caucho.quercus.lib.gettext.expr; public class MulExpr extends BinaryExpr { public MulExpr(Expr _left, Expr _right) {<|fim▁hole|> public int eval(int n) { return _left.eval(n) * _right.eval(n); } }<|fim▁end|>
super(_left, _right); }
<|file_name|>filter.js<|end_file_name|><|fim▁begin|>/** * Filtering sensitive information */ const _ = require('lodash'); /** * reset option * @param {string|object|array} opt filter option * @param {array} filterKeys filter keys * @param {string|function} replaceChat replace chat or function * @param {boolean} recursion whether recursive , true of false */ const setOption = (option) => { let filterKeys = ['password', 'token', 'authorization']; let replaceChat = '*'; let recursion = false; if (option !== undefined) { if (typeof option === 'string') { filterKeys = [option]; } else if (option instanceof Array && option.length > 0) { filterKeys = option.filter(item => typeof item === 'string'); } else if (_.isPlainObject(option)) { const { filterKeys: fks, recursion: rcs, replaceChat: rpc } = option; recursion = !!rcs; if (fks instanceof Array && fks.length > 0) { filterKeys = fks.filter(item => typeof item === 'string'); } if (typeof rpc === 'string') { replaceChat = rpc; } else { replaceChat = '*'; } } else { console.error(new Error(`option.filter do not support ${typeof option} type !`)); } } return { filterKeys, recursion, replaceChat }; }; /** * replace by replaceChat * @param {string} param content to replace * @param {string|function} replaceChat replace chat or function */ const replace = (param, replaceChat) => { if (typeof replaceChat === 'function') { return replaceChat(param); } return param.replace(/\S/g, '*'); }; <|fim▁hole|> * @param {*} message logger message * @param {object} opt filter option * @param {boolean} hit hit the fileterkeys , default false */ const filter = (message, opt, hit = false) => { const result = message; const { filterKeys, replaceChat } = opt; if (_.isPlainObject(result)) { Object.keys(result).forEach((key) => { const dHit = hit || filterKeys.indexOf(key) > -1; result[key] = filter(result[key], opt, dHit); // if (recursion) { // result[key] = filter(param, opt, true); // } else { // result[key] = replaceChat; // // replace the value of hit key // // eslint-disable-next-line no-return-assign // Object.keys(param).forEach(pk => (filterKeys.indexOf(pk) !== -1 ? result[key] = replaceChat : '')); // } }); return result; } else if (typeof result === 'number') { return replace(result.toString(), replaceChat); } else if (result instanceof Array && result.length > 0) { return result.map(param => filter(param, opt, hit)); } return replace(result, replaceChat); }; /** * filter log message by option do not recursion * @param {*} message logger message * @param {object} opt filter option * @param {array} opt.filterKeys filter keys * @param {string} opt.replaceChat replace chat or function */ const filterNoRecursion = (message, opt) => { const result = message; const { filterKeys, replaceChat } = opt; if (_.isPlainObject(result)) { Object.keys(result).forEach((key) => { if (filterKeys.indexOf(key) === -1) { result[key] = filterNoRecursion(result[key], opt); } else { result[key] = replaceChat; } }); return result; } else if (typeof result === 'number') { return result; } else if (result instanceof Array && result.length > 0) { return result; } return result; }; /** * filter sensitive information * @param {object} message log message * @param {*} option filter option */ const filteringSensitiveInfo = (message, option = false) => { if (!option) { return message; } if (typeof option === 'function') { return option(message); } return filterNoRecursion(message, setOption(option)); }; module.exports = { filteringSensitiveInfo, setOption, };<|fim▁end|>
/** * filter log message by option
<|file_name|>proto_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for tensorflow_fold.util.proto.""" import os # import google3 import tensorflow as tf from tensorflow_fold.util import proto_tools from tensorflow_fold.util import test3_pb2 from tensorflow_fold.util import test_pb2 from google.protobuf import text_format # Make sure SerializedMessageToTree can see our proto files. proto_tools.map_proto_source_tree_path("", os.getcwd()) # Note: Tests run in the bazel root directory, which we will use as the root for # our source protos. proto_tools.import_proto_file("tensorflow_fold/util/test.proto") proto_tools.import_proto_file("tensorflow_fold/util/test3.proto") def MakeCyclicProto(message_str): return text_format.Parse(message_str, test_pb2.CyclicType()) def MakeCyclicProto3(message_str): return text_format.Parse(message_str, test3_pb2.CyclicType3()) def MakeOneAtomProto(message_str): return text_format.Parse(message_str, test_pb2.OneAtom()) class ProtoTest(tf.test.TestCase): def testSerializedMessageToTree(self): example = MakeCyclicProto( "some_same<" " many_int32: 1" " many_int32: 2" " some_same<" " many_int32: 3" " many_int32: 4" " some_bool: false" " >" ">" "some_enum: THAT") result = proto_tools.serialized_message_to_tree(<|fim▁hole|> "tensorflow.fold.CyclicType", example.SerializeToString()) self.assertEqual(result["some_same"]["many_int32"], [1, 2]) self.assertEqual(result["some_same"]["some_same"]["many_int32"], [3, 4]) self.assertEqual(result["some_same"]["some_same"]["some_bool"], False) self.assertEqual(result["many_bool"], []) self.assertEqual(result["some_bool"], None) self.assertEqual(result["some_same"]["many_bool"], []) self.assertEqual(result["some_same"]["some_bool"], None) self.assertEqual(result["some_enum"]["name"], "THAT") self.assertEqual(result["some_enum"]["index"], 1) self.assertEqual(result["some_enum"]["number"], 1) def testSerializedMessageToTreeProto3(self): example = MakeCyclicProto3( "some_same<" " many_int32: 1" " many_int32: 2" " some_same<" " many_int32: 3" " many_int32: 4" " some_bool: false" " >" ">" "some_enum: THAT") result = proto_tools.serialized_message_to_tree( "tensorflow.fold.CyclicType3", example.SerializeToString()) self.assertEqual(result["some_same"]["many_int32"], [1, 2]) self.assertEqual(result["some_same"]["some_same"]["many_int32"], [3, 4]) self.assertEqual(result["some_same"]["some_same"]["some_bool"], False) self.assertEqual(result["many_bool"], []) self.assertEqual(result["some_bool"], False) self.assertEqual(result["some_same"]["many_bool"], []) self.assertEqual(result["some_same"]["some_bool"], False) self.assertEqual(result["some_enum"]["name"], "THAT") self.assertEqual(result["some_enum"]["index"], 1) self.assertEqual(result["some_enum"]["number"], 1) def testSerializedMessageToTreeOneofEmpty(self): empty_proto = MakeOneAtomProto("").SerializeToString() empty_result = proto_tools.serialized_message_to_tree( "tensorflow.fold.OneAtom", empty_proto) self.assertEqual(empty_result["atom_type"], None) self.assertEqual(empty_result["some_int32"], None) self.assertEqual(empty_result["some_int64"], None) self.assertEqual(empty_result["some_uint32"], None) self.assertEqual(empty_result["some_uint64"], None) self.assertEqual(empty_result["some_double"], None) self.assertEqual(empty_result["some_float"], None) self.assertEqual(empty_result["some_bool"], None) self.assertEqual(empty_result["some_enum"], None) self.assertEqual(empty_result["some_string"], None) def testSerializedMessageToTreeOneof(self): empty_proto = MakeOneAtomProto("some_string: \"x\"").SerializeToString() empty_result = proto_tools.serialized_message_to_tree( "tensorflow.fold.OneAtom", empty_proto) self.assertEqual(empty_result["atom_type"], "some_string") self.assertEqual(empty_result["some_int32"], None) self.assertEqual(empty_result["some_int64"], None) self.assertEqual(empty_result["some_uint32"], None) self.assertEqual(empty_result["some_uint64"], None) self.assertEqual(empty_result["some_double"], None) self.assertEqual(empty_result["some_float"], None) self.assertEqual(empty_result["some_bool"], None) self.assertEqual(empty_result["some_enum"], None) self.assertEqual(empty_result["some_string"], "x") def testNonConsecutiveEnum(self): name = "tensorflow.fold.NonConsecutiveEnumMessage" msg = test_pb2.NonConsecutiveEnumMessage( the_enum=test_pb2.NonConsecutiveEnumMessage.THREE) self.assertEqual( {"the_enum": {"name": "THREE", "index": 1, "number": 3}}, proto_tools.serialized_message_to_tree(name, msg.SerializeToString())) msg.the_enum = test_pb2.NonConsecutiveEnumMessage.SEVEN self.assertEqual( {"the_enum": {"name": "SEVEN", "index": 0, "number": 7}}, proto_tools.serialized_message_to_tree(name, msg.SerializeToString())) if __name__ == "__main__": tf.test.main()<|fim▁end|>
<|file_name|>AlmSettingsSupport.java<|end_file_name|><|fim▁begin|>/* * SonarQube * Copyright (C) 2009-2022 SonarSource SA * mailto:info AT sonarsource DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version.<|fim▁hole|> * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.server.almsettings.ws; import org.sonar.api.server.ServerSide; import org.sonar.db.DbClient; import org.sonar.db.DbSession; import org.sonar.db.alm.setting.ALM; import org.sonar.db.alm.setting.AlmSettingDto; import org.sonar.db.project.ProjectDto; import org.sonar.server.almsettings.MultipleAlmFeatureProvider; import org.sonar.server.component.ComponentFinder; import org.sonar.server.exceptions.BadRequestException; import org.sonar.server.exceptions.NotFoundException; import org.sonar.server.user.UserSession; import org.sonarqube.ws.AlmSettings; import static java.lang.String.format; import static org.sonar.api.web.UserRole.ADMIN; @ServerSide public class AlmSettingsSupport { private final DbClient dbClient; private final UserSession userSession; private final ComponentFinder componentFinder; private final MultipleAlmFeatureProvider multipleAlmFeatureProvider; public AlmSettingsSupport(DbClient dbClient, UserSession userSession, ComponentFinder componentFinder, MultipleAlmFeatureProvider multipleAlmFeatureProvider) { this.dbClient = dbClient; this.userSession = userSession; this.componentFinder = componentFinder; this.multipleAlmFeatureProvider = multipleAlmFeatureProvider; } public MultipleAlmFeatureProvider getMultipleAlmFeatureProvider() { return multipleAlmFeatureProvider; } public void checkAlmSettingDoesNotAlreadyExist(DbSession dbSession, String almSetting) { dbClient.almSettingDao().selectByKey(dbSession, almSetting) .ifPresent(a -> { throw new IllegalArgumentException(format("An ALM setting with key '%s' already exists", a.getKey())); }); } public void checkAlmMultipleFeatureEnabled(ALM alm) { try (DbSession dbSession = dbClient.openSession(false)) { if (!multipleAlmFeatureProvider.enabled() && !dbClient.almSettingDao().selectByAlm(dbSession, alm).isEmpty()) { throw BadRequestException.create("A " + alm + " setting is already defined"); } } } public ProjectDto getProjectAsAdmin(DbSession dbSession, String projectKey) { return getProject(dbSession, projectKey, ADMIN); } public ProjectDto getProject(DbSession dbSession, String projectKey, String projectPermission) { ProjectDto project = componentFinder.getProjectByKey(dbSession, projectKey); userSession.checkProjectPermission(projectPermission, project); return project; } public AlmSettingDto getAlmSetting(DbSession dbSession, String almSetting) { return dbClient.almSettingDao().selectByKey(dbSession, almSetting) .orElseThrow(() -> new NotFoundException(format("ALM setting with key '%s' cannot be found", almSetting))); } public static AlmSettings.Alm toAlmWs(ALM alm) { switch (alm) { case GITHUB: return AlmSettings.Alm.github; case BITBUCKET: return AlmSettings.Alm.bitbucket; case BITBUCKET_CLOUD: return AlmSettings.Alm.bitbucketcloud; case AZURE_DEVOPS: return AlmSettings.Alm.azure; case GITLAB: return AlmSettings.Alm.gitlab; default: throw new IllegalStateException(format("Unknown ALM '%s'", alm.name())); } } }<|fim▁end|>
*
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup import sslserver setup(name="django-sslserver", version=sslserver.__version__, author="Ted Dziuba", author_email="[email protected]", description="An SSL-enabled development server for Django", url="https://github.com/teddziuba/django-sslserver", packages=["sslserver", "sslserver.management", "sslserver.management.commands"], package_dir={"sslserver": "sslserver"}, package_data={"sslserver": ["certs/development.crt", "certs/development.key", "certs/server.csr"]},<|fim▁hole|> "Django >= 1.4"], license="MIT" )<|fim▁end|>
install_requires=["setuptools",
<|file_name|>asin.hpp<|end_file_name|><|fim▁begin|>#ifndef STAN_MATH_REV_FUN_ASIN_HPP #define STAN_MATH_REV_FUN_ASIN_HPP #include <stan/math/prim/fun/asin.hpp> #include <stan/math/prim/fun/abs.hpp> #include <stan/math/rev/core.hpp> #include <stan/math/rev/meta.hpp> #include <stan/math/rev/fun/abs.hpp> #include <stan/math/rev/fun/asinh.hpp> #include <stan/math/rev/fun/value_of_rec.hpp> #include <cmath> #include <complex> namespace stan { namespace math { /** * Return the principal value of the arc sine, in radians, of the * specified variable (cmath). * * The derivative is defined by * * \f$\frac{d}{dx} \arcsin x = \frac{1}{\sqrt{1 - x^2}}\f$. * * \f[ \mbox{asin}(x) = \begin{cases} \textrm{NaN} & \mbox{if } x < -1\\ \arcsin(x) & \mbox{if } -1\leq x\leq 1 \\ \textrm{NaN} & \mbox{if } x > 1\\[6pt] \textrm{NaN} & \mbox{if } x = \textrm{NaN} \end{cases} \f] \f[ \frac{\partial\, \mbox{asin}(x)}{\partial x} = \begin{cases} \textrm{NaN} & \mbox{if } x < -1\\ \frac{\partial\, \arcsin(x)}{\partial x} & \mbox{if } -1\leq x\leq 1 \\ \textrm{NaN} & \mbox{if } x < -1\\[6pt] \textrm{NaN} & \mbox{if } x = \textrm{NaN} \end{cases} \f] \f[ \frac{\partial \, \arcsin(x)}{\partial x} = \frac{1}{\sqrt{1-x^2}} \f] * * @param x Variable in range [-1, 1]. * @return Arc sine of variable, in radians. */ inline var asin(const var& x) { return make_callback_var(std::asin(x.val()), [x](const auto& vi) mutable { x.adj() += vi.adj() / std::sqrt(1.0 - (x.val() * x.val())); }); } /** * Return the principal value of the arc sine, in radians, of the * specified variable (cmath). * * @tparam Varmat a `var_value` with inner Eigen type * @param x Variable with cells in range [-1, 1]. * @return Arc sine of variable, in radians. */ template <typename VarMat, require_var_matrix_t<VarMat>* = nullptr> inline auto asin(const VarMat& x) { return make_callback_var( x.val().array().asin().matrix(), [x](const auto& vi) mutable { x.adj().array() += vi.adj().array() / (1.0 - (x.val().array().square())).sqrt(); }); }<|fim▁hole|>/** * Return the arc sine of the complex argument. * * @param[in] z argument * @return arc sine of the argument */ inline std::complex<var> asin(const std::complex<var>& z) { return stan::math::internal::complex_asin(z); } } // namespace math } // namespace stan #endif<|fim▁end|>
<|file_name|>sleep.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. scale = 1.0 def sleep(secs): import time<|fim▁hole|><|fim▁end|>
time.sleep(secs*scale)
<|file_name|>imports_config.py<|end_file_name|><|fim▁begin|># Example import configuration. import_templates = [{<|fim▁hole|> ('itt', '1'), ('mr', '1'), ('impstp', '1'), ('asa', '1'), ('impjun', '0'), ('dtd', '5'), { 'id': 'dr', 'label': 'Directory containing files to import', 'type': 'directory', 'default': 'files', }, ('clean_old_data', '1'), ('from_today', '1'), ], 'admins': [('root', '[email protected]')], 'uploaders': [('uploader', ['[email protected]'])], 'run_results_notifications': { 'load': ('[email protected]',), 'warn': ('[email protected]',), }, }]<|fim▁end|>
'id': 'my_import', 'label': 'My Import (Trident)', 'defaults': [ ('ds', '16607027920896001'),
<|file_name|>formatagesource0.rs<|end_file_name|><|fim▁begin|>use std::fmt::{self, Formatter, Display}; struct City { name: &'static str, // Latitude lat: f32, // Longitude lon: f32, } impl Display for City { // `f` est un tampon, cette méthode écrit la chaîne de caractères // formattée à l'intérieur de ce dernier. fn fmt(&self, f: &mut Formatter) -> fmt::Result { let lat_c = if self.lat >= 0.0 { 'N' } else { 'S' }; let lon_c = if self.lon >= 0.0 { 'E' } else { 'W' }; // `write!` est équivalente à `format!`, à l'exception qu'elle écrira // la chaîne de caractères formatée dans un tampon (le premier argument). write!(f, "{}: {:.3}°{} {:.3}°{}", self.name, self.lat.abs(), lat_c, self.lon.abs(), lon_c) } } #[derive(Debug)] struct Color {<|fim▁hole|> blue: u8, } fn main() { for city in [ City { name: "Dublin", lat: 53.347778, lon: -6.259722 }, City { name: "Oslo", lat: 59.95, lon: 10.75 }, City { name: "Vancouver", lat: 49.25, lon: -123.1 }, ].iter() { println!("{}", *city); } for color in [ Color { red: 128, green: 255, blue: 90 }, Color { red: 0, green: 3, blue: 254 }, Color { red: 0, green: 0, blue: 0 }, ].iter() { // Utilisez le marqueur `{}` une fois que vous aurez implémenté // le trait fmt::Display. println!("{:?}", *color) } }<|fim▁end|>
red: u8, green: u8,
<|file_name|>test_true.rs<|end_file_name|><|fim▁begin|>use common::util::*; #[test] fn test_exit_code() { new_ucmd!().succeeds();<|fim▁hole|><|fim▁end|>
}
<|file_name|>macrocontainer.controller.js<|end_file_name|><|fim▁begin|>//DO NOT DELETE THIS, this is in use... angular.module('umbraco') .controller("Umbraco.PropertyEditors.MacroContainerController", function($scope, dialogService, entityResource, macroService){ $scope.renderModel = []; $scope.allowOpenButton = true; $scope.allowRemoveButton = true; $scope.sortableOptions = {}; if($scope.model.value){ var macros = $scope.model.value.split('>'); angular.forEach(macros, function(syntax, key){ if(syntax && syntax.length > 10){ //re-add the char we split on syntax = syntax + ">"; var parsed = macroService.parseMacroSyntax(syntax); if(!parsed){ parsed = {}; } parsed.syntax = syntax; collectDetails(parsed); $scope.renderModel.push(parsed); setSortingState($scope.renderModel); } }); } function collectDetails(macro){ macro.details = ""; macro.icon = "icon-settings-alt"; if(macro.macroParamsDictionary){ angular.forEach((macro.macroParamsDictionary), function(value, key){ macro.details += key + ": " + value + " "; }); } } function openDialog(index){ var dialogData = { allowedMacros: $scope.model.config.allowed }; if(index !== null && $scope.renderModel[index]) { var macro = $scope.renderModel[index]; dialogData["macroData"] = macro; } $scope.macroPickerOverlay = {}; $scope.macroPickerOverlay.view = "macropicker"; $scope.macroPickerOverlay.dialogData = dialogData; $scope.macroPickerOverlay.show = true; $scope.macroPickerOverlay.submit = function(model) { var macroObject = macroService.collectValueData(model.selectedMacro, model.macroParams, dialogData.renderingEngine); collectDetails(macroObject); //update the raw syntax and the list... if(index !== null && $scope.renderModel[index]) { $scope.renderModel[index] = macroObject; } else { $scope.renderModel.push(macroObject); } setSortingState($scope.renderModel); $scope.macroPickerOverlay.show = false; $scope.macroPickerOverlay = null; }; $scope.macroPickerOverlay.close = function(oldModel) { $scope.macroPickerOverlay.show = false; $scope.macroPickerOverlay = null; }; } $scope.edit =function(index){ openDialog(index); }; $scope.add = function () { if ($scope.model.config.max && $scope.model.config.max > 0 && $scope.renderModel.length >= $scope.model.config.max) { //cannot add more than the max return; } openDialog(); }; $scope.remove =function(index){ $scope.renderModel.splice(index, 1); setSortingState($scope.renderModel); }; $scope.clear = function() { $scope.model.value = ""; $scope.renderModel = []; }; var unsubscribe = $scope.$on("formSubmitting", function (ev, args) { var syntax = []; angular.forEach($scope.renderModel, function(value, key){ syntax.push(value.syntax); }); $scope.model.value = syntax.join(""); }); //when the scope is destroyed we need to unsubscribe $scope.$on('$destroy', function () { unsubscribe(); });<|fim▁hole|> function trim(str, chr) { var rgxtrim = (!chr) ? new RegExp('^\\s+|\\s+$', 'g') : new RegExp('^'+chr+'+|'+chr+'+$', 'g'); return str.replace(rgxtrim, ''); } function setSortingState(items) { // disable sorting if the list only consist of one item if(items.length > 1) { $scope.sortableOptions.disabled = false; } else { $scope.sortableOptions.disabled = true; } } });<|fim▁end|>
<|file_name|>shipping_container.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # © 2016 Comunitea - Kiko Sanchez <[email protected]> # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0. from odoo import api, fields, models, _ import odoo.addons.decimal_precision as dp class ShippingContainerType(models.Model): _name = "shipping.container.type" name = fields.Char("Container type", required=True) volume = fields.Float("Volumen", help="Container volume (m3)", required=True) length = fields.Float("Length", help="Length(m)") height = fields.Float("Height", help="Height(m)") width = fields.Float("Width", help="Width(m)") @api.onchange('length', 'height', 'width') def onchange_dimensions(self): if self.length and self.height and self.width:<|fim▁hole|> class ShippingContainer(models.Model): _name = "shipping.container" @api.one def _get_moves(self): self.move_ids_count = len(self.move_ids) @api.one def _get_partners(self): self.partner_ids = self.picking_ids.partner_id @api.multi def _available_volume(self): for container in self: volume = container.shipping_container_type_id.volume weight = 0.00 for move in container.move_ids: volume -= move.product_id.volume * move.product_uom_qty weight += move.product_id.weight * move.product_uom_qty container.available_volume = volume container.weight = weight name = fields.Char("Container Ref.", required=True) date_expected = fields.Date("Date expected", required=True) date_shipment = fields.Date("Shipment date") picking_ids = fields.One2many("stock.picking", "shipping_container_id", "Pickings") company_id = fields. \ Many2one("res.company", "Company", required=True, default=lambda self: self.env['res.company']._company_default_get('shipping.container')) harbor_id = fields.Many2one('res.harbor', string="Harbor", required=True) move_ids = fields.One2many('stock.move', 'shipping_container_id', string="Moves") move_ids_count = fields.Integer('Move ids count', compute="_get_moves") harbor_dest_id = fields.Many2one('res.harbor', string="Dest. harbor") state = fields.Selection([('loading', 'Loading'), ('transit', 'Transit'), ('destination', 'Destination')], default='loading') shipping_container_type_id = fields.Many2one('shipping.container.type', 'Type') available_volume = fields.Float("Available volume (m3)", compute="_available_volume") weight = fields.Float("Weight (kgr.)", compute="_available_volume") incoterm_id = fields.Many2one('stock.incoterms', string='Incoterm') _sql_constraints = [ ('name_uniq', 'unique(name)', 'Container name must be unique') ] @api.multi def action_view_move_ids(self): action = self.env.ref( 'shipping_container.container_picking_tree_action').read()[0] action['domain'] = [('id', 'in', self.move_ids.ids)] return action def set_transit(self): self.state = 'transit' def set_destination(self): self.state = 'destination' def set_loading(self): self.state = 'loading' @api.multi def write(self, vals): if vals.get('date_expected', False): for container in self: if vals['date_expected'] != container.date_expected: for pick in container.picking_ids: pick.min_date = vals['date_expected'] return super(ShippingContainer, self).write(vals)<|fim▁end|>
self.volume = self.length * self.height * self.width
<|file_name|>shard_runner.rs<|end_file_name|><|fim▁begin|>use gateway::{InterMessage, ReconnectType, Shard, ShardAction}; use internal::prelude::*; use internal::ws_impl::{ReceiverExt, SenderExt}; use model::event::{Event, GatewayEvent}; use parking_lot::Mutex; use serde::Deserialize; use std::sync::{ mpsc::{ self, Receiver, Sender, TryRecvError }, Arc }; use super::super::super::dispatch::{DispatchEvent, dispatch}; use super::super::super::EventHandler; use super::event::{ClientEvent, ShardStageUpdateEvent}; use super::{ShardClientMessage, ShardId, ShardManagerMessage, ShardRunnerMessage}; use threadpool::ThreadPool; use typemap::ShareMap; use websocket::{ message::{CloseData, OwnedMessage}, WebSocketError }; #[cfg(feature = "framework")] use framework::Framework; #[cfg(feature = "voice")] use super::super::voice::ClientVoiceManager; /// A runner for managing a [`Shard`] and its respective WebSocket client. /// /// [`Shard`]: ../../../gateway/struct.Shard.html pub struct ShardRunner<H: EventHandler + Send + Sync + 'static> { data: Arc<Mutex<ShareMap>>, event_handler: Arc<H>, #[cfg(feature = "framework")] framework: Arc<Mutex<Option<Box<Framework + Send>>>>, manager_tx: Sender<ShardManagerMessage>, // channel to receive messages from the shard manager and dispatches runner_rx: Receiver<InterMessage>, // channel to send messages to the shard runner from the shard manager runner_tx: Sender<InterMessage>, shard: Shard, threadpool: ThreadPool, #[cfg(feature = "voice")] voice_manager: Arc<Mutex<ClientVoiceManager>>, } impl<H: EventHandler + Send + Sync + 'static> ShardRunner<H> { /// Creates a new runner for a Shard. pub fn new(opt: ShardRunnerOptions<H>) -> Self { let (tx, rx) = mpsc::channel(); Self { runner_rx: rx, runner_tx: tx, data: opt.data, event_handler: opt.event_handler, #[cfg(feature = "framework")] framework: opt.framework, manager_tx: opt.manager_tx, shard: opt.shard, threadpool: opt.threadpool, #[cfg(feature = "voice")] voice_manager: opt.voice_manager, } } /// Starts the runner's loop to receive events. /// /// This runs a loop that performs the following in each iteration: /// /// 1. checks the receiver for [`ShardRunnerMessage`]s, possibly from the /// [`ShardManager`], and if there is one, acts on it. /// /// 2. checks if a heartbeat should be sent to the discord Gateway, and if /// so, sends one. /// /// 3. attempts to retrieve a message from the WebSocket, processing it into /// a [`GatewayEvent`]. This will block for 100ms before assuming there is /// no message available. /// /// 4. Checks with the [`Shard`] to determine if the gateway event is /// specifying an action to take (e.g. resuming, reconnecting, heartbeating) /// and then performs that action, if any. /// /// 5. Dispatches the event via the Client. /// /// 6. Go back to 1. /// /// [`GatewayEvent`]: ../../../model/event/enum.GatewayEvent.html /// [`Shard`]: ../../../gateway/struct.Shard.html /// [`ShardManager`]: struct.ShardManager.html /// [`ShardRunnerMessage`]: enum.ShardRunnerMessage.html pub fn run(&mut self) -> Result<()> { debug!("[ShardRunner {:?}] Running", self.shard.shard_info()); loop { if !self.recv()? { return Ok(()); } // check heartbeat if !self.shard.check_heartbeat() { warn!( "[ShardRunner {:?}] Error heartbeating", self.shard.shard_info(), ); return self.request_restart(); } let pre = self.shard.stage(); let (event, action, successful) = self.recv_event(); let post = self.shard.stage(); if post != pre { self.update_manager(); let e = ClientEvent::ShardStageUpdate(ShardStageUpdateEvent { new: post, old: pre, shard_id: ShardId(self.shard.shard_info()[0]), }); self.dispatch(DispatchEvent::Client(e)); } match action { Some(ShardAction::Reconnect(ReconnectType::Reidentify)) => { return self.request_restart() }, Some(other) => { let _ = self.action(&other); }, None => {}, } if let Some(event) = event { self.dispatch(DispatchEvent::Model(event)); } if !successful && !self.shard.stage().is_connecting() { return self.request_restart(); } } } /// Clones the internal copy of the Sender to the shard runner. pub(super) fn runner_tx(&self) -> Sender<InterMessage> { self.runner_tx.clone() } /// Takes an action that a [`Shard`] has determined should happen and then /// does it. /// /// For example, if the shard says that an Identify message needs to be /// sent, this will do that. /// /// # Errors /// /// Returns fn action(&mut self, action: &ShardAction) -> Result<()> { match *action { ShardAction::Reconnect(ReconnectType::Reidentify) => { self.request_restart() }, ShardAction::Reconnect(ReconnectType::Resume) => { self.shard.resume() }, ShardAction::Heartbeat => self.shard.heartbeat(), ShardAction::Identify => self.shard.identify(), } } // Checks if the ID received to shutdown is equivalent to the ID of the // shard this runner is responsible. If so, it shuts down the WebSocket // client. // // Returns whether the WebSocket client is still active. // // If true, the WebSocket client was _not_ shutdown. If false, it was. fn checked_shutdown(&mut self, id: ShardId) -> bool { // First verify the ID so we know for certain this runner is // to shutdown. if id.0 != self.shard.shard_info()[0] { // Not meant for this runner for some reason, don't // shutdown. return true; } let close_data = CloseData::new(1000, String::new()); let msg = OwnedMessage::Close(Some(close_data)); let _ = self.shard.client.send_message(&msg); false } #[inline] fn dispatch(&self, event: DispatchEvent) { dispatch( event, #[cfg(feature = "framework")] &self.framework, &self.data, &self.event_handler, &self.runner_tx, &self.threadpool, self.shard.shard_info()[0], ); } // Handles a received value over the shard runner rx channel. // // Returns a boolean on whether the shard runner can continue. // // This always returns true, except in the case that the shard manager asked // the runner to shutdown. fn handle_rx_value(&mut self, value: InterMessage) -> bool { match value { InterMessage::Client(ShardClientMessage::Manager(x)) => match x { ShardManagerMessage::Restart(id) | ShardManagerMessage::Shutdown(id) => { self.checked_shutdown(id) }, ShardManagerMessage::ShutdownAll => { // This variant should never be received. warn!( "[ShardRunner {:?}] Received a ShutdownAll?", self.shard.shard_info(), ); true }, ShardManagerMessage::ShardUpdate { .. } | ShardManagerMessage::ShutdownInitiated => { // nb: not sent here true }, }, InterMessage::Client(ShardClientMessage::Runner(x)) => match x { ShardRunnerMessage::ChunkGuilds { guild_ids, limit, query } => { self.shard.chunk_guilds( guild_ids, limit, query.as_ref().map(String::as_str), ).is_ok() }, ShardRunnerMessage::Close(code, reason) => { let reason = reason.unwrap_or_else(String::new); let data = CloseData::new(code, reason); let msg = OwnedMessage::Close(Some(data)); self.shard.client.send_message(&msg).is_ok() }, ShardRunnerMessage::Message(msg) => { self.shard.client.send_message(&msg).is_ok() }, ShardRunnerMessage::SetGame(game) => { // To avoid a clone of `game`, we do a little bit of // trickery here: // // First, we obtain a reference to the current presence of // the shard, and create a new presence tuple of the new // game we received over the channel as well as the online // status that the shard already had. // // We then (attempt to) send the websocket message with the // status update, expressively returning: // // - whether the message successfully sent // - the original game we received over the channel self.shard.set_game(game);<|fim▁hole|> self.shard.update_presence().is_ok() }, ShardRunnerMessage::SetPresence(status, game) => { self.shard.set_presence(status, game); self.shard.update_presence().is_ok() }, ShardRunnerMessage::SetStatus(status) => { self.shard.set_status(status); self.shard.update_presence().is_ok() }, }, InterMessage::Json(value) => { // Value must be forwarded over the websocket self.shard.client.send_json(&value).is_ok() }, } } #[cfg(feature = "voice")] fn handle_voice_event(&self, event: &Event) { match *event { Event::Ready(_) => { self.voice_manager.lock().set( self.shard.shard_info()[0], self.runner_tx.clone(), ); }, Event::VoiceServerUpdate(ref event) => { if let Some(guild_id) = event.guild_id { let mut manager = self.voice_manager.lock(); let mut search = manager.get_mut(guild_id); if let Some(handler) = search { handler.update_server(&event.endpoint, &event.token); } } }, Event::VoiceStateUpdate(ref event) => { if let Some(guild_id) = event.guild_id { let mut manager = self.voice_manager.lock(); let mut search = manager.get_mut(guild_id); if let Some(handler) = search { handler.update_state(&event.voice_state); } } }, _ => {}, } } // Receives values over the internal shard runner rx channel and handles // them. // // This will loop over values until there is no longer one. // // Requests a restart if the sending half of the channel disconnects. This // should _never_ happen, as the sending half is kept on the runner. // Returns whether the shard runner is in a state that can continue. fn recv(&mut self) -> Result<bool> { loop { match self.runner_rx.try_recv() { Ok(value) => { if !self.handle_rx_value(value) { return Ok(false); } }, Err(TryRecvError::Disconnected) => { warn!( "[ShardRunner {:?}] Sending half DC; restarting", self.shard.shard_info(), ); let _ = self.request_restart(); return Ok(false); }, Err(TryRecvError::Empty) => break, } } // There are no longer any values available. Ok(true) } /// Returns a received event, as well as whether reading the potentially /// present event was successful. fn recv_event(&mut self) -> (Option<Event>, Option<ShardAction>, bool) { let gw_event = match self.shard.client.recv_json() { Ok(Some(value)) => { GatewayEvent::deserialize(value).map(Some).map_err(From::from) }, Ok(None) => Ok(None), Err(Error::WebSocket(WebSocketError::IoError(_))) => { // Check that an amount of time at least double the // heartbeat_interval has passed. // // If not, continue on trying to receive messages. // // If it has, attempt to auto-reconnect. { let last = self.shard.last_heartbeat_ack(); let interval = self.shard.heartbeat_interval(); if let (Some(last_heartbeat_ack), Some(interval)) = (last, interval) { let seconds_passed = last_heartbeat_ack.elapsed().as_secs(); let interval_in_secs = interval / 1000; if seconds_passed <= interval_in_secs * 2 { return (None, None, true); } } else { return (None, None, true); } } debug!("Attempting to auto-reconnect"); match self.shard.reconnection_type() { ReconnectType::Reidentify => return (None, None, false), ReconnectType::Resume => { if let Err(why) = self.shard.resume() { warn!("Failed to resume: {:?}", why); return (None, None, false); } }, } return (None, None, true); }, Err(Error::WebSocket(WebSocketError::NoDataAvailable)) => { // This is hit when the websocket client dies this will be // hit every iteration. return (None, None, false); }, Err(why) => Err(why), }; let event = match gw_event { Ok(Some(event)) => Ok(event), Ok(None) => return (None, None, true), Err(why) => Err(why), }; let action = match self.shard.handle_event(&event) { Ok(Some(action)) => Some(action), Ok(None) => None, Err(why) => { error!("Shard handler received err: {:?}", why); return (None, None, true); }, }; if let Ok(GatewayEvent::HeartbeatAck) = event { self.update_manager(); } #[cfg(feature = "voice")] { if let Ok(GatewayEvent::Dispatch(_, ref event)) = event { self.handle_voice_event(&event); } } let event = match event { Ok(GatewayEvent::Dispatch(_, event)) => Some(event), _ => None, }; (event, action, true) } fn request_restart(&self) -> Result<()> { self.update_manager(); debug!( "[ShardRunner {:?}] Requesting restart", self.shard.shard_info(), ); let shard_id = ShardId(self.shard.shard_info()[0]); let msg = ShardManagerMessage::Restart(shard_id); let _ = self.manager_tx.send(msg); #[cfg(feature = "voice")] { self.voice_manager.lock().manager_remove(&shard_id.0); } Ok(()) } fn update_manager(&self) { let _ = self.manager_tx.send(ShardManagerMessage::ShardUpdate { id: ShardId(self.shard.shard_info()[0]), latency: self.shard.latency(), stage: self.shard.stage(), }); } } /// Options to be passed to [`ShardRunner::new`]. /// /// [`ShardRunner::new`]: struct.ShardRunner.html#method.new pub struct ShardRunnerOptions<H: EventHandler + Send + Sync + 'static> { pub data: Arc<Mutex<ShareMap>>, pub event_handler: Arc<H>, #[cfg(feature = "framework")] pub framework: Arc<Mutex<Option<Box<Framework + Send>>>>, pub manager_tx: Sender<ShardManagerMessage>, pub shard: Shard, pub threadpool: ThreadPool, #[cfg(feature = "voice")] pub voice_manager: Arc<Mutex<ClientVoiceManager>>, }<|fim▁end|>
<|file_name|>response.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from ggrc import db from ggrc.models.mixins import ( deferred, Noted, Described, Hyperlinked, WithContact, Titled, Slugged, ) from ggrc.models.object_document import Documentable from ggrc.models.object_person import Personable from ggrc.models.relationship import Relatable from ggrc.models.request import Request class Response(Noted, Described, Hyperlinked, WithContact, Titled, Slugged, db.Model): __tablename__ = 'responses' __mapper_args__ = { 'polymorphic_on': 'response_type', } _title_uniqueness = False _slug_uniqueness = False # Override `Titled.title` to provide default="" title = deferred( db.Column(db.String, nullable=False, default=""), 'Response') VALID_STATES = (u'Assigned', u'Submitted', u'Accepted', u'Rejected') VALID_TYPES = (u'documentation', u'interview', u'population sample') request_id = deferred( db.Column(db.Integer, db.ForeignKey('requests.id'), nullable=False), 'Response') response_type = db.Column(db.Enum(*VALID_TYPES), nullable=False) status = deferred(db.Column(db.String, nullable=False), 'Response') population_worksheet_id = deferred( db.Column(db.Integer, db.ForeignKey('documents.id'), nullable=True), 'Response') population_count = deferred(db.Column(db.Integer, nullable=True), 'Response') sample_worksheet_id = deferred( db.Column(db.Integer, db.ForeignKey('documents.id'), nullable=True), 'Response') sample_count = deferred(db.Column(db.Integer, nullable=True), 'Response') sample_evidence_id = deferred( db.Column(db.Integer, db.ForeignKey('documents.id'), nullable=True), 'Response') population_worksheet = db.relationship( "Document", foreign_keys="PopulationSampleResponse.population_worksheet_id" ) sample_worksheet = db.relationship( "Document", foreign_keys="PopulationSampleResponse.sample_worksheet_id" ) sample_evidence = db.relationship( "Document", foreign_keys="PopulationSampleResponse.sample_evidence_id" ) @staticmethod def _extra_table_args(cls): return ( db.Index('population_worksheet_document', 'population_worksheet_id'), db.Index('sample_evidence_document', 'sample_evidence_id'), db.Index('sample_worksheet_document', 'sample_worksheet_id'), ) _publish_attrs = [ 'request', 'status',<|fim▁hole|> _sanitize_html = [ 'description', ] _aliases = { "description": "Response", "request": { "display_name": "Request", "mandatory": True, "filter_by": "_filter_by_request", }, "response_type": { "display_name": "Response Type", "mandatory": True, }, "status": "Status", "title": None, "secondary_contact": None, "notes": None, } def _display_name(self): return u'Response with id={0} for Audit "{1}"'.format( self.id, self.request.audit.display_name) @classmethod def _filter_by_request(cls, predicate): return Request.query.filter( (Request.id == cls.request_id) & predicate(Request.slug) ).exists() @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Response, cls).eager_query() return query.options( orm.joinedload('request')) class DocumentationResponse(Relatable, Documentable, Personable, Response): __mapper_args__ = { 'polymorphic_identity': 'documentation' } _table_plural = 'documentation_responses' _publish_attrs = [] _sanitize_html = [] class InterviewResponse(Relatable, Documentable, Personable, Response): __mapper_args__ = { 'polymorphic_identity': 'interview' } _table_plural = 'interview_responses' meetings = db.relationship( 'Meeting', backref='response', cascade='all, delete-orphan' ) _publish_attrs = [ 'meetings', ] _sanitize_html = [] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(InterviewResponse, cls).eager_query() return query.options( orm.subqueryload('meetings')) class PopulationSampleResponse(Relatable, Documentable, Personable, Response): __mapper_args__ = { 'polymorphic_identity': 'population sample' } _table_plural = 'population_sample_responses' _publish_attrs = [ 'population_worksheet', 'population_count', 'sample_worksheet', 'sample_count', 'sample_evidence', ] _sanitize_html = [ 'population_count', 'sample_count', ] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(PopulationSampleResponse, cls).eager_query() return query.options( orm.joinedload('population_worksheet'), orm.joinedload('sample_worksheet'), orm.joinedload('sample_evidence'))<|fim▁end|>
'response_type', ]
<|file_name|>better_conv_net.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import sys sys.path.insert(0, '../data_loader/') import load from theano.tensor.nnet.conv import conv2d from theano.tensor.signal.downsample import max_pool_2d # load data x_train, t_train, x_test, t_test = load.cifar10(dtype=theano.config.floatX, grayscale=False) labels_test = np.argmax(t_test, axis=1) # reshape data x_train = x_train.reshape((x_train.shape[0], 3, 32, 32)) x_test = x_test.reshape((x_test.shape[0], 3, 32, 32)) # define symbolic Theano variables x = T.tensor4() t = T.matrix() # define model: neural network def floatX(x): return np.asarray(x, dtype=theano.config.floatX) def init_weights(shape): return theano.shared(floatX(np.random.randn(*shape) * 0.1)) def momentum(cost, params, learning_rate, momentum): grads = theano.grad(cost, params) updates = [] for p, g in zip(params, grads): mparam_i = theano.shared(np.zeros(p.get_value().shape, dtype=theano.config.floatX)) v = momentum * mparam_i - learning_rate * g updates.append((mparam_i, v)) updates.append((p, p + v)) return updates def model(x, w_c1, b_c1, w_c2, b_c2, w_h3, b_h3, w_o, b_o): c1 = T.maximum(0, conv2d(x, w_c1) + b_c1.dimshuffle('x', 0, 'x', 'x')) p1 = max_pool_2d(c1, (3, 3)) c2 = T.maximum(0, conv2d(p1, w_c2) + b_c2.dimshuffle('x', 0, 'x', 'x')) p2 = max_pool_2d(c2, (2, 2)) p2_flat = p2.flatten(2) h3 = T.maximum(0, T.dot(p2_flat, w_h3) + b_h3) p_y_given_x = T.nnet.softmax(T.dot(h3, w_o) + b_o) return p_y_given_x w_c1 = init_weights((4, 3, 3, 3)) b_c1 = init_weights((4,)) w_c2 = init_weights((8, 4, 3, 3)) b_c2 = init_weights((8,)) w_h3 = init_weights((8 * 4 * 4, 100)) b_h3 = init_weights((100,)) w_o = init_weights((100, 10)) b_o = init_weights((10,)) params = [w_c1, b_c1, w_c2, b_c2, w_h3, b_h3, w_o, b_o] p_y_given_x = model(x, *params) y = T.argmax(p_y_given_x, axis=1) cost = T.mean(T.nnet.categorical_crossentropy(p_y_given_x, t)) updates = momentum(cost, params, learning_rate=0.01, momentum=0.9) # compile theano functions train = theano.function([x, t], cost, updates=updates) predict = theano.function([x], y) # train model batch_size = 50 for i in range(50): print "iteration {}".format(i + 1) for start in range(0, len(x_train), batch_size): x_batch = x_train[start:start + batch_size] t_batch = t_train[start:start + batch_size] cost = train(x_batch, t_batch) predictions_test = predict(x_test) accuracy = np.mean(predictions_test == labels_test) print "accuracy: {}\n".format(accuracy)<|fim▁end|>
import theano import theano.tensor as T import numpy as np
<|file_name|>styles.js<|end_file_name|><|fim▁begin|>export default {<|fim▁hole|> content: { width: '100%', padding: 10, marginRight: 20, }, header: { display: 'flex', justifyContent: 'space-between', alignItems: 'center', }, };<|fim▁end|>
<|file_name|>test_vision.py<|end_file_name|><|fim▁begin|># # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest from unittest import mock import pytest from google.cloud.vision import enums from google.cloud.vision_v1 import ProductSearchClient from google.cloud.vision_v1.proto.image_annotator_pb2 import ( AnnotateImageResponse, EntityAnnotation, SafeSearchAnnotation, ) from google.cloud.vision_v1.proto.product_search_service_pb2 import Product, ProductSet, ReferenceImage from google.protobuf.json_format import MessageToDict from parameterized import parameterized from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID_TEST = 'project-id' PROJECT_ID_TEST_2 = 'project-id-2' LOC_ID_TEST = 'loc-id' LOC_ID_TEST_2 = 'loc-id-2' PRODUCTSET_ID_TEST = 'ps-id' PRODUCTSET_ID_TEST_2 = 'ps-id-2' PRODUCTSET_NAME_TEST = f'projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/productSets/{PRODUCTSET_ID_TEST}' PRODUCT_ID_TEST = 'p-id' PRODUCT_ID_TEST_2 = 'p-id-2' PRODUCT_NAME_TEST = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}" PRODUCT_NAME = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}" REFERENCE_IMAGE_ID_TEST = 'ri-id' REFERENCE_IMAGE_GEN_ID_TEST = 'ri-id' ANNOTATE_IMAGE_REQUEST = { 'image': {'source': {'image_uri': "gs://bucket-name/object-name"}}, 'features': [{'type': enums.Feature.Type.LOGO_DETECTION}], } BATCH_ANNOTATE_IMAGE_REQUEST = [ { 'image': {'source': {'image_uri': "gs://bucket-name/object-name"}}, 'features': [{'type': enums.Feature.Type.LOGO_DETECTION}], }, { 'image': {'source': {'image_uri': "gs://bucket-name/object-name"}}, 'features': [{'type': enums.Feature.Type.LOGO_DETECTION}], }, ] REFERENCE_IMAGE_NAME_TEST = ( f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/" f"{PRODUCTSET_ID_TEST}/referenceImages/{REFERENCE_IMAGE_ID_TEST}" ) REFERENCE_IMAGE_TEST = ReferenceImage(name=REFERENCE_IMAGE_GEN_ID_TEST) REFERENCE_IMAGE_WITHOUT_ID_NAME = ReferenceImage() DETECT_TEST_IMAGE = {"source": {"image_uri": "https://foo.com/image.jpg"}} DETECT_TEST_ADDITIONAL_PROPERTIES = {"test-property-1": "test-value-1", "test-property-2": "test-value-2"} class TestGcpVisionHook(unittest.TestCase): def setUp(self): with mock.patch( 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.hook = CloudVisionHook(gcp_conn_id='test') @mock.patch( "airflow.providers.google.cloud.hooks.vision.CloudVisionHook.client_info", new_callable=mock.PropertyMock, ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook._get_credentials") @mock.patch("airflow.providers.google.cloud.hooks.vision.ProductSearchClient") def test_product_search_client_creation(self, mock_client, mock_get_creds, mock_client_info): result = self.hook.get_conn() mock_client.assert_called_once_with( credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value ) assert mock_client.return_value == result assert self.hook._client == result @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_productset_explicit_id(self, get_conn): # Given create_product_set_method = get_conn.return_value.create_product_set create_product_set_method.return_value = None parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product_set = ProductSet() # When result = self.hook.create_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, product_set=product_set, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then # ProductSet ID was provided explicitly in the method call above, should be returned from the method assert result == PRODUCTSET_ID_TEST create_product_set_method.assert_called_once_with( parent=parent, product_set=product_set, product_set_id=PRODUCTSET_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_productset_autogenerated_id(self, get_conn): # Given autogenerated_id = 'autogen-id' response_product_set = ProductSet( name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id) ) create_product_set_method = get_conn.return_value.create_product_set create_product_set_method.return_value = response_product_set parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product_set = ProductSet() # When result = self.hook.create_product_set( location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST ) # Then # ProductSet ID was not provided in the method call above. Should be extracted from the API response # and returned. assert result == autogenerated_id create_product_set_method.assert_called_once_with( parent=parent, product_set=product_set, product_set_id=None, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn): # Given response_product_set = None create_product_set_method = get_conn.return_value.create_product_set create_product_set_method.return_value = response_product_set parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product_set = ProductSet() # When with pytest.raises(AirflowException) as ctx: self.hook.create_product_set( location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then # API response was wrong (None) and thus ProductSet ID extraction should fail. err = ctx.value assert 'Unable to get name from response...' in str(err) create_product_set_method.assert_called_once_with( parent=parent, product_set=product_set, product_set_id=None, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_get_productset(self, get_conn): # Given name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST) response_product_set = ProductSet(name=name) get_product_set_method = get_conn.return_value.get_product_set get_product_set_method.return_value = response_product_set # When response = self.hook.get_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST ) # Then assert response assert response == MessageToDict(response_product_set) get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_no_explicit_name(self, get_conn): # Given product_set = ProductSet() update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = product_set productset_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST ) # When result = self.hook.update_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product_set) update_product_set_method.assert_called_once_with( product_set=ProductSet(name=productset_name), metadata=None, retry=None, timeout=None, update_mask=None, ) @parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name( self, location, product_set_id, get_conn ): # Given update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = None product_set = ProductSet() # When with pytest.raises(AirflowException) as ctx: self.hook.update_product_set( location=location, product_set_id=product_set_id, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value assert err assert ERR_UNABLE_TO_CREATE.format(label='ProductSet', id_label='productset_id') in str(err) update_product_set_method.assert_not_called() @parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_explicit_name_missing_params_for_constructed_name( self, location, product_set_id, get_conn ): # Given explicit_ps_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2 ) product_set = ProductSet(name=explicit_ps_name) update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = product_set # When result = self.hook.update_product_set( location=location, product_set_id=product_set_id, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product_set) update_product_set_method.assert_called_once_with( product_set=ProductSet(name=explicit_ps_name), metadata=None, retry=None, timeout=None, update_mask=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_explicit_name_different_from_constructed(self, get_conn): # Given update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = None explicit_ps_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2 ) product_set = ProductSet(name=explicit_ps_name) template_ps_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST ) # When # Location and product_set_id are passed in addition to a ProductSet with an explicit name, # but both names differ (constructed != explicit). # Should throw AirflowException in this case. with pytest.raises(AirflowException) as ctx: self.hook.update_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value # self.assertIn("The required parameter 'project_id' is missing", str(err)) assert err assert ( ERR_DIFF_NAMES.format( explicit_name=explicit_ps_name, constructed_name=template_ps_name, label="ProductSet", id_label="productset_id", ) in str(err) ) update_product_set_method.assert_not_called() @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_delete_productset(self, get_conn): # Given delete_product_set_method = get_conn.return_value.delete_product_set delete_product_set_method.return_value = None name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST) # When response = self.hook.delete_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST ) # Then assert response is None delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) @mock.patch( 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn', **{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST}, ) def test_create_reference_image_explicit_id(self, get_conn): # Given create_reference_image_method = get_conn.return_value.create_reference_image # When result = self.hook.create_reference_image( project_id=PROJECT_ID_TEST, location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME, reference_image_id=REFERENCE_IMAGE_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method assert result == REFERENCE_IMAGE_ID_TEST create_reference_image_method.assert_called_once_with( parent=PRODUCT_NAME, reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME, reference_image_id=REFERENCE_IMAGE_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch( 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn', **{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST}, ) def test_create_reference_image_autogenerated_id(self, get_conn): # Given create_reference_image_method = get_conn.return_value.create_reference_image # When result = self.hook.create_reference_image( project_id=PROJECT_ID_TEST, location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, reference_image=REFERENCE_IMAGE_TEST, reference_image_id=REFERENCE_IMAGE_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method assert result == REFERENCE_IMAGE_GEN_ID_TEST create_reference_image_method.assert_called_once_with( parent=PRODUCT_NAME, reference_image=REFERENCE_IMAGE_TEST, reference_image_id=REFERENCE_IMAGE_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_add_product_to_product_set(self, get_conn): # Given add_product_to_product_set_method = get_conn.return_value.add_product_to_product_set # When self.hook.add_product_to_product_set( product_set_id=PRODUCTSET_ID_TEST, product_id=PRODUCT_ID_TEST, location=LOC_ID_TEST, project_id=PROJECT_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method add_product_to_product_set_method.assert_called_once_with( name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None ) # remove_product_from_product_set @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_remove_product_from_product_set(self, get_conn): # Given remove_product_from_product_set_method = get_conn.return_value.remove_product_from_product_set # When self.hook.remove_product_from_product_set( product_set_id=PRODUCTSET_ID_TEST, product_id=PRODUCT_ID_TEST, location=LOC_ID_TEST, project_id=PROJECT_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method remove_product_from_product_set_method.assert_called_once_with( name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client') def test_annotate_image(self, annotator_client_mock): # Given annotate_image_method = annotator_client_mock.annotate_image # When self.hook.annotate_image(request=ANNOTATE_IMAGE_REQUEST) # Then # Product ID was provided explicitly in the method call above, should be returned from the method annotate_image_method.assert_called_once_with( request=ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client') def test_batch_annotate_images(self, annotator_client_mock): # Given batch_annotate_images_method = annotator_client_mock.batch_annotate_images # When self.hook.batch_annotate_images(requests=BATCH_ANNOTATE_IMAGE_REQUEST) # Then # Product ID was provided explicitly in the method call above, should be returned from the method batch_annotate_images_method.assert_called_once_with( requests=BATCH_ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_explicit_id(self, get_conn): # Given create_product_method = get_conn.return_value.create_product create_product_method.return_value = None parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When result = self.hook.create_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method assert result == PRODUCT_ID_TEST create_product_method.assert_called_once_with( parent=parent, product=product, product_id=PRODUCT_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_autogenerated_id(self, get_conn): # Given autogenerated_id = 'autogen-p-id' response_product = Product( name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id) ) create_product_method = get_conn.return_value.create_product create_product_method.return_value = response_product parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When result = self.hook.create_product( location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST ) # Then # Product ID was not provided in the method call above. Should be extracted from the API response # and returned. assert result == autogenerated_id create_product_method.assert_called_once_with( parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn): # Given wrong_name = 'wrong_name_not_a_correct_path' response_product = Product(name=wrong_name) create_product_method = get_conn.return_value.create_product create_product_method.return_value = response_product parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When with pytest.raises(AirflowException) as ctx: self.hook.create_product( location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST ) # Then # API response was wrong (wrong name format) and thus ProductSet ID extraction should fail. err = ctx.value assert 'Unable to get id from name' in str(err) create_product_method.assert_called_once_with( parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_autogenerated_id_wrong_api_response(self, get_conn): # Given response_product = None create_product_method = get_conn.return_value.create_product create_product_method.return_value = response_product parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When with pytest.raises(AirflowException) as ctx: self.hook.create_product( location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST ) # Then # API response was wrong (None) and thus ProductSet ID extraction should fail. err = ctx.value assert 'Unable to get name from response...' in str(err) create_product_method.assert_called_once_with( parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_no_explicit_name(self, get_conn): # Given product = Product() update_product_method = get_conn.return_value.update_product update_product_method.return_value = product product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) # When result = self.hook.update_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product) update_product_method.assert_called_once_with( product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None ) @parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_no_explicit_name_and_missing_params_for_constructed_name( self, location, product_id, get_conn ): # Given update_product_method = get_conn.return_value.update_product update_product_method.return_value = None product = Product() # When with pytest.raises(AirflowException) as ctx: self.hook.update_product( location=location, product_id=product_id, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value assert err assert ERR_UNABLE_TO_CREATE.format(label='Product', id_label='product_id') in str(err) update_product_method.assert_not_called() @parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_explicit_name_missing_params_for_constructed_name( self, location, product_id, get_conn ): # Given explicit_p_name = ProductSearchClient.product_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2 ) product = Product(name=explicit_p_name) update_product_method = get_conn.return_value.update_product update_product_method.return_value = product # When result = self.hook.update_product( location=location, product_id=product_id, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product) update_product_method.assert_called_once_with( product=Product(name=explicit_p_name), metadata=None, retry=None, timeout=None, update_mask=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_explicit_name_different_from_constructed(self, get_conn): # Given update_product_method = get_conn.return_value.update_product update_product_method.return_value = None explicit_p_name = ProductSearchClient.product_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2 ) product = Product(name=explicit_p_name) template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) # When # Location and product_id are passed in addition to a Product with an explicit name, # but both names differ (constructed != explicit). # Should throw AirflowException in this case. with pytest.raises(AirflowException) as ctx: self.hook.update_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value assert err assert ( ERR_DIFF_NAMES.format( explicit_name=explicit_p_name,<|fim▁hole|> ) in str(err) ) update_product_method.assert_not_called() @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_delete_product(self, get_conn): # Given delete_product_method = get_conn.return_value.delete_product delete_product_method.return_value = None name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) # When response = self.hook.delete_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST ) # Then assert response is None delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_text(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.text_detection detect_text_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.text_detection(image=DETECT_TEST_IMAGE) # Then detect_text_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_text_with_additional_properties(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.text_detection detect_text_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.text_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then detect_text_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_text_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.text_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.text_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_document_text_detection(self, annotator_client_mock): # Given document_text_detection_method = annotator_client_mock.document_text_detection document_text_detection_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.document_text_detection(image=DETECT_TEST_IMAGE) # Then document_text_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_document_text_detection_with_additional_properties(self, annotator_client_mock): # Given document_text_detection_method = annotator_client_mock.document_text_detection document_text_detection_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.document_text_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then document_text_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_document_text_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.document_text_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.document_text_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_label_detection(self, annotator_client_mock): # Given label_detection_method = annotator_client_mock.label_detection label_detection_method.return_value = AnnotateImageResponse( label_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.label_detection(image=DETECT_TEST_IMAGE) # Then label_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_label_detection_with_additional_properties(self, annotator_client_mock): # Given label_detection_method = annotator_client_mock.label_detection label_detection_method.return_value = AnnotateImageResponse( label_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.label_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then label_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_label_detection_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.label_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.label_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_safe_search_detection(self, annotator_client_mock): # Given safe_search_detection_method = annotator_client_mock.safe_search_detection safe_search_detection_method.return_value = AnnotateImageResponse( safe_search_annotation=SafeSearchAnnotation( adult="VERY_UNLIKELY", spoof="VERY_UNLIKELY", medical="VERY_UNLIKELY", violence="VERY_UNLIKELY", racy="VERY_UNLIKELY", ) ) # When self.hook.safe_search_detection(image=DETECT_TEST_IMAGE) # Then safe_search_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_safe_search_detection_with_additional_properties(self, annotator_client_mock): # Given safe_search_detection_method = annotator_client_mock.safe_search_detection safe_search_detection_method.return_value = AnnotateImageResponse( safe_search_annotation=SafeSearchAnnotation( adult="VERY_UNLIKELY", spoof="VERY_UNLIKELY", medical="VERY_UNLIKELY", violence="VERY_UNLIKELY", racy="VERY_UNLIKELY", ) ) # When self.hook.safe_search_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then safe_search_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_safe_search_detection_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.safe_search_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.safe_search_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err)<|fim▁end|>
constructed_name=template_p_name, label="Product", id_label="product_id",
<|file_name|>test5.py<|end_file_name|><|fim▁begin|>import os import CTK UPLOAD_DIR = "/tmp" def ok (filename, target_dir, target_file, params): txt = "<h1>It worked!</h1>" txt += "<pre>%s</pre>" %(os.popen("ls -l '%s'" %(os.path.join(target_dir, target_file))).read()) txt += "<p>Params: %s</p>" %(str(params)) txt += "<p>Filename: %s</p>" %(filename) return txt class default: def __init__ (self): self.page = CTK.Page () self.page += CTK.RawHTML ("<h1>Direct Upload with params</h1>") self.page += CTK.Uploader({'handler': ok, 'target_dir': UPLOAD_DIR}, {'var':'foo'}) self.page += CTK.RawHTML ("<h1>Temporal Upload without params</h1>") self.page += CTK.Uploader({'handler': ok, 'target_dir': UPLOAD_DIR}, direct=False) def __call__ (self): return self.page.Render()<|fim▁hole|><|fim▁end|>
CTK.publish ('', default) CTK.run (port=8000)
<|file_name|>ApplicationException.ts<|end_file_name|><|fim▁begin|>export default class ApplicationException extends Error { constructor(message?: string) { super(message); Object.setPrototypeOf(this, ApplicationException.prototype); }<|fim▁hole|>}<|fim▁end|>
<|file_name|>stringify-mark.js<|end_file_name|><|fim▁begin|>const test = require('tape'); const Mark = require('./../mark.js'); function stringArrayBuffer(str) { var buffer = new ArrayBuffer(str.length); var bytes = new Uint8Array(buffer); str.split('').forEach(function(str, i) { bytes[i] = str.charCodeAt(0); }); return buffer; } test('Stringify JSON object', function(assert) { assert.equal(Mark.stringify(Mark.parse(`{a:12.4, b:true, c:false, d:'str', e:null, g:1, h:[1,2,3], i:-12, j:[], k:{}, l:'', m:"", n:0, p:1e-2}`)), `{a:12.4 b:true c:false d:"str" e:null g:1 h:[1 2 3] i:-12 j:[] k:{} l:"" m:"" n:0 p:0.01}`, "Stringify JSON object"); assert.end() ; }); test('Stringify Mark object', function(assert) { assert.equal(Mark.stringify(Mark.parse('{obj}')), '{obj}', "Stringify {obj}"); assert.equal(Mark.stringify(Mark.parse('{div width:10}')), '{div width:10}', "Stringify {div width:10}"); assert.equal(Mark.stringify(Mark.parse('{div "text"}')), '{div "text"}', 'Stringify {div "text"}'); assert.equal(Mark.stringify(Mark.parse("{div 'text'}")), '{div "text"}', "Stringify {div 'text'}"); assert.equal(Mark.stringify(Mark.parse('{div {br}}')), '{div {br}}', "Stringify {div {br}}"); assert.equal(Mark.stringify(Mark.parse('{div width:null}')), '{div width:null}', "Stringify property with null value}"); // undefined value handling var t = {obj:undefined}; assert.equal(Mark.stringify(t), '{}', "Stringify undefined property"); assert.equal(Mark.stringify([1, null, undefined]), '[1 null null]', "Stringify undefined value in array"); // JSON inside Mark assert.equal(Mark.stringify(Mark.parse('{div {width:10}}')), '{div {width:10}}', "Stringify {div {width:10}}"); // stringify with identation assert.equal(Mark.stringify(Mark.parse('{div width:10 (!--comment--) "test" {br}}'), {space:' '}), '{div width:10 \n (!--comment--) \n "test" \n {br}\n}', "Stringify with identation"); // stringify omitting comma assert.equal(Mark.stringify(Mark.parse('{div width:10, height:"15px", margin:[5 10 10 5]}')), '{div width:10 height:"15px" margin:[5 10 10 5]}', "Stringify without comma"); <|fim▁hole|> var doc = Mark('doc', {mime:'text/html', data:stringArrayBuffer("<h1>Mark binary!</h1>")}); assert.equal(Mark.stringify(doc), '{doc mime:"text/html" data:[#PGgxPk1hcmsgYmluYXJ5ITwvaDE+]}', "Stringify nested binary data"); // stringify base85 data var bin = stringArrayBuffer('hello'); bin.encoding = 'a85'; assert.equal(Mark.stringify(bin), "[#~BOu!rDZ~]", "Stringify base85"); assert.equal(Mark.stringify(Mark("[#~\n@p\ns7\ntD.3~]")), "[#[email protected]~]", "Stringify base85"); assert.equal(Mark.stringify(Mark("[#~ @<5pm \rBfIs ~]")), "[#~@<5pmBfIs~]", "Parse base85 of 'ascii85'"); assert.end(); });<|fim▁end|>
// stringify base64 data assert.equal(Mark.stringify(stringArrayBuffer('Hello')), '[#SGVsbG8=]', "Stringify binary data 'hello'"); assert.equal(Mark.stringify(stringArrayBuffer('Hello worlds!')), '[#SGVsbG8gd29ybGRzIQ==]', "Stringify binary data 'Hello worlds!'");
<|file_name|>FullscreenBtn.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; import * as fullscreen from 'src/singletons/fullscreen'; interface Props { target:HTMLElement; } interface State { isFullscreen:boolean; } export default class FullscreenBtn extends React.Component<Props, State> { constructor() { super(); this.state = {isFullscreen: false}; } render() { return (<|fim▁hole|> e.preventDefault(); e.stopPropagation(); this.state.isFullscreen ? fullscreen.exit() : fullscreen.enter(this.props.target); }} > <i className="material-icons"> {this.state.isFullscreen ? 'fullscreen_exit' : 'fullscreen' } </i> </button> ); } componentDidMount() { fullscreen.onChange(() => this.setState({ isFullscreen: fullscreen.check() })); } }<|fim▁end|>
<button className="fullscreen-toggle" onClick={(e) => {
<|file_name|>disambiguation_fire.js<|end_file_name|><|fim▁begin|>var d = require('../dtrace-provider'); var dtp = d.createDTraceProvider('test'); dtp.addProbe('probe1', 'int', 'int'); dtp.addProbe('probe2', 'int', 'int'); dtp.enable(); var dtp2 = d.createDTraceProvider('test'); dtp2.addProbe('probe3', 'int', 'int'); dtp2.addProbe('probe1', 'int', 'int'); dtp2.enable(); <|fim▁hole|> var dtp3 = d.createDTraceProvider('test', 'mymod1'); dtp3.addProbe('probe1', 'int', 'int'); dtp3.addProbe('probe2', 'int', 'int'); dtp3.enable(); var dtp4 = d.createDTraceProvider('test', 'mymod2'); dtp4.addProbe('probe1', 'int', 'int'); dtp4.addProbe('probe3', 'int', 'int'); dtp4.enable(); dtp.fire('probe1', function () { return ([12, 3]); }); dtp2.fire('probe1', function () { return ([12, 73]); }); dtp3.fire('probe1', function () { return ([12, 3]); }); dtp4.fire('probe1', function () { return ([12, 73]); });<|fim▁end|>
<|file_name|>marginalia-strings.js<|end_file_name|><|fim▁begin|>/* * Languages for annotation Javascript */ /* * Fetch a localized string * This is a function so that it can be replaced with another source of strings if desired * (e.g. in a database). The application uses short English-language strings as keys, so * that if the language source is lacking the key can be returned instead. */ function getLocalized( s ) { return LocalizedAnnotationStrings[ s ]; } LocalizedAnnotationStrings = { 'public annotation' : 'This annotation is public.', 'private annotation' : 'This annotation is private.', 'delete annotation button' : 'Delete this annotation.', 'annotation link button' : 'Link to another document.', 'annotation link label' : 'Select a document to link to.', 'delete annotation link button' : 'Remove this link.', 'annotation expand edit button' : 'Click to display margin note editor', 'annotation collapse edit button' : 'Click to display margin note drop-down list', 'annotation quote button' : 'Quote this annotation in a discussion post.', 'browser support of W3C range required for annotation creation' : 'Your browser does not support the W3C range standard, so you cannot create annotations.', 'select text to annotate' : 'You must select some text to annotate.', 'invalid selection' : 'Selection range is not valid.', 'corrupt XML from service' : 'An attempt to retrieve annotations from the server returned corrupt XML data.', 'note too long' : 'Please limit your margin note to 250 characters.', <|fim▁hole|> 'quote not found' : 'The highlighted passage could not be found', 'create overlapping edits' : 'You may not create overlapping edits', 'lang' : 'en' };<|fim▁end|>
'quote too long' : 'The passage you have attempted to highlight is too long. It may not exceed 1000 characters.', 'zero length quote' : 'You must select some text to annotate.',
<|file_name|>day01.py<|end_file_name|><|fim▁begin|>from __future__ import print_function f = open('inputs/input_01.txt') contents = f.read() print("Floor:", contents.count('(') - contents.count(')')) # Part Two change = {'(': 1, ')': -1}<|fim▁hole|>floor = 0 position = 1 for c in contents: if c in change: floor += change[c] if floor == -1: print("Basement entered at position:", position) break position += 1<|fim▁end|>
<|file_name|>loaders.py<|end_file_name|><|fim▁begin|>import os import logging from superdesk import get_resource_service from jinja2.loaders import FileSystemLoader, ModuleLoader, ChoiceLoader, DictLoader, PrefixLoader from liveblog.mongo_util import decode as mongodecode __all__ = ['ThemeTemplateLoader', 'CompiledThemeTemplateLoader'] logger = logging.getLogger('superdesk') class ThemeTemplateLoader(FileSystemLoader): """ Theme template loader for jinja2 SEO themes. """ def __init__(self, theme, encoding='utf-8', followlinks=False): theme_name = theme['name'] themes = get_resource_service('themes') theme_dirname = themes.get_theme_path(theme_name) self.searchpath = [os.path.join(theme_dirname, 'templates')] parent_theme = theme.get('extends')<|fim▁hole|> self.encoding = encoding self.followlinks = followlinks class CompiledThemeTemplateLoader(ChoiceLoader): def __init__(self, theme): """ A Mixed logic template loader module. It will use Compiled theme template for current theme and will also use FileSystemLoader like in order to enable inheritance """ self.loaders = [] theme_name = theme['name'] themes = get_resource_service('themes') parent_theme = theme.get('extends') files = theme.get('files', {'templates': {}}) if files.get('templates'): self.addDictonary(theme) if parent_theme: parent = themes.find_one(req=None, name=parent_theme) self.addDictonary(parent) else: compiled = themes.get_theme_compiled_templates_path(theme_name) self.loaders.append(ModuleLoader(compiled)) if parent_theme: parent_compiled = themes.get_theme_compiled_templates_path(parent_theme) self.loaders.append(ModuleLoader(parent_compiled)) # let's now add the parent theme prefix loader if parent_theme: prefix_loader = self._parent_prefix_loader(parent_theme) self.loaders.append(prefix_loader) def _parent_prefix_loader(self, name): """ Creates a PrefixLoader in order to be able to extends parent theme templates using as prefix the parent theme name Example: {% extends 'parent_theme_name/template_name.html' %} {% include 'parent_theme_name/template_name.html' %} Args: name (`str`): Parent theme name Returns: PrefixLoader instance with parent_name as prefix """ themes = get_resource_service('themes') parent_dirname = themes.get_theme_path(name) search_paths = [os.path.join(parent_dirname, 'templates')] return PrefixLoader({name: FileSystemLoader(search_paths)}) def addDictonary(self, theme): """ Add template files as dictionary in the loaders. """ files = theme.get('files', {'templates': {}}) if files.get('templates'): compiled = {} for file, content in files.get('templates').items(): compiled[mongodecode(file)] = content self.loaders.append(DictLoader(compiled))<|fim▁end|>
if parent_theme: parent_dirname = themes.get_theme_path(parent_theme) self.searchpath.append(os.path.join(parent_dirname, 'templates'))
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use fixture_tests::Fixture; use graphql_test_helpers::apply_transform_for_test; use relay_transforms::generate_id_field; <|fim▁hole|> apply_transform_for_test(fixture, |program| Ok(generate_id_field(program))) }<|fim▁end|>
pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> {
<|file_name|>test_endpoints_tmdb.py<|end_file_name|><|fim▁begin|># coding=utf-8 """Unit tests for mapi/endpoints/tmdb.py.""" import pytest from mapi.endpoints import tmdb_find, tmdb_movies, tmdb_search_movies from mapi.exceptions import MapiNotFoundException, MapiProviderException from tests import JUNK_TEXT GOONIES_IMDB_ID = "tt0089218" GOONIES_TMDB_ID = 9340 JUNK_IMDB_ID = "tt1234567890" @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_find__imdb_success(tmdb_api_key): expected_top_level_keys = { "movie_results", "person_results", "tv_episode_results", "tv_results", "tv_season_results", } expected_movie_results_keys = { "adult", "backdrop_path", "genre_ids", "id", "original_language", "original_title", "overview", "poster_path", "popularity", "release_date", "title", "video", "vote_average", "vote_count", } result = tmdb_find(tmdb_api_key, "imdb_id", GOONIES_IMDB_ID) assert isinstance(result, dict) assert set(result.keys()) == expected_top_level_keys assert len(result.get("movie_results", {})) > 0 assert expected_movie_results_keys == set( result.get("movie_results", {})[0].keys() ) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_find__api_key_fail(): with pytest.raises(MapiProviderException): tmdb_find(JUNK_TEXT, "imdb_id", GOONIES_IMDB_ID, cache=False) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_find__invalid_id_imdb(tmdb_api_key): with pytest.raises(MapiProviderException): tmdb_find(tmdb_api_key, "imdb_id", JUNK_TEXT, cache=False) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_find__not_found(tmdb_api_key): with pytest.raises(MapiNotFoundException): tmdb_find(tmdb_api_key, "imdb_id", JUNK_IMDB_ID) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_movies__success(tmdb_api_key): expected_top_level_keys = { "adult", "backdrop_path", "belongs_to_collection", "budget", "genres", "homepage", "id", "imdb_id", "original_language", "original_title", "overview", "popularity", "poster_path", "production_companies", "production_countries", "release_date", "revenue", "runtime", "spoken_languages", "status", "tagline", "title", "video", "vote_average", "vote_count", } result = tmdb_movies(tmdb_api_key, GOONIES_TMDB_ID) assert isinstance(result, dict) assert set(result.keys()) == expected_top_level_keys assert result.get("original_title") == "The Goonies" def test_tmdb_movies__api_key_fail(): with pytest.raises(MapiProviderException): tmdb_movies(JUNK_TEXT, "", cache=False) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_movies__id_tmdb_fail(tmdb_api_key): with pytest.raises(MapiProviderException): tmdb_movies(tmdb_api_key, JUNK_TEXT, cache=False) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_movies__not_found(tmdb_api_key): with pytest.raises(MapiNotFoundException): tmdb_movies(tmdb_api_key, "1" * 10) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_search_movies__success(tmdb_api_key): expected_top_level_keys = { "page", "results", "total_pages", "total_results", } expected_results_keys = { "adult", "backdrop_path", "genre_ids", "id", "original_language", "original_title", "overview", "popularity", "poster_path", "release_date", "title", "video", "vote_average", "vote_count", } result = tmdb_search_movies(tmdb_api_key, "the goonies", 1985) assert isinstance(result, dict) assert set(result.keys()) == expected_top_level_keys assert isinstance(result["results"], list) assert expected_results_keys == set(result.get("results", [{}])[0].keys()) assert len(result["results"]) == 1 assert result["results"][0]["original_title"] == "The Goonies" result = tmdb_search_movies(tmdb_api_key, "the goonies") assert len(result["results"]) > 1 def test_tmdb_search_movies__bad_api_key(): with pytest.raises(MapiProviderException): tmdb_search_movies(JUNK_TEXT, "the goonies", cache=False) <|fim▁hole|> tmdb_search_movies(tmdb_api_key, JUNK_TEXT, cache=False) @pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_search_movies__bad_year(tmdb_api_key): with pytest.raises(MapiProviderException): tmdb_search_movies( tmdb_api_key, "the goonies", year=JUNK_TEXT, cache=False )<|fim▁end|>
@pytest.mark.usefixtures("tmdb_api_key") def test_tmdb_search_movies__bad_title(tmdb_api_key): with pytest.raises(MapiNotFoundException):
<|file_name|>hdr_encoder.rs<|end_file_name|><|fim▁begin|>use color::Rgb; use hdr::{rgbe8, RGBE8Pixel, SIGNATURE}; use std::io::{Result, Write}; /// Radiance HDR encoder pub struct HDREncoder<W: Write> { w: W, } impl<W: Write> HDREncoder<W> { /// Creates encoder pub fn new(w: W) -> HDREncoder<W> { HDREncoder { w } } /// Encodes the image ```data``` /// that has dimensions ```width``` and ```height``` pub fn encode(mut self, data: &[Rgb<f32>], width: usize, height: usize) -> Result<()> { assert!(data.len() >= width * height); let w = &mut self.w; try!(w.write_all(SIGNATURE)); try!(w.write_all(b"\n")); try!(w.write_all(b"# Rust HDR encoder\n")); try!(w.write_all(b"FORMAT=32-bit_rle_rgbe\n\n")); try!(w.write_all(format!("-Y {} +X {}\n", height, width).as_bytes())); if width < 8 || width > 32_768 { for &pix in data { try!(write_rgbe8(w, to_rgbe8(pix))); } } else { // new RLE marker contains scanline width let marker = rgbe8(2, 2, (width / 256) as u8, (width % 256) as u8); // buffers for encoded pixels let mut bufr = Vec::with_capacity(width); bufr.resize(width, 0); let mut bufg = Vec::with_capacity(width); bufg.resize(width, 0); let mut bufb = Vec::with_capacity(width); bufb.resize(width, 0); let mut bufe = Vec::with_capacity(width); bufe.resize(width, 0); let mut rle_buf = Vec::with_capacity(width); for scanline in data.chunks(width) { for ((((r, g), b), e), &pix) in bufr.iter_mut() .zip(bufg.iter_mut()) .zip(bufb.iter_mut()) .zip(bufe.iter_mut()) .zip(scanline.iter()) { let cp = to_rgbe8(pix); *r = cp.c[0]; *g = cp.c[1]; *b = cp.c[2]; *e = cp.e; } try!(write_rgbe8(w, marker)); // New RLE encoding marker rle_buf.clear(); rle_compress(&bufr[..], &mut rle_buf); try!(w.write_all(&rle_buf[..])); rle_buf.clear(); rle_compress(&bufg[..], &mut rle_buf); try!(w.write_all(&rle_buf[..])); rle_buf.clear(); rle_compress(&bufb[..], &mut rle_buf); try!(w.write_all(&rle_buf[..])); rle_buf.clear(); rle_compress(&bufe[..], &mut rle_buf); try!(w.write_all(&rle_buf[..])); } } Ok(()) } } #[derive(Debug, PartialEq, Eq)] enum RunOrNot { Run(u8, usize), Norun(usize, usize), } use self::RunOrNot::{Norun, Run}; const RUN_MAX_LEN: usize = 127; const NORUN_MAX_LEN: usize = 128; struct RunIterator<'a> { data: &'a [u8], curidx: usize, } impl<'a> RunIterator<'a> { fn new(data: &'a [u8]) -> RunIterator<'a> { RunIterator { data, curidx: 0 } } } impl<'a> Iterator for RunIterator<'a> { type Item = RunOrNot; fn next(&mut self) -> Option<Self::Item> { if self.curidx == self.data.len() { None } else { let cv = self.data[self.curidx]; let crun = self.data[self.curidx..] .iter() .take_while(|&&v| v == cv) .take(RUN_MAX_LEN) .count(); let ret = if crun > 2 { Run(cv, crun) } else { Norun(self.curidx, crun) }; self.curidx += crun; Some(ret) } } } struct NorunCombineIterator<'a> { runiter: RunIterator<'a>, prev: Option<RunOrNot>, } impl<'a> NorunCombineIterator<'a> { fn new(data: &'a [u8]) -> NorunCombineIterator<'a> { NorunCombineIterator { runiter: RunIterator::new(data), prev: None, } } } // Combines sequential noruns produced by RunIterator impl<'a> Iterator for NorunCombineIterator<'a> { type Item = RunOrNot; fn next(&mut self) -> Option<Self::Item> { loop {<|fim▁hole|> // Just return stored run return Some(Run(c, len)); } Some(Norun(idx, len)) => { // Let's see if we need to continue norun match self.runiter.next() { Some(Norun(_, len1)) => { // norun continues let clen = len + len1; // combined length if clen == NORUN_MAX_LEN { return Some(Norun(idx, clen)); } else if clen > NORUN_MAX_LEN { // combined norun exceeds maximum length. store extra part of norun self.prev = Some(Norun(idx + NORUN_MAX_LEN, clen - NORUN_MAX_LEN)); // then return maximal norun return Some(Norun(idx, NORUN_MAX_LEN)); } else { // len + len1 < NORUN_MAX_LEN self.prev = Some(Norun(idx, len + len1)); // combine and continue loop } } Some(Run(c, len1)) => { // Run encountered. Store it self.prev = Some(Run(c, len1)); return Some(Norun(idx, len)); // and return combined norun } None => { // End of sequence return Some(Norun(idx, len)); // return combined norun } } } // End match self.prev.take() == Some(NoRun()) None => { // No norun to combine match self.runiter.next() { Some(Norun(idx, len)) => { self.prev = Some(Norun(idx, len)); // store for combine and continue the loop } Some(Run(c, len)) => { // Some run. Just return it return Some(Run(c, len)); } None => { // That's all, folks return None; } } } // End match self.prev.take() == None } // End match } // End loop } } // Appends RLE compressed ```data``` to ```rle``` fn rle_compress(data: &[u8], rle: &mut Vec<u8>) { rle.clear(); if data.is_empty() { rle.push(0); // Technically correct. It means read next 0 bytes. return; } // Task: split data into chunks of repeating (max 127) and non-repeating bytes (max 128) // Prepend non-repeating chunk with its length // Replace repeating byte with (run length + 128) and the byte for rnr in NorunCombineIterator::new(data) { match rnr { Run(c, len) => { assert!(len <= 127); rle.push(128u8 + len as u8); rle.push(c); } Norun(idx, len) => { assert!(len <= 128); rle.push(len as u8); rle.extend_from_slice(&data[idx..idx + len]); } } } } fn write_rgbe8<W: Write>(w: &mut W, v: RGBE8Pixel) -> Result<()> { let buf: [u8; 4] = unsafe { // It's safe, RGBE8Pixel doesn't implement Drop and it is repr(C) ::std::mem::transmute(v) }; w.write_all(&buf[..]) } /// Converts ```Rgb<f32>``` into ```RGBE8Pixel``` pub fn to_rgbe8(pix: Rgb<f32>) -> RGBE8Pixel { let pix = pix.data; let mx = f32::max(pix[0], f32::max(pix[1], pix[2])); if mx <= 0.0 { RGBE8Pixel { c: [0, 0, 0], e: 0 } } else { // let (frac, exp) = mx.frexp(); // unstable yet let exp = mx.log2().floor() as i32 + 1; let mul = f32::powi(2.0, exp); let mut conv = [0u8; 3]; for (cv, &sv) in conv.iter_mut().zip(pix.iter()) { *cv = f32::trunc(sv / mul * 256.0) as u8; } RGBE8Pixel { c: conv, e: (exp + 128) as u8, } } } #[test] fn to_rgbe8_test() { use hdr::rgbe8; let test_cases = vec![rgbe8(0, 0, 0, 0), rgbe8(1, 1, 128, 128)]; for &pix in &test_cases { assert_eq!(pix, to_rgbe8(pix.to_hdr())); } for mc in 128..255 { // TODO: use inclusive range when stable let pix = rgbe8(mc, mc, mc, 100); assert_eq!(pix, to_rgbe8(pix.to_hdr())); let pix = rgbe8(mc, 0, mc, 130); assert_eq!(pix, to_rgbe8(pix.to_hdr())); let pix = rgbe8(0, 0, mc, 140); assert_eq!(pix, to_rgbe8(pix.to_hdr())); let pix = rgbe8(1, 0, mc, 150); assert_eq!(pix, to_rgbe8(pix.to_hdr())); let pix = rgbe8(1, mc, 10, 128); assert_eq!(pix, to_rgbe8(pix.to_hdr())); for c in 0..255 { // Radiance HDR seems to be pre IEEE 754. // exponent can be -128 (represented as 0u8), so some colors cannot be represented in normalized f32 // Let's exclude exponent value of -128 (0u8) from testing let pix = rgbe8(1, mc, c, if c == 0 { 1 } else { c }); assert_eq!(pix, to_rgbe8(pix.to_hdr())); } } fn relative_dist(a: Rgb<f32>, b: Rgb<f32>) -> f32 { // maximal difference divided by maximal value let max_diff = a.data .iter() .zip(b.data.iter()) .fold(0.0, |diff, (&a, &b)| f32::max(diff, (a - b).abs())); let max_val = a.data .iter() .chain(b.data.iter()) .fold(0.0, |maxv, &a| f32::max(maxv, a)); if max_val == 0.0 { 0.0 } else { max_diff / max_val } } let test_values = vec![ 0.000_001, 0.000_02, 0.000_3, 0.004, 0.05, 0.6, 7.0, 80.0, 900.0, 1_000.0, 20_000.0, 300_000.0, ]; for &r in &test_values { for &g in &test_values { for &b in &test_values { let c1 = Rgb([r, g, b]); let c2 = to_rgbe8(c1).to_hdr(); let rel_dist = relative_dist(c1, c2); // Maximal value is normalized to the range 128..256, thus we have 1/128 precision assert!( rel_dist <= 1.0 / 128.0, "Relative distance ({}) exceeds 1/128 for {:?} and {:?}", rel_dist, c1, c2 ); } } } } #[test] fn runiterator_test() { let data = []; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), None); let data = [5]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Norun(0, 1))); assert_eq!(run_iter.next(), None); let data = [1, 1]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Norun(0, 2))); assert_eq!(run_iter.next(), None); let data = [0, 0, 0]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Run(0u8, 3))); assert_eq!(run_iter.next(), None); let data = [0, 0, 1, 1]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Norun(0, 2))); assert_eq!(run_iter.next(), Some(Norun(2, 2))); assert_eq!(run_iter.next(), None); let data = [0, 0, 0, 1, 1]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Run(0u8, 3))); assert_eq!(run_iter.next(), Some(Norun(3, 2))); assert_eq!(run_iter.next(), None); let data = [1, 2, 2, 2]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Norun(0, 1))); assert_eq!(run_iter.next(), Some(Run(2u8, 3))); assert_eq!(run_iter.next(), None); let data = [1, 1, 2, 2, 2]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Norun(0, 2))); assert_eq!(run_iter.next(), Some(Run(2u8, 3))); assert_eq!(run_iter.next(), None); let data = [2; 128]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Run(2u8, 127))); assert_eq!(run_iter.next(), Some(Norun(127, 1))); assert_eq!(run_iter.next(), None); let data = [2; 129]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Run(2u8, 127))); assert_eq!(run_iter.next(), Some(Norun(127, 2))); assert_eq!(run_iter.next(), None); let data = [2; 130]; let mut run_iter = RunIterator::new(&data[..]); assert_eq!(run_iter.next(), Some(Run(2u8, 127))); assert_eq!(run_iter.next(), Some(Run(2u8, 3))); assert_eq!(run_iter.next(), None); } #[test] fn noruncombine_test() { fn a<T>(mut v: Vec<T>, mut other: Vec<T>) -> Vec<T> { v.append(&mut other); v } let v = vec![]; let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), None); let v = vec![1]; let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Norun(0, 1))); assert_eq!(rsi.next(), None); let v = vec![2, 2]; let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Norun(0, 2))); assert_eq!(rsi.next(), None); let v = vec![3, 3, 3]; let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Run(3, 3))); assert_eq!(rsi.next(), None); let v = vec![4, 4, 3, 3, 3]; let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Norun(0, 2))); assert_eq!(rsi.next(), Some(Run(3, 3))); assert_eq!(rsi.next(), None); let v = vec![40; 400]; let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Run(40, 127))); assert_eq!(rsi.next(), Some(Run(40, 127))); assert_eq!(rsi.next(), Some(Run(40, 127))); assert_eq!(rsi.next(), Some(Run(40, 19))); assert_eq!(rsi.next(), None); let v = a(a(vec![5; 3], vec![6; 129]), vec![7, 3, 7, 10, 255]); let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Run(5, 3))); assert_eq!(rsi.next(), Some(Run(6, 127))); assert_eq!(rsi.next(), Some(Norun(130, 7))); assert_eq!(rsi.next(), None); let v = a(a(vec![5; 2], vec![6; 129]), vec![7, 3, 7, 7, 255]); let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Norun(0, 2))); assert_eq!(rsi.next(), Some(Run(6, 127))); assert_eq!(rsi.next(), Some(Norun(129, 7))); assert_eq!(rsi.next(), None); let v: Vec<_> = ::std::iter::repeat(()) .flat_map(|_| (0..2)) .take(257) .collect(); let mut rsi = NorunCombineIterator::new(&v[..]); assert_eq!(rsi.next(), Some(Norun(0, 128))); assert_eq!(rsi.next(), Some(Norun(128, 128))); assert_eq!(rsi.next(), Some(Norun(256, 1))); assert_eq!(rsi.next(), None); }<|fim▁end|>
match self.prev.take() { Some(Run(c, len)) => {
<|file_name|>TestGoto.java<|end_file_name|><|fim▁begin|>package brennus.asm; import static brennus.model.ExistingType.VOID; import static brennus.model.ExistingType.existing; import static brennus.model.Protection.PUBLIC; import static junit.framework.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import brennus.Builder; import brennus.MethodBuilder; import brennus.SwitchBuilder; import brennus.ThenBuilder; import brennus.asm.TestGeneration.DynamicClassLoader; import brennus.model.FutureType; import brennus.printer.TypePrinter; import org.junit.Test; public class TestGoto { abstract public static class FSA { private List<String> states = new ArrayList<String>(); abstract public void exec(); public void state(String p) { states.add(p); } public List<String> getStates() { return states; } } abstract public static class FSA2 { private List<String> states = new ArrayList<String>(); abstract public void exec(Iterator<Integer> it); public void state(String p) { states.add(p); } public List<String> getStates() { return states; } } @Test public void testGoto() throws Exception { FutureType testClass = new Builder()<|fim▁hole|> .label("a") .exec().callOnThis("state").literal("a").endCall().endExec() .gotoLabel("c") .label("b") .exec().callOnThis("state").literal("b").endCall().endExec() .gotoLabel("end") .label("c") .exec().callOnThis("state").literal("c").endCall().endExec() .gotoLabel("b") .label("end") .endMethod() .endClass(); // new TypePrinter().print(testClass); DynamicClassLoader cl = new DynamicClassLoader(); cl.define(testClass); Class<?> generated = (Class<?>)cl.loadClass("brennus.asm.TestGoto$TestClass"); FSA fsa = (FSA)generated.newInstance(); fsa.exec(); assertEquals(Arrays.asList("a", "c", "b"), fsa.getStates()); } @Test public void testFSA() throws Exception { int[][] fsa = { {0,1,2,3}, {0,1,2,3}, {0,1,2,3}, {0,1,2,3} }; MethodBuilder m = new Builder() .startClass("brennus.asm.TestGoto$TestClass2", existing(FSA2.class)) .startMethod(PUBLIC, VOID, "exec").param(existing(Iterator.class), "it") .gotoLabel("start") .label("start"); for (int i = 0; i < fsa.length; i++) { m = m.label("s_"+i) .exec().callOnThis("state").literal("s_"+i).endCall().endExec(); SwitchBuilder<ThenBuilder<MethodBuilder>> s = m.ifExp().get("it").callNoParam("hasNext").thenBlock() .switchOn().get("it").callNoParam("next").switchBlock(); for (int j = 0; j < fsa[i].length; j++) { int to = fsa[i][j]; s = s.caseBlock(j) .gotoLabel("s_"+to) .endCase(); } m = s.endSwitch() .elseBlock() .gotoLabel("end") .endIf(); } FutureType testClass = m.label("end").endMethod().endClass(); new TypePrinter().print(testClass); Logger.getLogger("brennus").setLevel(Level.FINEST); Logger.getLogger("brennus").addHandler(new Handler() { public void publish(LogRecord record) { System.out.println(record.getMessage()); } public void flush() { System.out.flush(); } public void close() throws SecurityException { System.out.flush(); } }); DynamicClassLoader cl = new DynamicClassLoader(); cl.define(testClass); Class<?> generated = (Class<?>)cl.loadClass("brennus.asm.TestGoto$TestClass2"); FSA2 compiledFSA = (FSA2)generated.newInstance(); compiledFSA.exec(Arrays.asList(3,2,1).iterator()); assertEquals(Arrays.asList("s_0", "s_3", "s_2", "s_1"), compiledFSA.getStates()); } }<|fim▁end|>
.startClass("brennus.asm.TestGoto$TestClass", existing(FSA.class)) .startMethod(PUBLIC, VOID, "exec")
<|file_name|>multiple_files.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(rand)] use std::fs::File; use std::io::prelude::*; use std::path::Path; use std::process::Command; use std::__rand::{thread_rng, Rng}; use std::{char, env}; // creates unicode_input_multiple_files_{main,chars}.rs, where the // former imports the latter. `_chars` just contains an identifier // made up of random characters, because will emit an error message // about the ident being in the wrong place, with a span (and creating // this span used to upset the compiler). fn random_char() -> char { let mut rng = thread_rng(); // a subset of the XID_start Unicode table (ensuring that the // compiler doesn't fail with an "unrecognised token" error) let (lo, hi): (u32, u32) = match rng.gen_range(1u32, 4u32 + 1) { 1 => (0x41, 0x5a), 2 => (0xf8, 0x1ba), 3 => (0x1401, 0x166c), _ => (0x10400, 0x1044f) }; char::from_u32(rng.gen_range(lo, hi + 1)).unwrap() } fn main() { let args: Vec<String> = env::args().collect(); let rustc = &args[1]; let tmpdir = Path::new(&args[2]); let main_file = tmpdir.join("unicode_input_multiple_files_main.rs"); { let _ = File::create(&main_file).unwrap() .write_all(b"mod unicode_input_multiple_files_chars;").unwrap(); } for _ in 0..100 { { let randoms = tmpdir.join("unicode_input_multiple_files_chars.rs"); let mut w = File::create(&randoms).unwrap();<|fim▁hole|> write!(&mut w, "{}", random_char()).unwrap(); } } // rustc is passed to us with --out-dir and -L etc., so we // can't exec it directly let result = Command::new("sh") .arg("-c") .arg(&format!("{} {}", rustc, main_file.display())) .output().unwrap(); let err = String::from_utf8_lossy(&result.stderr); // positive test so that this test will be updated when the // compiler changes. assert!(err.contains("expected item, found")) } }<|fim▁end|>
for _ in 0..30 {
<|file_name|>unilateral.rs<|end_file_name|><|fim▁begin|>use crate::{ algo::TarjanScc, core::{ property::{ proxy_remove_edge_where_weight, proxy_remove_vertex, HasVertexGraph, RemoveEdge, RemoveVertex, Subgraph, Weak, }, Directed, Ensure, Graph, GraphDerefMut, }, }; use std::borrow::Borrow; /// A marker trait for graphs that are unilaterally connected. /// /// A graph is unilaterally connected if, for each pair of vertices, there /// exists at least 1 path from one of them to the other. This is contrasted /// with strongly connected graphs, where there must exist a path from either to /// the other (i.e. a path in each direction). /// /// The distinction between unilaterally and strongly connected only exists for /// directed graphs, for undirected ones, they are equal. For this reason, the /// companion ensurer graph `UnilateralGraph` only allows directed graphs. /// For undirected graph, simply use `ConnectedGraph`. /// /// For type safety reasons, the trait itself does not restrict directedness. pub trait Unilateral: Weak { } #[derive(Clone, Debug)] pub struct UnilateralGraph<C: Ensure>(C) where C::Graph: Graph<Directedness = Directed>; impl<C: Ensure> Ensure for UnilateralGraph<C> where C::Graph: Graph<Directedness = Directed>, { fn ensure_unvalidated(c: Self::Ensured, _: ()) -> Self { Self(c) } fn validate(c: &Self::Ensured, _: &()) -> bool { if let Ok(graph) = HasVertexGraph::ensure(c.graph(), ()) { // Algorithm: First use Tarjan's Strongly Connected Component (SCC) algorithm to // find SCCs and then check whether every component has an edge to the next one // in the list. Note: Tarjan's algorithm produces SCCs in reverse topological // order, so we don't need to sort, just check the first has an edge to it from // the next. let mut tarjan = TarjanScc::new(&graph); let mut scc_current = tarjan.next(); while let Some(scc1) = &scc_current { let scc_next = tarjan.next(); if let Some(scc2) = &scc_next { if scc2.reaches(scc1).is_none() { return false; } } scc_current = scc_next; } } true } } impl<C: Ensure + GraphDerefMut> RemoveVertex for UnilateralGraph<C> where C::Graph: RemoveVertex<Directedness = Directed>, { fn remove_vertex(&mut self, v: impl Borrow<Self::Vertex>) -> Result<Self::VertexWeight, ()> { proxy_remove_vertex::<UnilateralGraph<_>, _>(self.0.graph_mut(), v.borrow()) } } impl<C: Ensure + GraphDerefMut> RemoveEdge for UnilateralGraph<C> where C::Graph: RemoveEdge<Directedness = Directed>, { fn remove_edge_where_weight<F>( &mut self, source: impl Borrow<Self::Vertex>, sink: impl Borrow<Self::Vertex>, f: F, ) -> Result<Self::EdgeWeight, ()> where F: Fn(&Self::EdgeWeight) -> bool, { proxy_remove_edge_where_weight::<UnilateralGraph<_>, _, _>( self.0.graph_mut(), source.borrow(), sink.borrow(),<|fim▁hole|>} impl<C: Ensure> Weak for UnilateralGraph<C> where C::Graph: Graph<Directedness = Directed> {} impl<C: Ensure> Unilateral for UnilateralGraph<C> where C::Graph: Graph<Directedness = Directed> {} impl_ensurer! { use<C> UnilateralGraph<C>: Ensure, Unilateral, Weak, RemoveVertex, RemoveEdge, // A new vertex would be unconnected to the rest of the graph NewVertex as (self.0) : C where C::Graph: Graph<Directedness=Directed> }<|fim▁end|>
f, ) }
<|file_name|>bounds.rs<|end_file_name|><|fim▁begin|>// 这个 trait 实现了打印标记:`{:?}`。 use std::fmt::Debug; trait HasArea { fn area(&self) -> f64; } impl HasArea for Rectangle { fn area(&self) -> f64 { self.length * self.height } } #[derive(Debug)] struct Rectangle { length: f64, height: f64 } #[allow(dead_code)] struct Triangle { length: f64, height: f64 } // 泛型 `T` 必须实现 `Debug`。不管什么类型,都可以正常工作。 fn print_debug<T: Debug>(t: &T) { println!("{:?}", t); } // `T` 必须实现 `HasArea`。任意符合限定的函数都可以访问 // `HasArea` 的 `area` 函数。 fn area<T: HasArea>(t: &T) -> f64 { t.area() } fn main() { let rectangle = Rectangle { length: 3.0, height: 4.0 }; let _triangle = Triangle { length: 3.0, height: 4.0 }; print_debug(&rectangle);<|fim▁hole|> // ^ 试一试:将上述语句的注释去掉。 // | 报错:未实现 `Debug` 或 `HasArea`。 }<|fim▁end|>
println!("Area: {}", area(&rectangle)); //print_debug(&_triangle); //println!("Area: {}", area(&_triangle));
<|file_name|>test_middleware.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob import webob.dec import webob.exc from nova.api import ec2 from nova import context from nova import exception from nova import flags from nova import test from nova import utils from xml.etree.ElementTree import fromstring as xml_to_tree FLAGS = flags.FLAGS @webob.dec.wsgify def conditional_forbid(req): """Helper wsgi app returns 403 if param 'die' is 1.""" if 'die' in req.params and req.params['die'] == '1': raise webob.exc.HTTPForbidden() return 'OK' class LockoutTestCase(test.TestCase): """Test case for the Lockout middleware.""" def setUp(self): # pylint: disable=C0103 super(LockoutTestCase, self).setUp() utils.set_time_override() self.lockout = ec2.Lockout(conditional_forbid) def tearDown(self): # pylint: disable=C0103 utils.clear_time_override() super(LockoutTestCase, self).tearDown() def _send_bad_attempts(self, access_key, num_attempts=1): """Fail x.""" for i in xrange(num_attempts): req = webob.Request.blank('/?AWSAccessKeyId=%s&die=1' % access_key) self.assertEqual(req.get_response(self.lockout).status_int, 403) def _is_locked_out(self, access_key):<|fim▁hole|> def test_lockout(self): self._send_bad_attempts('test', FLAGS.lockout_attempts) self.assertTrue(self._is_locked_out('test')) def test_timeout(self): self._send_bad_attempts('test', FLAGS.lockout_attempts) self.assertTrue(self._is_locked_out('test')) utils.advance_time_seconds(FLAGS.lockout_minutes * 60) self.assertFalse(self._is_locked_out('test')) def test_multiple_keys(self): self._send_bad_attempts('test1', FLAGS.lockout_attempts) self.assertTrue(self._is_locked_out('test1')) self.assertFalse(self._is_locked_out('test2')) utils.advance_time_seconds(FLAGS.lockout_minutes * 60) self.assertFalse(self._is_locked_out('test1')) self.assertFalse(self._is_locked_out('test2')) def test_window_timeout(self): self._send_bad_attempts('test', FLAGS.lockout_attempts - 1) self.assertFalse(self._is_locked_out('test')) utils.advance_time_seconds(FLAGS.lockout_window * 60) self._send_bad_attempts('test', FLAGS.lockout_attempts - 1) self.assertFalse(self._is_locked_out('test')) class ExecutorTestCase(test.TestCase): def setUp(self): super(ExecutorTestCase, self).setUp() self.executor = ec2.Executor() def _execute(self, invoke): class Fake(object): pass fake_ec2_request = Fake() fake_ec2_request.invoke = invoke fake_wsgi_request = Fake() fake_wsgi_request.environ = { 'nova.context': context.get_admin_context(), 'ec2.request': fake_ec2_request, } return self.executor(fake_wsgi_request) def _extract_message(self, result): tree = xml_to_tree(result.body) return tree.findall('./Errors')[0].find('Error/Message').text def test_instance_not_found(self): def not_found(context): raise exception.InstanceNotFound(instance_id=5) result = self._execute(not_found) self.assertIn('i-00000005', self._extract_message(result)) def test_snapshot_not_found(self): def not_found(context): raise exception.SnapshotNotFound(snapshot_id=5) result = self._execute(not_found) self.assertIn('snap-00000005', self._extract_message(result)) def test_volume_not_found(self): def not_found(context): raise exception.VolumeNotFound(volume_id=5) result = self._execute(not_found) self.assertIn('vol-00000005', self._extract_message(result))<|fim▁end|>
"""Sends a test request to see if key is locked out.""" req = webob.Request.blank('/?AWSAccessKeyId=%s' % access_key) return (req.get_response(self.lockout).status_int == 403)
<|file_name|>TabBarIcon.js<|end_file_name|><|fim▁begin|>import React, { PureComponent } from 'react'; import { Animated, View, StyleSheet } from 'react-native'; var babelPluginFlowReactPropTypes_proptype_Style = require('../../TypeDefinition').babelPluginFlowReactPropTypes_proptype_Style || require('prop-types').any; <|fim▁hole|>var babelPluginFlowReactPropTypes_proptype_NavigationScreenProp = require('../../TypeDefinition').babelPluginFlowReactPropTypes_proptype_NavigationScreenProp || require('prop-types').any; var babelPluginFlowReactPropTypes_proptype_NavigationState = require('../../TypeDefinition').babelPluginFlowReactPropTypes_proptype_NavigationState || require('prop-types').any; var babelPluginFlowReactPropTypes_proptype_NavigationAction = require('../../TypeDefinition').babelPluginFlowReactPropTypes_proptype_NavigationAction || require('prop-types').any; var babelPluginFlowReactPropTypes_proptype_TabScene = require('./TabView').babelPluginFlowReactPropTypes_proptype_TabScene || require('prop-types').any; export default class TabBarIcon extends PureComponent { render() { const { position, scene, navigation, activeTintColor, inactiveTintColor, style } = this.props; const { route, index } = scene; const { routes } = navigation.state; // Prepend '-1', so there are always at least 2 items in inputRange const inputRange = [-1, ...routes.map((x, i) => i)]; const activeOpacity = position.interpolate({ inputRange, outputRange: inputRange.map(i => i === index ? 1 : 0) }); const inactiveOpacity = position.interpolate({ inputRange, outputRange: inputRange.map(i => i === index ? 0 : 1) }); // We render the icon twice at the same position on top of each other: // active and inactive one, so we can fade between them. return <View style={style}> <Animated.View style={[styles.icon, { opacity: activeOpacity }]}> {this.props.renderIcon({ route, index, focused: true, tintColor: activeTintColor })} </Animated.View> <Animated.View style={[styles.icon, { opacity: inactiveOpacity }]}> {this.props.renderIcon({ route, index, focused: false, tintColor: inactiveTintColor })} </Animated.View> </View>; } } TabBarIcon.propTypes = { activeTintColor: require('prop-types').string.isRequired, inactiveTintColor: require('prop-types').string.isRequired, scene: babelPluginFlowReactPropTypes_proptype_TabScene, position: require('prop-types').any.isRequired, navigation: babelPluginFlowReactPropTypes_proptype_NavigationScreenProp, renderIcon: require('prop-types').func.isRequired, style: babelPluginFlowReactPropTypes_proptype_Style }; TabBarIcon.propTypes = { activeTintColor: require('prop-types').string.isRequired, inactiveTintColor: require('prop-types').string.isRequired, scene: babelPluginFlowReactPropTypes_proptype_TabScene, position: require('prop-types').any.isRequired, navigation: babelPluginFlowReactPropTypes_proptype_NavigationScreenProp, renderIcon: require('prop-types').func.isRequired, style: babelPluginFlowReactPropTypes_proptype_Style }; const styles = StyleSheet.create({ icon: { // We render the icon twice at the same position on top of each other: // active and inactive one, so we can fade between them: // Cover the whole iconContainer: position: 'absolute', top: 0, left: 0, right: 0, bottom: 0, alignItems: 'center', justifyContent: 'center' } });<|fim▁end|>
<|file_name|>0009_simple_multiplication_with_stdin.rs<|end_file_name|><|fim▁begin|>// Compute product of two numbers while accepting value from standard input. use std::io; // imports io library from standard library fn main() { let mut a = String::new(); //creates new, empty String let mut b = String::new(); <|fim▁hole|> let c: u32; println!("Enter value a:"); io::stdin().read_line(&mut a) .ok() .expect("Failed to read value"); println!("Enter value b:"); io::stdin().read_line(&mut b) .ok() .expect("Failed to read value"); //Shadowing lets us to re-use the old name. // parse() method on String converts the String into number let a: u32 = a.trim().parse() .ok() .expect("Please type a number"); let b: u32 = b.trim().parse() .ok() .expect("Please type a number"); c = a * b; println!("Product of {} * {} is {} ", a, b, c); }<|fim▁end|>
<|file_name|>rxncon2boolnet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 import logging import os import sys import click import click_log import colorama from typing import Optional from rxncon.input.excel_book.excel_book import ExcelBook from rxncon.simulation.boolean.boolean_model import SmoothingStrategy, KnockoutStrategy, OverexpressionStrategy from rxncon.simulation.boolean.boolnet_from_boolean_model import QuantitativeContingencyStrategy, \ boolnet_strs_from_rxncon <|fim▁hole|> def write_boolnet(excel_filename: str, smoothing_strategy: SmoothingStrategy, knockout_strategy: KnockoutStrategy, overexpression_strategy: OverexpressionStrategy, k_plus_strategy: QuantitativeContingencyStrategy, k_minus_strategy: QuantitativeContingencyStrategy, base_name: Optional[str] = None): if not base_name: base_name = os.path.splitext(os.path.basename(excel_filename))[0] base_path = os.path.dirname(excel_filename) boolnet_model_filename = os.path.join(base_path, '{0}.boolnet'.format(base_name)) boolnet_symbol_filename = os.path.join(base_path, '{0}_symbols.csv'.format(base_name)) boolnet_initial_val_filename = os.path.join(base_path, '{0}_initial_vals.csv'.format(base_name)) print('Reading in Excel file [{}] ...'.format(excel_filename)) excel_book = ExcelBook(excel_filename) rxncon_system = excel_book.rxncon_system print('Constructed rxncon system: [{} reactions], [{} contingencies]' .format(len(rxncon_system.reactions), len(rxncon_system.contingencies))) print('Generating BoolNet output using smoothing strategy [{}] ...'.format(smoothing_strategy.name)) model_str, symbol_str, initial_val_str = boolnet_strs_from_rxncon(rxncon_system, smoothing_strategy, knockout_strategy, overexpression_strategy, k_plus_strategy, k_minus_strategy) print('Writing BoolNet model file [{}] ...'.format(boolnet_model_filename)) with open(boolnet_model_filename, mode='w') as f: f.write(model_str) print('Writing BoolNet symbol file [{}] ...'.format(boolnet_symbol_filename)) with open(boolnet_symbol_filename, mode='w') as f: f.write(symbol_str) print('Writing BoolNet initial value file [{}] ...'.format(boolnet_initial_val_filename)) with open(boolnet_initial_val_filename, mode='w') as f: f.write(initial_val_str) valid_smoothing_strategies = [strategy.value for strategy in SmoothingStrategy.__members__.values()] # type: ignore valid_knockout_strategies = [strategy.value for strategy in KnockoutStrategy.__members__.values()] # type: ignore valid_overexpression_strategies = [strategy.value for strategy in OverexpressionStrategy.__members__.values()] # type: ignore valid_quantitative_contingency_strategies = [strategy.value for strategy in QuantitativeContingencyStrategy.__members__.values()] # type: ignore def validate_smoothing_strategy(ctx, param, value): try: SmoothingStrategy(value) return value except ValueError: raise click.BadParameter('Valid strategies are: {}'.format(', '.join(valid_smoothing_strategies))) def validate_knockout_strategy(ctx, param, value): try: KnockoutStrategy(value) return value except ValueError: raise click.BadParameter('Valid strategies are: {}'.format(', '.join(valid_knockout_strategies))) def validate_overexpression_strategy(ctx, param, value): try: OverexpressionStrategy(value) return value except ValueError: raise click.BadParameter('Valid strategies are: {}'.format(', '.join(valid_overexpression_strategies))) def validate_quantitative_contingency_strategy(ctx, param, value): try: QuantitativeContingencyStrategy(value) return value except ValueError: raise click.BadParameter( 'Valid strategies are: {}'.format(', '.join(valid_quantitative_contingency_strategies))) @click.command() @click.option('--smoothing', default='smooth_production_sources', help='Smoothing strategy. Default: smooth_production_sources. Choices: {}'.format( ', '.join(valid_smoothing_strategies)), callback=validate_smoothing_strategy) @click.option('--knockout', default='no_knockout', help='Generate knockouts. Default: no_knockout. Choices: {}'.format(', '.join(valid_knockout_strategies)), callback=validate_knockout_strategy) @click.option('--overexpression', default='no_overexpression', help='Generate overexpressions. Default: no_overexpression. Choices: {}'.format( ', '.join(valid_overexpression_strategies)), callback=validate_overexpression_strategy) @click.option('--k_plus', default='strict', help='Strategy for handling k+ contingencies. Default: strict. Choices: {}'.format( ', '.join(valid_quantitative_contingency_strategies)), callback=validate_quantitative_contingency_strategy) @click.option('--k_minus', default='strict', help='Strategy for handling k- contingencies. Default: strict. Choices: {}'.format( ', '.join(valid_quantitative_contingency_strategies)), callback=validate_quantitative_contingency_strategy) @click.option('--output', default=None, help='Base name for output files. Default: \'fn\' for input file \'fn.xls\'') @click.argument('excel_file') @click_log.simple_verbosity_option(default='WARNING') @click_log.init() def run(overexpression, knockout, smoothing, output, excel_file, k_plus, k_minus): smoothing_strategy = SmoothingStrategy(smoothing) knockout_strategy = KnockoutStrategy(knockout) overexpression_strategy = OverexpressionStrategy(overexpression) k_plus_strategy = QuantitativeContingencyStrategy(k_plus) k_minus_strategy = QuantitativeContingencyStrategy(k_minus) write_boolnet(excel_file, smoothing_strategy, knockout_strategy, overexpression_strategy, k_plus_strategy, k_minus_strategy, output) def setup_logging_colors(): click_log.ColorFormatter.colors = { 'error': dict(fg='red'), 'exception': dict(fg='red'), 'critical': dict(fg='red'), 'debug': dict(fg='yellow'), 'warning': dict(fg='yellow'), 'info': dict(fg='yellow') } def format(self, record): if not record.exc_info: level = record.levelname.lower() if level in self.colors: padding_size = 7 # Assume just INFO / DEBUG entries. prefix = click.style('{}: '.format(level).ljust(padding_size), **self.colors[level]) prefix += click.style('{} '.format(record.name), fg='blue') msg = record.msg if isinstance(msg, bytes): msg = msg.decode(sys.getfilesystemencoding(), 'replace') elif not isinstance(msg, str): msg = str(msg) record.msg = '\n'.join(prefix + x for x in msg.splitlines()) return logging.Formatter.format(self, record) click_log.ColorFormatter.format = format if __name__ == '__main__': try: setup_logging_colors() run() except Exception as e: print('ERROR: {}\n{}\nPlease re-run this command with the \'-v DEBUG\' option.'.format(type(e), e))<|fim▁end|>
colorama.init() LOGGER = logging.getLogger(__name__)
<|file_name|>register_test.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 The btcsuite developers // Copyright (c) 2015-2020 The Decred developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package dcrjson import ( "errors" "reflect" "sort" "testing" ) // Register methods for testing purposes. This does not conflict with // registration performed by external packages as they are done in separate // builds. func init() { MustRegister("getblock", (*testGetBlockCmd)(nil), 0) MustRegister("getblockcount", (*testGetBlockCountCmd)(nil), 0) MustRegister("session", (*testSessionCmd)(nil), UFWebsocketOnly) MustRegister("help", (*testHelpCmd)(nil), 0) } type testGetBlockCmd struct { Hash string Verbose *bool `jsonrpcdefault:"true"` VerboseTx *bool `jsonrpcdefault:"false"` } type testGetBlockCountCmd struct{} type testSessionCmd struct{} type testHelpCmd struct { Command *string } // TestUsageFlagStringer tests the stringized output for the UsageFlag type. func TestUsageFlagStringer(t *testing.T) { t.Parallel() tests := []struct { in UsageFlag want string }{ {0, "0x0"}, {1, "0x0"}, // was UFWalletOnly {UFWebsocketOnly, "UFWebsocketOnly"}, {UFNotification, "UFNotification"}, {UFWebsocketOnly | UFNotification, "UFWebsocketOnly|UFNotification"}, {1 | UFWebsocketOnly | UFNotification | (1 << 31), "UFWebsocketOnly|UFNotification|0x80000000"}, } // Detect additional usage flags that don't have the stringer added. numUsageFlags := 0 highestUsageFlagBit := highestUsageFlagBit for highestUsageFlagBit > 1 { numUsageFlags++ highestUsageFlagBit >>= 1 } if len(tests)-3 != numUsageFlags { t.Errorf("It appears a usage flag was added without adding " + "an associated stringer test") } t.Logf("Running %d tests", len(tests)) for i, test := range tests { result := test.in.String() if result != test.want { t.Errorf("String #%d\n got: %s want: %s", i, result, test.want) continue } } } // TestRegisterCmdErrors ensures the RegisterCmd function returns the expected // error when provided with invalid types. func TestRegisterCmdErrors(t *testing.T) { t.Parallel() tests := []struct { name string method string cmdFunc func() interface{} flags UsageFlag err error }{ { name: "duplicate method", method: "getblock", cmdFunc: func() interface{} { return struct{}{} }, err: ErrDuplicateMethod, }, { name: "invalid usage flags", method: "registertestcmd", cmdFunc: func() interface{} { return 0 }, flags: highestUsageFlagBit, err: ErrInvalidUsageFlags, }, { name: "invalid type", method: "registertestcmd", cmdFunc: func() interface{} { return 0 }, err: ErrInvalidType, }, { name: "invalid type 2", method: "registertestcmd", cmdFunc: func() interface{} { return &[]string{} }, err: ErrInvalidType, }, { name: "embedded field", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ int } return (*test)(nil) }, err: ErrEmbeddedType, }, { name: "unexported field", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ a int } return (*test)(nil) }, err: ErrUnexportedField, }, { name: "unsupported field type 1", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ A **int } return (*test)(nil) }, err: ErrUnsupportedFieldType, }, { name: "unsupported field type 2", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ A chan int } return (*test)(nil) }, err: ErrUnsupportedFieldType, }, { name: "unsupported field type 3", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ A complex64 } return (*test)(nil) }, err: ErrUnsupportedFieldType, }, { name: "unsupported field type 4", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ A complex128 } return (*test)(nil) }, err: ErrUnsupportedFieldType, }, { name: "unsupported field type 5", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ A func() } return (*test)(nil) }, err: ErrUnsupportedFieldType, }, { name: "unsupported field type 6", method: "registertestcmd", cmdFunc: func() interface{} { type test struct{ A interface{} } return (*test)(nil) }, err: ErrUnsupportedFieldType, }, { name: "required after optional", method: "registertestcmd", cmdFunc: func() interface{} { type test struct { A *int B int }<|fim▁hole|> { name: "non-optional with default", method: "registertestcmd", cmdFunc: func() interface{} { type test struct { A int `jsonrpcdefault:"1"` } return (*test)(nil) }, err: ErrNonOptionalDefault, }, { name: "mismatched default", method: "registertestcmd", cmdFunc: func() interface{} { type test struct { A *int `jsonrpcdefault:"1.7"` } return (*test)(nil) }, err: ErrMismatchedDefault, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { err := Register(test.method, test.cmdFunc(), test.flags) if !errors.Is(err, test.err) { t.Errorf("Test #%d (%s): mismatched error - got %v, "+ "want %v", i, test.name, err, test.err) continue } } } // TestMustRegisterCmdPanic ensures the MustRegisterCmd function panics when // used to register an invalid type. func TestMustRegisterCmdPanic(t *testing.T) { t.Parallel() // Setup a defer to catch the expected panic to ensure it actually // paniced. defer func() { if err := recover(); err == nil { t.Error("MustRegisterCmd did not panic as expected") } }() // Intentionally try to register an invalid type to force a panic. MustRegister("panicme", 0, 0) } // TestRegisteredCmdMethods tests the RegisteredCmdMethods function ensure it // works as expected. func TestRegisteredCmdMethods(t *testing.T) { t.Parallel() // Ensure the registered methods for plain string methods are returned. methods := RegisteredMethods("") if len(methods) == 0 { t.Fatal("RegisteredCmdMethods: no methods") } // Ensure the returned methods are sorted. sortedMethods := make([]string, len(methods)) copy(sortedMethods, methods) sort.Strings(sortedMethods) if !reflect.DeepEqual(sortedMethods, methods) { t.Fatal("RegisteredCmdMethods: methods are not sorted") } }<|fim▁end|>
return (*test)(nil) }, err: ErrNonOptionalField, },
<|file_name|>eval_assignment.rs<|end_file_name|><|fim▁begin|>use crate::helpers::{values::*, *}; use ostrov::errors::RuntimeError::*; #[test] fn returns_expression() { assert_eval_val( "(define x 0) (set! x (+ x 1))", unspecified(), ); } #[test] fn overwrites_variables() {<|fim▁hole|> x", "1", ); } #[test] fn overwrites_variables_on_upper_scopes() { assert_eval( "(define x 0) (define (f) (set! x (+ x 1))) (f) (f) (f) x", "3", ); } #[test] fn overwrites_variables_in_captured_scopes() { assert_eval( "(define (gen-counter) (define counter 0) (lambda () (set! counter (+ counter 1)) counter)) (define count (gen-counter)) (count) (count) (count)", "3", ); } #[test] fn malformed_variable_name() { assert_eval_err("(set! 3 3)", MalformedExpression); } #[test] fn unknown_variable() { assert_eval_err("(set! x 3)", UnboundVariable("x".into())); } #[test] fn wrong_arguments_number() { assert_eval_err("(set!)", BadArity(Some("set!".into()))); assert_eval_err("(set! x)", BadArity(Some("set!".into()))); assert_eval_err("(set! x 2 3)", BadArity(Some("set!".into()))); }<|fim▁end|>
assert_eval( "(define x 0) (set! x (+ x 1))
<|file_name|>flowtime.js<|end_file_name|><|fim▁begin|>/*! * Flowtime.js * http://marcolago.com/flowtime-js/ * MIT licensed * * Copyright (C) 2012-2013 Marco Lago, http://marcolago.com */ var Flowtime = (function () { /** * test if the device is touch enbled */ var isTouchDevice = 'ontouchstart' in document.documentElement; /** * test if the HTML History API's where available * this value can be overridden to disable the History API */ var pushHistory = window.history.pushState; /** * application constants */ var SECTION_CLASS = "ft-section"; var SECTION_SELECTOR = "." + SECTION_CLASS; var PAGE_CLASS = "ft-page"; var PAGE_SELECTOR = "." + PAGE_CLASS; var FRAGMENT_CLASS = "ft-fragment"; var FRAGMENT_SELECTOR = "." + FRAGMENT_CLASS; var FRAGMENT_REVEALED_CLASS = "revealed"; var FRAGMENT_ACTUAL_CLASS = "actual"; var FRAGMENT_REVEALED_TEMP_CLASS = "revealed-temp"; var DEFAULT_PROGRESS_CLASS = "ft-default-progress"; var DEFAULT_PROGRESS_SELECTOR = "." + DEFAULT_PROGRESS_CLASS; var SECTION_THUMB_CLASS = "ft-section-thumb"; var SECTION_THUMB_SELECTOR = "." + SECTION_THUMB_CLASS; var PAGE_THUMB_CLASS = "ft-page-thumb"; var PAGE_THUMB_SELECTOR = "." + PAGE_THUMB_CLASS; /** * events */ var NAVIGATION_EVENT = "flowtimenavigation"; /** * application variables */ var ftContainer = document.querySelector(".flowtime"); // cached reference to .flowtime element var html = document.querySelector("html"); // cached reference to html element var body = document.querySelector("body"); // cached reference to body element var useHash = false; // if true the engine uses only the hash change logic var currentHash = ""; // the hash string of the current section / page pair var pastIndex = { section:0, page:0 }; // section and page indexes of the past page var isOverview = false; // Boolean status for the overview var siteName = document.title; // cached base string for the site title var overviewCachedDest; // caches the destination before performing an overview zoom out for navigation back purposes var overviewFixedScaleFactor = 22; // fixed scale factor for overview variant var defaultProgress = null; // default progress bar reference var _fragmentsOnSide = false; // enable or disable fragments navigation when navigating from sections var _fragmentsOnBack = true; // shows or hide fragments when navigating back to a page var _slideInPx = false; // calculate the slide position in px instead of %, use in case the % mode does not works var _sectionsSlideToTop = false; // if true navigation with right or left arrow go to the first page of the section var _useOverviewVariant = false; // use an alternate overview layout and navigation (experimental - useful in case of rendering issues) var _twoStepsSlide = false; // not yet implemented! slides up or down before, then slides to the page var _showProgress = false; // show or hide the default progress indicator (leave false if you want to implement a custom progress indicator) var _parallaxInPx = false; // if false the parallax movement is calulated in % values, if true in pixels var defaultParallaxX = 50; // the default parallax horizontal value used when no data-parallax value were specified var defaultParallaxY = 50; // the default parallax vertical value used when no data-parallax value were specified var parallaxEnabled = document.querySelector(".parallax") != null; // performance tweak, if there is no elements with .parallax class disable the dom manipulation to boost performances /** * test the base support */ var browserSupport = true; try { var htmlClass = document.querySelector("html").className.toLowerCase(); if (htmlClass.indexOf("ie7") != -1 || htmlClass.indexOf("ie8") != -1 || htmlClass.indexOf("lt-ie9") != -1 ) { browserSupport = false; } } catch(e) { browserSupport = false; } /** * add "ft-absolute-nav" hook class to body * to set the CSS properties * needed for application scrolling */ if (browserSupport) { Brav1Toolbox.addClass(body, "ft-absolute-nav"); } /* ## ## ### ## ## #### ###### ### ######## #### ####### ## ## ## ## ### ######## ######## #### ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ### ### ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## #### #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ######## ## ### ## #### ######### ## ## ## ## ## ######### ## ## ## ## ## #### ## ## ######### ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### #### ###### ## ## ## #### ####### ## ## ## ## ## ## ## ## ## #### ## ## */ /** * NavigationMatrix is the Object who store the navigation grid structure * and which expose all the methods to get and set the navigation destinations */ var NavigationMatrix = (function () { var sections; // HTML Collection of .flowtime > .ft-section elements var sectionsArray; // multi-dimensional array containing the pages' array var allPages; // HTML Collection of .flowtime .ft-page elements var fragments; // HTML Collection of .fragment elements var fragmentsArray; // multi-dimensional array containing the per page fragments' array var fr = []; // multi-dimensional array containing the index of the current active fragment per page var parallaxElements = []; // array containing all elements with parrallax var sectionsLength = 0; // cached total number of .ft-section elements var pagesLength = 0; // cached max number of .page elements var pagesTotalLength = 0; // cached total number of .page elements var p = 0; // index of the current section viewved or higlighted var sp = 0; // index of the current page viewved or higlighted var pCache = 0; // cache index of the current section var spCache = 0; // cache index of the current page var hilited; // the current page higlighted, useful for overview mode /** * update the navigation matrix array * this is a publicy exposed method * useful for updating the matrix whne the site structure changes at runtime */ function _updateMatrix() { sectionsArray = []; parallaxElements = []; fragments = document.querySelectorAll(FRAGMENT_SELECTOR); fragmentsArray = []; sections = ftContainer.querySelectorAll(".flowtime > " + SECTION_SELECTOR); allPages = ftContainer.querySelectorAll(".flowtime " + PAGE_SELECTOR); // for (var i = 0; i < sections.length; i++) { var pagesArray = []; var section = sections[i]; fragmentsArray[i] = []; fr[i] = []; // if (section.getAttribute("data-id")) { section.setAttribute("data-id", "__" + unsafeAttr(section.getAttribute("data-id"))); // prevents attributes starting with a number } section.setAttribute("data-prog", "__" + (i + 1)); section.index = i; section.setAttribute("id", ""); // pages = section.querySelectorAll(PAGE_SELECTOR); pagesTotalLength += pages.length; pagesLength = Math.max(pagesLength, pages.length); // sets the pages max number for overview purposes for (var ii = 0; ii < pages.length; ii++) { var _sp = pages[ii]; if (_sp.getAttribute("data-id")) { _sp.setAttribute("data-id", "__" + unsafeAttr(_sp.getAttribute("data-id"))); // prevents attributes starting with a number } _sp.setAttribute("data-prog", "__" + (ii + 1)); _sp.index = ii; _sp.setAttribute("id", ""); // set data-title attributes to pages that doesn't have one and have at least an h1 heading element inside if (!_sp.getAttribute("data-title")) { var heading = _sp.querySelector("h1"); if (heading != null && heading.textContent.lenght != "") { _sp.setAttribute("data-title", heading.textContent); } } // store parallax data on elements setParallax(_sp, i, ii); // pagesArray.push(_sp); // var subFragments = _sp.querySelectorAll(FRAGMENT_SELECTOR); fragmentsArray[i][ii] = subFragments; fr[i][ii] = -1; } sectionsArray.push(pagesArray); } // sectionsLength = sections.length; // sets the sections max number for overview purposes resetScroll(); _updateOffsets(); } /** * stores parallax data directly on the dome elements with a data-parallax attribute * data are stored on a multi dimensional array ordered per section and per page to easily manage the position */ function setParallax(page, sectionIndex, pageIndex) { if (parallaxEnabled) { if (parallaxElements[sectionIndex] == undefined) { parallaxElements[sectionIndex] = []; } if (parallaxElements[sectionIndex][pageIndex] == undefined) { parallaxElements[sectionIndex][pageIndex] = []; } // var pxs = page.querySelectorAll(".parallax"); if (pxs.length > 0) { for (var i = 0; i < pxs.length; i++) { var el = pxs[i]; var pX = defaultParallaxX; var pY = defaultParallaxY; if (el.getAttribute("data-parallax") != null) { var pValues = el.getAttribute("data-parallax").split(","); pX = pY = pValues[0]; if (pValues.length > 1) { pY = pValues[1]; } } el.pX = pX; el.pY = pY; parallaxElements[sectionIndex][pageIndex].push(el); } } } } function _getParallaxElements() { return parallaxElements; } /** * cache the position for every page, useful when navigatin in pixels or when attaching a page after scrolling */ function _updateOffsets () { for (var i = 0; i < allPages.length; i++) { var _sp = allPages[i]; _sp.x = _sp.offsetLeft + _sp.parentNode.offsetLeft; _sp.y = _sp.offsetTop + _sp.parentNode.offsetTop; } }; /** * returns the next section in navigation * @param top Boolean if true the next page will be the first page in the next array; if false the next section will be the same index page in the next array * @param fos Boolean value of _fragmentsOnSide * @param io Boolean value of isOverview */ function _getNextSection(top, fos, io) { var sub = sp; var toTop = top == !_sectionsSlideToTop; if (fos == true && fragmentsArray[p][sp].length > 0 && fr[p][sp] < fragmentsArray[p][sp].length - 1 && toTop != true && io == false) { _showFragment(p, sp); } else { sub = 0; if (toTop == true && p + 1 < sectionsArray.length - 1) { sub = 0; } else if (toTop != true || _fragmentsOnBack == true || p + 1 > sectionsArray.length - 1) { sub = sp; } p = Math.min(p + 1, sectionsArray.length - 1); return _getNearestPage(sectionsArray[p], sub, io); } return hiliteOrNavigate(sectionsArray[p][sp], io); } /** * returns the prev section in navigation * @param top Boolean if true the next section will be the first page in the prev array; if false the prev section will be the same index page in the prev array * @param fos Boolean value of _fragmentsOnSide * @param io Boolean value of isOverview */ function _getPrevSection(top, fos, io) { var sub = sp; var toTop = top == !_sectionsSlideToTop; if (fos == true && fragmentsArray[p][sp].length > 0 && fr[p][sp] >= 0 && toTop != true && io == false) { _hideFragment(p, sp); } else { var sub = 0; sub = 0; if (toTop == true && p - 1 >= 0) { sub = 0; } else if (toTop != true || _fragmentsOnBack == true || p - 1 < 0) { sub = sp; } p = Math.max(p - 1, 0); return _getNearestPage(sectionsArray[p], sub, io); } return hiliteOrNavigate(sectionsArray[p][sp], io); } /** * checks if there is a valid page in the current section array * if the passed page is not valid thne check which is the first valid page in the array * then returns the page * @param p Number the section index in the sections array * @param sub Number the page index in the sections->page array * @param io Boolean value of isOverview */ function _getNearestPage(pg, sub, io) { var nsp = pg[sub]; if (nsp == undefined) { for (var i = sub; i >= 0; i--) { if (pg[i] != undefined) { nsp = pg[i]; sub = i; break; } } } sp = sub; if (!isOverview) { _updateFragments(); } return hiliteOrNavigate(nsp, io); } /** * returns the next page in navigation * if the next page is not in the current section array returns the first page in the next section array * @param jump Boolean if true jumps over the fragments directly to the next page * @param io Boolean value of isOverview */ function _getNextPage(jump, io) { if (fragmentsArray[p][sp].length > 0 && fr[p][sp] < fragmentsArray[p][sp].length - 1 && jump != true && io == false) { _showFragment(p, sp); } else { if (sectionsArray[p][sp + 1] == undefined && sectionsArray[p + 1] != undefined) { p += 1; sp = 0; } else { sp = Math.min(sp + 1, sectionsArray[p].length - 1); } } return hiliteOrNavigate(sectionsArray[p][sp], io); } /** * returns the prev page in navigation * if the prev page is not in the current section array returns the last page in the prev section array * @param jump Boolean if true jumps over the fragments directly to the prev page * @param io Boolean value of isOverview */ function _getPrevPage(jump, io) { if (fragmentsArray[p][sp].length > 0 && fr[p][sp] >= 0 && jump != true && io == false) { _hideFragment(p, sp); } else { if (sp == 0 && sectionsArray[p - 1] != undefined) { p -= 1; sp = sectionsArray[p].length - 1; } else { sp = Math.max(sp - 1, 0); } } return hiliteOrNavigate(sectionsArray[p][sp], io); } /** * returns the destination page or * if the application is in overview mode * switch the active page without returning a destination * @param d HTMLElement the candidate destination * @param io Boolean value of isOverview */ function hiliteOrNavigate(d, io) { if (io == true) { _switchActivePage(d); return; } else { return d; } } /** * show a single fragment inside the specified section / page * the fragment index parameter is optional, if passed force the specified fragment to show * otherwise the method shows the current fragment * @param fp Number the section index * @param fsp Number the page index * @param f Number the fragment index (optional) */ function _showFragment(fp, fsp, f) { if (f != undefined) { fr[fp][fsp] = f; } else { f = fr[fp][fsp] += 1; } for (var i = 0; i <= f; i++) { Brav1Toolbox.addClass(fragmentsArray[fp][fsp][i], FRAGMENT_REVEALED_CLASS); Brav1Toolbox.removeClass(fragmentsArray[fp][fsp][i], FRAGMENT_ACTUAL_CLASS); } Brav1Toolbox.addClass(fragmentsArray[fp][fsp][f], FRAGMENT_ACTUAL_CLASS); } /** * hide a single fragment inside the specified section / page * the fragment index parameter is optional, if passed force the specified fragment to hide * otherwise the method hides the current fragment * @param fp Number the section index * @param fsp Number the page index * @param f Number the fragment index (optional) */ function _hideFragment(fp, fsp, f) { if (f != undefined) { fr[fp][fsp] = f; } else { f = fr[fp][fsp]; } for (var i = 0; i < fragmentsArray[fp][fsp].length; i++) { if (i >= f) { Brav1Toolbox.removeClass(fragmentsArray[fp][fsp][i], FRAGMENT_REVEALED_CLASS); Brav1Toolbox.removeClass(fragmentsArray[fp][fsp][i], FRAGMENT_REVEALED_TEMP_CLASS); } Brav1Toolbox.removeClass(fragmentsArray[fp][fsp][i], FRAGMENT_ACTUAL_CLASS); } f -= 1; if (f >= 0) { Brav1Toolbox.addClass(fragmentsArray[fp][fsp][f], FRAGMENT_ACTUAL_CLASS); } fr[fp][fsp] = f; } /** * show all the fragments or the fragments in the specified page * adds a temporary class which does not override the current status of fragments */ function _showFragments() { for (var i = 0; i < fragments.length; i++) { Brav1Toolbox.addClass(fragments[i], FRAGMENT_REVEALED_TEMP_CLASS); } } /** * hide all the fragments or the fragments in the specified page * removes a temporary class which does not override the current status of fragments */ function _hideFragments() { for (var i = 0; i < fragments.length; i++) { Brav1Toolbox.removeClass(fragments[i], FRAGMENT_REVEALED_TEMP_CLASS); } } function _updateFragments() { for (var ip = 0; ip < fragmentsArray.length; ip++) { var frp = fragmentsArray[ip]; for (var isp = 0; isp < frp.length; isp++) { var frsp = frp[isp]; if (frsp.length > 0) { // there are fragments if (ip > p) { // previous section for (var f = frsp.length - 1; f >= 0; f--) { _hideFragment(ip, isp, f); } } else if (ip < p) { // next section for (var f = 0; f < frsp.length; f++) { _showFragment(ip, isp, f); } } else if (ip == p) { // same section if (isp > sp) { // previous page for (var f = frsp.length - 1; f >= 0; f--) { _hideFragment(ip, isp, f); } } else if (isp < sp) { // next page for (var f = 0; f < frsp.length; f++) { _showFragment(ip, isp, f); } } else if (isp == sp) { // same page if (_fragmentsOnBack == true && (pastIndex.section > NavigationMatrix.getPageIndex().section || pastIndex.page > NavigationMatrix.getPageIndex().page)) { for (var f = 0; f < frsp.length; f++) { _showFragment(ip, isp, f); } } else { for (var f = frsp.length - 1; f >= 0; f--) { _hideFragment(ip, isp, f); } } if (_fragmentsOnBack == false) { fr[ip][isp] = -1 } else { if (pastIndex.section > NavigationMatrix.getPageIndex().section || pastIndex.page > NavigationMatrix.getPageIndex().page) { fr[ip][isp] = frsp.length - 1; } else { fr[ip][isp] = -1 } } } } } } } } /** * returns the current section index */ function _getSection(h) { if (h) { // TODO return the index of the section by hash } return p; } /** * returns the current page index */ function _getPage(h) { if (h) { // TODO return the index of the page by hash } return sp; } /** * returns the sections collection */ function _getSections() { return sections; } /** * returns the pages collection inside the passed section index */ function _getPages(i) { return sectionsArray[i]; } /** * returns the pages collection of all pages in the presentation */ function _getAllPages() { return allPages; } /** * returns the number of sections */ function _getSectionsLength() { return sectionsLength; } /** * returns the max number of pages */ function _getPagesLength() { return pagesLength; } /** * returns the total number of pages */ function _getPagesTotalLength() { return pagesTotalLength; } /** * returns a object with the index of the current section and page */ function _getPageIndex(d) { var pIndex = p; var spIndex = sp; if (d != undefined) { pIndex = d.parentNode.index; //parseInt(d.parentNode.getAttribute("data-prog").replace(/__/, "")) - 1; spIndex = d.index; //parseInt(d.getAttribute("data-prog").replace(/__/, "")) - 1; } return { section: pIndex, page: spIndex }; } function _getSectionByIndex(i) { return sections[i]; } function _getPageByIndex(i, pi) { return sectionsArray[pi][i]; } function _getCurrentSection() { return sections[p]; } function _getCurrentPage() { return sectionsArray[p][sp]; } function _getCurrentFragment() { return fragmentsArray[p][sp][_getCurrentFragmentIndex()]; } function _getCurrentFragmentIndex() { return fr[p][sp]; } function _hasNextSection() { return p < sections.length - 1; } function _hasPrevSection() { return p > 0; } function _hasNextPage() { return sp < sectionsArray[p].length - 1; } function _hasPrevPage() { return sp > 0; } /** * get a progress value calculated on the total number of pages */ function _getProgress() { if (p == 0 && sp == 0) { return 0; } var c = 0; for (var i = 0; i < p; i++) { c += sectionsArray[i].length; } c += sectionsArray[p][sp].index + 1; return c; } /** * get a composed hash based on current section and page */ function _getHash(d) { if (d != undefined) { sp = _getPageIndex(d).page; p = _getPageIndex(d).section; } var h = ""; // append to h the value of data-id attribute or, if data-id is not defined, the data-prog attribute var _p = sections[p]; h += getPageId(_p); if (sectionsArray[p].length > 1) { var _sp = sectionsArray[p][sp]; h += "/" + getPageId(_sp); } return h; } /** * expose the method to set the page from a hash */ function _setPage(h) { var elem = getElementByHash(h); if (elem) { var pElem = elem.parentNode; for (var i = 0; i < sectionsArray.length; i++) { var pa = sectionsArray[i]; if (sections[i] === pElem) { p = i; for (var ii = 0; ii < pa.length; ii++) { if (pa[ii] === elem) { sp = ii; break; } } } } _updateFragments(); } return elem; } function _switchActivePage(d, navigate) { var sIndex = d.parentNode.index; for (var i = 0; i < sectionsArray.length; i++) { var pa = sectionsArray[i]; for (var ii = 0; ii < pa.length; ii++) { var spa = pa[ii]; // Brav1Toolbox.removeClass(spa, "past-section"); Brav1Toolbox.removeClass(spa, "future-section"); Brav1Toolbox.removeClass(spa, "past-page"); Brav1Toolbox.removeClass(spa, "future-page"); // if (spa !== d) { Brav1Toolbox.removeClass(spa, "hilite"); if (isOverview == false && spa !== _getCurrentPage()) { Brav1Toolbox.removeClass(spa, "actual"); } if (i < sIndex) { Brav1Toolbox.addClass(spa, "past-section"); } else if (i > sIndex) { Brav1Toolbox.addClass(spa, "future-section"); } if (spa.index < d.index) { Brav1Toolbox.addClass(spa, "past-page"); } else if (spa.index > d.index) { Brav1Toolbox.addClass(spa, "future-page"); } } } } Brav1Toolbox.addClass(d, "hilite"); if (navigate) { setActual(d); } hilited = d; } function _getCurrentHilited() { return hilited; } function setActual(d) { Brav1Toolbox.addClass(d, "actual"); } _updateMatrix(); // update the navigation matrix on the first run return { update: _updateMatrix, updateFragments: _updateFragments, showFragments: _showFragments, hideFragments: _hideFragments, getSection: _getSection, getPage: _getPage, getSections: _getSections, getPages: _getPages, getAllPages: _getAllPages, getNextSection: _getNextSection, getPrevSection: _getPrevSection, getNextPage: _getNextPage, getPrevPage: _getPrevPage, getSectionsLength: _getSectionsLength, getPagesLength: _getPagesLength, getPagesTotalLength: _getPagesTotalLength, getPageIndex: _getPageIndex, getSectionByIndex: _getSectionByIndex, getPageByIndex: _getPageByIndex, getCurrentSection: _getCurrentSection, getCurrentPage: _getCurrentPage, getCurrentFragment: _getCurrentFragment, getCurrentFragmentIndex: _getCurrentFragmentIndex, getProgress: _getProgress, getHash: _getHash, setPage: _setPage, switchActivePage: _switchActivePage, getCurrentHilited: _getCurrentHilited, hasNextSection: _hasNextSection, hasPrevSection: _hasPrevSection, hasNextPage: _hasNextPage, hasPrevPage: _hasPrevPage, updateOffsets: _updateOffsets, getParallaxElements: _getParallaxElements } })(); /* ## ## ### ## ## #### ###### ### ######## #### ####### ## ## ######## ## ## ######## ## ## ######## ###### ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ### ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ###### ## ## ###### ## ## ## ## ###### ## #### ######### ## ## ## ## ## ######### ## ## ## ## ## #### ## ## ## ## ## #### ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ### ## ## ## ## ## ## ## ### #### ###### ## ## ## #### ####### ## ## ######## ### ######## ## ## ## ###### */ /** * add a listener for event delegation * used for navigation purposes */ if (browserSupport) { if (isTouchDevice) { Brav1Toolbox.addListener(document, "touchend", onNavClick, false); } else { Brav1Toolbox.addListener(document, "click", onNavClick, false); } } function onNavClick(e) { var href = e.target.getAttribute("href"); // links with href starting with # if (href && href.substr(0,1) == "#") { e.target.blur(); e.preventDefault(); var h = href; var dest = NavigationMatrix.setPage(h); navigateTo(dest, true, true); } // pages in oveview mode if (isOverview) { var dest = e.target; while (dest && !Brav1Toolbox.hasClass(dest, PAGE_CLASS)) { dest = dest.parentNode; } if (Brav1Toolbox.hasClass(dest, PAGE_CLASS)) { e.preventDefault(); navigateTo(dest, null, true); } } // thumbs in the default progress indicator if (Brav1Toolbox.hasClass(e.target, PAGE_THUMB_CLASS)) { e.preventDefault(); var pTo = Number(unsafeAttr(e.target.getAttribute("data-section"))); var spTo = Number(unsafeAttr(e.target.getAttribute("data-page"))); _gotoPage(pTo, spTo); } } /** * set callback for onpopstate event * uses native history API to manage navigation * but uses the # for client side navigation on return */ if (useHash == false && window.history.pushState) { window.onpopstate = onPopState; } else { useHash = true; } // function onPopState(e) { useHash = false; var h; if (e.state) { h = e.state.token.replace("#/", ""); } else { h = document.location.hash.replace("#/", ""); } var dest = NavigationMatrix.setPage(h); navigateTo(dest, false); } /** * set callback for hashchange event * this callback is used not only when onpopstate event wasn't available * but also when the user resize the window or for the firs visit on the site */ Brav1Toolbox.addListener(window, "hashchange", onHashChange); // /** * @param e Event the hashChange Event * @param d Boolean force the hash change */ function onHashChange(e, d) { if (useHash || d) { var h = document.location.hash.replace("#/", ""); var dest = NavigationMatrix.setPage(h); navigateTo(dest, false); } } /* ######## ####### ## ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ####### ####### ###### ## ## */ var _ftX = ftContainer.offsetX; var _ftY = 0; var _touchStartX = 0; var _touchStartY = 0; var _deltaX = 0; var _deltaY = 0; var _dragging = 0; var _dragAxis = "x"; var _swipeLimit = 100; html.addEventListener("touchstart", onTouchStart, false); html.addEventListener("touchmove", onTouchMove, false); html.addEventListener("touchend", onTouchEnd, false); function onTouchStart(e) { _deltaX = 0; _deltaY = 0; e.preventDefault(); e = getTouchEvent(e); _touchStartX = e.clientX; _touchStartY = e.clientY; _dragging = 1; var initOffset = getInitOffset(); _ftX = initOffset.x; _ftY = initOffset.y; } function onTouchMove(e) { e.preventDefault(); e = getTouchEvent(e); _deltaX = e.clientX - _touchStartX; _deltaY = e.clientY - _touchStartY; } function onTouchEnd(e) { // e.preventDefault(); e = getTouchEvent(e); _dragging = 0; _dragAxis = Math.abs(_deltaX) >= Math.abs(_deltaY) ? "x" : "y"; if (_dragAxis == "x" && Math.abs(_deltaX) >= _swipeLimit) { if (_deltaX > 0) { _prevSection(); return; } else if (_deltaX < 0) { _nextSection(); return; } } else { if (_deltaY > 0 && Math.abs(_deltaY) >= _swipeLimit) { _prevPage(); return; } else if (_deltaY < 0) { _nextPage(); return; } } } function getTouchEvent(e) { if (e.touches) { e = e.touches[0]; } return e; } function getInitOffset() { var off = ftContainer.style[Brav1Toolbox.getPrefixed("transform")]; // X var indexX = off.indexOf("translateX(") + 11; var offX = off.substring(indexX, off.indexOf(")", indexX)); if (offX.indexOf("%") != -1) { offX = offX.replace("%", ""); offX = (parseInt(offX) / 100) * window.innerWidth; } else if (offX.indexOf("px") != -1) { offX = parseInt(offX.replace("px", "")); } // Y var indexY = off.indexOf("translateY(") + 11; var offY = off.substring(indexY, off.indexOf(")", indexY)); if (offY.indexOf("%") != -1) { offY = offY.replace("%", ""); offY = (parseInt(offY) / 100) * window.innerHeight; } else if (offY.indexOf("px") != -1) { offY = parseInt(offY.replace("px", "")); } return { x:offX, y:offY }; } /* ###### ###### ######## ####### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ## ######## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ###### ## ## ####### ######## ######## */ /** * native scroll management */ var scrollEventEnabled = true; Brav1Toolbox.addListener(window, "scroll", onNativeScroll); function onNativeScroll(e) { e.preventDefault(); resetScroll(); } /* ######## ######## ###### #### ######## ######## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######## ###### ###### ## ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######## ###### #### ######## ######## */ /** * monitoring function that triggers hashChange when resizing window */ var resizeMonitor = (function _resizeMonitor() { var ticker = NaN; function _enable() { _disable(); if (!isOverview) { ticker = setTimeout(doResizeHandler, 300); } } function _disable() { clearTimeout(ticker); } function doResizeHandler() { NavigationMatrix.updateOffsets(); navigateTo(); } Brav1Toolbox.addListener(window, "resize", _enable); window.addEventListener("orientationchange", _enable, false); return { enable: _enable, disable: _disable, } })(); /* ## ## ######## #### ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ####### ## #### ######## ###### */ /** * returns the element by parsing the hash * @param h String the hash string to evaluate */ function getElementByHash(h) { if (h.length > 0) { var aHash = h.replace("#/", "").split("/"); // TODO considerare l'ultimo slash come nullo var p = document.querySelector(SECTION_SELECTOR + "[data-id=__" + aHash[0] + "]") || document.querySelector(SECTION_SELECTOR + "[data-prog=__" + aHash[0] + "]"); if (p != null) { var sp = null; if (aHash.length > 1) { sp = p.querySelector(PAGE_SELECTOR + "[data-id=__" + aHash[1] + "]") || p.querySelector(PAGE_SELECTOR + "[data-prog=__" + aHash[1] + "]"); } if (sp == null) { sp = p.querySelector(PAGE_SELECTOR); } return sp; } } return; } /** * public method to force navigation updates */ function _updateNavigation() { NavigationMatrix.update(); onHashChange(null, true); } /** * builds and sets the title of the document parsing the attributes of the current section * if a data-title is available in a page and or in a section then it will be used * otherwise it will be used a formatted version of the hash string */ function setTitle(h) { var t = siteName; var ht = NavigationMatrix.getCurrentPage().getAttribute("data-title"); if (ht == null) { var hs = h.split("/"); for (var i = 0; i < hs.length; i++) { t += " | " + hs[i]; } } else { if (NavigationMatrix.getCurrentSection().getAttribute("data-title") != null) { t += " | " + NavigationMatrix.getCurrentSection().getAttribute("data-title"); } t += " | " + ht } document.title = t; } /** * returns a clean string of navigation atributes of the passed page * if there is a data-id attribute it will be returned * otherwise will be returned the data-prog attribute */ function getPageId(d) { return (d.getAttribute("data-id") != null ? d.getAttribute("data-id").replace(/__/, "") : d.getAttribute("data-prog").replace(/__/, "")); } /** * returns a safe version of an attribute value * adding __ in front of the value */ function safeAttr(a) { if (a.substr(0,2) != "__") { return "__" + a; } else { return a; } } /** * clean the save value of an attribute * removing __ from the beginning of the value */ function unsafeAttr(a) { if (a.substr(0,2) == "__") { return a.replace(/__/, ""); } else { return a; } } /* ## ## ### ## ## #### ###### ### ######## ######## ######## ####### ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ###### ## ## ## ## #### ######### ## ## ## ## ## ######### ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### #### ###### ## ## ## ######## ## ####### */ /** * navigation transition logic * @param dest HTMLElement the page to go to * @param push Boolean if true the hash string were pushed to the history API * @param linked Boolean if true triggers a forced update of all the fragments in the pages, used when navigating from links or overview */ function navigateTo(dest, push, linked) { push = push == false ? push : true; // if dest doesn't exist then go to homepage if (!dest) { if (NavigationMatrix.getCurrentPage() != null) { dest = NavigationMatrix.getCurrentPage(); } else { dest = document.querySelector(PAGE_SELECTOR); } push = true; } // checks what properties use for navigation and set the style navigate(dest); // moveParallax(dest); // if (isOverview) { _toggleOverview(false, false); } // var h = NavigationMatrix.getHash(dest); if (linked == true) { NavigationMatrix.updateFragments(); } // set history properties var pageIndex = NavigationMatrix.getPageIndex(dest); if (pastIndex.section != pageIndex.section || pastIndex.page != pageIndex.page) { if (pushHistory != null && push != false && NavigationMatrix.getCurrentFragmentIndex() == -1) { var stateObj = { token: h }; var nextHash = "#/" + h; currentHash = nextHash; window.history.pushState(stateObj, null, currentHash); } else { document.location.hash = "/" + h; } } // set the title setTitle(h); // // dispatches an event populated with navigation data fireNavigationEvent(); // cache the section and page index, useful to determine the direction of the next navigation pastIndex = pageIndex; NavigationMatrix.switchActivePage(dest, true); // if (_showProgress) { updateProgress(); } } function fireNavigationEvent() { var pageIndex = NavigationMatrix.getPageIndex(); Brav1Toolbox.dispatchEvent(NAVIGATION_EVENT, { section: NavigationMatrix.getCurrentSection(), page: NavigationMatrix.getCurrentPage(), sectionIndex: pageIndex.section, pageIndex: pageIndex.page, pastSectionIndex: pastIndex.section, pastPageIndex: pastIndex.page, prevSection: NavigationMatrix.hasPrevSection(), nextSection: NavigationMatrix.hasNextSection(), prevPage: NavigationMatrix.hasPrevPage(), nextPage: NavigationMatrix.hasNextPage(), fragment: NavigationMatrix.getCurrentFragment(), fragmentIndex: NavigationMatrix.getCurrentFragmentIndex(), isOverview: isOverview, progress: NavigationMatrix.getProgress(), total: NavigationMatrix.getPagesTotalLength() } ); } /** * check the availability of transform CSS property * if transform is not available then fallbacks to position absolute behaviour */ function navigate(dest) { var x; var y; var pageIndex = NavigationMatrix.getPageIndex(dest); if (_slideInPx == true) { // calculate the coordinates of the destination x = dest.x; y = dest.y; } else { // calculate the index of the destination page x = pageIndex.section; y = pageIndex.page; } // if (Brav1Toolbox.testCSS("transform")) { if (_slideInPx) { ftContainer.style[Brav1Toolbox.getPrefixed("transform")] = "translateX(" + -x + "px) translateY(" + -y + "px)"; } else { ftContainer.style[Brav1Toolbox.getPrefixed("transform")] = "translateX(" + -x * 100 + "%) translateY(" + -y * 100 + "%)"; } } else { if (_slideInPx) { ftContainer.style.top = -y + "px"; ftContainer.style.left = -x + "px"; } else { ftContainer.style.top = -y * 100 + "%"; ftContainer.style.left = -x * 100 + "%"; } } resetScroll(); } function moveParallax(dest) { if (parallaxEnabled) { var pageIndex = NavigationMatrix.getPageIndex(dest); var pxElements = NavigationMatrix.getParallaxElements(); for (var i = 0; i < pxElements.length; i++) { var pxSection = pxElements[i]; if (pxSection != undefined) { for (var ii = 0; ii < pxSection.length; ii++) { var pxPage = pxSection[ii]; if (pxPage != undefined) { for (var iii = 0; iii < pxPage.length; iii++) { var pxElement = pxPage[iii] var pX = 0; var pY = 0; // sections if (pageIndex.section < i) { pX = pxElement.pX; } else if (pageIndex.section > i) { pX = -pxElement.pX; } // pages if (pageIndex.page < ii) { pY = pxElement.pY; } else if (pageIndex.page > ii) { pY = -pxElement.pY; } // animation if (_parallaxInPx) { pxElement.style[Brav1Toolbox.getPrefixed("transform")] = "translateX(" + pX + "px) translateY(" + pY + "px)"; } else { pxElement.style[Brav1Toolbox.getPrefixed("transform")] = "translateX(" + pX + "%) translateY(" + pY + "%)"; } } } } } } } } function resetScroll() { window.scrollTo(0,0); // fix the eventually occurred page scrolling resetting the scroll values to 0 } /* ######## ######## ####### ###### ######## ######## ###### ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######## ######## ## ## ## #### ######## ###### ###### ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ####### ###### ## ## ######## ###### ###### */ var defaultProgress = null; var progressFill = null; function buildProgressIndicator() { var domFragment = document.createDocumentFragment(); // create the progress container div defaultProgress = document.createElement("div"); defaultProgress.className = DEFAULT_PROGRESS_CLASS; domFragment.appendChild(defaultProgress); // loop through sections for (var i = 0; i < NavigationMatrix.getSectionsLength(); i++) { var pDiv = document.createElement("div"); pDiv.setAttribute("data-section", "__" + i); pDiv.className = SECTION_THUMB_CLASS; Brav1Toolbox.addClass(pDiv, "thumb-section-" + i); // loop through pages var spArray = NavigationMatrix.getPages(i) for (var ii = 0; ii < spArray.length; ii++) { var spDiv = document.createElement("div"); spDiv.className = PAGE_THUMB_CLASS; spDiv.setAttribute("data-section", "__" + i); spDiv.setAttribute("data-page", "__" + ii); Brav1Toolbox.addClass(spDiv, "thumb-page-" + ii); pDiv.appendChild(spDiv); }; defaultProgress.appendChild(pDiv); }; body.appendChild(defaultProgress); } function hideProgressIndicator() { if (defaultProgress != null) { body.removeChild(defaultProgress); defaultProgress = null; } } function updateProgress() { if (defaultProgress != null) { var spts = defaultProgress.querySelectorAll(PAGE_THUMB_SELECTOR); for (var i = 0; i < spts.length; i++) { var spt = spts[i]; var pTo = Number(unsafeAttr(spt.getAttribute("data-section"))); var spTo = Number(unsafeAttr(spt.getAttribute("data-page"))); if (pTo == NavigationMatrix.getPageIndex().section && spTo == NavigationMatrix.getPageIndex().page) { Brav1Toolbox.addClass(spts[i], "actual"); } else { Brav1Toolbox.removeClass(spts[i], "actual"); } } } } function _getDefaultProgress() { return defaultProgress; } /* ####### ## ## ######## ######## ## ## #### ######## ## ## ## ## ### ## ## ### ###### ######## ## ## ######## ## ## ######## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ### ## ## ### ## ## ## ## ## ## ### ### ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### #### ## ## #### ## ## ## ## ## #### #### ## #### ## ## ## ## ## ## ###### ######## ## ## ## ###### ## ## ## ## ### ## ## ## ## ## ## ## ## ## #### ###### ## ### ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######### ## #### ######### ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ### ## ####### ### ######## ## ## ### #### ######## ### ### ## ## ## ## ## ## ## ## ###### ######## ## ## ######## ## ## ## */ /** * switch from the overview states */ function _toggleOverview(back, navigate) { if (isOverview) { zoomIn(back, navigate); } else { overviewCachedDest = NavigationMatrix.getCurrentPage(); zoomOut(); } } /** * zoom in the view to focus on the current section / page */ function zoomIn(back, navigate) { isOverview = false; Brav1Toolbox.removeClass(body, "ft-overview"); NavigationMatrix.hideFragments(); navigate = navigate === false ? false : true; if (navigate == true) { if (back == true) { navigateTo(overviewCachedDest); } else { navigateTo(); } } } /** * zoom out the view for an overview of all the sections / pages */ function zoomOut() { isOverview = true; Brav1Toolbox.addClass(body, "ft-overview"); NavigationMatrix.showFragments(); // if (_useOverviewVariant == false) { overviewZoomTypeA(true); } else { overviewZoomTypeB(true); } fireNavigationEvent(); } function overviewZoomTypeA(out) { // ftContainer scale version if (out) { var scaleX = 100 / NavigationMatrix.getSectionsLength(); var scaleY = 100 / NavigationMatrix.getPagesLength(); var scale = Math.min(scaleX, scaleY) * 0.9; var offsetX = (100 - NavigationMatrix.getSectionsLength() * scale) / 2; var offsetY = (100 - NavigationMatrix.getPagesLength() * scale) / 2; ftContainer.style[Brav1Toolbox.getPrefixed("transform")] = "translate(" + offsetX + "%, " + offsetY + "%) scale(" + scale/100 + ", " + scale/100 + ")"; } } function overviewZoomTypeB(out) { // ftContainer scale alternative version if (out) { var scale = overviewFixedScaleFactor // Math.min(scaleX, scaleY) * 0.9; var pIndex = NavigationMatrix.getPageIndex(); var offsetX = 50 - (scale * pIndex.section) - (scale / 2); var offsetY = 50 - (scale * pIndex.page) - (scale / 2); ftContainer.style[Brav1Toolbox.getPrefixed("transform")] = "translate(" + offsetX + "%, " + offsetY + "%) scale(" + scale/100 + ", " + scale/100 + ")"; } } /* ## ## ######## ## ## ######## ####### ### ######## ######## ## ## ### ## ## #### ###### ### ######## #### ####### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ##### ###### ## ######## ## ## ## ## ######## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######### ## ## ## ## ## #### ######### ## ## ## ## ## ######### ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ######## ## ######## ####### ## ## ## ## ######## ## ## ## ## ### #### ###### ## ## ## #### ####### ## ## */ /** * KEYBOARD NAVIGATION */ Brav1Toolbox.addListener(window, "keydown", onKeyDown); Brav1Toolbox.addListener(window, "keyup", onKeyUp); function onKeyDown(e) { var tag = e.target.tagName; if (tag != "INPUT" && tag != "TEXTAREA" && tag != "SELECT") { if (e.keyCode >= 37 && e.keyCode <= 40) { e.preventDefault(); } } } function onKeyUp(e) { var tag = e.target.tagName; var elem; if (tag != "INPUT" && tag != "TEXTAREA" && tag != "SELECT") { e.preventDefault(); switch (e.keyCode) { case 27 : // esc _toggleOverview(true); break; case 33 : // pag up _gotoTop(); break; case 34 : // pag down _gotoBottom(); break; case 35 : // end _gotoEnd(); break; case 36 : // home _gotoHome(); break; case 37 : // left _prevSection(e.shiftKey); break; case 39 : // right _nextSection(e.shiftKey); break; case 38 : // up _prevPage(e.shiftKey); break; case 40 : // down _nextPage(e.shiftKey); break; case 13 : // return { if (isOverview) { _gotoPage(NavigationMatrix.getCurrentHilited()); } break; } default : break; } } } /* ######## ## ## ######## ## #### ###### ### ######## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ######## ## ## ######## ## ## ## ## ## ######## ## ## ## ## ## ## ## ## ## ######### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ####### ######## ######## #### ###### ## ## ## #### */ /** * triggers the first animation when visiting the site * if the hash is not empty */ function _start() { // init and configuration if (_showProgress && defaultProgress == null) { buildProgressIndicator(); } // start navigation if (document.location.hash.length > 0) { Brav1Toolbox.addClass(ftContainer, "no-transition"); onHashChange(null, true); Brav1Toolbox.removeClass(ftContainer, "no-transition"); } else { if (_start.arguments.length > 0) { _gotoPage.apply(this, _start.arguments); } else { _gotoPage(0,0); updateProgress(); } } } /* * Public API to go to the next section * @param top Boolean if true the next section will be the first page in the next array; if false the next section will be the same index page in the next array */ function _nextSection(top) { var d = NavigationMatrix.getNextSection(top, _fragmentsOnSide, isOverview); if (d != undefined) { navigateTo(d); } else { if (isOverview && _useOverviewVariant) { zoomOut(); } } } /* * Public API to go to the prev section * */ function _prevSection(top) { var d = NavigationMatrix.getPrevSection(top, _fragmentsOnSide, isOverview); if (d != undefined) { navigateTo(d); } else { if (isOverview && _useOverviewVariant) { zoomOut(); } } } /* * Public API to go to the next page */ function _nextPage(jump) { var d = NavigationMatrix.getNextPage(jump, isOverview); if (d != undefined) { navigateTo(d); } else { if (isOverview && _useOverviewVariant) { zoomOut(); } } } /* * Public API to go to the prev page */ function _prevPage(jump) { var d = NavigationMatrix.getPrevPage(jump, isOverview); if (d != undefined) { navigateTo(d); } else { if (isOverview && _useOverviewVariant) { zoomOut(); } } } /* * Public API to go to a specified section / page * the method accepts vary parameters: * if two numbers were passed it assumes that the first is the section index and the second is the page index; * if an object is passed it assumes that the object has a section property and a page property to get the indexes to navigate; * if an HTMLElement is passed it assumes the element is a destination page */ function _gotoPage() { var args = _gotoPage.arguments; if (args.length > 0) { if (args.length == 1) { if (Brav1Toolbox.typeOf(args[0]) === "Object") { var o = args[0]; var p = o.section; var sp = o.page; if (p != null && p != undefined) { var pd = document.querySelector(SECTION_SELECTOR + "[data-id=" + safeAttr(p) + "]"); if (sp != null && sp != undefined) { var spd = pd.querySelector(PAGE_SELECTOR + "[data-id=" + safeAttr(sp) + "]"); if (spd != null) { navigateTo(spd); return; } } } } else if (args[0].nodeName != undefined) { navigateTo(args[0], null, true); } } if (Brav1Toolbox.typeOf(args[0]) === "Number" || args[0] === 0) { var spd = NavigationMatrix.getPageByIndex(args[1], args[0]); navigateTo(spd); return; } } } function _gotoHome() { _gotoPage(0,0); } function _gotoEnd() { var sl = NavigationMatrix.getSectionsLength() - 1; _gotoPage(sl, NavigationMatrix.getPages(sl).length - 1); } function _gotoTop() { var pageIndex = NavigationMatrix.getPageIndex(); _gotoPage(pageIndex.section, 0); } function _gotoBottom() { var pageIndex = NavigationMatrix.getPageIndex(); _gotoPage(pageIndex.section, NavigationMatrix.getPages(pageIndex.section).length - 1); } function _addEventListener(type, handler, useCapture) { Brav1Toolbox.addListener(document, type, handler, useCapture); } /* ###### ######## ######## ######## ######## ######## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ###### ## ## ###### ######## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ######## ## ## ######## ## ## ###### */ function _setFragmentsOnSide(v) { _fragmentsOnSide = v; _setFragmentsOnBack(v); } function _setFragmentsOnBack(v) { _fragmentsOnBack = v; } function _setUseHistory(v) { pushHistory = v; } function _setSlideInPx(v) { _slideInPx = v; navigateTo(); } function _setSectionsSlideToTop(v) { _sectionsSlideToTop = v; } function _setGridNavigation(v) { _sectionsSlideToTop = !v; } function _setUseOverviewVariant(v) { _useOverviewVariant = v; } function _setTwoStepsSlide(v) { _twoStepsSlide = v; } function _setShowProgress(v) { _showProgress = v; if (_showProgress) { if (defaultProgress == null) { buildProgressIndicator(); } updateProgress(); } else { if (defaultProgress != null) { hideProgressIndicator(); } } } function _setDefaultParallaxValues(x, y) { defaultParallaxX = x; defaultParallaxY = y == undefined ? defaultParallaxX : y; NavigationMatrix.update(); } function _setParallaxInPx(v) { _parallaxInPx = v; } /** * return object for public methods */ return { start: _start, updateNavigation: _updateNavigation, nextSection: _nextSection, prevSection: _prevSection, next: _nextPage, prev: _prevPage,<|fim▁hole|> gotoPage: _gotoPage, gotoHome: _gotoHome, gotoTop: _gotoTop, gotoBottom: _gotoBottom, gotoEnd: _gotoEnd, toggleOverview: _toggleOverview, fragmentsOnSide: _setFragmentsOnSide, fragmentsOnBack: _setFragmentsOnBack, useHistory: _setUseHistory, slideInPx: _setSlideInPx, sectionsSlideToTop: _setSectionsSlideToTop, gridNavigation: _setGridNavigation, useOverviewVariant: _setUseOverviewVariant, twoStepsSlide: _setTwoStepsSlide, showProgress: _setShowProgress, addEventListener: _addEventListener, defaultParallaxValues: _setDefaultParallaxValues, parallaxInPx: _setParallaxInPx, getDefaultProgress: _getDefaultProgress }; })();<|fim▁end|>
nextFragment: _nextPage, prevFragment: _prevPage,
<|file_name|>x_testsuite_test.go<|end_file_name|><|fim▁begin|>package servo_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "testing" "github.com/fgrosse/servo" "fmt" ) func TestServo(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "Servo Test Suite") } type TestBundle struct {} func (b *TestBundle) Boot(kernel *servo.Kernel) { kernel.RegisterType("test_bundle.my_type", NewService) } type SomeService struct {} func NewRecursiveService(*SomeService) *SomeService { return &SomeService{} } func NewService() *SomeService { return &SomeService{} } func NewServiceWithParam(param interface{}) *SomeService { panic(param) return &SomeService{} } // ServerMock is a non blocking noop implementation of servo.Server type ServerMock struct { RunHasBeenCalled bool ReturnError bool Parameter1, Parameter2 string } func NewServerMockWithParams(param1, param2 string) *ServerMock { Expect(param1).To(Equal("foo"), `NewServerMockWithParams should always be called with the values "foo" and "bar"`) Expect(param2).To(Equal("bar"), `NewServerMockWithParams should always be called with the values "foo" and "bar"`) return &ServerMock{ Parameter1: param1, Parameter2: param2, } } func (s *ServerMock) Run() error { s.RunHasBeenCalled = true if s.ReturnError { return fmt.Errorf("ServerMock was told to return an error!") } <|fim▁hole|>}<|fim▁end|>
return nil
<|file_name|>AboutActivity.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2017 SamuelGjk <[email protected]> * * This file is part of DiyCode * * DiyCode is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * DiyCode is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with DiyCode. If not, see <http://www.gnu.org/licenses/>. */ package moe.yukinoneko.diycode.module.about; import android.os.Bundle; import android.support.v7.app.AlertDialog; import android.support.v7.widget.AppCompatTextView; import android.webkit.WebView; import java.util.Locale; import butterknife.BindView; import butterknife.OnClick; import moe.yukinoneko.diycode.R; import moe.yukinoneko.diycode.base.BaseActivity; import moe.yukinoneko.diycode.tool.Tools; /** * MVPPlugin * 邮箱 [email protected] */ public class AboutActivity extends BaseActivity { @BindView(R.id.text_app_version) AppCompatTextView textAppVersion; @Override protected int provideContentViewId() { return R.layout.activity_about; }<|fim▁hole|> @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); textAppVersion.setText(String.format(Locale.getDefault(), "Version %s", Tools.getVersionName(this))); } @OnClick(R.id.card_open_source_licenses) public void onViewClicked() { WebView v = new WebView(this); v.loadUrl("file:///android_asset/licenses.html"); new AlertDialog.Builder(this) .setView(v) .setNegativeButton(R.string.close, null) .show(); } }<|fim▁end|>
<|file_name|>pnacl_coordinator.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "native_client/src/trusted/plugin/pnacl_coordinator.h" #include <utility> #include <vector> #include "native_client/src/include/checked_cast.h" #include "native_client/src/include/portability_io.h" #include "native_client/src/shared/platform/nacl_check.h" #include "native_client/src/trusted/plugin/local_temp_file.h" #include "native_client/src/trusted/plugin/manifest.h" #include "native_client/src/trusted/plugin/plugin.h" #include "native_client/src/trusted/plugin/plugin_error.h" #include "native_client/src/trusted/plugin/pnacl_translate_thread.h" #include "native_client/src/trusted/plugin/service_runtime.h" #include "native_client/src/trusted/plugin/temporary_file.h" #include "native_client/src/trusted/service_runtime/include/sys/stat.h" #include "ppapi/c/pp_bool.h" #include "ppapi/c/pp_errors.h" #include "ppapi/c/ppb_file_io.h" #include "ppapi/c/private/ppb_uma_private.h" #include "ppapi/cpp/file_io.h" namespace { const char kPnaclTempDir[] = "/.pnacl"; const uint32_t kCopyBufSize = 512 << 10; } namespace plugin { ////////////////////////////////////////////////////////////////////// // Pnacl-specific manifest support. ////////////////////////////////////////////////////////////////////// class PnaclManifest : public Manifest { public: PnaclManifest(const pp::URLUtil_Dev* url_util, bool use_extension) : url_util_(url_util), manifest_base_url_(PnaclUrls::GetBaseUrl(use_extension)) { // TODO(jvoung): get rid of use_extension when we no longer rely // on the chrome webstore extension. Most of this Manifest stuff // can also be simplified then. } virtual ~PnaclManifest() { } virtual bool GetProgramURL(nacl::string* full_url, nacl::string* cache_identity, ErrorInfo* error_info, bool* pnacl_translate) const { // Does not contain program urls. UNREFERENCED_PARAMETER(full_url); UNREFERENCED_PARAMETER(cache_identity); UNREFERENCED_PARAMETER(error_info); UNREFERENCED_PARAMETER(pnacl_translate); PLUGIN_PRINTF(("PnaclManifest does not contain a program\n")); error_info->SetReport(ERROR_MANIFEST_GET_NEXE_URL, "pnacl manifest does not contain a program."); return false; } virtual bool ResolveURL(const nacl::string& relative_url, nacl::string* full_url, ErrorInfo* error_info) const { // Does not do general URL resolution, simply appends relative_url to // the end of manifest_base_url_. UNREFERENCED_PARAMETER(error_info); *full_url = manifest_base_url_ + relative_url; return true; } virtual bool GetFileKeys(std::set<nacl::string>* keys) const { // Does not support enumeration. PLUGIN_PRINTF(("PnaclManifest does not support key enumeration\n")); UNREFERENCED_PARAMETER(keys); return false; } virtual bool ResolveKey(const nacl::string& key, nacl::string* full_url, nacl::string* cache_identity, ErrorInfo* error_info, bool* pnacl_translate) const { // All of the extension files are native (do not require pnacl translate). *pnacl_translate = false; // Do not cache these entries. *cache_identity = ""; // We can only resolve keys in the files/ namespace. const nacl::string kFilesPrefix = "files/"; size_t files_prefix_pos = key.find(kFilesPrefix); if (files_prefix_pos == nacl::string::npos) { error_info->SetReport(ERROR_MANIFEST_RESOLVE_URL, "key did not start with files/"); return false; } // Append what follows files to the pnacl URL prefix. nacl::string key_basename = key.substr(kFilesPrefix.length()); return ResolveURL(key_basename, full_url, error_info); } private: NACL_DISALLOW_COPY_AND_ASSIGN(PnaclManifest); const pp::URLUtil_Dev* url_util_; nacl::string manifest_base_url_; }; // TEMPORARY: ld needs to look up dynamic libraries in the nexe's manifest // until metadata is complete in pexes. This manifest lookup allows looking // for whether a resource requested by ld is in the nexe manifest first, and // if not, then consults the extension manifest. // TODO(sehr,jvoung,pdox): remove this when metadata is correct. class PnaclLDManifest : public Manifest { public: PnaclLDManifest(const Manifest* nexe_manifest, const Manifest* extension_manifest) : nexe_manifest_(nexe_manifest), extension_manifest_(extension_manifest) { CHECK(nexe_manifest != NULL); CHECK(extension_manifest != NULL); } virtual ~PnaclLDManifest() { } virtual bool GetProgramURL(nacl::string* full_url, nacl::string* cache_identity, ErrorInfo* error_info, bool* pnacl_translate) const { if (nexe_manifest_->GetProgramURL(full_url, cache_identity, error_info, pnacl_translate)) { return true; } return extension_manifest_->GetProgramURL(full_url, cache_identity, error_info, pnacl_translate); } virtual bool ResolveURL(const nacl::string& relative_url, nacl::string* full_url, ErrorInfo* error_info) const { if (nexe_manifest_->ResolveURL(relative_url, full_url, error_info)) { return true; } return extension_manifest_->ResolveURL(relative_url, full_url, error_info); } virtual bool GetFileKeys(std::set<nacl::string>* keys) const { if (nexe_manifest_->GetFileKeys(keys)) { return true; } return extension_manifest_->GetFileKeys(keys); } virtual bool ResolveKey(const nacl::string& key, nacl::string* full_url, nacl::string* cache_identity, ErrorInfo* error_info, bool* pnacl_translate) const { if (nexe_manifest_->ResolveKey(key, full_url, cache_identity, error_info, pnacl_translate)) { return true; } return extension_manifest_->ResolveKey(key, full_url, cache_identity, error_info, pnacl_translate); } private: NACL_DISALLOW_COPY_AND_ASSIGN(PnaclLDManifest); const Manifest* nexe_manifest_; const Manifest* extension_manifest_; }; ////////////////////////////////////////////////////////////////////// // UMA stat helpers. ////////////////////////////////////////////////////////////////////// namespace { // Assume translation time metrics *can be* large. // Up to 12 minutes. const int64_t kTimeLargeMin = 10; // in ms const int64_t kTimeLargeMax = 720000; // in ms const uint32_t kTimeLargeBuckets = 100; const int32_t kSizeKBMin = 1; const int32_t kSizeKBMax = 512*1024; // very large .pexe / .nexe. const uint32_t kSizeKBBuckets = 100; const int32_t kRatioMin = 10; const int32_t kRatioMax = 10*100; // max of 10x difference. const uint32_t kRatioBuckets = 100; const int32_t kKBPSMin = 1; const int32_t kKBPSMax = 30*1000; // max of 30 MB / sec. const uint32_t kKBPSBuckets = 100; const PPB_UMA_Private* GetUMAInterface() { pp::Module *module = pp::Module::Get(); DCHECK(module); return static_cast<const PPB_UMA_Private*>( module->GetBrowserInterface(PPB_UMA_PRIVATE_INTERFACE)); } void HistogramTime(const std::string& name, int64_t ms) { if (ms < 0) return; const PPB_UMA_Private* ptr = GetUMAInterface(); if (ptr == NULL) return; ptr->HistogramCustomTimes(pp::Var(name).pp_var(), ms, kTimeLargeMin, kTimeLargeMax, kTimeLargeBuckets); } void HistogramSizeKB(const std::string& name, int32_t kb) { if (kb < 0) return; const PPB_UMA_Private* ptr = GetUMAInterface(); if (ptr == NULL) return; ptr->HistogramCustomCounts(pp::Var(name).pp_var(), kb, kSizeKBMin, kSizeKBMax, kSizeKBBuckets); } void HistogramRatio(const std::string& name, int64_t a, int64_t b) { if (a < 0 || b <= 0) return; const PPB_UMA_Private* ptr = GetUMAInterface(); if (ptr == NULL) return; ptr->HistogramCustomCounts(pp::Var(name).pp_var(), 100 * a / b, kRatioMin, kRatioMax, kRatioBuckets); } void HistogramKBPerSec(const std::string& name, double kb, double s) { if (kb < 0.0 || s <= 0.0) return; const PPB_UMA_Private* ptr = GetUMAInterface(); if (ptr == NULL) return; ptr->HistogramCustomCounts(pp::Var(name).pp_var(), static_cast<int64_t>(kb / s), kKBPSMin, kKBPSMax, kKBPSBuckets); } void HistogramEnumerateTranslationCache(bool hit) { const PPB_UMA_Private* ptr = GetUMAInterface(); if (ptr == NULL) return; ptr->HistogramEnumeration(pp::Var("NaCl.Perf.PNaClCache.IsHit").pp_var(), hit, 2); } } // namespace ////////////////////////////////////////////////////////////////////// // The coordinator class. ////////////////////////////////////////////////////////////////////// // Out-of-line destructor to keep it from getting put in every .o where // callback_source.h is included template<> CallbackSource<FileStreamData>::~CallbackSource() {} PnaclCoordinator* PnaclCoordinator::BitcodeToNative( Plugin* plugin, const nacl::string& pexe_url, const nacl::string& cache_identity, const pp::CompletionCallback& translate_notify_callback) { PLUGIN_PRINTF(("PnaclCoordinator::BitcodeToNative (plugin=%p, pexe=%s)\n", static_cast<void*>(plugin), pexe_url.c_str())); PnaclCoordinator* coordinator = new PnaclCoordinator(plugin, pexe_url, cache_identity, translate_notify_callback); coordinator->pnacl_init_time_ = NaClGetTimeOfDayMicroseconds(); coordinator->off_the_record_ = plugin->nacl_interface()->IsOffTheRecord(); PLUGIN_PRINTF(("PnaclCoordinator::BitcodeToNative (manifest=%p, " "off_the_record=%d)\n", reinterpret_cast<const void*>(coordinator->manifest_.get()), coordinator->off_the_record_)); // Load llc and ld. std::vector<nacl::string> resource_urls; resource_urls.push_back(PnaclUrls::GetLlcUrl()); resource_urls.push_back(PnaclUrls::GetLdUrl()); pp::CompletionCallback resources_cb = coordinator->callback_factory_.NewCallback( &PnaclCoordinator::ResourcesDidLoad); coordinator->resources_.reset( new PnaclResources(plugin, coordinator, coordinator->manifest_.get(), resource_urls, resources_cb)); CHECK(coordinator->resources_ != NULL); coordinator->resources_->StartLoad(); // ResourcesDidLoad will be invoked when all resources have been received. return coordinator; } int32_t PnaclCoordinator::GetLoadedFileDesc(int32_t pp_error, const nacl::string& url, const nacl::string& component) { PLUGIN_PRINTF(("PnaclCoordinator::GetLoadedFileDesc (pp_error=%" NACL_PRId32", url=%s, component=%s)\n", pp_error, url.c_str(), component.c_str())); ErrorInfo error_info; int32_t file_desc_ok_to_close = plugin_->GetPOSIXFileDesc(url); if (pp_error != PP_OK || file_desc_ok_to_close == NACL_NO_FILE_DESC) { if (pp_error == PP_ERROR_ABORTED) { plugin_->ReportLoadAbort(); } else { ReportPpapiError(ERROR_PNACL_RESOURCE_FETCH, pp_error, component + " load failed."); } return NACL_NO_FILE_DESC; } return file_desc_ok_to_close; } PnaclCoordinator::PnaclCoordinator( Plugin* plugin, const nacl::string& pexe_url, const nacl::string& cache_identity, const pp::CompletionCallback& translate_notify_callback) : translate_finish_error_(PP_OK), plugin_(plugin), translate_notify_callback_(translate_notify_callback), file_system_(new pp::FileSystem(plugin, PP_FILESYSTEMTYPE_LOCALTEMPORARY)), manifest_(new PnaclManifest( plugin->url_util(), plugin::PnaclUrls::UsePnaclExtension(plugin))), pexe_url_(pexe_url), cache_identity_(cache_identity), error_already_reported_(false), off_the_record_(false), pnacl_init_time_(0), pexe_size_(0), pexe_bytes_compiled_(0), expected_pexe_size_(-1) { PLUGIN_PRINTF(("PnaclCoordinator::PnaclCoordinator (this=%p, plugin=%p)\n", static_cast<void*>(this), static_cast<void*>(plugin))); callback_factory_.Initialize(this); ld_manifest_.reset(new PnaclLDManifest(plugin_->manifest(), manifest_.get())); } PnaclCoordinator::~PnaclCoordinator() { PLUGIN_PRINTF(("PnaclCoordinator::~PnaclCoordinator (this=%p, " "translate_thread=%p\n", static_cast<void*>(this), translate_thread_.get())); // Stopping the translate thread will cause the translate thread to try to // run translation_complete_callback_ on the main thread. This destructor is // running from the main thread, and by the time it exits, callback_factory_ // will have been destroyed. This will result in the cancellation of // translation_complete_callback_, so no notification will be delivered. if (translate_thread_.get() != NULL) { translate_thread_->AbortSubprocesses(); } } void PnaclCoordinator::ReportNonPpapiError(enum PluginErrorCode err_code, const nacl::string& message) { error_info_.SetReport(err_code, nacl::string("PnaclCoordinator: ") + message); ExitWithError(); } void PnaclCoordinator::ReportPpapiError(enum PluginErrorCode err_code, int32_t pp_error, const nacl::string& message) { nacl::stringstream ss; ss << "PnaclCoordinator: " << message << " (pp_error=" << pp_error << ")."; error_info_.SetReport(err_code, ss.str()); ExitWithError(); } void PnaclCoordinator::ExitWithError() { PLUGIN_PRINTF(("PnaclCoordinator::ExitWithError (error_code=%d, " "message='%s')\n", error_info_.error_code(), error_info_.message().c_str())); plugin_->ReportLoadError(error_info_); // Free all the intermediate callbacks we ever created. // Note: this doesn't *cancel* the callbacks from the factories attached // to the various helper classes (e.g., pnacl_resources). Thus, those // callbacks may still run asynchronously. We let those run but ignore // any other errors they may generate so that they do not end up running // translate_notify_callback_, which has already been freed. callback_factory_.CancelAll(); if (!error_already_reported_) { error_already_reported_ = true; translate_notify_callback_.Run(PP_ERROR_FAILED); } else { PLUGIN_PRINTF(("PnaclCoordinator::ExitWithError an earlier error was " "already reported -- Skipping.\n")); } } // Signal that Pnacl translation completed normally. void PnaclCoordinator::TranslateFinished(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::TranslateFinished (pp_error=%" NACL_PRId32")\n", pp_error)); // Bail out if there was an earlier error (e.g., pexe load failure). if (translate_finish_error_ != PP_OK) { ExitWithError(); return; } // Bail out if there is an error from the translation thread. if (pp_error != PP_OK) { ExitWithError(); return; } // If there are no errors, report stats from this thread (the main thread). const plugin::PnaclTimeStats& time_stats = translate_thread_->GetTimeStats(); HistogramTime("NaCl.Perf.PNaClLoadTime.LoadCompiler", time_stats.pnacl_llc_load_time / NACL_MICROS_PER_MILLI); HistogramTime("NaCl.Perf.PNaClLoadTime.CompileTime", time_stats.pnacl_compile_time / NACL_MICROS_PER_MILLI); HistogramKBPerSec("NaCl.Perf.PNaClLoadTime.CompileKBPerSec", pexe_size_ / 1024.0, time_stats.pnacl_compile_time / 1000000.0); HistogramTime("NaCl.Perf.PNaClLoadTime.LoadLinker", time_stats.pnacl_ld_load_time / NACL_MICROS_PER_MILLI); HistogramTime("NaCl.Perf.PNaClLoadTime.LinkTime", time_stats.pnacl_link_time / NACL_MICROS_PER_MILLI); HistogramSizeKB("NaCl.Perf.Size.Pexe", static_cast<int64_t>(pexe_size_ / 1024)); struct nacl_abi_stat stbuf; struct NaClDesc* desc = temp_nexe_file_->read_wrapper()->desc(); int stat_ret; if (0 != (stat_ret = (*((struct NaClDescVtbl const *) desc->base.vtbl)-> Fstat)(desc, &stbuf))) { PLUGIN_PRINTF(("PnaclCoordinator::TranslateFinished can't stat nexe.\n")); } else { size_t nexe_size = stbuf.nacl_abi_st_size; HistogramSizeKB("NaCl.Perf.Size.PNaClTranslatedNexe", static_cast<int64_t>(nexe_size / 1024)); HistogramRatio("NaCl.Perf.Size.PexeNexeSizePct", pexe_size_, nexe_size); } // The nexe is written to the temp_nexe_file_. We must Reset() the file // pointer to be able to read it again from the beginning. temp_nexe_file_->Reset(); if (cache_identity_ != "" && cached_nexe_file_ != NULL) { // We are using a cache, but had a cache miss, which is why we did the // translation. Reset cached_nexe_file_ to have a random name, // for scratch purposes, before renaming to the final cache_identity_. cached_nexe_file_.reset(new LocalTempFile(plugin_, file_system_.get(), nacl::string(kPnaclTempDir))); pp::CompletionCallback cb = callback_factory_.NewCallback( &PnaclCoordinator::CachedNexeOpenedForWrite); cached_nexe_file_->OpenWrite(cb); } else { // For now, tolerate bitcode that is missing a cache identity, and // tolerate the lack of caching in incognito mode. PLUGIN_PRINTF(("PnaclCoordinator -- not caching.\n")); NexeReadDidOpen(PP_OK); } } void PnaclCoordinator::CachedNexeOpenedForWrite(int32_t pp_error) { if (pp_error != PP_OK) { if (pp_error == PP_ERROR_NOACCESS) { ReportPpapiError( ERROR_PNACL_CACHE_FILEOPEN_NOACCESS, pp_error, "PNaCl translation cache failed to open file for write " "(no access)."); return; } if (pp_error == PP_ERROR_NOQUOTA) { ReportPpapiError( ERROR_PNACL_CACHE_FILEOPEN_NOQUOTA, pp_error, "PNaCl translation cache failed to open file for write " "(no quota)."); return; } if (pp_error == PP_ERROR_NOSPACE) { ReportPpapiError( ERROR_PNACL_CACHE_FILEOPEN_NOSPACE, pp_error, "PNaCl translation cache failed to open file for write " "(no space)."); return; } if (pp_error == PP_ERROR_NOTAFILE) { ReportPpapiError(ERROR_PNACL_CACHE_FILEOPEN_NOTAFILE, pp_error, "PNaCl translation cache failed to open file for write." " File already exists as a directory."); return; } ReportPpapiError(ERROR_PNACL_CACHE_FILEOPEN_OTHER, pp_error, "PNaCl translation cache failed to open file for write."); return; } // Copy the contents from temp_nexe_file_ -> cached_nexe_file_, // then rename the cached_nexe_file_ file to the cache id. int64_t cur_offset = 0; nacl::DescWrapper* read_wrapper = temp_nexe_file_->read_wrapper(); char buf[kCopyBufSize]; int32_t num_read = nacl::assert_cast<int32_t>(read_wrapper->Read(buf, sizeof buf)); // Hit EOF or something. if (num_read == 0) { NexeWasCopiedToCache(PP_OK); return; } if (num_read < 0) { PLUGIN_PRINTF(("PnaclCoordinator::CachedNexeOpenedForWrite read failed " "(error=%"NACL_PRId32")\n", num_read)); NexeWasCopiedToCache(PP_ERROR_FAILED); return; } pp::CompletionCallback cb = callback_factory_.NewCallback( &PnaclCoordinator::DidCopyNexeToCachePartial, num_read, cur_offset); cached_nexe_file_->write_file_io()->Write(cur_offset, buf, num_read, cb); } void PnaclCoordinator::DidCopyNexeToCachePartial(int32_t pp_error, int32_t num_read_prev, int64_t cur_offset) { PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial " "(pp_error=%"NACL_PRId32", num_read_prev=%"NACL_PRId32"" ", cur_offset=%"NACL_PRId64").\n", pp_error, num_read_prev, cur_offset)); // Assume we are done. if (pp_error == PP_OK) { NexeWasCopiedToCache(PP_OK); return; } if (pp_error < PP_OK) { PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial failed (err=%" NACL_PRId32")\n", pp_error)); NexeWasCopiedToCache(pp_error); return; } // Check if we wrote as much as we read. nacl::DescWrapper* read_wrapper = temp_nexe_file_->read_wrapper(); if (pp_error != num_read_prev) { PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial partial " "write (bytes_written=%"NACL_PRId32" vs " "read=%"NACL_PRId32")\n", pp_error, num_read_prev)); CHECK(pp_error < num_read_prev); // Seek back to re-read the bytes that were not written. nacl_off64_t seek_result = read_wrapper->Seek(pp_error - num_read_prev, SEEK_CUR); if (seek_result < 0) { PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial seek failed " "(err=%"NACL_PRId64")\n", seek_result)); NexeWasCopiedToCache(PP_ERROR_FAILED); return; } } int64_t next_offset = cur_offset + pp_error; char buf[kCopyBufSize]; int32_t num_read = nacl::assert_cast<int32_t>(read_wrapper->Read(buf, sizeof buf)); PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial read (bytes=%" NACL_PRId32")\n", num_read)); // Hit EOF or something. if (num_read == 0) { NexeWasCopiedToCache(PP_OK); return; } if (num_read < 0) { PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial read failed " "(error=%"NACL_PRId32")\n", num_read)); NexeWasCopiedToCache(PP_ERROR_FAILED); return; } pp::CompletionCallback cb = callback_factory_.NewCallback( &PnaclCoordinator::DidCopyNexeToCachePartial, num_read, next_offset); PLUGIN_PRINTF(("PnaclCoordinator::CopyNexeToCache Writing (" "bytes=%"NACL_PRId32", buf=%p, file_io=%p)\n", num_read, buf, cached_nexe_file_->write_file_io())); cached_nexe_file_->write_file_io()->Write(next_offset, buf, num_read, cb); } void PnaclCoordinator::NexeWasCopiedToCache(int32_t pp_error) { if (pp_error != PP_OK) {<|fim▁hole|> // before returning. pp::CompletionCallback cb = callback_factory_.NewCallback( &PnaclCoordinator::CorruptCacheFileWasDeleted, pp_error); cached_nexe_file_->Delete(cb); return; } // Rename the cached_nexe_file_ file to the cache id, to finalize. pp::CompletionCallback cb = callback_factory_.NewCallback(&PnaclCoordinator::NexeFileWasRenamed); cached_nexe_file_->Rename(cache_identity_, cb); } void PnaclCoordinator::CorruptCacheFileWasDeleted(int32_t delete_pp_error, int32_t orig_pp_error) { if (delete_pp_error != PP_OK) { // The cache file was certainly already opened by the time we tried // to write to it, so it should certainly be deletable. PLUGIN_PRINTF(("PnaclCoordinator::CorruptCacheFileWasDeleted " "delete failed with pp_error=%"NACL_PRId32"\n", delete_pp_error)); // fall through and report the original error. } // Report the original error that caused us to consider the // cache file corrupted. if (orig_pp_error == PP_ERROR_NOQUOTA) { ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_COPY_NOQUOTA, orig_pp_error, "Failed to copy translated nexe to cache (no quota)."); return; } if (orig_pp_error == PP_ERROR_NOSPACE) { ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_COPY_NOSPACE, orig_pp_error, "Failed to copy translated nexe to cache (no space)."); return; } ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_COPY_OTHER, orig_pp_error, "Failed to copy translated nexe to cache."); return; } void PnaclCoordinator::NexeFileWasRenamed(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::NexeFileWasRenamed (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_OK) { if (pp_error == PP_ERROR_NOACCESS) { ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_RENAME_NOACCESS, pp_error, "Failed to finalize cached translation (no access)."); return; } else if (pp_error != PP_ERROR_FILEEXISTS) { ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_RENAME_OTHER, pp_error, "Failed to finalize cached translation."); return; } else { // pp_error == PP_ERROR_FILEEXISTS. // NOTE: if the file already existed, it looks like the rename will // happily succeed. However, we should add a test for this. // Could be a hash collision, or it could also be two tabs racing to // translate the same pexe. We may want UMA stats to know if this happens. // For now, assume that it is a race and try to continue. // If there is truly a corrupted file, then sel_ldr should prevent the // file from loading due to the file size not matching the ELF header. PLUGIN_PRINTF(("PnaclCoordinator::NexeFileWasRenamed file existed\n")); } } cached_nexe_file_->FinishRename(); int64_t total_time = NaClGetTimeOfDayMicroseconds() - pnacl_init_time_; HistogramTime("NaCl.Perf.PNaClLoadTime.TotalUncachedTime", total_time / NACL_MICROS_PER_MILLI); HistogramKBPerSec("NaCl.Perf.PNaClLoadTime.TotalUncachedKBPerSec", pexe_size_ / 1024.0, total_time / 1000000.0); // Open the cache file for reading. pp::CompletionCallback cb = callback_factory_.NewCallback(&PnaclCoordinator::NexeReadDidOpen); cached_nexe_file_->OpenRead(cb); } void PnaclCoordinator::NexeReadDidOpen(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::NexeReadDidOpen (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_OK) { if (pp_error == PP_ERROR_FILENOTFOUND) { ReportPpapiError(ERROR_PNACL_CACHE_FETCH_NOTFOUND, pp_error, "Failed to open translated nexe (not found)."); return; } if (pp_error == PP_ERROR_NOACCESS) { ReportPpapiError(ERROR_PNACL_CACHE_FETCH_NOACCESS, pp_error, "Failed to open translated nexe (no access)."); return; } ReportPpapiError(ERROR_PNACL_CACHE_FETCH_OTHER, pp_error, "Failed to open translated nexe."); return; } // Transfer ownership of cache/temp file's wrapper to the coordinator. if (cached_nexe_file_ != NULL) { translated_fd_.reset(cached_nexe_file_->release_read_wrapper()); } else { translated_fd_.reset(temp_nexe_file_->release_read_wrapper()); } translate_notify_callback_.Run(pp_error); } void PnaclCoordinator::ResourcesDidLoad(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::ResourcesDidLoad (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_OK) { // Finer-grained error code should have already been reported by // the PnaclResources class. return; } if (!off_the_record_) { // Open the local temporary FS to see if we get a hit in the cache. pp::CompletionCallback cb = callback_factory_.NewCallback(&PnaclCoordinator::FileSystemDidOpen); int32_t open_error = file_system_->Open(0, cb); if (open_error != PP_OK_COMPLETIONPENDING) { // At this point, no async request has kicked off to check for // permissions, space, etc., so the only error that can be detected // now is that an open() is already in progress (or a really terrible // error). if (pp_error == PP_ERROR_INPROGRESS) { ReportPpapiError( ERROR_PNACL_CACHE_OPEN_INPROGRESS, pp_error, "File system for PNaCl translation cache failed to open " "(in progress)."); return; } ReportPpapiError( ERROR_PNACL_CACHE_OPEN_OTHER, pp_error, "File system for PNaCl translation cache failed to open."); } } else { // We don't have a cache, so do the non-cached codepath. CachedFileDidOpen(PP_ERROR_FAILED); } } void PnaclCoordinator::FileSystemDidOpen(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::FileSystemDidOpen (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_OK) { if (pp_error == PP_ERROR_NOACCESS) { ReportPpapiError( ERROR_PNACL_CACHE_OPEN_NOACCESS, pp_error, "File system for PNaCl translation cache failed to open " "(no access)."); return; } if (pp_error == PP_ERROR_NOQUOTA) { ReportPpapiError( ERROR_PNACL_CACHE_OPEN_NOQUOTA, pp_error, "File system for PNaCl translation cache failed to open " "(no quota)."); return; } if (pp_error == PP_ERROR_NOSPACE) { ReportPpapiError( ERROR_PNACL_CACHE_OPEN_NOSPACE, pp_error, "File system for PNaCl translation cache failed to open " "(no space)."); return; } ReportPpapiError(ERROR_PNACL_CACHE_OPEN_OTHER, pp_error, "File system for PNaCl translation cache failed to open."); } dir_ref_.reset(new pp::FileRef(*file_system_, kPnaclTempDir)); // Attempt to create the directory. pp::CompletionCallback cb = callback_factory_.NewCallback(&PnaclCoordinator::DirectoryWasCreated); dir_ref_->MakeDirectory(cb); } void PnaclCoordinator::DirectoryWasCreated(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::DirectoryWasCreated (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_ERROR_FILEEXISTS && pp_error != PP_OK) { // Directory did not exist and could not be created. if (pp_error == PP_ERROR_NOACCESS) { ReportPpapiError( ERROR_PNACL_CACHE_DIRECTORY_CREATE, pp_error, "PNaCl translation cache directory creation/check failed " "(no access)."); return; } ReportPpapiError( ERROR_PNACL_CACHE_DIRECTORY_CREATE, pp_error, "PNaCl translation cache directory creation/check failed."); return; } if (cache_identity_ != "") { cached_nexe_file_.reset(new LocalTempFile(plugin_, file_system_.get(), nacl::string(kPnaclTempDir), cache_identity_)); pp::CompletionCallback cb = callback_factory_.NewCallback(&PnaclCoordinator::CachedFileDidOpen); cached_nexe_file_->OpenRead(cb); } else { // For now, tolerate lack of cache identity... CachedFileDidOpen(PP_ERROR_FAILED); } } void PnaclCoordinator::CachedFileDidOpen(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::CachedFileDidOpen (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error == PP_OK) { HistogramEnumerateTranslationCache(true); NexeReadDidOpen(PP_OK); return; } // Otherwise, the cache file is missing, or the cache simply // cannot be created (e.g., incognito mode), so we must translate. HistogramEnumerateTranslationCache(false); // Create the translation thread object immediately. This ensures that any // pieces of the file that get downloaded before the compilation thread // is accepting SRPCs won't get dropped. translate_thread_.reset(new PnaclTranslateThread()); if (translate_thread_ == NULL) { ReportNonPpapiError(ERROR_PNACL_THREAD_CREATE, "could not allocate translation thread."); return; } // We also want to open the object file now so the // translator can start writing to it during streaming translation. obj_file_.reset(new TempFile(plugin_)); pp::CompletionCallback obj_cb = callback_factory_.NewCallback(&PnaclCoordinator::ObjectFileDidOpen); obj_file_->Open(obj_cb); streaming_downloader_.reset(new FileDownloader()); streaming_downloader_->Initialize(plugin_); pp::CompletionCallback cb = callback_factory_.NewCallback( &PnaclCoordinator::BitcodeStreamDidFinish); if (!streaming_downloader_->OpenStream(pexe_url_, cb, this)) { ReportNonPpapiError(ERROR_PNACL_PEXE_FETCH_OTHER, nacl::string("failed to open stream ") + pexe_url_); } } void PnaclCoordinator::BitcodeStreamDidFinish(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::BitcodeStreamDidFinish (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_OK) { // Defer reporting the error and cleanup until after the translation // thread returns, because it may be accessing the coordinator's // objects or writing to the files. translate_finish_error_ = pp_error; if (pp_error == PP_ERROR_ABORTED) { error_info_.SetReport(ERROR_PNACL_PEXE_FETCH_ABORTED, "PnaclCoordinator: pexe load failed (aborted)."); } if (pp_error == PP_ERROR_NOACCESS) { error_info_.SetReport(ERROR_PNACL_PEXE_FETCH_NOACCESS, "PnaclCoordinator: pexe load failed (no access)."); } else { nacl::stringstream ss; ss << "PnaclCoordinator: pexe load failed (pp_error=" << pp_error << ")."; error_info_.SetReport(ERROR_PNACL_PEXE_FETCH_OTHER, ss.str()); } translate_thread_->AbortSubprocesses(); } else { // Compare download completion pct (100% now), to compile completion pct. HistogramRatio("NaCl.Perf.PNaClLoadTime.PctCompiledWhenFullyDownloaded", pexe_bytes_compiled_, pexe_size_); } } void PnaclCoordinator::BitcodeStreamGotData(int32_t pp_error, FileStreamData data) { PLUGIN_PRINTF(("PnaclCoordinator::BitcodeStreamGotData (pp_error=%" NACL_PRId32", data=%p)\n", pp_error, data ? &(*data)[0] : 0)); DCHECK(translate_thread_.get()); translate_thread_->PutBytes(data, pp_error); // If pp_error > 0, then it represents the number of bytes received. if (data && pp_error > 0) { pexe_size_ += pp_error; } } StreamCallback PnaclCoordinator::GetCallback() { return callback_factory_.NewCallbackWithOutput( &PnaclCoordinator::BitcodeStreamGotData); } void PnaclCoordinator::BitcodeGotCompiled(int32_t pp_error, int64_t bytes_compiled) { // If we don't know the expected total yet, ask. pexe_bytes_compiled_ += bytes_compiled; if (expected_pexe_size_ == -1) { int64_t amount_downloaded; // dummy variable. streaming_downloader_->GetDownloadProgress(&amount_downloaded, &expected_pexe_size_); } bool length_computable = (expected_pexe_size_ != -1); plugin_->EnqueueProgressEvent(plugin::Plugin::kProgressEventProgress, pexe_url_, (length_computable ? plugin::Plugin::LENGTH_IS_COMPUTABLE : plugin::Plugin::LENGTH_IS_NOT_COMPUTABLE), pexe_bytes_compiled_, expected_pexe_size_); } pp::CompletionCallback PnaclCoordinator::GetCompileProgressCallback( int64_t bytes_compiled) { return callback_factory_.NewCallback(&PnaclCoordinator::BitcodeGotCompiled, bytes_compiled); } void PnaclCoordinator::GetCurrentProgress(int64_t* bytes_loaded, int64_t* bytes_total) { *bytes_loaded = pexe_bytes_compiled_; *bytes_total = expected_pexe_size_; } void PnaclCoordinator::ObjectFileDidOpen(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::ObjectFileDidOpen (pp_error=%" NACL_PRId32")\n", pp_error)); if (pp_error != PP_OK) { ReportPpapiError(ERROR_PNACL_CREATE_TEMP, pp_error, "Failed to open scratch object file."); return; } // Create the nexe file for connecting ld and sel_ldr. // Start translation when done with this last step of setup! temp_nexe_file_.reset(new TempFile(plugin_)); pp::CompletionCallback cb = callback_factory_.NewCallback(&PnaclCoordinator::RunTranslate); temp_nexe_file_->Open(cb); } void PnaclCoordinator::RunTranslate(int32_t pp_error) { PLUGIN_PRINTF(("PnaclCoordinator::RunTranslate (pp_error=%" NACL_PRId32")\n", pp_error)); // Invoke llc followed by ld off the main thread. This allows use of // blocking RPCs that would otherwise block the JavaScript main thread. pp::CompletionCallback report_translate_finished = callback_factory_.NewCallback(&PnaclCoordinator::TranslateFinished); CHECK(translate_thread_ != NULL); translate_thread_->RunTranslate(report_translate_finished, manifest_.get(), ld_manifest_.get(), obj_file_.get(), temp_nexe_file_.get(), &error_info_, resources_.get(), this, plugin_); } } // namespace plugin<|fim▁end|>
// Try to delete the partially written not-yet-committed cache file before // returning. We pass the current pp_error along so that it can be reported
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! The future site of an IRC server library. //! The server module is currently unimplimented. Visit<|fim▁hole|>//! https://github.com/aatxe/irc/issues/22 to contribute!<|fim▁end|>
<|file_name|>test_state.py<|end_file_name|><|fim▁begin|>from fastpm.state import StateVector, Matter, Baryon, CDM, NCDM from runtests.mpi import MPITest from nbodykit.cosmology import Planck15 as cosmo import numpy BoxSize = 100. Q = numpy.zeros((100, 3)) @MPITest([1, 4]) def test_create(comm): matter = Matter(cosmo, BoxSize, Q, comm) cdm = CDM(cosmo, BoxSize, Q, comm) cdm.a['S'] = 1.0 cdm.a['P'] = 1.0 baryon = Baryon(cosmo, BoxSize, Q, comm) baryon.a['S'] = 1.0 baryon.a['P'] = 1.0 state = StateVector(cosmo, {'0': baryon, '1' : cdm}, comm)<|fim▁hole|> state.save("state")<|fim▁end|>
state.a['S'] = 1.0 state.a['P'] = 1.0
<|file_name|>BitArray.cpp<|end_file_name|><|fim▁begin|>#include "BitArray.h" namespace Rapid { void BitArrayT::append(char const * Bytes, std::size_t Size)<|fim▁hole|>std::size_t BitArrayT::size() const { return mBytes.size() * 8; } bool BitArrayT::operator[](std::size_t Index) const { auto ByteIndex = Index / 8; auto BitIndex = Index % 8; return static_cast<std::uint8_t>(mBytes[ByteIndex]) >> BitIndex & 0x1; } }<|fim▁end|>
{ mBytes.append(Bytes, Size); }
<|file_name|>auth.go<|end_file_name|><|fim▁begin|>// Copyright 2015 go-dockerclient authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package docker import ( "bytes" "encoding/base64" "encoding/json" "errors" "fmt" "io" "io/ioutil" "net/http" "os" "path" "strings" ) // ErrCannotParseDockercfg is the error returned by NewAuthConfigurations when the dockercfg cannot be parsed. var ErrCannotParseDockercfg = errors.New("failed to read authentication from dockercfg") // AuthConfiguration represents authentication options to use in the PushImage // method. It represents the authentication in the Docker index server. type AuthConfiguration struct { Username string `json:"username,omitempty"` Password string `json:"password,omitempty"` Email string `json:"email,omitempty"` ServerAddress string `json:"serveraddress,omitempty"` // IdentityToken can be supplied with the identitytoken response of the AuthCheck call // see https://pkg.go.dev/github.com/docker/docker/api/types?tab=doc#AuthConfig // It can be used in place of password not in conjunction with it IdentityToken string `json:"identitytoken,omitempty"` // RegistryToken can be supplied with the registrytoken RegistryToken string `json:"registrytoken,omitempty"` } func (c AuthConfiguration) isEmpty() bool { return c == AuthConfiguration{} } func (c AuthConfiguration) headerKey() string { return "X-Registry-Auth" } // AuthConfigurations represents authentication options to use for the // PushImage method accommodating the new X-Registry-Config header type AuthConfigurations struct { Configs map[string]AuthConfiguration `json:"configs"` } func (c AuthConfigurations) isEmpty() bool { return len(c.Configs) == 0 } func (c AuthConfigurations) headerKey() string { return "X-Registry-Config" } // AuthConfigurations119 is used to serialize a set of AuthConfigurations // for Docker API >= 1.19. type AuthConfigurations119 map[string]AuthConfiguration func (c AuthConfigurations119) isEmpty() bool { return len(c) == 0 } func (c AuthConfigurations119) headerKey() string { return "X-Registry-Config" } // dockerConfig represents a registry authentation configuration from the // .dockercfg file. type dockerConfig struct { Auth string `json:"auth"` Email string `json:"email"` IdentityToken string `json:"identitytoken"` RegistryToken string `json:"registrytoken"` } // NewAuthConfigurationsFromFile returns AuthConfigurations from a path containing JSON // in the same format as the .dockercfg file. func NewAuthConfigurationsFromFile(path string) (*AuthConfigurations, error) { r, err := os.Open(path) if err != nil { return nil, err } return NewAuthConfigurations(r) } func cfgPaths(dockerConfigEnv string, homeEnv string) []string { var paths []string if dockerConfigEnv != "" { paths = append(paths, path.Join(dockerConfigEnv, "plaintext-passwords.json")) paths = append(paths, path.Join(dockerConfigEnv, "config.json")) } if homeEnv != "" { paths = append(paths, path.Join(homeEnv, ".docker", "plaintext-passwords.json")) paths = append(paths, path.Join(homeEnv, ".docker", "config.json")) paths = append(paths, path.Join(homeEnv, ".dockercfg")) } return paths } // NewAuthConfigurationsFromDockerCfg returns AuthConfigurations from // system config files. The following files are checked in the order listed: // - $DOCKER_CONFIG/config.json if DOCKER_CONFIG set in the environment, // - $HOME/.docker/config.json // - $HOME/.dockercfg func NewAuthConfigurationsFromDockerCfg() (*AuthConfigurations, error) { err := fmt.Errorf("no docker configuration found") var auths *AuthConfigurations pathsToTry := cfgPaths(os.Getenv("DOCKER_CONFIG"), os.Getenv("HOME")) for _, path := range pathsToTry { auths, err = NewAuthConfigurationsFromFile(path) if err == nil { return auths, nil } } return auths, err } // NewAuthConfigurations returns AuthConfigurations from a JSON encoded string in the // same format as the .dockercfg file. func NewAuthConfigurations(r io.Reader) (*AuthConfigurations, error) { var auth *AuthConfigurations confs, err := parseDockerConfig(r) if err != nil { return nil, err } auth, err = authConfigs(confs) if err != nil { return nil, err } return auth, nil } func parseDockerConfig(r io.Reader) (map[string]dockerConfig, error) { buf := new(bytes.Buffer) buf.ReadFrom(r) byteData := buf.Bytes() confsWrapper := struct { Auths map[string]dockerConfig `json:"auths"` }{} if err := json.Unmarshal(byteData, &confsWrapper); err == nil { if len(confsWrapper.Auths) > 0 { return confsWrapper.Auths, nil } } var confs map[string]dockerConfig if err := json.Unmarshal(byteData, &confs); err != nil { return nil, err } return confs, nil } // authConfigs converts a dockerConfigs map to a AuthConfigurations object. func authConfigs(confs map[string]dockerConfig) (*AuthConfigurations, error) { c := &AuthConfigurations{ Configs: make(map[string]AuthConfiguration), } for reg, conf := range confs { if conf.Auth == "" { continue } data, err := base64.StdEncoding.DecodeString(conf.Auth) if err != nil { return nil, err } userpass := strings.SplitN(string(data), ":", 2) if len(userpass) != 2 { return nil, ErrCannotParseDockercfg } authConfig := AuthConfiguration{ Email: conf.Email, Username: userpass[0], Password: userpass[1], ServerAddress: reg, } // if identitytoken provided then zero the password and set it if conf.IdentityToken != "" { authConfig.Password = "" authConfig.IdentityToken = conf.IdentityToken } // if registrytoken provided then zero the password and set it if conf.RegistryToken != "" { authConfig.Password = ""<|fim▁hole|> authConfig.RegistryToken = conf.RegistryToken } c.Configs[reg] = authConfig } return c, nil } // AuthStatus returns the authentication status for Docker API versions >= 1.23. type AuthStatus struct { Status string `json:"Status,omitempty" yaml:"Status,omitempty" toml:"Status,omitempty"` IdentityToken string `json:"IdentityToken,omitempty" yaml:"IdentityToken,omitempty" toml:"IdentityToken,omitempty"` } // AuthCheck validates the given credentials. It returns nil if successful. // // For Docker API versions >= 1.23, the AuthStatus struct will be populated, otherwise it will be empty.` // // See https://goo.gl/6nsZkH for more details. func (c *Client) AuthCheck(conf *AuthConfiguration) (AuthStatus, error) { var authStatus AuthStatus if conf == nil { return authStatus, errors.New("conf is nil") } resp, err := c.do(http.MethodPost, "/auth", doOptions{data: conf}) if err != nil { return authStatus, err } defer resp.Body.Close() data, err := ioutil.ReadAll(resp.Body) if err != nil { return authStatus, err } if len(data) == 0 { return authStatus, nil } if err := json.Unmarshal(data, &authStatus); err != nil { return authStatus, err } return authStatus, nil }<|fim▁end|>
<|file_name|>test_site_yearly_aggregator.py<|end_file_name|><|fim▁begin|>__author__ = 'Bohdan Mushkevych' import unittest from settings import enable_test_mode enable_test_mode() from db.model.raw_data import DOMAIN_NAME, TIMEPERIOD from constants import PROCESS_SITE_YEARLY from tests import monthly_fixtures<|fim▁hole|>from tests import yearly_fixtures from tests.test_abstract_worker import AbstractWorkerUnitTest from workers.site_yearly_aggregator import SiteYearlyAggregator class SiteYearlyAggregatorUnitTest(AbstractWorkerUnitTest): def virtual_set_up(self): super(SiteYearlyAggregatorUnitTest, self).constructor(baseclass=SiteYearlyAggregator, process_name=PROCESS_SITE_YEARLY, output_prefix='EXPECTED_SITE_YEARLY', output_module=yearly_fixtures, generate_output=False, compare_results=True) monthly_fixtures.clean_site_entries() return monthly_fixtures.generated_site_entries() def virtual_tear_down(self): monthly_fixtures.clean_site_entries() def _get_key(self, obj): return obj[DOMAIN_NAME], obj[TIMEPERIOD] def test_aggregation(self): super(SiteYearlyAggregatorUnitTest, self).perform_aggregation() if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>decoder.py<|end_file_name|><|fim▁begin|>import logging import os import subprocess import threading import util logger = logging.getLogger('rt.decoder') class Decoder: def close(self, force=False): if not force: self.lock.acquire() self.decoder.stdin.close() self.decoder.wait() if not force: self.lock.release() def decode(self, sentence, grammar=None): '''Threadsafe, FIFO''' self.lock.acquire() input = '<seg grammar="{g}">{s}</seg>\n'.format(s=sentence, g=grammar) if grammar else '{}\n'.format(sentence) self.decoder.stdin.write(input) hyp = self.decoder.stdout.readline().strip() self.lock.release() return hyp class CdecDecoder(Decoder): def __init__(self, config, weights): cdec_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) decoder = os.path.join(cdec_root, 'decoder', 'cdec') decoder_cmd = [decoder, '-c', config, '-w', weights] logger.info('Executing: {}'.format(' '.join(decoder_cmd))) self.decoder = util.popen_io(decoder_cmd) self.lock = util.FIFOLock() class MIRADecoder(Decoder): def __init__(self, config, weights, metric='ibm_bleu'): cdec_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) mira = os.path.join(cdec_root, 'training', 'mira', 'kbest_cut_mira') # optimizer=2 step=0.001 best=500, k=500, uniq, stream, metric mira_cmd = [mira, '-c', config, '-w', weights, '-o', '2', '-C', '0.001', '-b', '500', '-k', '500', '-u', '-t', '-m', metric] logger.info('Executing: {}'.format(' '.join(mira_cmd))) self.decoder = util.popen_io(mira_cmd) self.lock = util.FIFOLock() def get_weights(self): '''Threadsafe, FIFO''' self.lock.acquire() self.decoder.stdin.write('WEIGHTS ||| WRITE\n') weights = self.decoder.stdout.readline().strip() self.lock.release() return weights def set_weights(self, w_line): '''Threadsafe, FIFO''' self.lock.acquire() try: # Check validity for w_str in w_line.split(): (k, v) = w_str.split('=') float(v) self.decoder.stdin.write('WEIGHTS ||| {}\n'.format(w_line)) self.lock.release() except: self.lock.release()<|fim▁hole|> def update(self, sentence, grammar, reference): '''Threadsafe, FIFO''' self.lock.acquire() input = 'LEARN ||| <seg grammar="{g}">{s}</seg> ||| {r}\n'.format(s=sentence, g=grammar, r=reference) self.decoder.stdin.write(input) log = self.decoder.stdout.readline().strip() self.lock.release() return log<|fim▁end|>
raise Exception('Invalid weights line: {}'.format(w_line))
<|file_name|>TopicAdapter.java<|end_file_name|><|fim▁begin|>package com.example.denizalp.thefirst; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.Button;<|fim▁hole|> import com.bounswe.group7.model.Topics; import org.w3c.dom.Text; import java.util.List; /** * Created by denizalp on 18/12/16. */ public class TopicAdapter extends BaseAdapter { private LayoutInflater mInflater; private List<Topics> mTopicList; private String currentToken; private Activity activity; public TopicAdapter(Activity activity, List<Topics> topicList, String currentToken){ this.activity = activity; mInflater = (LayoutInflater) activity.getSystemService(Context.LAYOUT_INFLATER_SERVICE); mTopicList = topicList; this.currentToken = currentToken; } @Override public int getCount() { return mTopicList.size(); } @Override public Object getItem(int position) { return mTopicList.get(position); } @Override public long getItemId(int position) { return mTopicList.get(position).getTopicId(); } @Override public View getView(int position, View convertView, ViewGroup parent) { View singleTopicView = mInflater.inflate(R.layout.activity_single_topic, null); Topics topic = mTopicList.get(position); TextView textView = (TextView) singleTopicView.findViewById(R.id.textView8); Button button = (Button) singleTopicView.findViewById(R.id.button38); Intent toTopic = new Intent(activity, ShowTopicPage.class); textView.setText(topic.getHeader()); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { toTopic.putExtra("topicId",topic.getTopicId()); activity.startActivity(toTopic); } }); return singleTopicView; } }<|fim▁end|>
import android.widget.TextView;
<|file_name|>NonAsciiCharacterFix.java<|end_file_name|><|fim▁begin|>package uk.ac.ebi.embl.api.validation.fixer.entry; import uk.ac.ebi.embl.api.entry.Entry; import uk.ac.ebi.embl.api.entry.Text; import uk.ac.ebi.embl.api.entry.feature.Feature; import uk.ac.ebi.embl.api.entry.qualifier.Qualifier; import uk.ac.ebi.embl.api.entry.reference.Person; import uk.ac.ebi.embl.api.entry.reference.Reference; import uk.ac.ebi.embl.api.validation.Severity; import uk.ac.ebi.embl.api.validation.ValidationResult; import uk.ac.ebi.embl.api.validation.ValidationScope; import uk.ac.ebi.embl.api.validation.annotation.Description; import uk.ac.ebi.embl.api.validation.annotation.ExcludeScope; import uk.ac.ebi.embl.api.validation.check.entry.EntryValidationCheck; import uk.ac.ebi.embl.api.validation.helper.Utils; /** * Fix works for certain non-ascii characters only. Check Utils.removeAccents limitations. * If it is not possible to transliterate certain chars, it will be caught in and rejected * by AsciiCharacterCheck. */ @Description("Non-ascii characters fixed from \"{0}\" to \"{1}\".") @ExcludeScope(validationScope = {ValidationScope.NCBI, ValidationScope.NCBI_MASTER}) public class NonAsciiCharacterFix extends EntryValidationCheck { private static final String ASCII_CHARACTER_FIX = "AsciiCharacterFix_1"; public ValidationResult check(Entry entry) { result = new ValidationResult(); if (entry == null) return result; attemptFix(entry.getComment()); attemptFix(entry.getDescription()); for (Reference reference : entry.getReferences()) { if (reference.getPublication() != null) { String pubTitle = reference.getPublication().getTitle(); if (pubTitle != null) { String fixedPubTitle = fixedStr(pubTitle); if (!fixedPubTitle.equals(pubTitle)) { reference.getPublication().setTitle(fixedPubTitle); reportMessage(Severity.FIX, reference.getOrigin(), ASCII_CHARACTER_FIX, pubTitle, fixedPubTitle); } } if (reference.getPublication().getAuthors() != null) { for (Person author : reference.getPublication().getAuthors()) { String firstName = author.getFirstName(); if (firstName != null) { String fixedFirstName = fixedStr(firstName); if (!fixedFirstName.equals(firstName)) { author.setFirstName(fixedFirstName); reportMessage(Severity.FIX, reference.getOrigin(), ASCII_CHARACTER_FIX, firstName, fixedFirstName); } } String surname = author.getSurname(); if (surname != null) { String fixedSurname = fixedStr(surname); if (!fixedSurname.equals(surname)) { author.setSurname(fixedSurname); reportMessage(Severity.FIX, reference.getOrigin(), ASCII_CHARACTER_FIX, surname, fixedSurname);<|fim▁hole|> } } } for (Feature feature : entry.getFeatures()) { for (Qualifier qualifier : feature.getQualifiers()) { if (qualifier.getName().equals(Qualifier.COUNTRY_QUALIFIER_NAME) || qualifier.getName().equals(Qualifier.ISOLATE_QUALIFIER_NAME) ) { String qualifierValue = qualifier.getValue(); if (qualifierValue != null) { String fixedVal = fixedStr(qualifierValue); if (!fixedVal.equals(qualifierValue)) { qualifier.setValue(fixedVal); reportMessage(Severity.FIX, qualifier.getOrigin(), ASCII_CHARACTER_FIX, qualifierValue, fixedVal); } } } } } return result; } private void attemptFix(Text text) { if (text != null && text.getText() != null) { if (Utils.hasNonAscii(text.getText())) { String fixed = Utils.removeAccents(text.getText()); if (!fixed.equals(text.getText())) { text.setText(fixed); reportMessage(Severity.FIX, text.getOrigin(), ASCII_CHARACTER_FIX, text.getText(), fixed); } } } } private String fixedStr(String str) { if (Utils.hasNonAscii(str)) { return Utils.removeAccents(str); } return str; } }<|fim▁end|>
} } }
<|file_name|>TemplateComponent.js<|end_file_name|><|fim▁begin|>/*! * Copyright 2002 - 2015 Webdetails, a Pentaho company. All rights reserved. * * This software was developed by Webdetails and is provided under the terms * of the Mozilla Public License, Version 2.0, or any later version. You may not use * this file except in compliance with the license. If you need a copy of the license, * please go to http://mozilla.org/MPL/2.0/. The Initial Developer is Webdetails. * * Software distributed under the Mozilla Public License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. Please refer to * the license for the specific language governing your rights and limitations. */ define([ './UnmanagedComponent', '../dashboard/Utils', '../Logger', '../lib/jquery', 'amd!../lib/underscore', 'amd!../lib/mustache-wax', '../addIns/templateTypes', 'css!./TemplateComponent' ], function(UnmanagedComponent, Utils, Logger, $, _, Mustache) { return UnmanagedComponent.extend({ defaults: { templateType: 'mustache', template: '<div>{{items}}</div>', rootElement: 'items', formatters: {}, events: [], postProcess: function() {} }, messages: { error: { noData: "No data available.", invalidTemplate: "Invalid template.", invalidTemplateType: "Invalid template type.", generic: "Invalid options defined. Please check the template component properties." }, success: {}, warning: {}, info: {}, config: { style: { success: {icon: "comment", type: "success"}, error: {icon: "remove-sign", type: "danger"}, info: {icon: "info-sign", type: "info"}, warning: {icon: "exclamation-sign", style: "warning"} }, template: "<div class='alert alert-<%=type%>' role='alert'>" + " <span class='glyphicon glyphicon-<%=icon%>' aria-hidden='true'></span> " + " <span> <%=msg%> </span>" + "</div>" } }, init: function() { $.extend(true, this, Utils.ev(this.extendableOptions)); $.extend(true, this.defaults, Utils.ev(this.options)); }, update: function() { _.bindAll(this, 'redraw', 'init', 'processData', 'renderTemplate', 'attachEvents', 'processMessage', 'template', 'applyFormatter', 'applyAddin', 'processAddins'); this.init(); this.triggerQuery(this.chartDefinition, this.redraw); }, redraw: function(data) { this.model = this.processData(data); var htmlResult = this.renderTemplate(this.template, this.templateType, this.model); var $target = this.placeholder();<|fim▁hole|> this.attachEvents(this.eventSelector, this.eventType, this.eventHandler); } }, getUID: function() { return 'xxxxxxxx'.replace(/[xy]/g, function(c) { var r = Math.random() * 16 | 0, v = c === 'x' ? r : (r & 0x3 | 0x8); return v.toString(16); }); }, applyFormatter: function(model, formatter, id) { var formatHandler = Utils.propertiesArrayToObject(this.formatters)[formatter]; if(_.isFunction(formatHandler)) { return formatHandler.call(this, model, id); } else { return model; } }, applyAddin: function(model, addin, id) { var UID = this.name + "_" + addin + this.getUID(); this.addins = this.addins || []; this.addins.push({uid: UID, model: model, addin: addin, id: id}); return '<div id="' + UID + '" class="' + addin + '"/>'; }, processAddins: function($target, data) { var myself = this; _.each(this.addins, function(elem) { myself.handleAddin(_.first($target.find('#' + elem.uid)), elem.model, elem.addin, data, elem.id); }); }, handleAddin: function(target, model, addInName, data, id) { var addIn = this.getAddIn("templateType", addInName); var state = {value: model, data: data, id: id}; addIn.call(target, state, this.getAddInOptions("templateType", addIn.getName())); }, // Transform qyeryResult.dataset to JSON format to be used in Templates processData: function(queryResult) { if(!_.isFunction(this.modelHandler)) { var hasData = queryResult.queryInfo != null ? queryResult.queryInfo.totalRows > 0 : queryResult.resultset.length > 0; if(hasData) { var data = []; _.each(queryResult.resultset, function(row) { data.push(_.extend({}, row)); }); var model = {}; model[this.rootElement] = data; return model; } else { return ""; } } else { return this.modelHandler(queryResult); } }, // Apply template based on the result of a query. Creates a template based (mustache or underscore) view data object and apply columns format renderTemplate: function(template, templateType, model) { var html = ""; var myself = this; if((!_.isEmpty(model))) { var helpers = { formatter: function(data, formatter, id) { return myself.applyFormatter(data, formatter, id); }, addin: function(data, addin, id) { return myself.applyAddin(data, addin, id); } }; try { switch(templateType.toUpperCase()) { case 'UNDERSCORE': model = _.defaults({}, model, Utils.propertiesArrayToObject(helpers)); html = _.template(Utils.ev(template), model); break; case 'MUSTACHE': Mustache.Formatters = helpers; html = Mustache.render(Utils.ev(template), model); break; default: html = this.processMessage('invalidTemplateType', 'error'); break; } } catch(e) { html = this.processMessage('invalidTemplate', 'error'); } } else { html = this.processMessage('noData', 'error'); } return html; }, // bind click to created cards attachEvents: function() { var myself = this; _.each(this.events, function(elem) { var separator = ',', handler = _.first(elem).split(separator), eventHandler = _.last(elem), event = _.first(handler).trim(), selector = _.last(handler).trim(); if(_.isFunction(eventHandler)) { myself.placeholder(selector).on(event, _.bind(eventHandler, myself)); } }); }, processMessage: function(message, type) { var completeMsg = { msg: this.messages[type][message] || message || "", type: this.messages.config.style[type].type || "info", icon: this.messages.config.style[type].icon || "comment" }; Logger.log(completeMsg.msg, type); return _.template(this.messages.config.template, completeMsg); } }); });<|fim▁end|>
$target.empty().append(htmlResult); this.processAddins($target, data); if(!_.isEmpty(this.events)) {
<|file_name|>test_session.py<|end_file_name|><|fim▁begin|>""" Tests for login and logout. """ import datetime from unittest.mock import patch import responses import quilt3 from .utils import QuiltTestCase class TestSession(QuiltTestCase): @patch('quilt3.session.open_url') @patch('quilt3.session.input', return_value='123456') @patch('quilt3.session.login_with_token') def test_login(self, mock_login_with_token, mock_input, mock_open_url): quilt3.login() url = quilt3.session.get_registry_url() mock_open_url.assert_called_with(f'{url}/login') mock_login_with_token.assert_called_with('123456') @patch('quilt3.session._save_auth') @patch('quilt3.session._save_credentials') def test_login_with_token(self, mock_save_credentials, mock_save_auth): url = quilt3.session.get_registry_url() mock_auth = dict( refresh_token='refresh-token', access_token='access-token', expires_at=123456789 ) self.requests_mock.add( responses.POST, f'{url}/api/token', json=mock_auth, status=200 ) self.requests_mock.add( responses.GET, f'{url}/api/auth/get_credentials', json=dict( AccessKeyId='access-key', SecretAccessKey='secret-key', SessionToken='session-token', Expiration="2019-05-28T23:58:07+00:00" ), status=200 ) quilt3.session.login_with_token('123456') mock_save_auth.assert_called_with({url: mock_auth}) mock_save_credentials.assert_called_with(dict( access_key='access-key', secret_key='secret-key', token='session-token', expiry_time="2019-05-28T23:58:07+00:00" )) @patch('quilt3.session._save_credentials') @patch('quilt3.session._load_credentials') def test_create_botocore_session(self, mock_load_credentials, mock_save_credentials): def format_date(date): return date.replace(tzinfo=datetime.timezone.utc, microsecond=0).isoformat() # Test good credentials. future_date = datetime.datetime.utcnow() + datetime.timedelta(hours=1) mock_load_credentials.return_value = dict( access_key='access-key', secret_key='secret-key', token='session-token', expiry_time=format_date(future_date) ) session = quilt3.session.create_botocore_session() credentials = session.get_credentials() assert credentials.access_key == 'access-key' assert credentials.secret_key == 'secret-key' assert credentials.token == 'session-token' mock_save_credentials.assert_not_called() # Test expired credentials. past_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) mock_load_credentials.return_value = dict( access_key='access-key', secret_key='secret-key', token='session-token', expiry_time=format_date(past_date) ) url = quilt3.session.get_registry_url() self.requests_mock.add( responses.GET, f'{url}/api/auth/get_credentials', json=dict( AccessKeyId='access-key2', SecretAccessKey='secret-key2', SessionToken='session-token2', Expiration=format_date(future_date) ), status=200 ) session = quilt3.session.create_botocore_session()<|fim▁hole|> assert credentials.access_key == 'access-key2' assert credentials.secret_key == 'secret-key2' assert credentials.token == 'session-token2' mock_save_credentials.assert_called() def test_logged_in(self): registry_url = quilt3.session.get_registry_url() other_registry_url = registry_url + 'other' mock_auth = dict( refresh_token='refresh-token', access_token='access-token', expires_at=123456789, ) with patch('quilt3.session._load_auth', return_value={registry_url: mock_auth}) as mocked_load_auth: assert quilt3.logged_in() == 'https://example.com' mocked_load_auth.assert_called_once() with patch('quilt3.session._load_auth', return_value={other_registry_url: mock_auth}) as mocked_load_auth: assert quilt3.logged_in() is None mocked_load_auth.assert_called_once()<|fim▁end|>
credentials = session.get_credentials()
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Keras initializer serialization / deserialization.""" import tensorflow.compat.v2 as tf import threading from tensorflow.python import tf2 from keras.initializers import initializers_v1 from keras.initializers import initializers_v2 from keras.utils import generic_utils from keras.utils import tf_inspect as inspect from tensorflow.python.ops import init_ops from tensorflow.python.util.tf_export import keras_export # LOCAL.ALL_OBJECTS is meant to be a global mutable. Hence we need to make it # thread-local to avoid concurrent mutations. LOCAL = threading.local() def populate_deserializable_objects(): """Populates dict ALL_OBJECTS with every built-in initializer. """ global LOCAL if not hasattr(LOCAL, 'ALL_OBJECTS'): LOCAL.ALL_OBJECTS = {} LOCAL.GENERATED_WITH_V2 = None if LOCAL.ALL_OBJECTS and LOCAL.GENERATED_WITH_V2 == tf.__internal__.tf2.enabled(): # Objects dict is already generated for the proper TF version: # do nothing. return LOCAL.ALL_OBJECTS = {} LOCAL.GENERATED_WITH_V2 = tf.__internal__.tf2.enabled() <|fim▁hole|> LOCAL.ALL_OBJECTS['HeNormalV2'] = initializers_v2.HeNormal LOCAL.ALL_OBJECTS['HeUniformV2'] = initializers_v2.HeUniform LOCAL.ALL_OBJECTS['IdentityV2'] = initializers_v2.Identity LOCAL.ALL_OBJECTS['LecunNormalV2'] = initializers_v2.LecunNormal LOCAL.ALL_OBJECTS['LecunUniformV2'] = initializers_v2.LecunUniform LOCAL.ALL_OBJECTS['OnesV2'] = initializers_v2.Ones LOCAL.ALL_OBJECTS['OrthogonalV2'] = initializers_v2.Orthogonal LOCAL.ALL_OBJECTS['RandomNormalV2'] = initializers_v2.RandomNormal LOCAL.ALL_OBJECTS['RandomUniformV2'] = initializers_v2.RandomUniform LOCAL.ALL_OBJECTS['TruncatedNormalV2'] = initializers_v2.TruncatedNormal LOCAL.ALL_OBJECTS['VarianceScalingV2'] = initializers_v2.VarianceScaling LOCAL.ALL_OBJECTS['ZerosV2'] = initializers_v2.Zeros # Out of an abundance of caution we also include these aliases that have # a non-zero probability of having been included in saved configs in the past. LOCAL.ALL_OBJECTS['glorot_normalV2'] = initializers_v2.GlorotNormal LOCAL.ALL_OBJECTS['glorot_uniformV2'] = initializers_v2.GlorotUniform LOCAL.ALL_OBJECTS['he_normalV2'] = initializers_v2.HeNormal LOCAL.ALL_OBJECTS['he_uniformV2'] = initializers_v2.HeUniform LOCAL.ALL_OBJECTS['lecun_normalV2'] = initializers_v2.LecunNormal LOCAL.ALL_OBJECTS['lecun_uniformV2'] = initializers_v2.LecunUniform if tf.__internal__.tf2.enabled(): # For V2, entries are generated automatically based on the content of # initializers_v2.py. v2_objs = {} base_cls = initializers_v2.Initializer generic_utils.populate_dict_with_module_objects( v2_objs, [initializers_v2], obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls)) for key, value in v2_objs.items(): LOCAL.ALL_OBJECTS[key] = value # Functional aliases. LOCAL.ALL_OBJECTS[generic_utils.to_snake_case(key)] = value else: # V1 initializers. v1_objs = { 'Constant': tf.compat.v1.constant_initializer, 'GlorotNormal': tf.compat.v1.glorot_normal_initializer, 'GlorotUniform': tf.compat.v1.glorot_uniform_initializer, 'Identity': tf.compat.v1.initializers.identity, 'Ones': tf.compat.v1.ones_initializer, 'Orthogonal': tf.compat.v1.orthogonal_initializer, 'VarianceScaling': tf.compat.v1.variance_scaling_initializer, 'Zeros': tf.compat.v1.zeros_initializer, 'HeNormal': initializers_v1.HeNormal, 'HeUniform': initializers_v1.HeUniform, 'LecunNormal': initializers_v1.LecunNormal, 'LecunUniform': initializers_v1.LecunUniform, 'RandomNormal': initializers_v1.RandomNormal, 'RandomUniform': initializers_v1.RandomUniform, 'TruncatedNormal': initializers_v1.TruncatedNormal, } for key, value in v1_objs.items(): LOCAL.ALL_OBJECTS[key] = value # Functional aliases. LOCAL.ALL_OBJECTS[generic_utils.to_snake_case(key)] = value # More compatibility aliases. LOCAL.ALL_OBJECTS['normal'] = LOCAL.ALL_OBJECTS['random_normal'] LOCAL.ALL_OBJECTS['uniform'] = LOCAL.ALL_OBJECTS['random_uniform'] LOCAL.ALL_OBJECTS['one'] = LOCAL.ALL_OBJECTS['ones'] LOCAL.ALL_OBJECTS['zero'] = LOCAL.ALL_OBJECTS['zeros'] # For backwards compatibility, we populate this file with the objects # from ALL_OBJECTS. We make no guarantees as to whether these objects will # using their correct version. populate_deserializable_objects() globals().update(LOCAL.ALL_OBJECTS) # Utility functions @keras_export('keras.initializers.serialize') def serialize(initializer): return generic_utils.serialize_keras_object(initializer) @keras_export('keras.initializers.deserialize') def deserialize(config, custom_objects=None): """Return an `Initializer` object from its config.""" populate_deserializable_objects() return generic_utils.deserialize_keras_object( config, module_objects=LOCAL.ALL_OBJECTS, custom_objects=custom_objects, printable_module_name='initializer') @keras_export('keras.initializers.get') def get(identifier): """Retrieve a Keras initializer by the identifier. The `identifier` may be the string name of a initializers function or class ( case-sensitively). >>> identifier = 'Ones' >>> tf.keras.initializers.deserialize(identifier) <...keras.initializers.initializers_v2.Ones...> You can also specify `config` of the initializer to this function by passing dict containing `class_name` and `config` as an identifier. Also note that the `class_name` must map to a `Initializer` class. >>> cfg = {'class_name': 'Ones', 'config': {}} >>> tf.keras.initializers.deserialize(cfg) <...keras.initializers.initializers_v2.Ones...> In the case that the `identifier` is a class, this method will return a new instance of the class by its constructor. Args: identifier: String or dict that contains the initializer name or configurations. Returns: Initializer instance base on the input identifier. Raises: ValueError: If the input identifier is not a supported type or in a bad format. """ if identifier is None: return None if isinstance(identifier, dict): return deserialize(identifier) elif isinstance(identifier, str): identifier = str(identifier) return deserialize(identifier) elif callable(identifier): if inspect.isclass(identifier): identifier = identifier() return identifier else: raise ValueError('Could not interpret initializer identifier: ' + str(identifier))<|fim▁end|>
# Compatibility aliases (need to exist in both V1 and V2). LOCAL.ALL_OBJECTS['ConstantV2'] = initializers_v2.Constant LOCAL.ALL_OBJECTS['GlorotNormalV2'] = initializers_v2.GlorotNormal LOCAL.ALL_OBJECTS['GlorotUniformV2'] = initializers_v2.GlorotUniform
<|file_name|>ASTTypeCast.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal.parse; import antlr.*; import org.apache.geode.cache.query.internal.QCompiler; <|fim▁hole|> public ASTTypeCast() {} public ASTTypeCast(Token t) { super(t); } @Override public void compile(QCompiler compiler) { super.compile(compiler); // there's a type on the stack now compiler.typecast(); } }<|fim▁end|>
public class ASTTypeCast extends GemFireAST { private static final long serialVersionUID = -6368577668325776355L;
<|file_name|>basic.rs<|end_file_name|><|fim▁begin|>extern crate central; #[macro_use] extern crate log; extern crate env_logger; use central::types::{Storage, Handle}; use std::collections::HashMap; use std::time::Duration; use std::thread; use std::convert::Into; #[derive(Debug)] pub struct Credential { pub user: String, pub password: String <|fim▁hole|> format!("{}:{}", self.user, self.password) } } #[test] fn test_central_publish() { let _ = env_logger::init(); let credential = Credential { user: String::from("test"), password: String::from("test") }; let mut handlers : HashMap<String, Handle> = HashMap::new(); handlers.insert(String::from("hello"), Box::new(move |value: Result<String, ()>| { assert_eq!(value, Ok(String::from("test:test"))); })); let mut store = central::storage::RedisStore::create("redis://127.0.0.1", "central", handlers).unwrap(); store.set("hello", credential); let value = store.get("hello").unwrap(); assert_eq!(value, "test:test"); thread::sleep(Duration::from_secs(1)); }<|fim▁end|>
} impl Into<String> for Credential { fn into(self) -> String {
<|file_name|>pyminer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright (c) 2011 The Bitcoin developers // Copyright (c) 2014 Dyffy, Inc. # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # import time import json import pprint import hashlib import struct import re import base64 import httplib import sys from multiprocessing import Process ERR_SLEEP = 15 MAX_NONCE = 1000000L settings = {} pp = pprint.PrettyPrinter(indent=4) class SidecoinRPC: OBJID = 1 def __init__(self, host, port, username, password): authpair = "%s:%s" % (username, password) self.authhdr = "Basic %s" % (base64.b64encode(authpair)) self.conn = httplib.HTTPConnection(host, port, False, 30) def rpc(self, method, params=None): self.OBJID += 1 obj = { 'version' : '1.1', 'method' : method, 'id' : self.OBJID } if params is None: obj['params'] = [] else: obj['params'] = params self.conn.request('POST', '/', json.dumps(obj), { 'Authorization' : self.authhdr, 'Content-type' : 'application/json' }) resp = self.conn.getresponse() if resp is None: print "JSON-RPC: no response" return None body = resp.read() resp_obj = json.loads(body) if resp_obj is None: print "JSON-RPC: cannot JSON-decode body" return None if 'error' in resp_obj and resp_obj['error'] != None: return resp_obj['error'] if 'result' not in resp_obj: print "JSON-RPC: no result in object" return None return resp_obj['result'] def getblockcount(self): return self.rpc('getblockcount') def getwork(self, data=None): return self.rpc('getwork', data) def uint32(x): return x & 0xffffffffL def bytereverse(x): return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) | (((x) >> 8) & 0x0000ff00) | ((x) >> 24) )) def bufreverse(in_buf): out_words = [] for i in range(0, len(in_buf), 4): word = struct.unpack('@I', in_buf[i:i+4])[0] out_words.append(struct.pack('@I', bytereverse(word))) return ''.join(out_words) def wordreverse(in_buf): out_words = [] for i in range(0, len(in_buf), 4): out_words.append(in_buf[i:i+4]) out_words.reverse() return ''.join(out_words) class Miner: def __init__(self, id): self.id = id self.max_nonce = MAX_NONCE def work(self, datastr, targetstr): # decode work data hex string to binary static_data = datastr.decode('hex') static_data = bufreverse(static_data) # the first 76b of 80b do not change blk_hdr = static_data[:76] # decode 256-bit target value targetbin = targetstr.decode('hex') targetbin = targetbin[::-1] # byte-swap and dword-swap targetbin_str = targetbin.encode('hex') target = long(targetbin_str, 16) # pre-hash first 76b of block header static_hash = hashlib.sha256() static_hash.update(blk_hdr) for nonce in xrange(self.max_nonce): # encode 32-bit nonce value nonce_bin = struct.pack("<I", nonce) # hash final 4b, the nonce value hash1_o = static_hash.copy() hash1_o.update(nonce_bin) hash1 = hash1_o.digest() # sha256 hash of sha256 hash hash_o = hashlib.sha256() hash_o.update(hash1) hash = hash_o.digest() # quick test for winning solution: high 32 bits zero? if hash[-4:] != '\0\0\0\0': continue # convert binary hash to 256-bit Python long hash = bufreverse(hash) hash = wordreverse(hash) hash_str = hash.encode('hex') l = long(hash_str, 16) # proof-of-work test: hash < target if l < target: print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,) return (nonce + 1, nonce_bin) else: print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,) # return (nonce + 1, nonce_bin) return (nonce + 1, None) def submit_work(self, rpc, original_data, nonce_bin): nonce_bin = bufreverse(nonce_bin) nonce = nonce_bin.encode('hex') solution = original_data[:152] + nonce + original_data[160:256] param_arr = [ solution ] result = rpc.getwork(param_arr) print time.asctime(), "--> Upstream RPC result:", result def iterate(self, rpc): work = rpc.getwork() if work is None: time.sleep(ERR_SLEEP) return if 'data' not in work or 'target' not in work: time.sleep(ERR_SLEEP) return time_start = time.time() (hashes_done, nonce_bin) = self.work(work['data'], work['target']) time_end = time.time() time_diff = time_end - time_start self.max_nonce = long( (hashes_done * settings['scantime']) / time_diff) if self.max_nonce > 0xfffffffaL: self.max_nonce = 0xfffffffaL if settings['hashmeter']: print "HashMeter(%d): %d hashes, %.2f Khash/sec" % ( self.id, hashes_done, (hashes_done / 1000.0) / time_diff) if nonce_bin is not None: self.submit_work(rpc, work['data'], nonce_bin) def loop(self): rpc = SidecoinRPC(settings['host'], settings['port'], settings['rpcuser'], settings['rpcpass']) if rpc is None: return while True: self.iterate(rpc) def miner_thread(id): miner = Miner(id) miner.loop() if __name__ == '__main__': if len(sys.argv) != 2: print "Usage: pyminer.py CONFIG-FILE" sys.exit(1) f = open(sys.argv[1]) for line in f: # skip comment lines m = re.search('^\s*#', line) if m: continue # parse key=value lines m = re.search('^(\w+)\s*=\s*(\S.*)$', line) if m is None: continue settings[m.group(1)] = m.group(2) f.close() if 'host' not in settings: settings['host'] = '127.0.0.1' if 'port' not in settings: settings['port'] = 8332 if 'threads' not in settings: settings['threads'] = 1<|fim▁hole|> settings['scantime'] = 30L if 'rpcuser' not in settings or 'rpcpass' not in settings: print "Missing username and/or password in cfg file" sys.exit(1) settings['port'] = int(settings['port']) settings['threads'] = int(settings['threads']) settings['hashmeter'] = int(settings['hashmeter']) settings['scantime'] = long(settings['scantime']) thr_list = [] for thr_id in range(settings['threads']): p = Process(target=miner_thread, args=(thr_id,)) p.start() thr_list.append(p) time.sleep(1) # stagger threads print settings['threads'], "mining threads started" print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port']) try: for thr_proc in thr_list: thr_proc.join() except KeyboardInterrupt: pass print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])<|fim▁end|>
if 'hashmeter' not in settings: settings['hashmeter'] = 0 if 'scantime' not in settings:
<|file_name|>api_task_status.py<|end_file_name|><|fim▁begin|>from flask_restful import Resource, Api from flask_restful_swagger import swagger from flauthority import app from flauthority import api, app, celery, auth from ModelClasses import AnsibleCommandModel, AnsiblePlaybookModel, AnsibleExtraArgsModel import celery_runner class TaskStatus(Resource): @swagger.operation( notes='Get the status of an certificate generation task/job', nickname='taskstatus', parameters=[ { "name": "task_id", "description": "The ID of the task/job to get status for", "required": True, "allowMultiple": False, "dataType": 'string', "paramType": "path" } ]) @auth.login_required def get(self, task_id): task = celery_runner.generate_certificate.AsyncResult(task_id) if task.state == 'PENDING': result = "Task not found" <|fim▁hole|> 'description': "Task is currently running", 'returncode': None} else: try: return_code = task.info['returncode'] description = task.info['description'] if return_code is 0: result_obj = {'Status': "SUCCESS", 'description': description} else: result_obj = {'Status': "FLAUTHORITY_TASK_FAILURE", 'description': description, 'returncode': return_code} except: result_obj = {'Status': "CELERY_FAILURE"} return result_obj api.add_resource(TaskStatus, '/api/taskstatus/<string:task_id>')<|fim▁end|>
resp = app.make_response((result, 404)) return resp elif task.state == 'PROGRESS': result_obj = {'Status': "PROGRESS",
<|file_name|>path.py<|end_file_name|><|fim▁begin|># (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import shutil from errno import EEXIST from ansible.errors import AnsibleError from ansible.module_utils._text import to_bytes, to_native, to_text __all__ = ['unfrackpath', 'makedirs_safe'] def unfrackpath(path, follow=True, basedir=None): ''' Returns a path that is free of symlinks (if follow=True), environment variables, relative path traversals and symbols (~) :arg path: A byte or text string representing a path to be canonicalized<|fim▁hole|> :returns: An absolute path with symlinks, environment variables, and tilde expanded. Note that this does not check whether a path exists. example:: '$HOME/../../var/mail' becomes '/var/spool/mail' ''' b_basedir = to_bytes(basedir, errors='surrogate_or_strict', nonstring='passthru') if b_basedir is None: b_basedir = to_bytes(os.getcwd(), errors='surrogate_or_strict') elif os.path.isfile(b_basedir): b_basedir = os.path.dirname(b_basedir) b_final_path = os.path.expanduser(os.path.expandvars(to_bytes(path, errors='surrogate_or_strict'))) if not os.path.isabs(b_final_path): b_final_path = os.path.join(b_basedir, b_final_path) if follow: b_final_path = os.path.realpath(b_final_path) return to_text(os.path.normpath(b_final_path), errors='surrogate_or_strict') def makedirs_safe(path, mode=None): ''' A *potentially insecure* way to ensure the existence of a directory chain. The "safe" in this function's name refers only to its ability to ignore `EEXIST` in the case of multiple callers operating on the same part of the directory chain. This function is not safe to use under world-writable locations when the first level of the path to be created contains a predictable component. Always create a randomly-named element first if there is any chance the parent directory might be world-writable (eg, /tmp) to prevent symlink hijacking and potential disclosure or modification of sensitive file contents. :arg path: A byte or text string representing a directory chain to be created :kwarg mode: If given, the mode to set the directory to :raises AnsibleError: If the directory cannot be created and does not already exist. :raises UnicodeDecodeError: if the path is not decodable in the utf-8 encoding. ''' rpath = unfrackpath(path) b_rpath = to_bytes(rpath) if not os.path.exists(b_rpath): try: if mode: os.makedirs(b_rpath, mode) else: os.makedirs(b_rpath) except OSError as e: if e.errno != EEXIST: raise AnsibleError("Unable to create local directories(%s): %s" % (to_native(rpath), to_native(e))) def basedir(source): """ returns directory for inventory or playbook """ source = to_bytes(source, errors='surrogate_or_strict') dname = None if os.path.isdir(source): dname = source elif source in [None, '', '.']: dname = os.getcwd() elif os.path.isfile(source): dname = os.path.dirname(source) if dname: # don't follow symlinks for basedir, enables source re-use dname = os.path.abspath(dname) return to_text(dname, errors='surrogate_or_strict') def cleanup_tmp_file(path, warn=False): """ Removes temporary file or directory. Optionally display a warning if unable to remove the file or directory. :arg path: Path to file or directory to be removed :kwarg warn: Whether or not to display a warning when the file or directory cannot be removed """ try: if os.path.exists(path): try: if os.path.isdir(path): shutil.rmtree(path) elif os.path.isfile(path): os.unlink(path) except Exception as e: if warn: # Importing here to avoid circular import from ansible.utils.display import Display display = Display() display.display(u'Unable to remove temporary file {0}'.format(to_text(e))) except Exception: pass<|fim▁end|>
:arg follow: A boolean to indicate of symlinks should be resolved or not :raises UnicodeDecodeError: If the canonicalized version of the path contains non-utf8 byte sequences. :rtype: A text string (unicode on pyyhon2, str on python3).
<|file_name|>Parser.cpp<|end_file_name|><|fim▁begin|>#include "Parser.h" namespace hiapi { namespace server { Parser::Parser(hiaux::HttpRequestPtr _req): state(READING_SIZE), m_got_request(false) { <|fim▁hole|>void Parser::parse() { while (true) { if (m_cur_token.size() == 0) return; if (state == READING_SIZE) { if (m_cur_token[0] == 'k') { if (m_cur_token.size() == 1) { m_cur_token.clear(); return; } else { m_cur_token = m_cur_token.substr(1, m_cur_token.size() - 1); continue; } } size_t endlpos = m_cur_token.find("\n"); if (endlpos == std::string::npos) return; std::string size_str = m_cur_token.substr(0, endlpos); m_cur_token = m_cur_token.substr(size_str.size() + 1, m_cur_token.size() - size_str.size() + 1); //std::cout << "Parser::parse size_str: " << size_str << " m_cur_token:" << m_cur_token << std::endl; m_size = string_to_uint64(size_str); if (m_size > HIAPI_BINREQUEST_MAX_SIZE) throw hiaux::RequestParsingEx(); state = READING_MESSAGE; } if (state == READING_MESSAGE) { //std::cout << "state == READING_MESSAGE size: " << m_size << " got: " << m_cur_token.size() << std::endl; if (m_cur_token.size() >= m_size) { std::string cur_req_str = m_cur_token.substr(0, m_size); m_cur_request = RequestPtr(new Request(cur_req_str)); m_got_request = true; m_cur_token = m_cur_token.substr(cur_req_str.size(), m_cur_token.size() - cur_req_str.size()); state = READING_SIZE; } return; } } } void Parser::execute(const std::string &_d) { //return; m_cur_token.append(_d); //std::cout << "Parser::execute " << m_cur_token << std::endl; parse(); } bool Parser::hasRequest() { return m_got_request; } hiaux::CustomRequestPtr Parser::getRequest() { m_got_request = false; RequestPtr req = m_cur_request; parse(); return req; } Parser::~Parser() { } } }<|fim▁end|>
}
<|file_name|>ClusterMembership.java<|end_file_name|><|fim▁begin|>/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /* * ClusterMembership.java * Copyright (C) 2004 Mark Hall * */ package weka.filters.unsupervised.attribute; import weka.filters.Filter; import weka.filters.UnsupervisedFilter; import weka.filters.unsupervised.attribute.Remove; import weka.clusterers.Clusterer; import weka.clusterers.DensityBasedClusterer; import weka.core.Attribute; import weka.core.Instances; import weka.core.Instance; import weka.core.OptionHandler; import weka.core.Range; import weka.core.FastVector; import weka.core.Option; import weka.core.Utils; import java.util.Enumeration; import java.util.Vector; /** * A filter that uses a clusterer to obtain cluster membership values * for each input instance and outputs them as new instances. The * clusterer needs to be a density-based clusterer. If * a (nominal) class is set, then the clusterer will be run individually * for each class.<p> * * Valid filter-specific options are: <p> * * Full class name of clusterer to use. Clusterer options may be * specified at the end following a -- .(required)<p> * * -I range string <br> * The range of attributes the clusterer should ignore. Note: * the class attribute (if set) is automatically ignored during clustering.<p> * * @author Mark Hall ([email protected]) * @author Eibe Frank * @version $Revision: 1.7 $ */ public class ClusterMembership extends Filter implements UnsupervisedFilter, OptionHandler { /** The clusterer */ protected DensityBasedClusterer m_clusterer = new weka.clusterers.EM(); /** Array for storing the clusterers */ protected DensityBasedClusterer[] m_clusterers; /** Range of attributes to ignore */ protected Range m_ignoreAttributesRange; /** Filter for removing attributes */ protected Filter m_removeAttributes; /** The prior probability for each class */ protected double[] m_priors; /** * Sets the format of the input instances. * * @param instanceInfo an Instances object containing the input instance * structure (any instances contained in the object are ignored - only the * structure is required). * @return true if the outputFormat may be collected immediately * @exception Exception if the inputFormat can't be set successfully */ public boolean setInputFormat(Instances instanceInfo) throws Exception { super.setInputFormat(instanceInfo); m_removeAttributes = null; m_priors = null; return false; } /** * Signify that this batch of input to the filter is finished. * * @return true if there are instances pending output * @exception IllegalStateException if no input structure has been defined */ public boolean batchFinished() throws Exception { if (getInputFormat() == null) { throw new IllegalStateException("No input instance format defined"); } if (outputFormatPeek() == null) { Instances toFilter = getInputFormat(); Instances[] toFilterIgnoringAttributes; // Make subsets if class is nominal if ((toFilter.classIndex() >= 0) && toFilter.classAttribute().isNominal()) { toFilterIgnoringAttributes = new Instances[toFilter.numClasses()]; for (int i = 0; i < toFilter.numClasses(); i++) { toFilterIgnoringAttributes[i] = new Instances(toFilter, toFilter.numInstances()); } for (int i = 0; i < toFilter.numInstances(); i++) { toFilterIgnoringAttributes[(int)toFilter.instance(i).classValue()].add(toFilter.instance(i)); } m_priors = new double[toFilter.numClasses()]; for (int i = 0; i < toFilter.numClasses(); i++) { toFilterIgnoringAttributes[i].compactify(); m_priors[i] = toFilterIgnoringAttributes[i].sumOfWeights(); } Utils.normalize(m_priors); } else { toFilterIgnoringAttributes = new Instances[1]; toFilterIgnoringAttributes[0] = toFilter; m_priors = new double[1]; m_priors[0] = 1; } // filter out attributes if necessary if (m_ignoreAttributesRange != null || toFilter.classIndex() >= 0) { m_removeAttributes = new Remove(); String rangeString = ""; if (m_ignoreAttributesRange != null) { rangeString += m_ignoreAttributesRange.getRanges(); } if (toFilter.classIndex() >= 0) { if (rangeString.length() > 0) { rangeString += (","+(toFilter.classIndex()+1)); } else { rangeString = ""+(toFilter.classIndex()+1); } } ((Remove)m_removeAttributes).setAttributeIndices(rangeString); ((Remove)m_removeAttributes).setInvertSelection(false); ((Remove)m_removeAttributes).setInputFormat(toFilter); for (int i = 0; i < toFilterIgnoringAttributes.length; i++) { toFilterIgnoringAttributes[i] = Filter.useFilter(toFilterIgnoringAttributes[i], m_removeAttributes); } } // build the clusterers if ((toFilter.classIndex() <= 0) || !toFilter.classAttribute().isNominal()) { m_clusterers = DensityBasedClusterer.makeCopies(m_clusterer, 1); m_clusterers[0].buildClusterer(toFilterIgnoringAttributes[0]); } else { m_clusterers = DensityBasedClusterer.makeCopies(m_clusterer, toFilter.numClasses()); for (int i = 0; i < m_clusterers.length; i++) { if (toFilterIgnoringAttributes[i].numInstances() == 0) { m_clusterers[i] = null; } else { m_clusterers[i].buildClusterer(toFilterIgnoringAttributes[i]); } } } // create output dataset FastVector attInfo = new FastVector(); for (int j = 0; j < m_clusterers.length; j++) { if (m_clusterers[j] != null) { for (int i = 0; i < m_clusterers[j].numberOfClusters(); i++) { attInfo.addElement(new Attribute("pCluster_" + j + "_" + i)); } } } if (toFilter.classIndex() >= 0) { attInfo.addElement(toFilter.classAttribute().copy()); } attInfo.trimToSize(); Instances filtered = new Instances(toFilter.relationName()+"_clusterMembership", attInfo, 0); if (toFilter.classIndex() >= 0) { filtered.setClassIndex(filtered.numAttributes() - 1); } setOutputFormat(filtered); // build new dataset for (int i = 0; i < toFilter.numInstances(); i++) { convertInstance(toFilter.instance(i)); } } flushInput(); m_NewBatch = true; return (numPendingOutput() != 0); } /** * Input an instance for filtering. Ordinarily the instance is processed * and made available for output immediately. Some filters require all * instances be read before producing output. * * @param instance the input instance * @return true if the filtered instance may now be * collected with output(). * @exception IllegalStateException if no input format has been defined. */ public boolean input(Instance instance) throws Exception { if (getInputFormat() == null) { throw new IllegalStateException("No input instance format defined"); } if (m_NewBatch) { resetQueue(); m_NewBatch = false; } if (outputFormatPeek() != null) { convertInstance(instance); return true; } bufferInput(instance); return false; } /** * Converts logs back to density values. */ protected double[] logs2densities(int j, Instance in) throws Exception { double[] logs = m_clusterers[j].logJointDensitiesForInstance(in); for (int i = 0; i < logs.length; i++) { logs[i] += Math.log(m_priors[j]); } return logs; } /** * Convert a single instance over. The converted instance is added to * the end of the output queue. * * @param instance the instance to convert */ protected void convertInstance(Instance instance) throws Exception { // set up values double [] instanceVals = new double[outputFormatPeek().numAttributes()]; double [] tempvals; if (instance.classIndex() >= 0) { tempvals = new double[outputFormatPeek().numAttributes() - 1]; } else { tempvals = new double[outputFormatPeek().numAttributes()]; } int pos = 0; for (int j = 0; j < m_clusterers.length; j++) { if (m_clusterers[j] != null) { double [] probs; if (m_removeAttributes != null) { m_removeAttributes.input(instance); probs = logs2densities(j, m_removeAttributes.output()); } else { probs = logs2densities(j, instance); } System.arraycopy(probs, 0, tempvals, pos, probs.length); pos += probs.length;<|fim▁hole|> } tempvals = Utils.logs2probs(tempvals); System.arraycopy(tempvals, 0, instanceVals, 0, tempvals.length); if (instance.classIndex() >= 0) { instanceVals[instanceVals.length - 1] = instance.classValue(); } push(new Instance(instance.weight(), instanceVals)); } /** * Returns an enumeration describing the available options. * * @return an enumeration of all the available options. */ public Enumeration listOptions() { Vector newVector = new Vector(2); newVector. addElement(new Option("\tFull name of clusterer to use (required).\n" + "\teg: weka.clusterers.EM", "W", 1, "-W <clusterer name>")); newVector. addElement(new Option("\tThe range of attributes the clusterer should ignore." +"\n\t(the class attribute is automatically ignored)", "I", 1,"-I <att1,att2-att4,...>")); return newVector.elements(); } /** * Parses the options for this object. Valid options are: <p> * * -W clusterer string <br> * Full class name of clusterer to use. Clusterer options may be * specified at the end following a -- .(required)<p> * * -I range string <br> * The range of attributes the clusterer should ignore. Note: * the class attribute (if set) is automatically ignored during clustering.<p> * * @param options the list of options as an array of strings * @exception Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { String clustererString = Utils.getOption('W', options); if (clustererString.length() == 0) { throw new Exception("A clusterer must be specified" + " with the -W option."); } setDensityBasedClusterer((DensityBasedClusterer)Utils. forName(DensityBasedClusterer.class, clustererString, Utils.partitionOptions(options))); setIgnoredAttributeIndices(Utils.getOption('I', options)); Utils.checkForRemainingOptions(options); } /** * Gets the current settings of the filter. * * @return an array of strings suitable for passing to setOptions */ public String [] getOptions() { String [] clustererOptions = new String [0]; if ((m_clusterer != null) && (m_clusterer instanceof OptionHandler)) { clustererOptions = ((OptionHandler)m_clusterer).getOptions(); } String [] options = new String [clustererOptions.length + 5]; int current = 0; if (!getIgnoredAttributeIndices().equals("")) { options[current++] = "-I"; options[current++] = getIgnoredAttributeIndices(); } if (m_clusterer != null) { options[current++] = "-W"; options[current++] = getDensityBasedClusterer().getClass().getName(); } options[current++] = "--"; System.arraycopy(clustererOptions, 0, options, current, clustererOptions.length); current += clustererOptions.length; while (current < options.length) { options[current++] = ""; } return options; } /** * Returns a string describing this filter * * @return a description of the filter suitable for * displaying in the explorer/experimenter gui */ public String globalInfo() { return "A filter that uses a density-based clusterer to generate cluster " + "membership values; filtered instances are composed of these values " + "plus the class attribute (if set in the input data). If a (nominal) " + "class attribute is set, the clusterer is run separately for each " + "class. The class attribute (if set) and any user-specified " + "attributes are ignored during the clustering operation"; } /** * Returns a description of this option suitable for display * as a tip text in the gui. * * @return description of this option */ public String clustererTipText() { return "The clusterer that will generate membership values for the instances."; } /** * Set the clusterer for use in filtering * * @param newClusterer the clusterer to use */ public void setDensityBasedClusterer(DensityBasedClusterer newClusterer) { m_clusterer = newClusterer; } /** * Get the clusterer used by this filter * * @return the clusterer used */ public DensityBasedClusterer getDensityBasedClusterer() { return m_clusterer; } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String ignoredAttributeIndicesTipText() { return "The range of attributes to be ignored by the clusterer. eg: first-3,5,9-last"; } /** * Gets ranges of attributes to be ignored. * * @return a string containing a comma-separated list of ranges */ public String getIgnoredAttributeIndices() { if (m_ignoreAttributesRange == null) { return ""; } else { return m_ignoreAttributesRange.getRanges(); } } /** * Sets the ranges of attributes to be ignored. If provided string * is null, no attributes will be ignored. * * @param rangeList a string representing the list of attributes. * eg: first-3,5,6-last * @exception IllegalArgumentException if an invalid range list is supplied */ public void setIgnoredAttributeIndices(String rangeList) { if ((rangeList == null) || (rangeList.length() == 0)) { m_ignoreAttributesRange = null; } else { m_ignoreAttributesRange = new Range(); m_ignoreAttributesRange.setRanges(rangeList); } } /** * Main method for testing this class. * * @param argv should contain arguments to the filter: use -h for help */ public static void main(String [] argv) { try { if (Utils.getFlag('b', argv)) { Filter.batchFilterFile(new ClusterMembership(), argv); } else { Filter.filterFile(new ClusterMembership(), argv); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } }<|fim▁end|>
}
<|file_name|>timer.rs<|end_file_name|><|fim▁begin|>//! Timers that can invoke a callback on an interval. use widget_prelude::*; use ::callback::Callback; /// A timer that can invoke a callback on a configurable interval. /// /// ##Note: Not a Renderable Widget /// While this type can be dereferenced and converted to `BaseWidget`, it is *not* a renderable /// widget and adding it to a container will have no visual effect. /// /// ##Note: Resource Usage /// This struct should be freed by calling `.destroy()` on it when it is no longer in use to free /// any resources it has allocated. Otherwise, it will be freed when `kiss_ui::show_gui()` returns. pub struct Timer(IUPPtr); impl Timer { /// Create a new timer with a default interval. /// /// TODO: Document default interval. pub fn new() -> Timer { unsafe { let ptr = ::iup_sys::IupTimer(); Self::from_ptr(ptr) } }<|fim▁hole|> self.set_int_attribute(::attrs::TIME, time as i32); self } /// Set a callback to be invoked when the timer interval elapses. /// The callback will be invoked on every interval until `.stop()` is called. pub fn set_on_interval<Cb>(self, on_interval: Cb) -> Self where Cb: Callback<Self> { callback_impl! { ::attrs::ACTION_CB, self, on_interval, Timer } self } /// Start the timer. The callback will be invoked when the next interval elapses. pub fn start(self) -> Self { self.set_bool_attribute(::attrs::RUN, true); self } /// Stop the timer. The callback will not be invoked until the timer is restarted. pub fn stop(self) -> Self { self.set_bool_attribute(::attrs::RUN, false); self } } impl_widget! { Timer, "timer" } impl Destroy for Timer {}<|fim▁end|>
/// Set the timer interval in milliseconds. pub fn set_interval(self, time: u32) -> Self {
<|file_name|>volume.py<|end_file_name|><|fim▁begin|># Copyright 2011 OpenStack Foundation # (c) Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Volume drivers for libvirt.""" import glob import os import time import urllib2 from oslo.config import cfg from oslo.utils import strutils import six import six.moves.urllib.parse as urlparse from nova import exception from nova.i18n import _ from nova.i18n import _LE from nova.i18n import _LW from nova.openstack.common import log as logging from nova.openstack.common import loopingcall from nova.openstack.common import processutils from nova import paths from nova.storage import linuxscsi from nova import utils from nova.virt.libvirt import config as vconfig from nova.virt.libvirt import utils as libvirt_utils LOG = logging.getLogger(__name__) volume_opts = [ cfg.IntOpt('num_iscsi_scan_tries', default=5, help='Number of times to rescan iSCSI target to find volume'), cfg.IntOpt('num_iser_scan_tries', default=5, help='Number of times to rescan iSER target to find volume'), cfg.StrOpt('rbd_user', help='The RADOS client name for accessing rbd volumes'), cfg.StrOpt('rbd_secret_uuid', help='The libvirt UUID of the secret for the rbd_user' 'volumes'), cfg.StrOpt('nfs_mount_point_base', default=paths.state_path_def('mnt'), help='Directory where the NFS volume is mounted on the' ' compute node'), cfg.StrOpt('nfs_mount_options', help='Mount options passedf to the NFS client. See section ' 'of the nfs man page for details'), cfg.IntOpt('num_aoe_discover_tries', default=3, help='Number of times to rediscover AoE target to find volume'), cfg.StrOpt('glusterfs_mount_point_base', default=paths.state_path_def('mnt'), help='Directory where the glusterfs volume is mounted on the ' 'compute node'), cfg.BoolOpt('iscsi_use_multipath', default=False, help='Use multipath connection of the iSCSI volume'), cfg.BoolOpt('iser_use_multipath', default=False, help='Use multipath connection of the iSER volume'), cfg.StrOpt('scality_sofs_config', help='Path or URL to Scality SOFS configuration file'), cfg.StrOpt('scality_sofs_mount_point', default='$state_path/scality', help='Base dir where Scality SOFS shall be mounted'), cfg.ListOpt('qemu_allowed_storage_drivers', default=[], help='Protocols listed here will be accessed directly ' 'from QEMU. Currently supported protocols: [gluster]') ] CONF = cfg.CONF CONF.register_opts(volume_opts, 'libvirt') class LibvirtBaseVolumeDriver(object): """Base class for volume drivers.""" def __init__(self, connection, is_block_dev): self.connection = connection self.is_block_dev = is_block_dev def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = vconfig.LibvirtConfigGuestDisk() conf.driver_name = libvirt_utils.pick_disk_driver_name( self.connection._get_hypervisor_version(), self.is_block_dev ) conf.source_device = disk_info['type'] conf.driver_format = "raw" conf.driver_cache = "none" conf.target_dev = disk_info['dev'] conf.target_bus = disk_info['bus'] conf.serial = connection_info.get('serial') # Support for block size tuning data = {} if 'data' in connection_info: data = connection_info['data'] if 'logical_block_size' in data: conf.logical_block_size = data['logical_block_size'] if 'physical_block_size' in data: conf.physical_block_size = data['physical_block_size'] # Extract rate_limit control parameters if 'qos_specs' in data and data['qos_specs']: tune_opts = ['total_bytes_sec', 'read_bytes_sec', 'write_bytes_sec', 'total_iops_sec', 'read_iops_sec', 'write_iops_sec'] specs = data['qos_specs'] if isinstance(specs, dict): for k, v in specs.iteritems(): if k in tune_opts: new_key = 'disk_' + k setattr(conf, new_key, v) else: LOG.warn(_LW('Unknown content in connection_info/' 'qos_specs: %s'), specs) # Extract access_mode control parameters if 'access_mode' in data and data['access_mode']: access_mode = data['access_mode'] if access_mode in ('ro', 'rw'): conf.readonly = access_mode == 'ro' else: LOG.error(_LE('Unknown content in ' 'connection_info/access_mode: %s'), access_mode) raise exception.InvalidVolumeAccessMode( access_mode=access_mode) return conf def connect_volume(self, connection_info, disk_info): """Connect the volume. Returns xml for libvirt.""" return self.get_config(connection_info, disk_info) def disconnect_volume(self, connection_info, disk_dev): """Disconnect the volume.""" pass class LibvirtVolumeDriver(LibvirtBaseVolumeDriver): """Class for volumes backed by local file.""" def __init__(self, connection): super(LibvirtVolumeDriver, self).__init__(connection, is_block_dev=True) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtVolumeDriver, self).get_config(connection_info, disk_info) conf.source_type = "block" conf.source_path = connection_info['data']['device_path'] return conf class LibvirtFakeVolumeDriver(LibvirtBaseVolumeDriver): """Driver to attach fake volumes to libvirt.""" def __init__(self, connection): super(LibvirtFakeVolumeDriver, self).__init__(connection, is_block_dev=True) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtFakeVolumeDriver, self).get_config(connection_info, disk_info) conf.source_type = "network" conf.source_protocol = "fake" conf.source_name = "fake" return conf class LibvirtNetVolumeDriver(LibvirtBaseVolumeDriver): """Driver to attach Network volumes to libvirt.""" def __init__(self, connection): super(LibvirtNetVolumeDriver, self).__init__(connection, is_block_dev=False) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtNetVolumeDriver, self).get_config(connection_info, disk_info) netdisk_properties = connection_info['data'] conf.source_type = "network" conf.source_protocol = connection_info['driver_volume_type'] conf.source_name = netdisk_properties.get('name') conf.source_hosts = netdisk_properties.get('hosts', []) conf.source_ports = netdisk_properties.get('ports', []) auth_enabled = netdisk_properties.get('auth_enabled') if (conf.source_protocol == 'rbd' and CONF.libvirt.rbd_secret_uuid): conf.auth_secret_uuid = CONF.libvirt.rbd_secret_uuid auth_enabled = True # Force authentication locally if CONF.libvirt.rbd_user: conf.auth_username = CONF.libvirt.rbd_user if auth_enabled: conf.auth_username = (conf.auth_username or netdisk_properties['auth_username']) conf.auth_secret_type = netdisk_properties['secret_type'] conf.auth_secret_uuid = (conf.auth_secret_uuid or netdisk_properties['secret_uuid']) return conf class LibvirtISCSIVolumeDriver(LibvirtBaseVolumeDriver): """Driver to attach Network volumes to libvirt.""" def __init__(self, connection): super(LibvirtISCSIVolumeDriver, self).__init__(connection, is_block_dev=True) self.num_scan_tries = CONF.libvirt.num_iscsi_scan_tries self.use_multipath = CONF.libvirt.iscsi_use_multipath def _run_iscsiadm(self, iscsi_properties, iscsi_command, **kwargs): check_exit_code = kwargs.pop('check_exit_code', 0) (out, err) = utils.execute('iscsiadm', '-m', 'node', '-T', iscsi_properties['target_iqn'], '-p', iscsi_properties['target_portal'], *iscsi_command, run_as_root=True, check_exit_code=check_exit_code) msg = ('iscsiadm %(command)s: stdout=%(out)s stderr=%(err)s' % {'command': iscsi_command, 'out': out, 'err': err}) # NOTE(bpokorny): iscsi_command can contain passwords so we need to # sanitize the password in the message. LOG.debug(strutils.mask_password(msg)) return (out, err) def _iscsiadm_update(self, iscsi_properties, property_key, property_value, **kwargs): iscsi_command = ('--op', 'update', '-n', property_key, '-v', property_value) return self._run_iscsiadm(iscsi_properties, iscsi_command, **kwargs) def _get_target_portals_from_iscsiadm_output(self, output): # return both portals and iqns # # as we are parsing a command line utility, allow for the # possibility that additional debug data is spewed in the # stream, and only grab actual ip / iqn lines. targets = [] for data in [line.split() for line in output.splitlines()]: if len(data) == 2 and data[1].startswith('iqn.'): targets.append(data) return targets def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtISCSIVolumeDriver, self).get_config(connection_info, disk_info) conf.source_type = "block" conf.source_path = connection_info['data']['host_device'] return conf @utils.synchronized('connect_volume') def connect_volume(self, connection_info, disk_info): """Attach the volume to instance_name.""" iscsi_properties = connection_info['data'] if self.use_multipath: # multipath installed, discovering other targets if available # multipath should be configured on the nova-compute node, # in order to fit storage vendor out = self._run_iscsiadm_bare(['-m', 'discovery', '-t', 'sendtargets', '-p', iscsi_properties['target_portal']], check_exit_code=[0, 255])[0] \ or "" for ip, iqn in self._get_target_portals_from_iscsiadm_output(out): props = iscsi_properties.copy() props['target_portal'] = ip props['target_iqn'] = iqn self._connect_to_iscsi_portal(props) self._rescan_iscsi() else: self._connect_to_iscsi_portal(iscsi_properties) # Detect new/resized LUNs for existing sessions self._run_iscsiadm(iscsi_properties, ("--rescan",)) host_device = self._get_host_device(iscsi_properties) # The /dev/disk/by-path/... node is not always present immediately # TODO(justinsb): This retry-with-delay is a pattern, move to utils? tries = 0 disk_dev = disk_info['dev'] while not os.path.exists(host_device): if tries >= self.num_scan_tries: raise exception.NovaException(_("iSCSI device not found at %s") % (host_device)) LOG.warn(_LW("ISCSI volume not yet found at: %(disk_dev)s. " "Will rescan & retry. Try number: %(tries)s"), {'disk_dev': disk_dev, 'tries': tries}) # The rescan isn't documented as being necessary(?), but it helps self._run_iscsiadm(iscsi_properties, ("--rescan",)) tries = tries + 1 if not os.path.exists(host_device): time.sleep(tries ** 2) if tries != 0: LOG.debug("Found iSCSI node %(disk_dev)s " "(after %(tries)s rescans)", {'disk_dev': disk_dev, 'tries': tries}) if self.use_multipath: # we use the multipath device instead of the single path device self._rescan_multipath() multipath_device = self._get_multipath_device_name(host_device) if multipath_device is not None: host_device = multipath_device connection_info['data']['host_device'] = host_device return self.get_config(connection_info, disk_info) @utils.synchronized('connect_volume') def disconnect_volume(self, connection_info, disk_dev): """Detach the volume from instance_name.""" iscsi_properties = connection_info['data'] host_device = self._get_host_device(iscsi_properties) multipath_device = None if self.use_multipath: multipath_device = self._get_multipath_device_name(host_device) super(LibvirtISCSIVolumeDriver, self).disconnect_volume(connection_info, disk_dev) if self.use_multipath and multipath_device: return self._disconnect_volume_multipath_iscsi(iscsi_properties, multipath_device) # NOTE(vish): Only disconnect from the target if no luns from the # target are in use. device_prefix = ("/dev/disk/by-path/ip-%s-iscsi-%s-lun-" % (iscsi_properties['target_portal'], iscsi_properties['target_iqn'])) devices = self.connection._get_all_block_devices() devices = [dev for dev in devices if dev.startswith(device_prefix)] if not devices: self._disconnect_from_iscsi_portal(iscsi_properties) elif host_device not in devices: # Delete device if LUN is not in use by another instance self._delete_device(host_device) def _delete_device(self, device_path): device_name = os.path.basename(os.path.realpath(device_path)) delete_control = '/sys/block/' + device_name + '/device/delete' if os.path.exists(delete_control): # Copy '1' from stdin to the device delete control file utils.execute('cp', '/dev/stdin', delete_control, process_input='1', run_as_root=True) else: LOG.warn(_LW("Unable to delete volume device %s"), device_name) def _remove_multipath_device_descriptor(self, disk_descriptor): disk_descriptor = disk_descriptor.replace('/dev/mapper/', '') try: self._run_multipath(['-f', disk_descriptor], check_exit_code=[0, 1]) except processutils.ProcessExecutionError as exc: # Because not all cinder drivers need to remove the dev mapper, # here just logs a warning to avoid affecting those drivers in # exceptional cases. LOG.warn(_LW('Failed to remove multipath device descriptor ' '%(dev_mapper)s. Exception message: %(msg)s') % {'dev_mapper': disk_descriptor, 'msg': exc.message}) def _disconnect_volume_multipath_iscsi(self, iscsi_properties, multipath_device): self._rescan_iscsi() self._rescan_multipath() block_devices = self.connection._get_all_block_devices() devices = [] for dev in block_devices: if "/mapper/" in dev: devices.append(dev) else: mpdev = self._get_multipath_device_name(dev) if mpdev: devices.append(mpdev) # Do a discovery to find all targets. # Targets for multiple paths for the same multipath device # may not be the same. out = self._run_iscsiadm_bare(['-m', 'discovery', '-t', 'sendtargets', '-p', iscsi_properties['target_portal']], check_exit_code=[0, 255])[0] \ or "" ips_iqns = self._get_target_portals_from_iscsiadm_output(out) if not devices: # disconnect if no other multipath devices self._disconnect_mpath(iscsi_properties, ips_iqns) return # Get a target for all other multipath devices other_iqns = [self._get_multipath_iqn(device) for device in devices] # Get all the targets for the current multipath device current_iqns = [iqn for ip, iqn in ips_iqns] in_use = False for current in current_iqns: if current in other_iqns: in_use = True break # If no other multipath device attached has the same iqn # as the current device if not in_use: # disconnect if no other multipath devices with same iqn self._disconnect_mpath(iscsi_properties, ips_iqns) return elif multipath_device not in devices: # delete the devices associated w/ the unused multipath self._delete_mpath(iscsi_properties, multipath_device, ips_iqns) # else do not disconnect iscsi portals, # as they are used for other luns, # just remove multipath mapping device descriptor self._remove_multipath_device_descriptor(multipath_device) return def _connect_to_iscsi_portal(self, iscsi_properties): # NOTE(vish): If we are on the same host as nova volume, the # discovery makes the target so we don't need to # run --op new. Therefore, we check to see if the # target exists, and if we get 255 (Not Found), then # we run --op new. This will also happen if another # volume is using the same target. try: self._run_iscsiadm(iscsi_properties, ()) except processutils.ProcessExecutionError as exc: # iscsiadm returns 21 for "No records found" after version 2.0-871 if exc.exit_code in [21, 255]: self._reconnect(iscsi_properties) else: raise if iscsi_properties.get('auth_method'): self._iscsiadm_update(iscsi_properties, "node.session.auth.authmethod", iscsi_properties['auth_method']) self._iscsiadm_update(iscsi_properties, "node.session.auth.username", iscsi_properties['auth_username']) self._iscsiadm_update(iscsi_properties, "node.session.auth.password", iscsi_properties['auth_password']) # duplicate logins crash iscsiadm after load, # so we scan active sessions to see if the node is logged in. out = self._run_iscsiadm_bare(["-m", "session"], run_as_root=True, check_exit_code=[0, 1, 21])[0] or "" portals = [{'portal': p.split(" ")[2], 'iqn': p.split(" ")[3]} for p in out.splitlines() if p.startswith("tcp:")] stripped_portal = iscsi_properties['target_portal'].split(",")[0] if len(portals) == 0 or len([s for s in portals if stripped_portal == s['portal'].split(",")[0] and s['iqn'] == iscsi_properties['target_iqn']] ) == 0: try: self._run_iscsiadm(iscsi_properties, ("--login",), check_exit_code=[0, 255]) except processutils.ProcessExecutionError as err: # as this might be one of many paths, # only set successful logins to startup automatically if err.exit_code in [15]: self._iscsiadm_update(iscsi_properties, "node.startup", "automatic") return self._iscsiadm_update(iscsi_properties, "node.startup", "automatic") def _disconnect_from_iscsi_portal(self, iscsi_properties): self._iscsiadm_update(iscsi_properties, "node.startup", "manual", check_exit_code=[0, 21, 255]) self._run_iscsiadm(iscsi_properties, ("--logout",), check_exit_code=[0, 21, 255]) self._run_iscsiadm(iscsi_properties, ('--op', 'delete'), check_exit_code=[0, 21, 255]) def _get_multipath_device_name(self, single_path_device): device = os.path.realpath(single_path_device) out = self._run_multipath(['-ll', device], check_exit_code=[0, 1])[0] mpath_line = [line for line in out.splitlines() if "scsi_id" not in line] # ignore udev errors if len(mpath_line) > 0 and len(mpath_line[0]) > 0: return "/dev/mapper/%s" % mpath_line[0].split(" ")[0] return None def _get_iscsi_devices(self): try: devices = list(os.walk('/dev/disk/by-path'))[0][-1] except IndexError: return [] return [entry for entry in devices if entry.startswith("ip-")] def _delete_mpath(self, iscsi_properties, multipath_device, ips_iqns): entries = self._get_iscsi_devices() # Loop through ips_iqns to construct all paths iqn_luns = [] for ip, iqn in ips_iqns: iqn_lun = '%s-lun-%s' % (iqn, iscsi_properties.get('target_lun', 0)) iqn_luns.append(iqn_lun) for dev in ['/dev/disk/by-path/%s' % dev for dev in entries]: for iqn_lun in iqn_luns: if iqn_lun in dev: self._delete_device(dev) self._rescan_multipath() def _disconnect_mpath(self, iscsi_properties, ips_iqns): for ip, iqn in ips_iqns: props = iscsi_properties.copy() props['target_portal'] = ip props['target_iqn'] = iqn self._disconnect_from_iscsi_portal(props) self._rescan_multipath() def _get_multipath_iqn(self, multipath_device): entries = self._get_iscsi_devices() for entry in entries: entry_real_path = os.path.realpath("/dev/disk/by-path/%s" % entry) entry_multipath = self._get_multipath_device_name(entry_real_path) if entry_multipath == multipath_device: return entry.split("iscsi-")[1].split("-lun")[0] return None def _run_iscsiadm_bare(self, iscsi_command, **kwargs): check_exit_code = kwargs.pop('check_exit_code', 0) (out, err) = utils.execute('iscsiadm', *iscsi_command, run_as_root=True, check_exit_code=check_exit_code) LOG.debug("iscsiadm %(command)s: stdout=%(out)s stderr=%(err)s", {'command': iscsi_command, 'out': out, 'err': err}) return (out, err) def _run_multipath(self, multipath_command, **kwargs): check_exit_code = kwargs.pop('check_exit_code', 0) (out, err) = utils.execute('multipath', *multipath_command, run_as_root=True, check_exit_code=check_exit_code) LOG.debug("multipath %(command)s: stdout=%(out)s stderr=%(err)s", {'command': multipath_command, 'out': out, 'err': err}) return (out, err) def _rescan_iscsi(self): self._run_iscsiadm_bare(('-m', 'node', '--rescan'), check_exit_code=[0, 1, 21, 255]) self._run_iscsiadm_bare(('-m', 'session', '--rescan'), check_exit_code=[0, 1, 21, 255]) def _rescan_multipath(self): self._run_multipath(['-r'], check_exit_code=[0, 1, 21]) def _get_host_device(self, iscsi_properties): return ("/dev/disk/by-path/ip-%s-iscsi-%s-lun-%s" % (iscsi_properties['target_portal'], iscsi_properties['target_iqn'], iscsi_properties.get('target_lun', 0))) def _reconnect(self, iscsi_properties): self._run_iscsiadm(iscsi_properties, ('--op', 'new')) class LibvirtISERVolumeDriver(LibvirtISCSIVolumeDriver): """Driver to attach Network volumes to libvirt.""" def __init__(self, connection): super(LibvirtISERVolumeDriver, self).__init__(connection) self.num_scan_tries = CONF.libvirt.num_iser_scan_tries self.use_multipath = CONF.libvirt.iser_use_multipath def _get_multipath_iqn(self, multipath_device): entries = self._get_iscsi_devices() for entry in entries: entry_real_path = os.path.realpath("/dev/disk/by-path/%s" % entry) entry_multipath = self._get_multipath_device_name(entry_real_path) if entry_multipath == multipath_device: return entry.split("iser-")[1].split("-lun")[0] return None def _get_host_device(self, iser_properties): time.sleep(1) host_device = None device = ("ip-%s-iscsi-%s-lun-%s" % (iser_properties['target_portal'], iser_properties['target_iqn'], iser_properties.get('target_lun', 0))) look_for_device = glob.glob('/dev/disk/by-path/*%s' % device) if look_for_device: host_device = look_for_device[0] return host_device def _reconnect(self, iser_properties): self._run_iscsiadm(iser_properties, ('--interface', 'iser', '--op', 'new')) class LibvirtNFSVolumeDriver(LibvirtBaseVolumeDriver): """Class implements libvirt part of volume driver for NFS.""" def __init__(self, connection): """Create back-end to nfs.""" super(LibvirtNFSVolumeDriver, self).__init__(connection, is_block_dev=False) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtNFSVolumeDriver, self).get_config(connection_info, disk_info) path = os.path.join(CONF.libvirt.nfs_mount_point_base, utils.get_hash_str(connection_info['data']['export'])) path = os.path.join(path, connection_info['data']['name']) conf.source_type = 'file' conf.source_path = path conf.driver_format = connection_info['data'].get('format', 'raw') return conf def connect_volume(self, connection_info, disk_info): """Connect the volume. Returns xml for libvirt.""" options = connection_info['data'].get('options') self._ensure_mounted(connection_info['data']['export'], options) return self.get_config(connection_info, disk_info) def disconnect_volume(self, connection_info, disk_dev): """Disconnect the volume.""" export = connection_info['data']['export'] mount_path = os.path.join(CONF.libvirt.nfs_mount_point_base, utils.get_hash_str(export)) try: utils.execute('umount', mount_path, run_as_root=True) except processutils.ProcessExecutionError as exc: if ('device is busy' in exc.message or 'target is busy' in exc.message): LOG.debug("The NFS share %s is still in use.", export) else: LOG.exception(_LE("Couldn't unmount the NFS share %s"), export) def _ensure_mounted(self, nfs_export, options=None): """@type nfs_export: string @type options: string """ mount_path = os.path.join(CONF.libvirt.nfs_mount_point_base, utils.get_hash_str(nfs_export)) if not libvirt_utils.is_mounted(mount_path, nfs_export): self._mount_nfs(mount_path, nfs_export, options, ensure=True) return mount_path def _mount_nfs(self, mount_path, nfs_share, options=None, ensure=False): """Mount nfs export to mount path.""" utils.execute('mkdir', '-p', mount_path) # Construct the NFS mount command. nfs_cmd = ['mount', '-t', 'nfs'] if CONF.libvirt.nfs_mount_options is not None: nfs_cmd.extend(['-o', CONF.libvirt.nfs_mount_options]) if options is not None: nfs_cmd.extend(options.split(' ')) nfs_cmd.extend([nfs_share, mount_path]) try: utils.execute(*nfs_cmd, run_as_root=True) except processutils.ProcessExecutionError as exc: if ensure and 'already mounted' in exc.message: LOG.warn(_LW("%s is already mounted"), nfs_share) else: raise class LibvirtAOEVolumeDriver(LibvirtBaseVolumeDriver): """Driver to attach AoE volumes to libvirt.""" def __init__(self, connection): super(LibvirtAOEVolumeDriver, self).__init__(connection, is_block_dev=True) def _aoe_discover(self): """Call aoe-discover (aoe-tools) AoE Discover.""" (out, err) = utils.execute('aoe-discover', run_as_root=True, check_exit_code=0) return (out, err) def _aoe_revalidate(self, aoedev): """Revalidate the LUN Geometry (When an AoE ID is reused).""" (out, err) = utils.execute('aoe-revalidate', aoedev, run_as_root=True, check_exit_code=0) return (out, err) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtAOEVolumeDriver, self).get_config(connection_info, disk_info) shelf = connection_info['data']['target_shelf'] lun = connection_info['data']['target_lun'] aoedev = 'e%s.%s' % (shelf, lun) aoedevpath = '/dev/etherd/%s' % (aoedev) conf.source_type = "block" conf.source_path = aoedevpath return conf def connect_volume(self, connection_info, mount_device): shelf = connection_info['data']['target_shelf'] lun = connection_info['data']['target_lun'] aoedev = 'e%s.%s' % (shelf, lun) aoedevpath = '/dev/etherd/%s' % (aoedev) if os.path.exists(aoedevpath): # NOTE(jbr_): If aoedevpath already exists, revalidate the LUN. self._aoe_revalidate(aoedev) else: # NOTE(jbr_): If aoedevpath does not exist, do a discover. self._aoe_discover() # NOTE(jbr_): Device path is not always present immediately def _wait_for_device_discovery(aoedevpath, mount_device): tries = self.tries if os.path.exists(aoedevpath): raise loopingcall.LoopingCallDone() if self.tries >= CONF.libvirt.num_aoe_discover_tries: raise exception.NovaException(_("AoE device not found at %s") % (aoedevpath)) LOG.warn(_LW("AoE volume not yet found at: %(aoedevpath)s. " "Try number: %(tries)s"), {'aoedevpath': aoedevpath, 'tries': tries}) self._aoe_discover() self.tries = self.tries + 1 self.tries = 0 timer = loopingcall.FixedIntervalLoopingCall( _wait_for_device_discovery, aoedevpath, mount_device) timer.start(interval=2).wait() tries = self.tries if tries != 0: LOG.debug("Found AoE device %(aoedevpath)s " "(after %(tries)s rediscover)", {'aoedevpath': aoedevpath, 'tries': tries}) return self.get_config(connection_info, mount_device) class LibvirtGlusterfsVolumeDriver(LibvirtBaseVolumeDriver): """Class implements libvirt part of volume driver for GlusterFS.""" def __init__(self, connection): """Create back-end to glusterfs.""" super(LibvirtGlusterfsVolumeDriver, self).__init__(connection, is_block_dev=False) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtGlusterfsVolumeDriver, self).get_config(connection_info, disk_info) data = connection_info['data'] if 'gluster' in CONF.libvirt.qemu_allowed_storage_drivers: vol_name = data['export'].split('/')[1] source_host = data['export'].split('/')[0][:-1] conf.source_ports = ['24007'] conf.source_type = 'network' conf.source_protocol = 'gluster' conf.source_hosts = [source_host] conf.source_name = '%s/%s' % (vol_name, data['name']) else: path = os.path.join(CONF.libvirt.glusterfs_mount_point_base, utils.get_hash_str(data['export'])) path = os.path.join(path, data['name']) conf.source_type = 'file' conf.source_path = path conf.driver_format = connection_info['data'].get('format', 'raw') return conf def connect_volume(self, connection_info, mount_device): data = connection_info['data'] if 'gluster' not in CONF.libvirt.qemu_allowed_storage_drivers: self._ensure_mounted(data['export'], data.get('options')) return self.get_config(connection_info, mount_device) def disconnect_volume(self, connection_info, disk_dev): """Disconnect the volume.""" if 'gluster' in CONF.libvirt.qemu_allowed_storage_drivers: return export = connection_info['data']['export'] mount_path = os.path.join(CONF.libvirt.glusterfs_mount_point_base, utils.get_hash_str(export)) try: utils.execute('umount', mount_path, run_as_root=True) except processutils.ProcessExecutionError as exc: if 'target is busy' in exc.message: LOG.debug("The GlusterFS share %s is still in use.", export) else: LOG.exception(_LE("Couldn't unmount the GlusterFS share %s"), export) def _ensure_mounted(self, glusterfs_export, options=None): """@type glusterfs_export: string @type options: string """ mount_path = os.path.join(CONF.libvirt.glusterfs_mount_point_base, utils.get_hash_str(glusterfs_export)) if not libvirt_utils.is_mounted(mount_path, glusterfs_export): self._mount_glusterfs(mount_path, glusterfs_export, options, ensure=True) return mount_path def _mount_glusterfs(self, mount_path, glusterfs_share, options=None, ensure=False): """Mount glusterfs export to mount path.""" utils.execute('mkdir', '-p', mount_path) gluster_cmd = ['mount', '-t', 'glusterfs'] if options is not None: gluster_cmd.extend(options.split(' ')) gluster_cmd.extend([glusterfs_share, mount_path]) try: utils.execute(*gluster_cmd, run_as_root=True) except processutils.ProcessExecutionError as exc: if ensure and 'already mounted' in exc.message: LOG.warn(_LW("%s is already mounted"), glusterfs_share) else: raise class LibvirtFibreChannelVolumeDriver(LibvirtBaseVolumeDriver): """Driver to attach Fibre Channel Network volumes to libvirt.""" def __init__(self, connection): super(LibvirtFibreChannelVolumeDriver, self).__init__(connection, is_block_dev=False) def _get_pci_num(self, hba): # NOTE(walter-boring) # device path is in format of # /sys/devices/pci0000:00/0000:00:03.0/0000:05:00.3/host2/fc_host/host2 # sometimes an extra entry exists before the host2 value # we always want the value prior to the host2 value pci_num = None if hba is not None: if "device_path" in hba: index = 0 device_path = hba['device_path'].split('/') for value in device_path: if value.startswith('host'): break index = index + 1 if index > 0: pci_num = device_path[index - 1] return pci_num def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtFibreChannelVolumeDriver, self).get_config(connection_info, disk_info) conf.source_type = "block" conf.source_path = connection_info['data']['device_path'] return conf @utils.synchronized('connect_volume') def connect_volume(self, connection_info, disk_info): """Attach the volume to instance_name.""" fc_properties = connection_info['data'] mount_device = disk_info["dev"] ports = fc_properties['target_wwn'] wwns = [] # we support a list of wwns or a single wwn if isinstance(ports, list): for wwn in ports: wwns.append(str(wwn)) elif isinstance(ports, six.string_types): wwns.append(str(ports)) # We need to look for wwns on every hba # because we don't know ahead of time # where they will show up. hbas = libvirt_utils.get_fc_hbas_info() host_devices = [] for hba in hbas: pci_num = self._get_pci_num(hba) if pci_num is not None: for wwn in wwns: target_wwn = "0x%s" % wwn.lower() host_device = ("/dev/disk/by-path/pci-%s-fc-%s-lun-%s" % (pci_num, target_wwn, fc_properties.get('target_lun', 0))) host_devices.append(host_device) if len(host_devices) == 0: # this is empty because we don't have any FC HBAs msg = _("We are unable to locate any Fibre Channel devices") raise exception.NovaException(msg) # The /dev/disk/by-path/... node is not always present immediately # We only need to find the first device. Once we see the first device # multipath will have any others. def _wait_for_device_discovery(host_devices, mount_device): tries = self.tries for device in host_devices: LOG.debug("Looking for Fibre Channel dev %(device)s", {'device': device}) if os.path.exists(device): self.host_device = device # get the /dev/sdX device. This is used # to find the multipath device. self.device_name = os.path.realpath(device) raise loopingcall.LoopingCallDone()<|fim▁hole|> LOG.warn(_LW("Fibre volume not yet found at: %(mount_device)s. " "Will rescan & retry. Try number: %(tries)s"), {'mount_device': mount_device, 'tries': tries}) linuxscsi.rescan_hosts(hbas) self.tries = self.tries + 1 self.host_device = None self.device_name = None self.tries = 0 timer = loopingcall.FixedIntervalLoopingCall( _wait_for_device_discovery, host_devices, mount_device) timer.start(interval=2).wait() tries = self.tries if self.host_device is not None and self.device_name is not None: LOG.debug("Found Fibre Channel volume %(mount_device)s " "(after %(tries)s rescans)", {'mount_device': mount_device, 'tries': tries}) # see if the new drive is part of a multipath # device. If so, we'll use the multipath device. mdev_info = linuxscsi.find_multipath_device(self.device_name) if mdev_info is not None: LOG.debug("Multipath device discovered %(device)s", {'device': mdev_info['device']}) device_path = mdev_info['device'] connection_info['data']['device_path'] = device_path connection_info['data']['devices'] = mdev_info['devices'] connection_info['data']['multipath_id'] = mdev_info['id'] else: # we didn't find a multipath device. # so we assume the kernel only sees 1 device device_path = self.host_device device_info = linuxscsi.get_device_info(self.device_name) connection_info['data']['device_path'] = device_path connection_info['data']['devices'] = [device_info] return self.get_config(connection_info, disk_info) @utils.synchronized('connect_volume') def disconnect_volume(self, connection_info, mount_device): """Detach the volume from instance_name.""" super(LibvirtFibreChannelVolumeDriver, self).disconnect_volume(connection_info, mount_device) # If this is a multipath device, we need to search again # and make sure we remove all the devices. Some of them # might not have shown up at attach time. if 'multipath_id' in connection_info['data']: multipath_id = connection_info['data']['multipath_id'] mdev_info = linuxscsi.find_multipath_device(multipath_id) devices = mdev_info['devices'] LOG.debug("devices to remove = %s", devices) else: # only needed when multipath-tools work improperly devices = connection_info['data'].get('devices', []) LOG.warn(_LW("multipath-tools probably work improperly. " "devices to remove = %s.") % devices) # There may have been more than 1 device mounted # by the kernel for this volume. We have to remove # all of them for device in devices: linuxscsi.remove_device(device) class LibvirtScalityVolumeDriver(LibvirtBaseVolumeDriver): """Scality SOFS Nova driver. Provide hypervisors with access to sparse files on SOFS. """ def __init__(self, connection): """Create back-end to SOFS and check connection.""" super(LibvirtScalityVolumeDriver, self).__init__(connection, is_block_dev=False) def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtScalityVolumeDriver, self).get_config(connection_info, disk_info) path = os.path.join(CONF.libvirt.scality_sofs_mount_point, connection_info['data']['sofs_path']) conf.source_type = 'file' conf.source_path = path # The default driver cache policy is 'none', and this causes # qemu/kvm to open the volume file with O_DIRECT, which is # rejected by FUSE (on kernels older than 3.3). Scality SOFS # is FUSE based, so we must provide a more sensible default. conf.driver_cache = 'writethrough' return conf def connect_volume(self, connection_info, disk_info): """Connect the volume. Returns xml for libvirt.""" self._check_prerequisites() self._mount_sofs() return self.get_config(connection_info, disk_info) def _check_prerequisites(self): """Sanity checks before attempting to mount SOFS.""" # config is mandatory config = CONF.libvirt.scality_sofs_config if not config: msg = _LW("Value required for 'scality_sofs_config'") LOG.warn(msg) raise exception.NovaException(msg) # config can be a file path or a URL, check it if urlparse.urlparse(config).scheme == '': # turn local path into URL config = 'file://%s' % config try: urllib2.urlopen(config, timeout=5).close() except urllib2.URLError as e: msg = _LW("Cannot access 'scality_sofs_config': %s") % e LOG.warn(msg) raise exception.NovaException(msg) # mount.sofs must be installed if not os.access('/sbin/mount.sofs', os.X_OK): msg = _LW("Cannot execute /sbin/mount.sofs") LOG.warn(msg) raise exception.NovaException(msg) def _mount_sofs(self): config = CONF.libvirt.scality_sofs_config mount_path = CONF.libvirt.scality_sofs_mount_point sysdir = os.path.join(mount_path, 'sys') if not os.path.isdir(mount_path): utils.execute('mkdir', '-p', mount_path) if not os.path.isdir(sysdir): utils.execute('mount', '-t', 'sofs', config, mount_path, run_as_root=True) if not os.path.isdir(sysdir): msg = _LW("Cannot mount Scality SOFS, check syslog for errors") LOG.warn(msg) raise exception.NovaException(msg)<|fim▁end|>
if self.tries >= CONF.libvirt.num_iscsi_scan_tries: msg = _("Fibre Channel device not found.") raise exception.NovaException(msg)
<|file_name|>_a4c_pre_configure_source.py<|end_file_name|><|fim▁begin|>from cloudify import ctx from cloudify.exceptions import NonRecoverableError from cloudify.state import ctx_parameters as inputs import subprocess import os import re import sys import time import threading import platform from StringIO import StringIO from cloudify_rest_client import CloudifyClient from cloudify import utils if 'MANAGER_REST_PROTOCOL' in os.environ and os.environ['MANAGER_REST_PROTOCOL'] == "https": client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port(), protocol='https', trust_all=True) else: client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port()) def convert_env_value_to_string(envDict): for key, value in envDict.items(): envDict[str(key)] = str(envDict.pop(key)) def get_attribute_user(ctx): if get_attribute(ctx, 'user'): return get_attribute(ctx, 'user') else: return get_attribute(ctx, 'cloudify_agent')['user'] def get_attribute_key(ctx): if get_attribute(ctx, 'key'): return get_attribute(ctx, 'key') else: return get_attribute(ctx, 'cloudify_agent')['key'] def get_host(entity): if entity.instance.relationships: for relationship in entity.instance.relationships: if 'cloudify.relationships.contained_in' in relationship.type_hierarchy: return relationship.target return None def has_attribute_mapping(entity, attribute_name): ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, entity.node.properties)) mapping_configuration = entity.node.properties.get('_a4c_att_' + attribute_name, None) if mapping_configuration is not None: if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name: return False else: return True return False def process_attribute_mapping(entity, attribute_name, data_retriever_function): # This is where attribute mapping is defined in the cloudify type mapping_configuration = entity.node.properties['_a4c_att_' + attribute_name] ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration)) # If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name # Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET if mapping_configuration['parameters'][0] == 'SELF': return data_retriever_function(entity, mapping_configuration['parameters'][1]) elif mapping_configuration['parameters'][0] == 'TARGET' and entity.instance.relationships: for relationship in entity.instance.relationships: if mapping_configuration['parameters'][1] in relationship.type_hierarchy: return data_retriever_function(relationship.target, mapping_configuration['parameters'][2]) return "" def get_nested_attribute(entity, attribute_names): deep_properties = get_attribute(entity, attribute_names[0]) attribute_names_iter = iter(attribute_names) next(attribute_names_iter) for attribute_name in attribute_names_iter: if deep_properties is None: return "" else: deep_properties = deep_properties.get(attribute_name, None) return deep_properties def _all_instances_get_nested_attribute(entity, attribute_names): return None def get_attribute(entity, attribute_name): if has_attribute_mapping(entity, attribute_name): # First check if any mapping exist for attribute mapped_value = process_attribute_mapping(entity, attribute_name, get_attribute) ctx.logger.info('Mapping exists for attribute {0} with value {1}'.format(attribute_name, mapped_value)) return mapped_value # No mapping exist, try to get directly the attribute from the entity attribute_value = entity.instance.runtime_properties.get(attribute_name, None) if attribute_value is not None: ctx.logger.info('Found the attribute {0} with value {1} on the node {2}'.format(attribute_name, attribute_value, entity.node.id)) return attribute_value # Attribute retrieval fails, fall back to property property_value = entity.node.properties.get(attribute_name, None) if property_value is not None: return property_value # Property retrieval fails, fall back to host instance host = get_host(entity) if host is not None: ctx.logger.info('Attribute not found {0} go up to the parent node {1}'.format(attribute_name, host.node.id)) return get_attribute(host, attribute_name) # Nothing is found return "" def _all_instances_get_attribute(entity, attribute_name): result_map = {} # get all instances data using cfy rest client # we have to get the node using the rest client with node_instance.node_id # then we will have the relationships node = client.nodes.get(ctx.deployment.id, entity.node.id) all_node_instances = client.node_instances.list(ctx.deployment.id, entity.node.id) for node_instance in all_node_instances: prop_value = __recursively_get_instance_data(node, node_instance, attribute_name) if prop_value is not None: ctx.logger.info('Found the property/attribute {0} with value {1} on the node {2} instance {3}'.format(attribute_name, prop_value, entity.node.id, node_instance.id)) result_map[node_instance.id + '_'] = prop_value return result_map def get_property(entity, property_name): # Try to get the property value on the node property_value = entity.node.properties.get(property_name, None) if property_value is not None: ctx.logger.info('Found the property {0} with value {1} on the node {2}'.format(property_name, property_value, entity.node.id)) return property_value # No property found on the node, fall back to the host host = get_host(entity) if host is not None: ctx.logger.info('Property not found {0} go up to the parent node {1}'.format(property_name, host.node.id)) return get_property(host, property_name) return "" def get_instance_list(node_id): result = '' all_node_instances = client.node_instances.list(ctx.deployment.id, node_id) for node_instance in all_node_instances: if len(result) > 0: result += ',' result += node_instance.id return result def get_host_node_name(instance): for relationship in instance.relationships: if 'cloudify.relationships.contained_in' in relationship.type_hierarchy: return relationship.target.node.id return None def __get_relationship(node, target_name, relationship_type): for relationship in node.relationships: if relationship.get('target_id') == target_name and relationship_type in relationship.get('type_hierarchy'): return relationship return None def __has_attribute_mapping(node, attribute_name): ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, node.properties)) mapping_configuration = node.properties.get('_a4c_att_' + attribute_name, None) if mapping_configuration is not None: if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name: return False else: return True return False def __process_attribute_mapping(node, node_instance, attribute_name, data_retriever_function): # This is where attribute mapping is defined in the cloudify type mapping_configuration = node.properties['_a4c_att_' + attribute_name] ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration)) # If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name # Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET if mapping_configuration['parameters'][0] == 'SELF': return data_retriever_function(node, node_instance, mapping_configuration['parameters'][1]) elif mapping_configuration['parameters'][0] == 'TARGET' and node_instance.relationships: for rel in node_instance.relationships: relationship = __get_relationship(node, rel.get('target_name'), rel.get('type')) if mapping_configuration['parameters'][1] in relationship.get('type_hierarchy'): target_instance = client.node_instances.get(rel.get('target_id')) target_node = client.nodes.get(ctx.deployment.id, target_instance.node_id) return data_retriever_function(target_node, target_instance, mapping_configuration['parameters'][2]) return None def __recursively_get_instance_data(node, node_instance, attribute_name): if __has_attribute_mapping(node, attribute_name): return __process_attribute_mapping(node, node_instance, attribute_name, __recursively_get_instance_data) attribute_value = node_instance.runtime_properties.get(attribute_name, None) if attribute_value is not None: return attribute_value elif node_instance.relationships: for rel in node_instance.relationships: # on rel we have target_name, target_id (instanceId), type relationship = __get_relationship(node, rel.get('target_name'), rel.get('type')) if 'cloudify.relationships.contained_in' in relationship.get('type_hierarchy'): parent_instance = client.node_instances.get(rel.get('target_id')) parent_node = client.nodes.get(ctx.deployment.id, parent_instance.node_id) return __recursively_get_instance_data(parent_node, parent_instance, attribute_name) return None else: return None def download(child_rel_path, child_abs_path, download_dir): artifact_downloaded_path = ctx.download_resource(child_abs_path) new_file = os.path.join(download_dir, child_rel_path) new_file_dir = os.path.dirname(new_file) if not os.path.exists(new_file_dir): os.makedirs(new_file_dir) os.rename(artifact_downloaded_path, new_file) ctx.logger.info('Downloaded artifact from path ' + child_abs_path + ', it\'s available now at ' + new_file) return new_file def download_artifacts(artifacts, download_dir): downloaded_artifacts = {} os.makedirs(download_dir) for artifact_name, artifact_ref in artifacts.items(): ctx.logger.info('Download artifact ' + artifact_name) if isinstance(artifact_ref, basestring):<|fim▁hole|> downloaded_artifacts[artifact_name] = download(os.path.basename(artifact_ref), artifact_ref, download_dir) else: child_download_dir = os.path.join(download_dir, artifact_name) for child_path in artifact_ref: download(child_path['relative_path'], child_path['absolute_path'], child_download_dir) downloaded_artifacts[artifact_name] = child_download_dir return downloaded_artifacts env_map = {} env_map['TARGET_NODE'] = ctx.target.node.id env_map['TARGET_INSTANCE'] = ctx.target.instance.id env_map['TARGET_INSTANCES'] = get_instance_list(ctx.target.node.id) env_map['SOURCE_NODE'] = ctx.source.node.id env_map['SOURCE_INSTANCE'] = ctx.source.instance.id env_map['SOURCE_INSTANCES'] = get_instance_list(ctx.source.node.id) env_map['A4C_EXECUTION_HOST'] = get_attribute(ctx.source, 'ip_address') env_map['A4C_EXECUTION_USER'] = get_attribute_user(ctx.source) env_map['A4C_EXECUTION_KEY'] = get_attribute_key(ctx.source) env_map['DB_IP'] = get_attribute(ctx.target, 'ip_address') env_map['DB_PORT'] = r'3306' env_map['DB_NAME'] = r'wordpress' env_map['DB_USER'] = r'pass' env_map['DB_PASSWORD'] = r'pass' other_instances_map = _all_instances_get_attribute(ctx.target, 'ip_address') if other_instances_map is not None: for other_instances_key in other_instances_map: env_map[other_instances_key + 'DB_IP'] = other_instances_map[other_instances_key] node_artifacts = { "configs": [ { "relative_path": "mysqld_charset.cnf", "absolute_path": "_a4c_artifact/Mysql/configs/configs/mysqld_charset.cnf" } ] } relationship_artifacts = { } artifacts = node_artifacts.copy() artifacts.update(relationship_artifacts) download_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'downloads') env_map.update(download_artifacts(artifacts, download_dir)) if inputs.get('process', None) is not None and inputs['process'].get('env', None) is not None: ctx.logger.info('Operation is executed with environment variable {0}'.format(inputs['process']['env'])) env_map.update(inputs['process']['env']) def parse_output(output): # by convention, the last output is the result of the operation last_output = None outputs = {} pattern = re.compile('EXPECTED_OUTPUT_(\w+)=(.*)') for line in output.splitlines(): match = pattern.match(line) if match is None: last_output = line else: output_name = match.group(1) output_value = match.group(2) outputs[output_name] = output_value return {'last_output': last_output, 'outputs': outputs} def execute(script_path, process, outputNames, command_prefix=None, cwd=None): os.chmod(script_path, 0755) on_posix = 'posix' in sys.builtin_module_names env = os.environ.copy() process_env = process.get('env', {}) env.update(process_env) if outputNames is not None: env['EXPECTED_OUTPUTS'] = outputNames if platform.system() == 'Windows': wrapper_path = ctx.download_resource("scriptWrapper.bat") else: wrapper_path = ctx.download_resource("scriptWrapper.sh") os.chmod(wrapper_path, 0755) command = '{0} {1}'.format(wrapper_path, script_path) else: command = script_path if command_prefix is not None: command = "{0} {1}".format(command_prefix, command) ctx.logger.info('Executing: {0} in env {1}'.format(command, env)) process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd, bufsize=1, close_fds=on_posix) return_code = None stdout_consumer = OutputConsumer(process.stdout) stderr_consumer = OutputConsumer(process.stderr) while True: return_code = process.poll() if return_code is not None: break time.sleep(0.1) stdout_consumer.join() stderr_consumer.join() parsed_output = parse_output(stdout_consumer.buffer.getvalue()) if outputNames is not None: outputNameList = outputNames.split(';') for outputName in outputNameList: ctx.logger.info('Ouput name: {0} value : {1}'.format(outputName, parsed_output['outputs'].get(outputName, None))) if return_code != 0: error_message = "Script {0} encountered error with return code {1} and standard output {2}, error output {3}".format(command, return_code, stdout_consumer.buffer.getvalue(), stderr_consumer.buffer.getvalue()) error_message = str(unicode(error_message, errors='ignore')) ctx.logger.error(error_message) raise NonRecoverableError(error_message) else: ok_message = "Script {0} executed normally with standard output {1} and error output {2}".format(command, stdout_consumer.buffer.getvalue(), stderr_consumer.buffer.getvalue()) ok_message = str(unicode(ok_message, errors='ignore')) ctx.logger.info(ok_message) return parsed_output class OutputConsumer(object): def __init__(self, out): self.out = out self.buffer = StringIO() self.consumer = threading.Thread(target=self.consume_output) self.consumer.daemon = True self.consumer.start() def consume_output(self): for line in iter(self.out.readline, b''): self.buffer.write(line) self.out.close() def join(self): self.consumer.join() new_script_process = {'env': env_map} operationOutputNames = None convert_env_value_to_string(new_script_process['env']) parsed_output = execute(ctx.download_resource('_a4c_impl_artifact/Wordpress_Mysql/wordpressConnectToMysqlMysql/tosca.interfaces.relationship.Configure/pre_configure_source/config_wordpress_for_mysql.sh'), new_script_process, operationOutputNames) outputs = parsed_output['outputs'].items() for k,v in outputs: ctx.logger.info('Output name: {0} value: {1}'.format(k, v)) ctx.source.instance.runtime_properties['_a4c_OO:tosca.interfaces.relationship.Configure:pre_configure_source:{0}'.format(k)] = v ctx.source.instance.runtime_properties['wordpress_url'] = r'http://' + get_attribute(ctx.source, 'public_ip_address') + r':' + r'80' + r'/' ctx.source.instance.update() ctx.target.instance.update()<|fim▁end|>
<|file_name|>soupselect.py<|end_file_name|><|fim▁begin|>""" soupselect.py - https://code.google.com/p/soupselect/ CSS selector support for BeautifulSoup. soup = BeautifulSoup('<html>...') select(soup, 'div') - returns a list of div elements select(soup, 'div#main ul a') - returns a list of links inside a ul inside div#main """ import re from bs4 import BeautifulSoup tag_re = re.compile('^[a-z0-9]+$') attribselect_re = re.compile( r'^(?P<tag>\w+)?\[(?P<attribute>\w+)(?P<operator>[=~\|\^\$\*]?)' + r'=?"?(?P<value>[^\]"]*)"?\]$' ) # /^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/ # \---/ \---/\-------------/ \-------/ # | | | | # | | | The value # | | ~,|,^,$,* or = # | Attribute # Tag def attribute_checker(operator, attribute, value=''): """ Takes an operator, attribute and optional value; returns a function that will return True for elements that match that combination. """ return { '=': lambda el: el.get(attribute) == value, # attribute includes value as one of a set of space separated tokens '~': lambda el: value in el.get(attribute, '').split(), # attribute starts with value '^': lambda el: el.get(attribute, '').startswith(value), # attribute ends with value '$': lambda el: el.get(attribute, '').endswith(value), # attribute contains value '*': lambda el: value in el.get(attribute, ''), # attribute is either exactly value or starts with value- '|': lambda el: el.get(attribute, '') == value \ or el.get(attribute, '').startswith('%s-' % value), }.get(operator, lambda el: attribute in el) def select(soup, selector): """ soup should be a BeautifulSoup instance; selector is a CSS selector specifying the elements you want to retrieve. """ tokens = selector.split() current_context = [soup] for token in tokens: m = attribselect_re.match(token) if m:<|fim▁hole|> # Attribute selector tag, attribute, operator, value = m.groups() if not tag: tag = True checker = attribute_checker(operator, attribute, value) found = [] for context in current_context: found.extend([el for el in context.findAll(tag) if checker(el)]) current_context = found continue if '#' in token: # ID selector tag, id = token.split('#', 1) if not tag: tag = True el = current_context[0].find(tag, {'id': id}) if not el: return [] # No match current_context = [el] continue if '.' in token: # Class selector tag, klass = token.split('.', 1) if not tag: tag = True found = [] for context in current_context: found.extend( context.findAll(tag, {'class': lambda attr: attr and klass in attr.split()} ) ) current_context = found continue if token == '*': # Star selector found = [] for context in current_context: found.extend(context.findAll(True)) current_context = found continue # Here we should just have a regular tag if not tag_re.match(token): return [] found = [] for context in current_context: found.extend(context.findAll(token)) current_context = found return current_context def monkeypatch(BeautifulSoupClass=None): """ If you don't explicitly state the class to patch, defaults to the most common import location for BeautifulSoup. """ if not BeautifulSoupClass: from BeautifulSoup import BeautifulSoup as BeautifulSoupClass BeautifulSoupClass.findSelect = select def unmonkeypatch(BeautifulSoupClass=None): if not BeautifulSoupClass: from BeautifulSoup import BeautifulSoup as BeautifulSoupClass delattr(BeautifulSoupClass, 'findSelect') def cssFind(html, selector): """ Parse ``html`` with class:`BeautifulSoup.BeautifulSoup` and use :func:`.select` on the result. Added by Espen A. Kristiansen to make it even easier to use for testing. """ soup = BeautifulSoup(html) return select(soup, selector) def cssGet(html, selector): """ Same as :func:`.cssFind`, but returns the first match. Added by Espen A. Kristiansen to make it even easier to use for testing. """ try: return cssFind(html, selector)[0] except IndexError as e: raise IndexError('Could not find {}.'.format(selector)) def cssExists(html, selector): """ Same as :func:`.cssFind`, but returns ``True`` if the selector matches at least one item. Added by Espen A. Kristiansen to make it even easier to use for testing. """ matches = cssFind(html, selector) return bool(len(matches)) def prettyhtml(html): return BeautifulSoup(html).prettify() def normalize_whitespace(html): return re.sub('(\s|\\xa0)+', ' ', html).strip()<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate notify; use notify::{RecommendedWatcher, Watcher}; use std::sync::mpsc::channel; use std::env; fn main() { // Create a channel to receive the events. let (tx, rx) = channel(); // Automatically select the best implementation for your platform. // You can also access each implementation directly e.g. INotifyWatcher. let mut watcher: RecommendedWatcher = Watcher::new(tx).unwrap(); let current_path = env::current_dir().unwrap(); watcher.watch(current_path.as_path()).unwrap(); loop { match rx.recv() { Ok(event) => { println!("Recv {:?}", event); if let Some(path) = event.path { if let Ok(op) = event.op { println!("path: {:?}, op: {:?}", path, op); } } },<|fim▁hole|> println!("Error: {}", error) } } }<|fim▁end|>
Err(error) =>
<|file_name|>markduplicates.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python from genomon_pipeline.stage_task import * <|fim▁hole|> task_name = "markduplicates" script_template = """ #!/bin/bash # # Set SGE # #$ -S /bin/bash # set shell in UGE #$ -cwd # execute at the submitted dir pwd # print current working directory hostname # print hostname date # print date set -xv set -o pipefail {biobambam}/bammarkduplicates M={out_prefix}.metrics tmpfile={out_prefix}.tmp markthreads=2 rewritebam=1 rewritebamlevel=1 index=1 md5=1 {input_bam_files} O={out_bam} """ def __init__(self, qsub_option, script_dir): super(Markduplicates, self).__init__(qsub_option, script_dir)<|fim▁end|>
class Markduplicates(Stage_task):
<|file_name|>bulk_hash.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ crate_anon/linkage/bulk_hash.py =============================================================================== Copyright (C) 2015-2021 Rudolf Cardinal ([email protected]). This file is part of CRATE. CRATE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. CRATE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CRATE. If not, see <https://www.gnu.org/licenses/>. =============================================================================== Tool to hash multiple IDs from the command line. Test code to look at different types of digest:<|fim▁hole|>.. code-block:: python import hashlib import hmac msg = "This is an ex-parrot!" key = "voom" key_bytes = str(key).encode('utf-8') msg_bytes = str(msg).encode('utf-8') digestmod = hashlib.sha256 hmac_obj = hmac.new(key=key_bytes, msg=msg_bytes, digestmod=digestmod) # These are the two default kinds of digest: print(hmac_obj.digest()) # 8-bit binary print(hmac_obj.hexdigest()) # hexadecimal # Hex carries 4 bits per character. There are other possibilities, # notably: # - Base64 with 6 bits per character; # - Base32 with 5 bits per character. """ import argparse import logging from typing import Optional, TextIO from cardinal_pythonlib.file_io import ( gen_noncomment_lines, smart_open, writeline_nl, ) from cardinal_pythonlib.logs import main_only_quicksetup_rootlogger from cardinal_pythonlib.hash import ( HashMethods, make_hasher, ) log = logging.getLogger(__name__) def get_first_noncomment_line(filename: str) -> Optional[str]: try: with open(filename) as f: return next(gen_noncomment_lines(f)) except StopIteration: return None def bulk_hash(input_filename: str, output_filename: str, hash_method: str, key: str, keep_id: bool = True): """ Hash lines from one file to another. Args: input_filename: input filename, or "-" for stdin output_filename: output filename, or "-" for stdin hash_method: method to use; e.g. ``HMAC_SHA256`` key: secret key for hasher keep_id: produce CSV with ``hash,id`` pairs, rather than just lines with the hashes? Note that the hash precedes the ID with the ``keep_id`` option, which works best if the ID might contain commas. """ log.info(f"Reading from: {input_filename}") log.info(f"Writing to: {output_filename}") log.info(f"Using hash method: {hash_method}") log.info(f"keep_id: {keep_id}") log.debug(f"Using key: {key!r}") # NB security warning in help hasher = make_hasher(hash_method=hash_method, key=key) with smart_open(input_filename, "rt") as i: # type: TextIO with smart_open(output_filename, "wt") as o: # type: TextIO for line in gen_noncomment_lines(i): hashed = hasher.hash(line) if line else "" outline = f"{hashed},{line}" if keep_id else hashed # log.debug(f"{line!r} -> {hashed!r}") writeline_nl(o, outline) def main() -> None: """ Command-line entry point. """ # noinspection PyTypeChecker parser = argparse.ArgumentParser( description="Hash IDs in bulk, using a cryptographic hash function.", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( 'infile', type=str, help="Input file, or '-' for stdin. " "Use one line per thing to be hashed. " "Comments (marked with '#') and blank lines are ignored. " "Lines have whitespace stripped left and right.") parser.add_argument( '--outfile', type=str, default="-", help="Output file, or '-' for stdout. " "One line will be written for every input line. " "Blank lines will be written for commented or blank input.") parser.add_argument( '--key', type=str, help="Secret key for hasher (warning: may be visible in process list; " "see also --keyfile)") parser.add_argument( '--keyfile', type=str, help="File whose first noncomment line contains the secret key for " "the hasher. (It will be whitespace-stripped right and left.)") parser.add_argument( '--method', choices=[HashMethods.HMAC_MD5, HashMethods.HMAC_SHA256, HashMethods.HMAC_SHA512], default=HashMethods.HMAC_MD5, help="Hash method") parser.add_argument( '--keepid', action="store_true", help="Produce CSV output with (hash,id) rather than just the hash") parser.add_argument( '--verbose', '-v', action="store_true", help="Be verbose (NB will write key to stderr)") args = parser.parse_args() main_only_quicksetup_rootlogger(logging.DEBUG if args.verbose else logging.INFO) assert bool(args.key) != bool(args.keyfile), ( "Specify either --key or --keyfile (and not both)." ) if args.keyfile: key = get_first_noncomment_line(args.keyfile) assert key, f"No key found in keyfile: {args.keyfile}" else: key = args.key bulk_hash( input_filename=args.infile, output_filename=args.outfile, hash_method=args.method, key=key, keep_id=args.keepid, ) if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>type_collector.rs<|end_file_name|><|fim▁begin|>//! Collecting type items. use std::collections::BTreeSet; use super::context::{BindgenContext, ItemId}; /// A set of items. pub type ItemSet = BTreeSet<ItemId>; /// Collect all the type items referenced by this item. pub trait TypeCollector { /// If a particular type needs extra information beyond what it has in /// `self` and `context` to find its referenced type items, its /// implementation can define this associated type, forcing callers to pass /// the needed information through. type Extra; /// Add each type item referenced by `self` into the `types` set. fn collect_types(&self, context: &BindgenContext,<|fim▁hole|> types: &mut ItemSet, extra: &Self::Extra); }<|fim▁end|>
<|file_name|>commands.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------- # Copyright (c) 2014--, The Qiita Development Team. # # Distributed under the terms of the BSD 3-clause License. # # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------- from os.path import join, isdir from shutil import rmtree from tarfile import open as taropen from tempfile import mkdtemp from os import environ from traceback import format_exc from moi.job import system_call from qiita_db.artifact import Artifact from qiita_db.logger import LogEntry from qiita_core.qiita_settings import qiita_config from qiita_ware.ebi import EBISubmission from qiita_ware.exceptions import ComputeError, EBISubmissionError def submit_EBI(preprocessed_data_id, action, send): """Submit a preprocessed data to EBI Parameters ---------- preprocessed_data_id : int The preprocesssed data id action : %s The action to perform with this data send : bool True to actually send the files """ # step 1: init and validate ebi_submission = EBISubmission(preprocessed_data_id, action) # step 2: generate demux fastq files ebi_submission.study.ebi_submission_status = 'submitting' try: ebi_submission.generate_demultiplexed_fastq() except: error_msg = format_exc() if isdir(ebi_submission.full_ebi_dir): rmtree(ebi_submission.full_ebi_dir) ebi_submission.study.ebi_submission_status = 'failed: %s' % error_msg LogEntry.create('Runtime', error_msg, info={'ebi_submission': preprocessed_data_id}) raise # step 3: generate and write xml files ebi_submission.generate_xml_files() if send: # step 4: sending sequences if action != 'MODIFY': old_ascp_pass = environ.get('ASPERA_SCP_PASS', '') environ['ASPERA_SCP_PASS'] = qiita_config.ebi_seq_xfer_pass LogEntry.create('Runtime', ("Submitting sequences for pre_processed_id: " "%d" % preprocessed_data_id)) try: for cmd in ebi_submission.generate_send_sequences_cmd(): try: stdout, stderr, _ = system_call(cmd) except Exception as e: stdout = '' stderr = str(e) le = LogEntry.create( 'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)), info={'ebi_submission': preprocessed_data_id}) ebi_submission.study.ebi_submission_status = ( "failed: ASCP submission, log id: %d" % le.id) raise ComputeError("EBI Submission failed! Log id: " "%d" % le.id) finally: open(ebi_submission.ascp_reply, 'a').write( 'stdout:\n%s\n\nstderr: %s' % (stdout, stderr)) finally: environ['ASPERA_SCP_PASS'] = old_ascp_pass LogEntry.create('Runtime', ('Submission of sequences of pre_processed_id: ' '%d completed successfully' % preprocessed_data_id)) # step 5: sending xml and parsing answer xmls_cmds = ebi_submission.generate_curl_command() LogEntry.create('Runtime', ("Submitting XMLs for pre_processed_id: " "%d" % preprocessed_data_id)) try: xml_content, stderr, _ = system_call(xmls_cmds) except Exception as e: xml_content = '' stderr = str(e) le = LogEntry.create( 'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)), info={'ebi_submission': preprocessed_data_id}) ebi_submission.study.ebi_submission_status = ( "failed: XML submission, log id: %d" % le.id) raise ComputeError("EBI Submission failed! Log id: %d" % le.id) else: LogEntry.create('Runtime', ('Submission of sequences of pre_processed_id: ' '%d completed successfully' % preprocessed_data_id)) finally: open(ebi_submission.curl_reply, 'w').write( 'stdout:\n%s\n\nstderr: %s' % (xml_content, stderr)) try: st_acc, sa_acc, bio_acc, ex_acc, run_acc = \ ebi_submission.parse_EBI_reply(xml_content) except EBISubmissionError as e: le = LogEntry.create( 'Fatal', "Command: %s\nError: %s\n" % (xml_content, str(e)), info={'ebi_submission': preprocessed_data_id}) ebi_submission.study.ebi_submission_status = ( "failed: XML parsing, log id: %d" % le.id) raise ComputeError("EBI Submission failed! Log id: %d" % le.id) ebi_submission.study.ebi_submission_status = 'submitted' if action == 'ADD': if st_acc: ebi_submission.study.ebi_study_accession = st_acc if sa_acc: ebi_submission.sample_template.ebi_sample_accessions = sa_acc if bio_acc: ebi_submission.sample_template.biosample_accessions = bio_acc if ex_acc: ebi_submission.prep_template.ebi_experiment_accessions = ex_acc ebi_submission.artifact.ebi_run_accessions = run_acc else: st_acc, sa_acc, bio_acc, ex_acc, run_acc = None, None, None, None, None return st_acc, sa_acc, bio_acc, ex_acc, run_acc def submit_VAMPS(artifact_id): """Submit artifact to VAMPS Parameters ---------- artifact_id : int The artifact id Raises ------ ComputeError - If the artifact cannot be submitted to VAMPS - If the artifact is associated with more than one prep template """ artifact = Artifact(artifact_id) if not artifact.can_be_submitted_to_vamps: raise ComputeError("Artifact %d cannot be submitted to VAMPS" % artifact_id) study = artifact.study sample_template = study.sample_template prep_templates = artifact.prep_templates if len(prep_templates) > 1: raise ComputeError( "Multiple prep templates associated with the artifact: %s" % artifact_id) prep_template = prep_templates[0] # Also need to check that is not submitting (see item in #1523) if artifact.is_submitted_to_vamps: raise ValueError("Cannot resubmit artifact %s to VAMPS!" % artifact_id) # Generating a tgz targz_folder = mkdtemp(prefix=qiita_config.working_dir) targz_fp = join(targz_folder, '%d_%d_%d.tgz' % (study.id, prep_template.id, artifact_id)) targz = taropen(targz_fp, mode='w:gz') # adding sample/prep samp_fp = join(targz_folder, 'sample_metadata.txt') sample_template.to_file(samp_fp) targz.add(samp_fp, arcname='sample_metadata.txt') prep_fp = join(targz_folder, 'prep_metadata.txt') prep_template.to_file(prep_fp) targz.add(prep_fp, arcname='prep_metadata.txt') # adding preprocessed data for _, fp, fp_type in artifact.filepaths: if fp_type == 'preprocessed_fasta': targz.add(fp, arcname='preprocessed_fasta.fna') targz.close() # submitting cmd = ("curl -F user=%s -F pass='%s' -F uploadFile=@%s -F " "press=UploadFile %s" % (qiita_config.vamps_user, qiita_config.vamps_pass, targz_fp, qiita_config.vamps_url)) obs, _, _ = system_call(cmd) exp = ("<html>\n<head>\n<title>Process Uploaded File</title>\n</head>\n" "<body>\n</body>\n</html>") if obs != exp: return False else: artifact.is_submitted_to_vamps = True<|fim▁hole|><|fim▁end|>
return True
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>SECRET_KEY = 'fake-key' DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } } INSTALLED_APPS = [ "django_nose", "tests", ] TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'<|fim▁hole|>NOSE_ARGS = [ '--with-coverage', '--cover-package=search_views', ]<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""The ign_sismologia component."""<|fim▁end|>
<|file_name|>condvar.rs<|end_file_name|><|fim▁begin|>use crate::sys::condvar as imp; use crate::sys::mutex as mutex_imp; use crate::sys_common::mutex::MovableMutex; use crate::time::Duration; mod check; type CondvarCheck = <mutex_imp::MovableMutex as check::CondvarCheck>::Check; /// An OS-based condition variable. pub struct Condvar { inner: imp::MovableCondvar, check: CondvarCheck, } impl Condvar { /// Creates a new condition variable for use.<|fim▁hole|> pub fn new() -> Self { let mut c = imp::MovableCondvar::from(imp::Condvar::new()); unsafe { c.init() }; Self { inner: c, check: CondvarCheck::new() } } /// Signals one waiter on this condition variable to wake up. #[inline] pub fn notify_one(&self) { unsafe { self.inner.notify_one() }; } /// Awakens all current waiters on this condition variable. #[inline] pub fn notify_all(&self) { unsafe { self.inner.notify_all() }; } /// Waits for a signal on the specified mutex. /// /// Behavior is undefined if the mutex is not locked by the current thread. /// /// May panic if used with more than one mutex. #[inline] pub unsafe fn wait(&self, mutex: &MovableMutex) { self.check.verify(mutex); self.inner.wait(mutex.raw()) } /// Waits for a signal on the specified mutex with a timeout duration /// specified by `dur` (a relative time into the future). /// /// Behavior is undefined if the mutex is not locked by the current thread. /// /// May panic if used with more than one mutex. #[inline] pub unsafe fn wait_timeout(&self, mutex: &MovableMutex, dur: Duration) -> bool { self.check.verify(mutex); self.inner.wait_timeout(mutex.raw(), dur) } } impl Drop for Condvar { fn drop(&mut self) { unsafe { self.inner.destroy() }; } }<|fim▁end|>
<|file_name|>window_update.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 ThetaSinner // // This file is part of Osmium. // Osmium is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Osmium is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Osmium. If not, see <http://www.gnu.org/licenses/>. // std use std::vec::IntoIter; // osmium use super::CompressibleHttpFrame; use super::FrameType; use http2::error; const WINDOW_SIZE_INCREMENT_BIT_MASK: u8 = 0x80; #[derive(Debug)] pub struct WindowUpdateFrameCompressModel { window_size_increment: u32 } impl WindowUpdateFrameCompressModel { pub fn new(window_size_increment: u32) -> Self { WindowUpdateFrameCompressModel { window_size_increment: window_size_increment } } } impl CompressibleHttpFrame for WindowUpdateFrameCompressModel { fn get_length(&self) -> i32 { // 4 octets for the 32 bits in the window size increment 4 } fn get_frame_type(&self) -> FrameType { FrameType::WindowUpdate } fn get_flags(&self) -> u8 { // this frame doesn't define any flags 0 } fn get_payload(self: Box<Self>) -> Vec<u8> { let mut result = Vec::new(); // include the window size increment let window_size_increment_first_octet = (self.window_size_increment >> 24) as u8; // TODO handle error assert_eq!(0, window_size_increment_first_octet & WINDOW_SIZE_INCREMENT_BIT_MASK);<|fim▁hole|> result.push((self.window_size_increment >> 16) as u8); result.push((self.window_size_increment >> 8) as u8); result.push(self.window_size_increment as u8); result } } pub struct WindowUpdateFrame { window_size_increment: u32 } impl WindowUpdateFrame { pub fn new_conn(frame_header: &super::FrameHeader, frame: &mut IntoIter<u8>) -> Result<Self, error::HttpError> { if frame_header.length != 4 { return Err(error::HttpError::ConnectionError( error::ErrorCode::FrameSizeError, error::ErrorName::InvalidFrameLengthForConnectionWindowUpdateFrame )); } let window_size_increment_first_octet = frame.next().unwrap(); assert_eq!(0, window_size_increment_first_octet & WINDOW_SIZE_INCREMENT_BIT_MASK); Ok(WindowUpdateFrame { window_size_increment: (((window_size_increment_first_octet & !WINDOW_SIZE_INCREMENT_BIT_MASK) as u32) << 24) + ((frame.next().unwrap() as u32) << 16) + ((frame.next().unwrap() as u32) << 8) + (frame.next().unwrap() as u32) }) } pub fn new_stream(frame_header: &super::StreamFrameHeader, frame: &mut IntoIter<u8>) -> Result<Self, error::HttpError> { if frame_header.length != 4 { return Err(error::HttpError::ConnectionError( error::ErrorCode::FrameSizeError, error::ErrorName::InvalidFrameLengthForConnectionWindowUpdateFrame )); } let window_size_increment_first_octet = frame.next().unwrap(); assert_eq!(0, window_size_increment_first_octet & WINDOW_SIZE_INCREMENT_BIT_MASK); Ok(WindowUpdateFrame { window_size_increment: (((window_size_increment_first_octet & !WINDOW_SIZE_INCREMENT_BIT_MASK) as u32) << 24) + ((frame.next().unwrap() as u32) << 16) + ((frame.next().unwrap() as u32) << 8) + (frame.next().unwrap() as u32) }) } pub fn get_window_size_increment(&self) -> u32 { self.window_size_increment } }<|fim▁end|>
result.push(window_size_increment_first_octet & !WINDOW_SIZE_INCREMENT_BIT_MASK);
<|file_name|>local_settings.docker.py<|end_file_name|><|fim▁begin|># Docker-specific local settings import os DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db', } } # Make this unique, and don't share it with anybody. SECRET_KEY = '' TEMPLATE_DIRS = ( '/srv/webldap/templates', ) EMAIL_FROM = 'root@localhost'<|fim▁hole|>REQ_EXPIRE_HRS = 48 REQ_EXPIRE_STR = '48 heures' LDAP_URI = 'ldap://{}:{}'.format(os.environ['LDAP_PORT_389_TCP_ADDR'], os.environ['LDAP_PORT_389_TCP_PORT']) LDAP_STARTTLS = False LDAP_CACERT = '' LDAP_BASE = 'dc=example,dc=net' LDAP_WEBLDAP_USER = 'cn=webldap,ou=service-users,dc=example,dc=net' LDAP_WEBLDAP_PASSWD = 'secret' LDAP_DEFAULT_GROUPS = [] LDAP_DEFAULT_ROLES = ['member']<|fim▁end|>
<|file_name|>full.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. DEPS = [ 'git', 'recipe_engine/context', 'recipe_engine/path', 'recipe_engine/platform', 'recipe_engine/properties', 'recipe_engine/raw_io', 'recipe_engine/step', ] def RunSteps(api): url = 'https://chromium.googlesource.com/chromium/src.git' # git.checkout can optionally dump GIT_CURL_VERBOSE traces to a log file, # useful for debugging git access issues that are reproducible only on bots. curl_trace_file = None if api.properties.get('use_curl_trace'): curl_trace_file = api.path['start_dir'].join('curl_trace.log') submodule_update_force = api.properties.get('submodule_update_force', False) submodule_update_recursive = api.properties.get('submodule_update_recursive', True) # You can use api.git.checkout to perform all the steps of a safe checkout. retVal = api.git.checkout( url, ref=api.properties.get('revision'), recursive=True, submodule_update_force=submodule_update_force, set_got_revision=api.properties.get('set_got_revision'), curl_trace_file=curl_trace_file, remote_name=api.properties.get('remote_name'), display_fetch_size=api.properties.get('display_fetch_size'), file_name=api.properties.get('checkout_file_name'), submodule_update_recursive=submodule_update_recursive, use_git_cache=api.properties.get('use_git_cache')) assert retVal == "deadbeef", ( "expected retVal to be %r but was %r" % ("deadbeef", retVal)) # count_objects shows number and size of objects in .git dir. api.git.count_objects( name='count-objects', can_fail_build=api.properties.get('count_objects_can_fail_build'), git_config_options={'foo': 'bar'}) # Get the remote URL. api.git.get_remote_url( step_test_data=lambda: api.raw_io.test_api.stream_output('foo')) api.git.get_timestamp(test_data='foo') # You can use api.git.fetch_tags to fetch all tags from the remote api.git.fetch_tags(api.properties.get('remote_name')) # If you need to run more arbitrary git commands, you can use api.git itself, # which behaves like api.step(), but automatically sets the name of the step. with api.context(cwd=api.path['checkout']): api.git('status') api.git('status', name='git status can_fail_build', can_fail_build=True) api.git('status', name='git status cannot_fail_build', can_fail_build=False) # You should run git new-branch before you upload something with git cl. api.git.new_branch('refactor') # Upstream is origin/master by default. # And use upstream kwarg to set up different upstream for tracking. api.git.new_branch('feature', upstream='refactor') # You can use api.git.rebase to rebase the current branch onto another one api.git.rebase(name_prefix='my repo', branch='origin/master', dir_path=api.path['checkout'], remote_name=api.properties.get('remote_name')) if api.properties.get('cat_file', None): step_result = api.git.cat_file_at_commit(api.properties['cat_file'], api.properties['revision'], stdout=api.raw_io.output()) if 'TestOutput' in step_result.stdout: pass # Success! # Bundle the repository. api.git.bundle_create( api.path['start_dir'].join('all.bundle')) def GenTests(api): yield api.test('basic') yield api.test('basic_ref') + api.properties(revision='refs/foo/bar') yield api.test('basic_branch') + api.properties(revision='refs/heads/testing') yield api.test('basic_hash') + api.properties( revision='abcdef0123456789abcdef0123456789abcdef01') yield api.test('basic_file_name') + api.properties(checkout_file_name='DEPS') yield api.test('basic_submodule_update_force') + api.properties( submodule_update_force=True) yield api.test('platform_win') + api.platform.name('win') yield api.test('curl_trace_file') + api.properties( revision='refs/foo/bar', use_curl_trace=True) yield ( api.test('can_fail_build') + api.step_data('git status can_fail_build', retcode=1) ) yield ( api.test('cannot_fail_build') + api.step_data('git status cannot_fail_build', retcode=1) ) yield ( api.test('set_got_revision') + api.properties(set_got_revision=True) ) yield ( api.test('rebase_failed') + api.step_data('my repo rebase', retcode=1) ) yield api.test('remote_not_origin') + api.properties(remote_name='not_origin') yield ( api.test('count-objects_delta') + api.properties(display_fetch_size=True)) yield ( api.test('count-objects_failed') + api.step_data('count-objects', retcode=1))<|fim▁hole|> api.step_data( 'count-objects', stdout=api.raw_io.output(api.git.count_objects_output('xxx')))) yield ( api.test('count-objects_with_bad_output_fails_build') + api.step_data( 'count-objects', stdout=api.raw_io.output(api.git.count_objects_output('xxx'))) + api.properties(count_objects_can_fail_build=True)) yield ( api.test('cat-file_test') + api.step_data('git cat-file abcdef12345:TestFile', stdout=api.raw_io.output('TestOutput')) + api.properties(revision='abcdef12345', cat_file='TestFile')) yield ( api.test('git-cache-checkout') + api.properties(use_git_cache=True))<|fim▁end|>
yield ( api.test('count-objects_with_bad_output') +
<|file_name|>badgesService.js<|end_file_name|><|fim▁begin|>(function() { 'use strict'; /** * @ngdoc function * @name app.service:badgesService * @description * # badgesService * Service of the app */ angular .module('badges') .factory('BadgesService', Badges); // Inject your dependencies as .$inject = ['$http', 'someSevide']; // function Name ($http, someSevide) {...} Badges.$inject = ['$http', '$rootScope']; function Badges ($http, $rootScope) { return { getBadges:getBadges }; function getBadges(vm) { var badges = []; var url = "https://raw.githubusercontent.com/ltouroumov/amt-g4mify/master/client/app/assets/images/"; var req = { method: 'GET', url: 'http://localhost:8080/api/users/' + $rootScope.username +'/badges', headers: { 'Content-Type': 'application/json', 'Identity': '1:secret' } }; $http(req).then(function(res){ console.log("Badges: OK"); for(var i = 0; i < res.data.length; i++){ var badge = { level: res.data[i].level, name: res.data[i].type.name, image: url + res.data[i].type.image }; console.log(badges); badges.push(badge); } vm.badges = badges; }, function(err){ console.log("Badges: ERROR"); vm.msg = "- An error occurred posting the event to the gamification platform"; vm.success = false; }); }<|fim▁hole|>})();<|fim▁end|>
}
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import os as _os __path__.append(_os.path.join(__path__[0], '..', '..', 'gen', 'ortools', 'algorithms'))<|fim▁hole|>__path__.append(_os.path.join(__path__[0], '..', '..', '..', 'lib'))<|fim▁end|>