prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>Compare.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import org.janus.data.DataContext; public class Compare extends WordImpl { public enum Comp { EQ, NEQ, LT, GT, LEQ, GEQ } Comp comp; public Compare(Comp comp) { super(-1); this.comp = comp; } @Override public void perform(DataContext context) { super.perform(context); MiniForthContext withStack = (MiniForthContext) context; Comparable a = (Comparable) withStack.pop(); Comparable b = (Comparable) withStack.pop(); boolean result = false; if (a.getClass().equals(b.getClass())) { result = compare(a, b); } else { throw new IllegalArgumentException("Vergleich nicht möglich"); } withStack.pushBoolean(result); } private boolean compare(Comparable a, Comparable b) { int c = a.compareTo(b); if (c == 0) { if (comp.equals(Comp.NEQ)) { return false; } else { return comp.equals(Comp.EQ) || comp.equals(Comp.LEQ) || comp.equals(Comp.GEQ); } } else { if (c > 0) { return comp.equals(Comp.NEQ) || comp.equals(Comp.LEQ) || comp.equals(Comp.LT); } else { return comp.equals(Comp.NEQ) || comp.equals(Comp.GEQ) || comp.equals(Comp.GT); } } } }<|fim▁end|>
package org.janus.miniforth;
<|file_name|>dirtree.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Utilities for getting directory tree.""" import os from pathlib import Path import crayons from nuke.utils import parse_ignore_file def fg(text, color): """Set text to foregound color.""" return "\33[38;5;" + str(color) + "m" + text + "\33[0m" def bg(text, color): """Set text to background color.""" return "\33[48;5;" + str(color) + "m" + text + "\33[0m" def get_colorized(path: Path): """Colorize path name based on type.""" name = path.name if path.is_dir(): return crayons.blue(name) elif path.is_file(): return crayons.green(name) elif path.is_mount(): return crayons.red(name)<|fim▁hole|> elif path.is_symlink(): return crayons.cyan(name) elif path.is_socket(): return crayons.magenta(name) else: return crayons.white(name) def get_dirtree(directory): """ Get the directory tree of the `directory`. :param directory: The root directory from where to generate the directory tree. :return: The list of paths with appropriate indenting """ element_list = [] ignore_patterns = [] file_link = fg("├── ", 241) # u'\u251c\u2500\u2500 ' last_file_link = fg("└── ", 241) # u'\u2514\u2500\u2500 ' tree_branch = fg("│ ", 241) # u'\u2502 ' # Get the list of all the files/dirs in the directory to nuke. # We traverse in a bottom up manner so that directory removal is trivial. for (dirpath_str, dirnames, filenames) in os.walk(directory, topdown=False): level = dirpath_str.replace(str(directory), "").count(os.sep) if level > 0: indent = tree_branch * (level - 1) + file_link else: indent = "" dirpath = Path(dirpath_str) # We record every element in the tree as a dict of the indented name (repr) # and the path so we can use the ignore methods on the paths and still # have the indented names for our tree # only add current directory as element to be nuked if no .nukeignore file is present if ".nukeignore" not in filenames: # Add the current directory element = { "repr": "{}{}/".format(indent, get_colorized(dirpath)), "path": dirpath, } element_list.append(element) subindent = tree_branch * (level) # Add the files in the directory for idx, fn in enumerate(filenames): if fn == ".nukeignore": ignore_patterns.extend( parse_ignore_file((dirpath / fn), dirpath)) continue # Check if it is the last element if idx == len(filenames) - 1: branch = subindent + last_file_link else: branch = subindent + file_link element = { "repr": "{}{}".format(branch, get_colorized(dirpath / fn)), "path": (dirpath / fn), } element_list.append(element) return element_list, ignore_patterns<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#coding: utf-8 from __future__ import absolute_import, unicode_literals, print_function import cProfile from datetime import datetime import re import os import codecs import timeit import pymorphy from pymorphy.contrib.tokenizers import extract_words DICT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'dicts', 'converted')) def total_seconds(delta): return delta.days * 3600 * 24 + delta.seconds + delta.microseconds/100000.0 def get_words(text): # return list(extract_words(text)) r = re.compile('[\W+-]',re.U) return [word for word in r.split(text.upper()) if word] def do_normalize(words, morph): for word in words: forms = morph.normalize(word) def do_pluralize(words, morph): for word in words: forms = morph.pluralize_ru(word) def do_all(words, morph): do_normalize(words, morph) # do_pluralize(words, morph) def load_words(fn): filename = os.path.abspath(os.path.join('text', fn)) with codecs.open(filename, encoding='utf-8') as f: text = f.read() return get_words(text) def get_morph(backend, **kwargs): if backend == 'pickle': path = os.path.join(DICT_PATH, 'ru', 'morphs.pickle') else: path = os.path.join(DICT_PATH,'ru') return pymorphy.get_morph(path, backend, **kwargs) def get_mem_usage(): try: import psutil proc = psutil.Process(os.getpid()) return proc.get_memory_info() except ImportError: from collections import namedtuple Info = namedtuple('Info', 'vms rss') return Info(0, 0) def print_memory_usage(old=None): info = get_mem_usage() M = 1024*1024.0 if old: print("RSS: %0.1fM (+%0.1fM), VMS: %0.1fM (+%0.1fM)" % ( info.rss/M, (info.rss-old.rss)/M, info.vms/M, (info.vms-old.vms)/M), ) else: print("RSS: %0.1fM, VMS: %0.1fM" % (info.rss/M, info.vms/M)) def bench(filename, backend='shelve', use_psyco=True, use_cache=True, profile=True): if profile: words = load_words(filename) print ('Text is loaded (%d words)' % len(words)) usage = get_mem_usage() morph = get_morph(backend, cached=use_cache) prof = cProfile.Profile() prof = prof.runctx('do_all(words, morph)', globals = globals(), locals=locals()) prof.print_stats(1) print_memory_usage(usage) else: # prep = """ #from bench import do_all, load_words, get_morph #words = load_words('%s') #morph = get_morph('%s', cached=%s) # """ % (file, backend, use_cache) # res = timeit.timeit('do_all(words, morph)', prep, number=1) # print '%s => %s (cache: %s) => %.2f sec' % (file, backend, use_cache, res) start = datetime.now()<|fim▁hole|> usage = get_mem_usage() morph = get_morph(backend, cached=use_cache) loaded = datetime.now() do_all(words, morph) parsed = datetime.now() load_time = total_seconds(loaded-start) parse_time = total_seconds(parsed-loaded) wps = len(words)/parse_time print ('%s => %s (cache: %s) => load: %.2f sec, parse: %0.2f sec (%d words/sec)' % ( filename, backend, use_cache, load_time, parse_time, wps)) print_memory_usage(usage)<|fim▁end|>
words = load_words(filename)
<|file_name|>server.py<|end_file_name|><|fim▁begin|># coding: utf-8 from livereload import Server, shell <|fim▁hole|><|fim▁end|>
server = Server() server.watch('docs/*.rst', shell('make html')) server.serve(root='docs/_build/html', open_url=True)
<|file_name|>paste.py<|end_file_name|><|fim▁begin|>""" Contains a base Site class for pastebin-like sites. Each child class only needs to specify a base url, the relative url to the public pastes archive, and a lambda function to get the paste links out of the page. """ import logging import re from urllib.parse import urljoin from bs4 import BeautifulSoup import requests LOGGER = logging.getLogger(__name__) class Site(object): """ Base class for all paste sites to inherit from. """ def __init__(self, url_base, url_archive, paste_tag, target_patterns, paste): self.url_base = url_base self.url_archive = url_archive self.paste_tag = paste_tag self.target_patterns = target_patterns self.headers = {'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36' \ '(KHTML, like Gecko) Chrome/41.0.2272.76 Safari/537.36'} self.paste = paste def get(self): """ Gets the archive page for the paste site. Returns list of links to pastes. """ req = requests.get(self.url_base+self.url_archive, headers=self.headers) LOGGER.debug('Got the response for the archive page') while req.status_code != 200: LOGGER.error('Response was not 200. Trying again...') req = requests.get(self.url_base+self.url_archive) soup = BeautifulSoup(req.text, 'lxml') links = soup.find_all(self.paste_tag) LOGGER.debug('Got %d links from page', len(links)) return [self.paste(urljoin(self.url_base, link.a.get('href'))) for link in links] def get_paste(self, paste): """ Gets the supplied paste url. Returns list of tuples of matches in paste. """ req = requests.get(paste.url, headers=self.headers) LOGGER.debug('Got response for paste') while req.status_code != 200: LOGGER.error('Response was not 200. Trying again...') req = requests.get(paste.url) found = [] for name, pattern in self.target_patterns: LOGGER.debug('Trying pattern: %s', pattern) matches = re.findall(pattern, req.text) LOGGER.debug('Got %d matches', len(matches)) if matches: found.append((name, len(matches))) return found class Paste(object): """ Base class for pastes. Parses paste ID from supplied URL """ def __init__(self, url): _id = url.split('/')[-1] self._id = _id class PastebinPaste(Paste): """ Paste for Pastebin """ def __init__(self, url): super().__init__(url) self.url = 'http://pastebin.com/raw.php?i={}'.format(self._id) class PastiePaste(Paste): """ Paste for Pastie """ def __init__(self, url): super().__init__(url) self.url = 'http://pastie.org/pastes/{}/text'.format(self._id)<|fim▁hole|> class SlexyPaste(Paste): """ Paste for Slexy """ def __init__(self, url): super().__init__(url) self.url = 'http://slexy.org/raw/{}'.format(self._id) class Pastebin(Site): """ Pastebin class """ def __init__(self, target_patterns): self.url_base = 'http://pastebin.com/' self.url_archive = '/archive' self.paste_tag = lambda tag: tag.name == 'td' and tag.a and \ '/archive/' not in tag.a['href'] and tag.a['href'][1:] super().__init__(self.url_base, self.url_archive, self.paste_tag, target_patterns, PastebinPaste) class Pastie(Site): """ Pastie class """ def __init__(self, target_patterns): self.url_base = 'http://pastie.org' self.url_archive = '/pastes' self.paste_tag = lambda tag: tag.name == 'p' and tag.a and self.url_base in tag.a['href'] super().__init__(self.url_base, self.url_archive, self.paste_tag, target_patterns, PastiePaste) class Slexy(Site): """ Slexy site """ def __init__(self, target_patterns): self.url_base = 'http://slexy.org' self.url_archive = '/recent' self.paste_tag = lambda tag: tag.name == 'td' and tag.a and '/view/' in tag.a['href'] super().__init__(self.url_base, self.url_archive, self.paste_tag, target_patterns, SlexyPaste)<|fim▁end|>
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>// Copyright 2016-2021 Matthew D. Michelotti // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Standard definitions of `NoisyFloat`. //! //! Definitions in this module all use `debug_assert!` //! to check for valid values, so there is no overhead //! when running in an optimized build. use core::marker::PhantomData; use crate::{ checkers::{FiniteChecker, NumChecker}, NoisyFloat, }; /// A floating point number behaving like `f32` that does not allow NaN. /// /// The "N" in the name stands for "Number", since all values of this type /// are "numbers", i.e. they are not "not-a-number". pub type N32 = NoisyFloat<f32, NumChecker>; /// A floating point number behaving like `f64` that does not allow NaN. /// /// The "N" in the name stands for "Number", since all values of this type /// are "numbers", i.e. they are not "not-a-number". pub type N64 = NoisyFloat<f64, NumChecker>; /// A floating point number behaving like `f32` that does not allow NaN or +/- Infinity. /// /// The "R" in the name stands for "Real", since in Mathematics, the Real /// numbers do not include NaN or +/- Infinity. pub type R32 = NoisyFloat<f32, FiniteChecker>; /// A floating point number behaving like `f64` that does not allow NaN or +/- Infinity. /// /// The "R" in the name stands for "Real", since in Mathematics, the Real /// numbers do not include NaN or +/- Infinity. pub type R64 = NoisyFloat<f64, FiniteChecker>; /// Shorthand for `N32::new(value)`. #[inline] pub fn n32(value: f32) -> N32 { N32::new(value) } /// Shorthand for `N64::new(value)`. #[inline] pub fn n64(value: f64) -> N64 { N64::new(value) } /// Shorthand for `R32::new(value)`. #[inline] pub fn r32(value: f32) -> R32 { R32::new(value) } /// Shorthand for `R64::new(value)`. #[inline] pub fn r64(value: f64) -> R64 { R64::new(value) } macro_rules! const_fns { ($type:ty, $raw:ty) => { impl $type { /// A const constructor that does not check whether `value` is valid. /// /// WARNING: This constructor does not panic even in debug mode. /// As always, it is the user's responsibility to ensure `value` is valid. /// Until Rust supports panics in const functions, this constructor /// is necessary to create a NoisyFloat in a const setting. pub const fn unchecked_new(value: $raw) -> Self { Self { value, checker: PhantomData, } } /// A const function that returns the underlying float value.<|fim▁hole|> } }; } const_fns!(N32, f32); const_fns!(N64, f64); const_fns!(R32, f32); const_fns!(R64, f64);<|fim▁end|>
pub const fn const_raw(self) -> $raw { self.value }
<|file_name|>findCWs_plug.C<|end_file_name|><|fim▁begin|>// Copyright (C) 2006,2007,2008,2009, George Hobbs, Russell Edwards /* * This file is part of TEMPO2. * * TEMPO2 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * TEMPO2 is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with TEMPO2. If not, see <http://www.gnu.org/licenses/>. */ /* * If you use TEMPO2 then please acknowledge it by citing * Hobbs, Edwards & Manchester (2006) MNRAS, Vol 369, Issue 2, * pp. 655-672 (bibtex: 2006MNRAS.369..655H) * or Edwards, Hobbs & Manchester (2006) MNRAS, VOl 372, Issue 4, * pp. 1549-1574 (bibtex: 2006MNRAS.372.1549E) when discussing the * timing model. */ #include <stdio.h> #include <string.h> #include <stdlib.h> #include <math.h> #include "tempo2.h" #include "TKspectrum.h" #include <gsl/gsl_math.h> #include <gsl/gsl_blas.h> #include <gsl/gsl_vector.h> #include <gsl/gsl_matrix.h> #include <gsl/gsl_eigen.h> #include <gsl/gsl_linalg.h> using namespace std; void help() /* Display help */ { } extern "C" int graphicalInterface(int argc,char *argv[],pulsar *psr,int *npsr) { char parFile[MAX_PSR][MAX_FILELEN]; char timFile[MAX_PSR][MAX_FILELEN]; char covarFuncFile[128]; int i, j; double globalParameter; const char *CVS_verNum = "$Revision: 1.1 $"; FILE *fout; strcpy(covarFuncFile,"NULL"); if (displayCVSversion == 1) CVSdisplayVersion((char *)"findCW.C",(char *)"plugin",CVS_verNum); *npsr = 0; printf("Graphical Interface: findCW\n"); printf("Author: X. Zhu, G. Hobbs\n"); printf("CVS Version: $Revision: 1.1 $\n"); printf(" --- type 'h' for help information\n"); /* Obtain all parameters from the command line */ for (i=2;i<argc;i++) { if (strcmp(argv[i],"-f")==0) { strcpy(parFile[*npsr],argv[++i]); strcpy(timFile[*npsr],argv[++i]); (*npsr)++; } else if (strcmp(argv[i],"-dcf")==0) strcpy(covarFuncFile,argv[++i]); } readParfile(psr,parFile,timFile,*npsr); /* Load the parameters */ readTimfile(psr,timFile,*npsr); /* Load the arrival times */ preProcess(psr,*npsr,argc,argv); for (i=0;i<2;i++) /* Do two iterations for pre- and post-fit residuals*/ // i=0; { logdbg("Calling formBatsAll"); formBatsAll(psr,*npsr); /* Form the barycentric arrival times */ logdbg("Calling formResiduals"); formResiduals(psr,*npsr,1); /* Form the residuals */ logdbg("Calling doFit"); if (i==0) doFitAll(psr,*npsr,covarFuncFile); /* Do the fitting */ else textOutput(psr,*npsr,globalParameter,0,0,0,(char *)""); /* Display the output */ } // Print A+ and Ax into a file fout = fopen("aplus_across.dat","w"); for (i=0;i<psr[0].quad_ifuncN_p;i++) { fprintf(fout,"%.2lf %g %g %g %g\n",psr[0].quad_ifuncT_p[i],psr[0].quad_ifuncV_p[i],psr[0].quad_ifuncE_p[i],psr[0].quad_ifuncV_c[i],psr[0].quad_ifuncE_c[i]); } fclose(fout); // return 0; // Calculate the Detection Statistics as a function of Fourier frequencies { double freq[1024]; double DS[1024]; double dt, Tspan, fmin, fmax; int nSpec; /* number of independent freq channels */ int nSpecOS4; /* number of freq channels with an OverSampling factor of 4 */ const int lp = psr[0].quad_ifuncN_p; /* length of A+ or Ax */ const int lpc = 2*psr[0].quad_ifuncN_p; /* length of the stacked data A+,x assuming A+&Ax have the same length*/ const int n_cst = 18; /* number of constraints set on A+&Ax */ const int mlen = lpc-n_cst; /* rank of the noise covariance matrix */ // Assuming that the ifuncs are regularly sampled // dt = psr[0].quad_ifuncT_p[1]-psr[0].quad_ifuncT_p[0]; Tspan = psr[0].quad_ifuncT_p[lp-1]-psr[0].quad_ifuncT_p[0]; fmin = 1.0/Tspan/86400.0; fmax = 1.0/dt/86400.0/2; nSpec = floor (fmax/fmin); nSpecOS4 = 4*nSpec; /* somehow gsl can't handle f=fmax=36.5*fin, so f only goes up to 36.25*fmin */ // check /* printf("%d\t%d\t%d\n", lp, lpc, mlen); printf("%g\t%g\t%d\n", fmin, fmax, nSpec); */ gsl_vector *time = gsl_vector_alloc (lp); gsl_vector *Apn = gsl_vector_alloc (lp); gsl_vector *Acn = gsl_vector_alloc (lp); for (i = 0; i < lp; i++) { gsl_vector_set (time, i, psr[0].quad_ifuncT_p[i]); gsl_vector_set (Apn, i, psr[0].quad_ifuncV_p[i]); gsl_vector_set (Acn, i, psr[0].quad_ifuncV_c[i]); } gsl_matrix *Sigma_n = gsl_matrix_alloc (lpc, lpc); /* noise convariance matrix */ gsl_matrix *Sigma_n1 = gsl_matrix_alloc (lpc, lpc); /* inverse noise convariance matrix */ for (i = 0; i < psr[0].globalNfit; i++) { for (j = 0; j < psr[0].globalNfit; j++) { if ((psr[0].fitParamI[i] == param_quad_ifunc_p || psr[0].fitParamI[i] == param_quad_ifunc_c) && (psr[0].fitParamI[j] == param_quad_ifunc_p || psr[0].fitParamI[j] == param_quad_ifunc_c)) { gsl_matrix_set (Sigma_n, i, j, psr[0].covar[i][j]); } } } // print the covariance matrix to screen /* for (j = 0; j < lpc; j++) printf ("Sigma_n(%d,%d) = %g\n", 0, j, gsl_matrix_get (Sigma_n, 0, j)); */ // eign-decomposition gsl_vector *eval = gsl_vector_alloc (lpc); gsl_matrix *evec = gsl_matrix_alloc (lpc, lpc); gsl_eigen_symmv_workspace * w = gsl_eigen_symmv_alloc (lpc); gsl_eigen_symmv (Sigma_n, eval, evec, w); gsl_eigen_symmv_free (w); gsl_eigen_symmv_sort (eval, evec, GSL_EIGEN_SORT_ABS_ASC); gsl_matrix *D1 = gsl_matrix_alloc (mlen, mlen); gsl_matrix *A1 = gsl_matrix_alloc (lpc, mlen); gsl_matrix_set_zero (D1); /* print the eign-values, checked that they agree with results from Matlab for (j = 0; j < lpc; j++) printf ("%d\t%g\n", j, gsl_vector_get (eval, j)); */ for (i = 0; i < mlen; i++) { double eval_i = gsl_vector_get (eval, i+n_cst); gsl_matrix_set (D1, i, i, 1.0/eval_i); gsl_vector *evec_i = gsl_vector_alloc (lpc); gsl_matrix_get_col (evec_i, evec, i+n_cst); gsl_matrix_set_col (A1, i, evec_i); gsl_vector_free (evec_i); } gsl_vector_free (eval); gsl_matrix_free (evec); gsl_matrix *AD1 = gsl_matrix_alloc (lpc, mlen); gsl_matrix_set_zero (AD1); gsl_matrix_set_zero (Sigma_n1); gsl_blas_dgemm (CblasNoTrans, CblasNoTrans, 1.0, A1, D1, 0.0, AD1); gsl_blas_dgemm (CblasNoTrans, CblasTrans, 1.0, AD1, A1, 0.0, Sigma_n1); /* Sigma_n1=A1*D1*A1' */ // print the inverse covariance matrix to screen /* checked and agree with Matlab results for (j = 0; j < lpc; j++) printf ("Sigma_n1(%d,%d) = %g\n", 1, j+1, gsl_matrix_get (Sigma_n1, 0, j)); */ gsl_matrix_free (Sigma_n); gsl_matrix_free (D1); gsl_matrix_free (A1); gsl_matrix_free (AD1); /* something (unknown) wrong with the following approach of getting submatrix gsl_matrix_view Sb1 = gsl_matrix_submatrix (Sigma_n1, 0, 0, lp, lp); gsl_matrix_view Sb2 = gsl_matrix_submatrix (Sigma_n1, 0, lp, lp, lp); gsl_matrix_view Sb3 = gsl_matrix_submatrix (Sigma_n1, lp, 0, lp, lp); gsl_matrix_view Sb4 = gsl_matrix_submatrix (Sigma_n1, lp, lp, lp, lp); gsl_matrix *S11 = &Sb1.matrix; gsl_matrix *S12 = &Sb2.matrix; gsl_matrix *S21 = &Sb3.matrix; gsl_matrix *S22 = &Sb4.matrix; */ gsl_matrix *S11 = gsl_matrix_alloc (lp, lp); gsl_matrix *S12 = gsl_matrix_alloc (lp, lp); gsl_matrix *S21 = gsl_matrix_alloc (lp, lp); gsl_matrix *S22 = gsl_matrix_alloc (lp, lp); for (i = 0; i < lp; i++) { for (j = 0; j < lp; j++) { gsl_matrix_set (S11, i, j, gsl_matrix_get (Sigma_n1, i, j)); gsl_matrix_set (S12, i, j, gsl_matrix_get (Sigma_n1, i, (lp+j))); gsl_matrix_set (S21, i, j, gsl_matrix_get (Sigma_n1, (lp+i), j)); gsl_matrix_set (S22, i, j, gsl_matrix_get (Sigma_n1, (lp+i), (lp+j))); } } gsl_matrix_free (Sigma_n1); /* print the submatrix for (j = 0; j < lp; j++) printf ("S11(%d,%d) = %g\n", 1, j+1, gsl_matrix_get (S11, 0, j)); */ // calculate the Detection Statistics for a set of frequencies for (i = 0; i < nSpecOS4; i++) { double fIndx = 0.5+0.25*i; double f = fmin*fIndx; double y11, y12, y21, y22, y31, y32, y41, y42; double r11, r12, r13, r14, r22, r23, r24, r33, r34, r44; double d1, d2, d3, d4, d5, d6, d7, d8; double y1, y2, y3, y4, r21, r31, r32, r41, r42, r43; gsl_vector *Sc = gsl_vector_alloc (lp); gsl_vector *Ss = gsl_vector_alloc (lp); for (j = 0; j < lp; j++) { gsl_vector_set (Sc, j, cos(2.0*86400.0*M_PI*f*psr[0].quad_ifuncT_p[j])); gsl_vector_set (Ss, j, sin(2.0*86400.0*M_PI*f*psr[0].quad_ifuncT_p[j])); } gsl_vector *tempy1 = gsl_vector_alloc (lp); gsl_vector *tempy2 = gsl_vector_alloc (lp); gsl_vector *tempy3 = gsl_vector_alloc (lp); gsl_vector *tempy4 = gsl_vector_alloc (lp); gsl_vector *tempy5 = gsl_vector_alloc (lp); gsl_vector *tempy6 = gsl_vector_alloc (lp); gsl_vector *tempy7 = gsl_vector_alloc (lp); gsl_vector *tempy8 = gsl_vector_alloc (lp); gsl_vector *tempr1 = gsl_vector_alloc (lp); gsl_vector *tempr2 = gsl_vector_alloc (lp); gsl_vector *tempr3 = gsl_vector_alloc (lp); gsl_vector *tempr4 = gsl_vector_alloc (lp); gsl_vector *tempr5 = gsl_vector_alloc (lp); gsl_vector *tempr6 = gsl_vector_alloc (lp); gsl_vector *tempr7 = gsl_vector_alloc (lp); gsl_vector *temp5 = gsl_vector_alloc (lp); gsl_vector *temp6 = gsl_vector_alloc (lp); gsl_vector *temp7 = gsl_vector_alloc (lp); gsl_vector *temp8 = gsl_vector_alloc (lp); gsl_blas_dgemv (CblasTrans, 1.0, S11, Apn, 0.0, tempy1); gsl_blas_dgemv (CblasTrans, 1.0, S21, Acn, 0.0, tempy2); gsl_blas_ddot (tempy1, Sc, &y11); gsl_blas_ddot (tempy2, Sc, &y12); y1 = y11 + y12; gsl_blas_ddot (tempy1, Ss, &y21); gsl_blas_ddot (tempy2, Ss, &y22); y2 = y21 + y22; gsl_blas_dgemv (CblasTrans, 1.0, S12, Apn, 0.0, tempy3); gsl_blas_dgemv (CblasTrans, 1.0, S22, Acn, 0.0, tempy4); gsl_blas_ddot (tempy3, Sc, &y31); gsl_blas_ddot (tempy4, Sc, &y32); y3 = y31 + y32; gsl_blas_ddot (tempy3, Ss, &y41); gsl_blas_ddot (tempy4, Ss, &y42); y4 = y41 + y42; gsl_blas_dgemv (CblasTrans, 1.0, S11, Sc, 0.0, tempr1); gsl_blas_dgemv (CblasTrans, 1.0, S12, Sc, 0.0, tempr2); gsl_blas_ddot (tempr1, Sc, &r11); gsl_blas_ddot (tempr1, Ss, &r12); gsl_blas_ddot (tempr2, Sc, &r13); gsl_blas_ddot (tempr2, Ss, &r14); gsl_blas_dgemv (CblasTrans, 1.0, S11, Ss, 0.0, tempr3); gsl_blas_dgemv (CblasTrans, 1.0, S21, Sc, 0.0, tempr4); gsl_blas_dgemv (CblasTrans, 1.0, S21, Ss, 0.0, tempr5); gsl_blas_ddot (tempr3, Ss, &r22); gsl_blas_ddot (tempr4, Ss, &r23); gsl_blas_ddot (tempr5, Ss, &r24); gsl_blas_dgemv (CblasTrans, 1.0, S22, Sc, 0.0, tempr6); gsl_blas_dgemv (CblasTrans, 1.0, S22, Ss, 0.0, tempr7); gsl_blas_ddot (tempr6, Sc, &r33); gsl_blas_ddot (tempr6, Ss, &r34); gsl_blas_ddot (tempr7, Ss, &r44); r21 = r12; r31 = r13; r32 = r23; r41 = r14; r42 = r24; r43 = r34; double a_data[] = { r11, r12, r13, r14, r21, r22, r23, r24, r31, r32, r33, r34, r41, r42, r43, r44 }; double b_data[] = { y1, y2, y3, y4 }; gsl_matrix_view m = gsl_matrix_view_array (a_data, 4, 4); gsl_vector_view b = gsl_vector_view_array (b_data, 4); gsl_vector *Cc = gsl_vector_alloc (4); int s; gsl_permutation * p = gsl_permutation_alloc (4); gsl_linalg_LU_decomp (&m.matrix, p, &s); gsl_linalg_LU_solve (&m.matrix, p, &b.vector, Cc); gsl_permutation_free (p); /* for (j = 0; j < 4; j++) printf ("Cc(%d) = %g\n", j+1, gsl_vector_get (Cc, j)); */<|fim▁hole|> double Cc3 = gsl_vector_get (Cc, 2); double Cc4 = gsl_vector_get (Cc, 3); gsl_vector_free (Cc); gsl_vector *Ap = gsl_vector_alloc (lp); gsl_vector *Ac = gsl_vector_alloc (lp); for (j = 0; j < lp; j++) { gsl_vector_set (Ap, j, Cc1*cos(2.0*86400.0*M_PI*f*psr[0].quad_ifuncT_p[j]) + Cc2*sin(2.0*86400.0*M_PI*f*psr[0].quad_ifuncT_p[j])); gsl_vector_set (Ac, j, Cc3*cos(2.0*86400.0*M_PI*f*psr[0].quad_ifuncT_p[j]) + Cc4*sin(2.0*86400.0*M_PI*f*psr[0].quad_ifuncT_p[j])); } gsl_blas_dgemv (CblasTrans, 1.0, S11, Ap, 0.0, tempy5); gsl_blas_dgemv (CblasTrans, 1.0, S21, Ac, 0.0, tempy6); gsl_blas_dgemv (CblasTrans, 1.0, S12, Ap, 0.0, tempy7); gsl_blas_dgemv (CblasTrans, 1.0, S22, Ac, 0.0, tempy8); gsl_blas_dgemv (CblasTrans, 1.0, S11, Apn, 0.0, temp5); gsl_blas_dgemv (CblasTrans, 1.0, S21, Acn, 0.0, temp6); gsl_blas_dgemv (CblasTrans, 1.0, S12, Apn, 0.0, temp7); gsl_blas_dgemv (CblasTrans, 1.0, S22, Acn, 0.0, temp8); gsl_blas_ddot (tempy5, Ap, &d1); gsl_blas_ddot (tempy6, Ap, &d2); gsl_blas_ddot (tempy7, Ac, &d3); gsl_blas_ddot (tempy8, Ac, &d4); gsl_blas_ddot (temp5, Ap, &d5); gsl_blas_ddot (temp6, Ap, &d6); gsl_blas_ddot (temp7, Ac, &d7); gsl_blas_ddot (temp8, Ac, &d8); freq[i] = f; DS[i] = 2.0*(d5+d6+d7+d8)-(d1+d2+d3+d4); gsl_vector_free (tempy1); gsl_vector_free (tempy2); gsl_vector_free (tempy3); gsl_vector_free (tempy4); gsl_vector_free (tempy5); gsl_vector_free (tempy6); gsl_vector_free (tempy7); gsl_vector_free (tempy8); gsl_vector_free (temp5); gsl_vector_free (temp6); gsl_vector_free (temp7); gsl_vector_free (temp8); gsl_vector_free (tempr1); gsl_vector_free (tempr2); gsl_vector_free (tempr3); gsl_vector_free (tempr4); gsl_vector_free (tempr5); gsl_vector_free (tempr6); gsl_vector_free (tempr7); gsl_vector_free (Sc); gsl_vector_free (Ss); gsl_vector_free (Ac); gsl_vector_free (Ap); } gsl_matrix_free (S11); gsl_matrix_free (S12); gsl_matrix_free (S21); gsl_matrix_free (S22); gsl_vector_free (time); gsl_vector_free (Apn); gsl_vector_free (Acn); fout = fopen("DectionSts.dat","w"); for (i=0;i<nSpecOS4;i++) fprintf(fout,"%d %g %g\n",i+1,freq[i],DS[i]); fclose(fout); } return 0; } // char * plugVersionCheck = TEMPO2_h_VER;<|fim▁end|>
double Cc1 = gsl_vector_get (Cc, 0); double Cc2 = gsl_vector_get (Cc, 1);
<|file_name|>util.py<|end_file_name|><|fim▁begin|># # GdbLib - A Gdb python library. # Copyright (C) 2012 Fernando Castillo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful,<|fim▁hole|># You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import os def change(values): pass def removeFile(path): index = path.rfind(os.sep) return path[:index]<|fim▁end|>
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. #
<|file_name|>balancer_test.go<|end_file_name|><|fim▁begin|>package cluster_test import ( "fmt" "testing" "github.com/influxdb/influxdb/cluster" "github.com/influxdb/influxdb/services/meta" ) func NewNodes() []meta.NodeInfo { var nodes []meta.NodeInfo for i := 1; i <= 2; i++ { nodes = append(nodes, meta.NodeInfo{ ID: uint64(i), Host: fmt.Sprintf("localhost:999%d", i), }) } return nodes<|fim▁hole|> func TestBalancerEmptyNodes(t *testing.T) { b := cluster.NewNodeBalancer([]meta.NodeInfo{}) got := b.Next() if got != nil { t.Errorf("expected nil, got %v", got) } } func TestBalancerUp(t *testing.T) { nodes := NewNodes() b := cluster.NewNodeBalancer(nodes) // First node in randomized round-robin order first := b.Next() if first == nil { t.Errorf("expected datanode, got %v", first) } // Second node in randomized round-robin order second := b.Next() if second == nil { t.Errorf("expected datanode, got %v", second) } // Should never get the same node in order twice if first.ID == second.ID { t.Errorf("expected first != second. got %v = %v", first.ID, second.ID) } } /* func TestBalancerDown(t *testing.T) { nodes := NewNodes() b := cluster.NewNodeBalancer(nodes) nodes[0].Down() // First node in randomized round-robin order first := b.Next() if first == nil { t.Errorf("expected datanode, got %v", first) } // Second node should rollover to the first up node second := b.Next() if second == nil { t.Errorf("expected datanode, got %v", second) } // Health node should be returned each time if first.ID != 2 && first.ID != second.ID { t.Errorf("expected first != second. got %v = %v", first.ID, second.ID) } } */ /* func TestBalancerBackUp(t *testing.T) { nodes := newDataNodes() b := cluster.NewNodeBalancer(nodes) nodes[0].Down() for i := 0; i < 3; i++ { got := b.Next() if got == nil { t.Errorf("expected datanode, got %v", got) } if exp := uint64(2); got.ID != exp { t.Errorf("wrong node id: exp %v, got %v", exp, got.ID) } } nodes[0].Up() // First node in randomized round-robin order first := b.Next() if first == nil { t.Errorf("expected datanode, got %v", first) } // Second node should rollover to the first up node second := b.Next() if second == nil { t.Errorf("expected datanode, got %v", second) } // Should get both nodes returned if first.ID == second.ID { t.Errorf("expected first != second. got %v = %v", first.ID, second.ID) } } */<|fim▁end|>
}
<|file_name|>host.rs<|end_file_name|><|fim▁begin|>use std::convert::TryFrom; use std::fmt; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::str::FromStr; use super::EndpointError; use crate::ZmqError; /// Represents a host address. Does not include the port, and may be either an /// ip address or a domain name #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum Host { /// An IPv4 address Ipv4(Ipv4Addr), /// An Ipv6 address Ipv6(Ipv6Addr), /// A domain name, such as `example.com` in `tcp://example.com:4567`. Domain(String), } impl fmt::Display for Host { fn fmt(&self, f: &mut fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { match self { Host::Ipv4(addr) => write!(f, "{}", addr), Host::Ipv6(addr) => write!(f, "{}", addr), Host::Domain(name) => write!(f, "{}", name), } } } impl TryFrom<Host> for IpAddr { type Error = ZmqError; fn try_from(h: Host) -> Result<Self, Self::Error> { match h { Host::Ipv4(a) => Ok(IpAddr::V4(a)), Host::Ipv6(a) => Ok(IpAddr::V6(a)), Host::Domain(_) => Err(ZmqError::Other("Host was neither Ipv4 nor Ipv6")), } } } impl From<IpAddr> for Host { fn from(a: IpAddr) -> Self { match a { IpAddr::V4(a) => Host::Ipv4(a), IpAddr::V6(a) => Host::Ipv6(a), } } } impl TryFrom<String> for Host { type Error = EndpointError; /// An Ipv6 address must be enclosed by `[` and `]`. fn try_from(s: String) -> Result<Self, Self::Error> { if s.is_empty() { return Err(EndpointError::Syntax("Host string should not be empty")); } if let Ok(addr) = s.parse::<Ipv4Addr>() { return Ok(Host::Ipv4(addr)); } // Attempt to parse ipv6 from either ::1 or [::1] using ascii let ipv6_substr = if s.starts_with('[') && s.len() >= 4 && *s.as_bytes().last().unwrap() == b']' { let substr = &s[1..s.len() - 1]; debug_assert_eq!(substr.len(), s.len() - 2); substr } else { &s }; if let Ok(addr) = ipv6_substr.parse::<Ipv6Addr>() { return Ok(Host::Ipv6(addr)); } Ok(Host::Domain(s)) } } impl FromStr for Host { type Err = EndpointError; /// Equivalent to [`TryFrom<String>`] fn from_str(s: &str) -> Result<Self, Self::Err> { let s = s.to_string(); Self::try_from(s) } } #[cfg(test)] mod tests { use super::*; // These two tests on std are more for reference than any real test of // functionality #[test] fn std_ipv6_parse() { assert_eq!(Ipv6Addr::LOCALHOST, "::1".parse::<Ipv6Addr>().unwrap()); assert!("[::1]".parse::<Ipv6Addr>().is_err()); } #[test] fn std_ipv6_display() { assert_eq!("::1", &Ipv6Addr::LOCALHOST.to_string()); } #[test] fn parse_and_display_nobracket_ipv6_same_as_std() { let valid_addr_strs = vec![ "::1", "::", "2001:db8:a::123", "2001:db8:0:0:0:0:2:1", "2001:db8::2:1", ]; let invalid_addr_strs = vec!["", "[]", "[:]", ":"]; for valid in valid_addr_strs { let parsed_std = valid.parse::<Ipv6Addr>().unwrap(); let parsed_host = valid.parse::<Host>().unwrap(); if let Host::Ipv6(parsed_host) = &parsed_host { // Check that both are structurally the same assert_eq!(&parsed_std, parsed_host); } else { panic!("Did not parse as IPV6!"); } // Check that both display as the same assert_eq!(parsed_std.to_string(), parsed_host.to_string()); } for invalid in invalid_addr_strs { invalid.parse::<Ipv6Addr>().unwrap_err(); let parsed_host = invalid.parse::<Host>(); if parsed_host.is_err() { continue; } let parsed_host = parsed_host.unwrap();<|fim▁hole|> continue; } panic!( "Expected that \"{}\" would not parse as Ipv6 or Ipv4, but instead it parsed as {:?}", invalid, parsed_host ); } } #[test] fn parse_and_display_bracket_ipv6() { let addr_strs = vec![ "[::1]", "[::]", "[2001:db8:a::123]", "[2001:db8:0:0:0:0:2:1]", "[2001:db8::2:1]", ]; fn remove_brackets(s: &str) -> &str { assert!(s.starts_with('[')); assert!(s.ends_with(']')); let result = &s[1..s.len() - 1]; assert_eq!(result.len(), s.len() - 2); result } for addr_str in addr_strs { let parsed_host: Host = addr_str.parse().unwrap(); assert!(addr_str.parse::<Ipv6Addr>().is_err()); if let Host::Ipv6(host_ipv6) = parsed_host { assert_eq!( host_ipv6, remove_brackets(addr_str).parse::<Ipv6Addr>().unwrap() ); assert_eq!(parsed_host.to_string(), host_ipv6.to_string()); } else { panic!( "Expected host to parse as Ipv6, but instead got {:?}", parsed_host ); } } } }<|fim▁end|>
if let Host::Domain(_) = parsed_host {
<|file_name|>CardContent.test.js<|end_file_name|><|fim▁begin|>import React from 'react'; import { assert } from 'chai'; import { createShallow, getClasses } from '../test-utils'; import CardContent from './CardContent'; describe('<CardContent />', () => { let shallow; let classes; before(() => { shallow = createShallow({ untilSelector: 'CardContent' }); classes = getClasses(<CardContent />); }); it('should render a div with the root class', () => { const wrapper = shallow(<CardContent />); assert.strictEqual(wrapper.name(), 'div');<|fim▁hole|><|fim▁end|>
assert.strictEqual(wrapper.hasClass(classes.root), true); }); });
<|file_name|>convert_shader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys def convert_str(infile, outfile): f = open(infile, 'r') lines = f.readlines() f.close() f = open(outfile, 'w') f.writelines(['"%s\\n"\n' % i.rstrip() for i in lines]) f.close() def main(): convert_str('fountain.vert', 'fountain.vert.inc') convert_str('fountain.frag', 'fountain.frag.inc') <|fim▁hole|>if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>client.js<|end_file_name|><|fim▁begin|>/* -------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. * ------------------------------------------------------------------------------------------ */ 'use strict'; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } Object.defineProperty(exports, "__esModule", { value: true }); const vscode_1 = require("vscode"); const vscode_languageserver_protocol_1 = require("vscode-languageserver-protocol"); const c2p = require("./codeConverter"); const p2c = require("./protocolConverter"); const Is = require("./utils/is"); const async_1 = require("./utils/async"); const UUID = require("./utils/uuid"); const progressPart_1 = require("./progressPart"); __export(require("vscode-languageserver-protocol")); class ConsoleLogger { error(message) { console.error(message); } warn(message) { console.warn(message); } info(message) { console.info(message); } log(message) { console.log(message); } <|fim▁hole|> connection.onError((data) => { errorHandler(data[0], data[1], data[2]); }); connection.onClose(closeHandler); let result = { listen: () => connection.listen(), sendRequest: (type, ...params) => connection.sendRequest(Is.string(type) ? type : type.method, ...params), onRequest: (type, handler) => connection.onRequest(Is.string(type) ? type : type.method, handler), sendNotification: (type, params) => connection.sendNotification(Is.string(type) ? type : type.method, params), onNotification: (type, handler) => connection.onNotification(Is.string(type) ? type : type.method, handler), onProgress: connection.onProgress, sendProgress: connection.sendProgress, trace: (value, tracer, sendNotificationOrTraceOptions) => { const defaultTraceOptions = { sendNotification: false, traceFormat: vscode_languageserver_protocol_1.TraceFormat.Text }; if (sendNotificationOrTraceOptions === void 0) { connection.trace(value, tracer, defaultTraceOptions); } else if (Is.boolean(sendNotificationOrTraceOptions)) { connection.trace(value, tracer, sendNotificationOrTraceOptions); } else { connection.trace(value, tracer, sendNotificationOrTraceOptions); } }, initialize: (params) => connection.sendRequest(vscode_languageserver_protocol_1.InitializeRequest.type, params), shutdown: () => connection.sendRequest(vscode_languageserver_protocol_1.ShutdownRequest.type, undefined), exit: () => connection.sendNotification(vscode_languageserver_protocol_1.ExitNotification.type), onLogMessage: (handler) => connection.onNotification(vscode_languageserver_protocol_1.LogMessageNotification.type, handler), onShowMessage: (handler) => connection.onNotification(vscode_languageserver_protocol_1.ShowMessageNotification.type, handler), onTelemetry: (handler) => connection.onNotification(vscode_languageserver_protocol_1.TelemetryEventNotification.type, handler), didChangeConfiguration: (params) => connection.sendNotification(vscode_languageserver_protocol_1.DidChangeConfigurationNotification.type, params), didChangeWatchedFiles: (params) => connection.sendNotification(vscode_languageserver_protocol_1.DidChangeWatchedFilesNotification.type, params), didOpenTextDocument: (params) => connection.sendNotification(vscode_languageserver_protocol_1.DidOpenTextDocumentNotification.type, params), didChangeTextDocument: (params) => connection.sendNotification(vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type, params), didCloseTextDocument: (params) => connection.sendNotification(vscode_languageserver_protocol_1.DidCloseTextDocumentNotification.type, params), didSaveTextDocument: (params) => connection.sendNotification(vscode_languageserver_protocol_1.DidSaveTextDocumentNotification.type, params), onDiagnostics: (handler) => connection.onNotification(vscode_languageserver_protocol_1.PublishDiagnosticsNotification.type, handler), dispose: () => connection.dispose() }; return result; } /** * An action to be performed when the connection is producing errors. */ var ErrorAction; (function (ErrorAction) { /** * Continue running the server. */ ErrorAction[ErrorAction["Continue"] = 1] = "Continue"; /** * Shutdown the server. */ ErrorAction[ErrorAction["Shutdown"] = 2] = "Shutdown"; })(ErrorAction = exports.ErrorAction || (exports.ErrorAction = {})); /** * An action to be performed when the connection to a server got closed. */ var CloseAction; (function (CloseAction) { /** * Don't restart the server. The connection stays closed. */ CloseAction[CloseAction["DoNotRestart"] = 1] = "DoNotRestart"; /** * Restart the server. */ CloseAction[CloseAction["Restart"] = 2] = "Restart"; })(CloseAction = exports.CloseAction || (exports.CloseAction = {})); class DefaultErrorHandler { constructor(name) { this.name = name; this.restarts = []; } error(_error, _message, count) { if (count && count <= 3) { return ErrorAction.Continue; } return ErrorAction.Shutdown; } closed() { this.restarts.push(Date.now()); if (this.restarts.length < 5) { return CloseAction.Restart; } else { let diff = this.restarts[this.restarts.length - 1] - this.restarts[0]; if (diff <= 3 * 60 * 1000) { vscode_1.window.showErrorMessage(`The ${this.name} server crashed 5 times in the last 3 minutes. The server will not be restarted.`); return CloseAction.DoNotRestart; } else { this.restarts.shift(); return CloseAction.Restart; } } } } var RevealOutputChannelOn; (function (RevealOutputChannelOn) { RevealOutputChannelOn[RevealOutputChannelOn["Info"] = 1] = "Info"; RevealOutputChannelOn[RevealOutputChannelOn["Warn"] = 2] = "Warn"; RevealOutputChannelOn[RevealOutputChannelOn["Error"] = 3] = "Error"; RevealOutputChannelOn[RevealOutputChannelOn["Never"] = 4] = "Never"; })(RevealOutputChannelOn = exports.RevealOutputChannelOn || (exports.RevealOutputChannelOn = {})); var State; (function (State) { State[State["Stopped"] = 1] = "Stopped"; State[State["Starting"] = 3] = "Starting"; State[State["Running"] = 2] = "Running"; })(State = exports.State || (exports.State = {})); var ClientState; (function (ClientState) { ClientState[ClientState["Initial"] = 0] = "Initial"; ClientState[ClientState["Starting"] = 1] = "Starting"; ClientState[ClientState["StartFailed"] = 2] = "StartFailed"; ClientState[ClientState["Running"] = 3] = "Running"; ClientState[ClientState["Stopping"] = 4] = "Stopping"; ClientState[ClientState["Stopped"] = 5] = "Stopped"; })(ClientState || (ClientState = {})); const SupportedSymbolKinds = [ vscode_languageserver_protocol_1.SymbolKind.File, vscode_languageserver_protocol_1.SymbolKind.Module, vscode_languageserver_protocol_1.SymbolKind.Namespace, vscode_languageserver_protocol_1.SymbolKind.Package, vscode_languageserver_protocol_1.SymbolKind.Class, vscode_languageserver_protocol_1.SymbolKind.Method, vscode_languageserver_protocol_1.SymbolKind.Property, vscode_languageserver_protocol_1.SymbolKind.Field, vscode_languageserver_protocol_1.SymbolKind.Constructor, vscode_languageserver_protocol_1.SymbolKind.Enum, vscode_languageserver_protocol_1.SymbolKind.Interface, vscode_languageserver_protocol_1.SymbolKind.Function, vscode_languageserver_protocol_1.SymbolKind.Variable, vscode_languageserver_protocol_1.SymbolKind.Constant, vscode_languageserver_protocol_1.SymbolKind.String, vscode_languageserver_protocol_1.SymbolKind.Number, vscode_languageserver_protocol_1.SymbolKind.Boolean, vscode_languageserver_protocol_1.SymbolKind.Array, vscode_languageserver_protocol_1.SymbolKind.Object, vscode_languageserver_protocol_1.SymbolKind.Key, vscode_languageserver_protocol_1.SymbolKind.Null, vscode_languageserver_protocol_1.SymbolKind.EnumMember, vscode_languageserver_protocol_1.SymbolKind.Struct, vscode_languageserver_protocol_1.SymbolKind.Event, vscode_languageserver_protocol_1.SymbolKind.Operator, vscode_languageserver_protocol_1.SymbolKind.TypeParameter ]; const SupportedCompletionItemKinds = [ vscode_languageserver_protocol_1.CompletionItemKind.Text, vscode_languageserver_protocol_1.CompletionItemKind.Method, vscode_languageserver_protocol_1.CompletionItemKind.Function, vscode_languageserver_protocol_1.CompletionItemKind.Constructor, vscode_languageserver_protocol_1.CompletionItemKind.Field, vscode_languageserver_protocol_1.CompletionItemKind.Variable, vscode_languageserver_protocol_1.CompletionItemKind.Class, vscode_languageserver_protocol_1.CompletionItemKind.Interface, vscode_languageserver_protocol_1.CompletionItemKind.Module, vscode_languageserver_protocol_1.CompletionItemKind.Property, vscode_languageserver_protocol_1.CompletionItemKind.Unit, vscode_languageserver_protocol_1.CompletionItemKind.Value, vscode_languageserver_protocol_1.CompletionItemKind.Enum, vscode_languageserver_protocol_1.CompletionItemKind.Keyword, vscode_languageserver_protocol_1.CompletionItemKind.Snippet, vscode_languageserver_protocol_1.CompletionItemKind.Color, vscode_languageserver_protocol_1.CompletionItemKind.File, vscode_languageserver_protocol_1.CompletionItemKind.Reference, vscode_languageserver_protocol_1.CompletionItemKind.Folder, vscode_languageserver_protocol_1.CompletionItemKind.EnumMember, vscode_languageserver_protocol_1.CompletionItemKind.Constant, vscode_languageserver_protocol_1.CompletionItemKind.Struct, vscode_languageserver_protocol_1.CompletionItemKind.Event, vscode_languageserver_protocol_1.CompletionItemKind.Operator, vscode_languageserver_protocol_1.CompletionItemKind.TypeParameter ]; function ensure(target, key) { if (target[key] === void 0) { target[key] = {}; } return target[key]; } var DynamicFeature; (function (DynamicFeature) { function is(value) { let candidate = value; return candidate && Is.func(candidate.register) && Is.func(candidate.unregister) && Is.func(candidate.dispose) && candidate.messages !== void 0; } DynamicFeature.is = is; })(DynamicFeature || (DynamicFeature = {})); class DocumentNotifiactions { constructor(_client, _event, _type, _middleware, _createParams, _selectorFilter) { this._client = _client; this._event = _event; this._type = _type; this._middleware = _middleware; this._createParams = _createParams; this._selectorFilter = _selectorFilter; this._selectors = new Map(); } static textDocumentFilter(selectors, textDocument) { for (const selector of selectors) { if (vscode_1.languages.match(selector, textDocument)) { return true; } } return false; } register(_message, data) { if (!data.registerOptions.documentSelector) { return; } if (!this._listener) { this._listener = this._event(this.callback, this); } this._selectors.set(data.id, data.registerOptions.documentSelector); } callback(data) { if (!this._selectorFilter || this._selectorFilter(this._selectors.values(), data)) { if (this._middleware) { this._middleware(data, (data) => this._client.sendNotification(this._type, this._createParams(data))); } else { this._client.sendNotification(this._type, this._createParams(data)); } this.notificationSent(data); } } notificationSent(_data) { } unregister(id) { this._selectors.delete(id); if (this._selectors.size === 0 && this._listener) { this._listener.dispose(); this._listener = undefined; } } dispose() { this._selectors.clear(); if (this._listener) { this._listener.dispose(); this._listener = undefined; } } getProvider(document) { for (const selector of this._selectors.values()) { if (vscode_1.languages.match(selector, document)) { return { send: (data) => { this.callback(data); } }; } } throw new Error(`No provider available for the given text document`); } } class DidOpenTextDocumentFeature extends DocumentNotifiactions { constructor(client, _syncedDocuments) { super(client, vscode_1.workspace.onDidOpenTextDocument, vscode_languageserver_protocol_1.DidOpenTextDocumentNotification.type, client.clientOptions.middleware.didOpen, (textDocument) => client.code2ProtocolConverter.asOpenTextDocumentParams(textDocument), DocumentNotifiactions.textDocumentFilter); this._syncedDocuments = _syncedDocuments; } get messages() { return vscode_languageserver_protocol_1.DidOpenTextDocumentNotification.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'textDocument'), 'synchronization').dynamicRegistration = true; } initialize(capabilities, documentSelector) { let textDocumentSyncOptions = capabilities.resolvedTextDocumentSync; if (documentSelector && textDocumentSyncOptions && textDocumentSyncOptions.openClose) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: { documentSelector: documentSelector } }); } } register(message, data) { super.register(message, data); if (!data.registerOptions.documentSelector) { return; } let documentSelector = data.registerOptions.documentSelector; vscode_1.workspace.textDocuments.forEach((textDocument) => { let uri = textDocument.uri.toString(); if (this._syncedDocuments.has(uri)) { return; } if (vscode_1.languages.match(documentSelector, textDocument)) { let middleware = this._client.clientOptions.middleware; let didOpen = (textDocument) => { this._client.sendNotification(this._type, this._createParams(textDocument)); }; if (middleware.didOpen) { middleware.didOpen(textDocument, didOpen); } else { didOpen(textDocument); } this._syncedDocuments.set(uri, textDocument); } }); } notificationSent(textDocument) { super.notificationSent(textDocument); this._syncedDocuments.set(textDocument.uri.toString(), textDocument); } } class DidCloseTextDocumentFeature extends DocumentNotifiactions { constructor(client, _syncedDocuments) { super(client, vscode_1.workspace.onDidCloseTextDocument, vscode_languageserver_protocol_1.DidCloseTextDocumentNotification.type, client.clientOptions.middleware.didClose, (textDocument) => client.code2ProtocolConverter.asCloseTextDocumentParams(textDocument), DocumentNotifiactions.textDocumentFilter); this._syncedDocuments = _syncedDocuments; } get messages() { return vscode_languageserver_protocol_1.DidCloseTextDocumentNotification.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'textDocument'), 'synchronization').dynamicRegistration = true; } initialize(capabilities, documentSelector) { let textDocumentSyncOptions = capabilities.resolvedTextDocumentSync; if (documentSelector && textDocumentSyncOptions && textDocumentSyncOptions.openClose) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: { documentSelector: documentSelector } }); } } notificationSent(textDocument) { super.notificationSent(textDocument); this._syncedDocuments.delete(textDocument.uri.toString()); } unregister(id) { let selector = this._selectors.get(id); // The super call removed the selector from the map // of selectors. super.unregister(id); let selectors = this._selectors.values(); this._syncedDocuments.forEach((textDocument) => { if (vscode_1.languages.match(selector, textDocument) && !this._selectorFilter(selectors, textDocument)) { let middleware = this._client.clientOptions.middleware; let didClose = (textDocument) => { this._client.sendNotification(this._type, this._createParams(textDocument)); }; this._syncedDocuments.delete(textDocument.uri.toString()); if (middleware.didClose) { middleware.didClose(textDocument, didClose); } else { didClose(textDocument); } } }); } } class DidChangeTextDocumentFeature { constructor(_client) { this._client = _client; this._changeData = new Map(); this._forcingDelivery = false; } get messages() { return vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'textDocument'), 'synchronization').dynamicRegistration = true; } initialize(capabilities, documentSelector) { let textDocumentSyncOptions = capabilities.resolvedTextDocumentSync; if (documentSelector && textDocumentSyncOptions && textDocumentSyncOptions.change !== void 0 && textDocumentSyncOptions.change !== vscode_languageserver_protocol_1.TextDocumentSyncKind.None) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: Object.assign({}, { documentSelector: documentSelector }, { syncKind: textDocumentSyncOptions.change }) }); } } register(_message, data) { if (!data.registerOptions.documentSelector) { return; } if (!this._listener) { this._listener = vscode_1.workspace.onDidChangeTextDocument(this.callback, this); } this._changeData.set(data.id, { documentSelector: data.registerOptions.documentSelector, syncKind: data.registerOptions.syncKind }); } callback(event) { // Text document changes are send for dirty changes as well. We don't // have dirty / undirty events in the LSP so we ignore content changes // with length zero. if (event.contentChanges.length === 0) { return; } for (const changeData of this._changeData.values()) { if (vscode_1.languages.match(changeData.documentSelector, event.document)) { let middleware = this._client.clientOptions.middleware; if (changeData.syncKind === vscode_languageserver_protocol_1.TextDocumentSyncKind.Incremental) { let params = this._client.code2ProtocolConverter.asChangeTextDocumentParams(event); if (middleware.didChange) { middleware.didChange(event, () => this._client.sendNotification(vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type, params)); } else { this._client.sendNotification(vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type, params); } } else if (changeData.syncKind === vscode_languageserver_protocol_1.TextDocumentSyncKind.Full) { let didChange = (event) => { if (this._changeDelayer) { if (this._changeDelayer.uri !== event.document.uri.toString()) { // Use this force delivery to track boolean state. Otherwise we might call two times. this.forceDelivery(); this._changeDelayer.uri = event.document.uri.toString(); } this._changeDelayer.delayer.trigger(() => { this._client.sendNotification(vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type, this._client.code2ProtocolConverter.asChangeTextDocumentParams(event.document)); }); } else { this._changeDelayer = { uri: event.document.uri.toString(), delayer: new async_1.Delayer(200) }; this._changeDelayer.delayer.trigger(() => { this._client.sendNotification(vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type, this._client.code2ProtocolConverter.asChangeTextDocumentParams(event.document)); }, -1); } }; if (middleware.didChange) { middleware.didChange(event, didChange); } else { didChange(event); } } } } } unregister(id) { this._changeData.delete(id); if (this._changeData.size === 0 && this._listener) { this._listener.dispose(); this._listener = undefined; } } dispose() { this._changeDelayer = undefined; this._forcingDelivery = false; this._changeData.clear(); if (this._listener) { this._listener.dispose(); this._listener = undefined; } } forceDelivery() { if (this._forcingDelivery || !this._changeDelayer) { return; } try { this._forcingDelivery = true; this._changeDelayer.delayer.forceDelivery(); } finally { this._forcingDelivery = false; } } getProvider(document) { for (const changeData of this._changeData.values()) { if (vscode_1.languages.match(changeData.documentSelector, document)) { return { send: (event) => { this.callback(event); } }; } } throw new Error(`No provider available for the given text document`); } } class WillSaveFeature extends DocumentNotifiactions { constructor(client) { super(client, vscode_1.workspace.onWillSaveTextDocument, vscode_languageserver_protocol_1.WillSaveTextDocumentNotification.type, client.clientOptions.middleware.willSave, (willSaveEvent) => client.code2ProtocolConverter.asWillSaveTextDocumentParams(willSaveEvent), (selectors, willSaveEvent) => DocumentNotifiactions.textDocumentFilter(selectors, willSaveEvent.document)); } get messages() { return vscode_languageserver_protocol_1.WillSaveTextDocumentNotification.type; } fillClientCapabilities(capabilities) { let value = ensure(ensure(capabilities, 'textDocument'), 'synchronization'); value.willSave = true; } initialize(capabilities, documentSelector) { let textDocumentSyncOptions = capabilities.resolvedTextDocumentSync; if (documentSelector && textDocumentSyncOptions && textDocumentSyncOptions.willSave) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: { documentSelector: documentSelector } }); } } } class WillSaveWaitUntilFeature { constructor(_client) { this._client = _client; this._selectors = new Map(); } get messages() { return vscode_languageserver_protocol_1.WillSaveTextDocumentWaitUntilRequest.type; } fillClientCapabilities(capabilities) { let value = ensure(ensure(capabilities, 'textDocument'), 'synchronization'); value.willSaveWaitUntil = true; } initialize(capabilities, documentSelector) { let textDocumentSyncOptions = capabilities.resolvedTextDocumentSync; if (documentSelector && textDocumentSyncOptions && textDocumentSyncOptions.willSaveWaitUntil) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: { documentSelector: documentSelector } }); } } register(_message, data) { if (!data.registerOptions.documentSelector) { return; } if (!this._listener) { this._listener = vscode_1.workspace.onWillSaveTextDocument(this.callback, this); } this._selectors.set(data.id, data.registerOptions.documentSelector); } callback(event) { if (DocumentNotifiactions.textDocumentFilter(this._selectors.values(), event.document)) { let middleware = this._client.clientOptions.middleware; let willSaveWaitUntil = (event) => { return this._client.sendRequest(vscode_languageserver_protocol_1.WillSaveTextDocumentWaitUntilRequest.type, this._client.code2ProtocolConverter.asWillSaveTextDocumentParams(event)).then((edits) => { let vEdits = this._client.protocol2CodeConverter.asTextEdits(edits); return vEdits === void 0 ? [] : vEdits; }); }; event.waitUntil(middleware.willSaveWaitUntil ? middleware.willSaveWaitUntil(event, willSaveWaitUntil) : willSaveWaitUntil(event)); } } unregister(id) { this._selectors.delete(id); if (this._selectors.size === 0 && this._listener) { this._listener.dispose(); this._listener = undefined; } } dispose() { this._selectors.clear(); if (this._listener) { this._listener.dispose(); this._listener = undefined; } } } class DidSaveTextDocumentFeature extends DocumentNotifiactions { constructor(client) { super(client, vscode_1.workspace.onDidSaveTextDocument, vscode_languageserver_protocol_1.DidSaveTextDocumentNotification.type, client.clientOptions.middleware.didSave, (textDocument) => client.code2ProtocolConverter.asSaveTextDocumentParams(textDocument, this._includeText), DocumentNotifiactions.textDocumentFilter); } get messages() { return vscode_languageserver_protocol_1.DidSaveTextDocumentNotification.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'textDocument'), 'synchronization').didSave = true; } initialize(capabilities, documentSelector) { let textDocumentSyncOptions = capabilities.resolvedTextDocumentSync; if (documentSelector && textDocumentSyncOptions && textDocumentSyncOptions.save) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: Object.assign({}, { documentSelector: documentSelector }, { includeText: !!textDocumentSyncOptions.save.includeText }) }); } } register(method, data) { this._includeText = !!data.registerOptions.includeText; super.register(method, data); } } class FileSystemWatcherFeature { constructor(_client, _notifyFileEvent) { this._client = _client; this._notifyFileEvent = _notifyFileEvent; this._watchers = new Map(); } get messages() { return vscode_languageserver_protocol_1.DidChangeWatchedFilesNotification.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'workspace'), 'didChangeWatchedFiles').dynamicRegistration = true; } initialize(_capabilities, _documentSelector) { } register(_method, data) { if (!Array.isArray(data.registerOptions.watchers)) { return; } let disposeables = []; for (let watcher of data.registerOptions.watchers) { if (!Is.string(watcher.globPattern)) { continue; } let watchCreate = true, watchChange = true, watchDelete = true; if (watcher.kind !== void 0 && watcher.kind !== null) { watchCreate = (watcher.kind & vscode_languageserver_protocol_1.WatchKind.Create) !== 0; watchChange = (watcher.kind & vscode_languageserver_protocol_1.WatchKind.Change) !== 0; watchDelete = (watcher.kind & vscode_languageserver_protocol_1.WatchKind.Delete) !== 0; } let fileSystemWatcher = vscode_1.workspace.createFileSystemWatcher(watcher.globPattern, !watchCreate, !watchChange, !watchDelete); this.hookListeners(fileSystemWatcher, watchCreate, watchChange, watchDelete); disposeables.push(fileSystemWatcher); } this._watchers.set(data.id, disposeables); } registerRaw(id, fileSystemWatchers) { let disposeables = []; for (let fileSystemWatcher of fileSystemWatchers) { this.hookListeners(fileSystemWatcher, true, true, true, disposeables); } this._watchers.set(id, disposeables); } hookListeners(fileSystemWatcher, watchCreate, watchChange, watchDelete, listeners) { if (watchCreate) { fileSystemWatcher.onDidCreate((resource) => this._notifyFileEvent({ uri: this._client.code2ProtocolConverter.asUri(resource), type: vscode_languageserver_protocol_1.FileChangeType.Created }), null, listeners); } if (watchChange) { fileSystemWatcher.onDidChange((resource) => this._notifyFileEvent({ uri: this._client.code2ProtocolConverter.asUri(resource), type: vscode_languageserver_protocol_1.FileChangeType.Changed }), null, listeners); } if (watchDelete) { fileSystemWatcher.onDidDelete((resource) => this._notifyFileEvent({ uri: this._client.code2ProtocolConverter.asUri(resource), type: vscode_languageserver_protocol_1.FileChangeType.Deleted }), null, listeners); } } unregister(id) { let disposeables = this._watchers.get(id); if (disposeables) { for (let disposable of disposeables) { disposable.dispose(); } } } dispose() { this._watchers.forEach((disposeables) => { for (let disposable of disposeables) { disposable.dispose(); } }); this._watchers.clear(); } } class TextDocumentFeature { constructor(_client, _message) { this._client = _client; this._message = _message; this._registrations = new Map(); } get messages() { return this._message; } register(message, data) { if (message.method !== this.messages.method) { throw new Error(`Register called on wrong feature. Requested ${message.method} but reached feature ${this.messages.method}`); } if (!data.registerOptions.documentSelector) { return; } let registration = this.registerLanguageProvider(data.registerOptions); this._registrations.set(data.id, { disposable: registration[0], data, provider: registration[1] }); } unregister(id) { let registration = this._registrations.get(id); if (registration !== undefined) { registration.disposable.dispose(); } } dispose() { this._registrations.forEach((value) => { value.disposable.dispose(); }); this._registrations.clear(); } getRegistration(documentSelector, capability) { if (!capability) { return [undefined, undefined]; } else if (vscode_languageserver_protocol_1.TextDocumentRegistrationOptions.is(capability)) { const id = vscode_languageserver_protocol_1.StaticRegistrationOptions.hasId(capability) ? capability.id : UUID.generateUuid(); const selector = capability.documentSelector || documentSelector; if (selector) { return [id, Object.assign({}, capability, { documentSelector: selector })]; } } else if (Is.boolean(capability) && capability === true || vscode_languageserver_protocol_1.WorkDoneProgressOptions.is(capability)) { if (!documentSelector) { return [undefined, undefined]; } let options = (Is.boolean(capability) && capability === true ? { documentSelector } : Object.assign({}, capability, { documentSelector })); return [UUID.generateUuid(), options]; } return [undefined, undefined]; } getRegistrationOptions(documentSelector, capability) { if (!documentSelector || !capability) { return undefined; } return (Is.boolean(capability) && capability === true ? { documentSelector } : Object.assign({}, capability, { documentSelector })); } getProvider(textDocument) { for (const registration of this._registrations.values()) { let selector = registration.data.registerOptions.documentSelector; if (selector !== null && vscode_1.languages.match(selector, textDocument)) { return registration.provider; } } throw new Error(`The feature has no registration for the provided text document ${textDocument.uri.toString()}`); } } exports.TextDocumentFeature = TextDocumentFeature; class WorkspaceFeature { constructor(_client, _message) { this._client = _client; this._message = _message; this._registrations = new Map(); } get messages() { return this._message; } register(message, data) { if (message.method !== this.messages.method) { throw new Error(`Register called on wron feature. Requested ${message.method} but reached feature ${this.messages.method}`); } const registration = this.registerLanguageProvider(data.registerOptions); this._registrations.set(data.id, { disposable: registration[0], provider: registration[1] }); } unregister(id) { let registration = this._registrations.get(id); if (registration !== undefined) { registration.disposable.dispose(); } } dispose() { this._registrations.forEach((registration) => { registration.disposable.dispose(); }); this._registrations.clear(); } getProviders() { const result = []; for (const registration of this._registrations.values()) { result.push(registration.provider); } return result; } } class CompletionItemFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.CompletionRequest.type); } fillClientCapabilities(capabilites) { let completion = ensure(ensure(capabilites, 'textDocument'), 'completion'); completion.dynamicRegistration = true; completion.contextSupport = true; completion.completionItem = { snippetSupport: true, commitCharactersSupport: true, documentationFormat: [vscode_languageserver_protocol_1.MarkupKind.Markdown, vscode_languageserver_protocol_1.MarkupKind.PlainText], deprecatedSupport: true, preselectSupport: true, tagSupport: { valueSet: [vscode_languageserver_protocol_1.CompletionItemTag.Deprecated] } }; completion.completionItemKind = { valueSet: SupportedCompletionItemKinds }; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.completionProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const triggerCharacters = options.triggerCharacters || []; const provider = { provideCompletionItems: (document, position, token, context) => { const client = this._client; const middleware = this._client.clientOptions.middleware; const provideCompletionItems = (document, position, context, token) => { return client.sendRequest(vscode_languageserver_protocol_1.CompletionRequest.type, client.code2ProtocolConverter.asCompletionParams(document, position, context), token).then(client.protocol2CodeConverter.asCompletionResult, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.CompletionRequest.type, error); return Promise.resolve([]); }); }; return middleware.provideCompletionItem ? middleware.provideCompletionItem(document, position, context, token, provideCompletionItems) : provideCompletionItems(document, position, context, token); }, resolveCompletionItem: options.resolveProvider ? (item, token) => { const client = this._client; const middleware = this._client.clientOptions.middleware; const resolveCompletionItem = (item, token) => { return client.sendRequest(vscode_languageserver_protocol_1.CompletionResolveRequest.type, client.code2ProtocolConverter.asCompletionItem(item), token).then(client.protocol2CodeConverter.asCompletionItem, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.CompletionResolveRequest.type, error); return Promise.resolve(item); }); }; return middleware.resolveCompletionItem ? middleware.resolveCompletionItem(item, token, resolveCompletionItem) : resolveCompletionItem(item, token); } : undefined }; return [vscode_1.languages.registerCompletionItemProvider(options.documentSelector, provider, ...triggerCharacters), provider]; } } class HoverFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.HoverRequest.type); } fillClientCapabilities(capabilites) { const hoverCapability = (ensure(ensure(capabilites, 'textDocument'), 'hover')); hoverCapability.dynamicRegistration = true; hoverCapability.contentFormat = [vscode_languageserver_protocol_1.MarkupKind.Markdown, vscode_languageserver_protocol_1.MarkupKind.PlainText]; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.hoverProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideHover: (document, position, token) => { const client = this._client; const provideHover = (document, position, token) => { return client.sendRequest(vscode_languageserver_protocol_1.HoverRequest.type, client.code2ProtocolConverter.asTextDocumentPositionParams(document, position), token).then(client.protocol2CodeConverter.asHover, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.HoverRequest.type, error); return Promise.resolve(null); }); }; const middleware = client.clientOptions.middleware; return middleware.provideHover ? middleware.provideHover(document, position, token, provideHover) : provideHover(document, position, token); } }; return [vscode_1.languages.registerHoverProvider(options.documentSelector, provider), provider]; } } class SignatureHelpFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.SignatureHelpRequest.type); } fillClientCapabilities(capabilites) { let config = ensure(ensure(capabilites, 'textDocument'), 'signatureHelp'); config.dynamicRegistration = true; config.signatureInformation = { documentationFormat: [vscode_languageserver_protocol_1.MarkupKind.Markdown, vscode_languageserver_protocol_1.MarkupKind.PlainText] }; config.signatureInformation.parameterInformation = { labelOffsetSupport: true }; config.contextSupport = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.signatureHelpProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideSignatureHelp: (document, position, token, context) => { const client = this._client; const providerSignatureHelp = (document, position, context, token) => { return client.sendRequest(vscode_languageserver_protocol_1.SignatureHelpRequest.type, client.code2ProtocolConverter.asSignatureHelpParams(document, position, context), token).then(client.protocol2CodeConverter.asSignatureHelp, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.SignatureHelpRequest.type, error); return Promise.resolve(null); }); }; const middleware = client.clientOptions.middleware; return middleware.provideSignatureHelp ? middleware.provideSignatureHelp(document, position, context, token, providerSignatureHelp) : providerSignatureHelp(document, position, context, token); } }; let disposable; if (options.retriggerCharacters === undefined) { const triggerCharacters = options.triggerCharacters || []; disposable = vscode_1.languages.registerSignatureHelpProvider(options.documentSelector, provider, ...triggerCharacters); } else { const metaData = { triggerCharacters: options.triggerCharacters || [], retriggerCharacters: options.retriggerCharacters || [] }; disposable = vscode_1.languages.registerSignatureHelpProvider(options.documentSelector, provider, metaData); } return [disposable, provider]; } } class DefinitionFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DefinitionRequest.type); } fillClientCapabilities(capabilites) { let definitionSupport = ensure(ensure(capabilites, 'textDocument'), 'definition'); definitionSupport.dynamicRegistration = true; definitionSupport.linkSupport = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.definitionProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideDefinition: (document, position, token) => { const client = this._client; const provideDefinition = (document, position, token) => { return client.sendRequest(vscode_languageserver_protocol_1.DefinitionRequest.type, client.code2ProtocolConverter.asTextDocumentPositionParams(document, position), token).then(client.protocol2CodeConverter.asDefinitionResult, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DefinitionRequest.type, error); return Promise.resolve(null); }); }; const middleware = client.clientOptions.middleware; return middleware.provideDefinition ? middleware.provideDefinition(document, position, token, provideDefinition) : provideDefinition(document, position, token); } }; return [vscode_1.languages.registerDefinitionProvider(options.documentSelector, provider), provider]; } } class ReferencesFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.ReferencesRequest.type); } fillClientCapabilities(capabilites) { ensure(ensure(capabilites, 'textDocument'), 'references').dynamicRegistration = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.referencesProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideReferences: (document, position, options, token) => { const client = this._client; const _providerReferences = (document, position, options, token) => { return client.sendRequest(vscode_languageserver_protocol_1.ReferencesRequest.type, client.code2ProtocolConverter.asReferenceParams(document, position, options), token).then(client.protocol2CodeConverter.asReferences, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.ReferencesRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideReferences ? middleware.provideReferences(document, position, options, token, _providerReferences) : _providerReferences(document, position, options, token); } }; return [vscode_1.languages.registerReferenceProvider(options.documentSelector, provider), provider]; } } class DocumentHighlightFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DocumentHighlightRequest.type); } fillClientCapabilities(capabilites) { ensure(ensure(capabilites, 'textDocument'), 'documentHighlight').dynamicRegistration = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.documentHighlightProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideDocumentHighlights: (document, position, token) => { const client = this._client; const _provideDocumentHighlights = (document, position, token) => { return client.sendRequest(vscode_languageserver_protocol_1.DocumentHighlightRequest.type, client.code2ProtocolConverter.asTextDocumentPositionParams(document, position), token).then(client.protocol2CodeConverter.asDocumentHighlights, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentHighlightRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideDocumentHighlights ? middleware.provideDocumentHighlights(document, position, token, _provideDocumentHighlights) : _provideDocumentHighlights(document, position, token); } }; return [vscode_1.languages.registerDocumentHighlightProvider(options.documentSelector, provider), provider]; } } class DocumentSymbolFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DocumentSymbolRequest.type); } fillClientCapabilities(capabilites) { let symbolCapabilities = ensure(ensure(capabilites, 'textDocument'), 'documentSymbol'); symbolCapabilities.dynamicRegistration = true; symbolCapabilities.symbolKind = { valueSet: SupportedSymbolKinds }; symbolCapabilities.hierarchicalDocumentSymbolSupport = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.documentSymbolProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideDocumentSymbols: (document, token) => { const client = this._client; const _provideDocumentSymbols = (document, token) => { return client.sendRequest(vscode_languageserver_protocol_1.DocumentSymbolRequest.type, client.code2ProtocolConverter.asDocumentSymbolParams(document), token).then((data) => { if (data === null) { return undefined; } if (data.length === 0) { return []; } else { let element = data[0]; if (vscode_languageserver_protocol_1.DocumentSymbol.is(element)) { return client.protocol2CodeConverter.asDocumentSymbols(data); } else { return client.protocol2CodeConverter.asSymbolInformations(data); } } }, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentSymbolRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideDocumentSymbols ? middleware.provideDocumentSymbols(document, token, _provideDocumentSymbols) : _provideDocumentSymbols(document, token); } }; return [vscode_1.languages.registerDocumentSymbolProvider(options.documentSelector, provider), provider]; } } class WorkspaceSymbolFeature extends WorkspaceFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.WorkspaceSymbolRequest.type); } fillClientCapabilities(capabilites) { let symbolCapabilities = ensure(ensure(capabilites, 'workspace'), 'symbol'); symbolCapabilities.dynamicRegistration = true; symbolCapabilities.symbolKind = { valueSet: SupportedSymbolKinds }; } initialize(capabilities) { if (!capabilities.workspaceSymbolProvider) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: capabilities.workspaceSymbolProvider === true ? { workDoneProgress: false } : capabilities.workspaceSymbolProvider }); } registerLanguageProvider(_options) { const provider = { provideWorkspaceSymbols: (query, token) => { const client = this._client; const provideWorkspaceSymbols = (query, token) => { return client.sendRequest(vscode_languageserver_protocol_1.WorkspaceSymbolRequest.type, { query }, token).then(client.protocol2CodeConverter.asSymbolInformations, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.WorkspaceSymbolRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideWorkspaceSymbols ? middleware.provideWorkspaceSymbols(query, token, provideWorkspaceSymbols) : provideWorkspaceSymbols(query, token); } }; return [vscode_1.languages.registerWorkspaceSymbolProvider(provider), provider]; } } class CodeActionFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.CodeActionRequest.type); } fillClientCapabilities(capabilites) { const cap = ensure(ensure(capabilites, 'textDocument'), 'codeAction'); cap.dynamicRegistration = true; cap.isPreferredSupport = true; cap.codeActionLiteralSupport = { codeActionKind: { valueSet: [ vscode_languageserver_protocol_1.CodeActionKind.Empty, vscode_languageserver_protocol_1.CodeActionKind.QuickFix, vscode_languageserver_protocol_1.CodeActionKind.Refactor, vscode_languageserver_protocol_1.CodeActionKind.RefactorExtract, vscode_languageserver_protocol_1.CodeActionKind.RefactorInline, vscode_languageserver_protocol_1.CodeActionKind.RefactorRewrite, vscode_languageserver_protocol_1.CodeActionKind.Source, vscode_languageserver_protocol_1.CodeActionKind.SourceOrganizeImports ] } }; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.codeActionProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideCodeActions: (document, range, context, token) => { const client = this._client; const _provideCodeActions = (document, range, context, token) => { const params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), range: client.code2ProtocolConverter.asRange(range), context: client.code2ProtocolConverter.asCodeActionContext(context) }; return client.sendRequest(vscode_languageserver_protocol_1.CodeActionRequest.type, params, token).then((values) => { if (values === null) { return undefined; } const result = []; for (let item of values) { if (vscode_languageserver_protocol_1.Command.is(item)) { result.push(client.protocol2CodeConverter.asCommand(item)); } else { result.push(client.protocol2CodeConverter.asCodeAction(item)); } } return result; }, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.CodeActionRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideCodeActions ? middleware.provideCodeActions(document, range, context, token, _provideCodeActions) : _provideCodeActions(document, range, context, token); } }; return [vscode_1.languages.registerCodeActionsProvider(options.documentSelector, provider, (options.codeActionKinds ? { providedCodeActionKinds: this._client.protocol2CodeConverter.asCodeActionKinds(options.codeActionKinds) } : undefined)), provider]; } } class CodeLensFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.CodeLensRequest.type); } fillClientCapabilities(capabilites) { ensure(ensure(capabilites, 'textDocument'), 'codeLens').dynamicRegistration = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.codeLensProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideCodeLenses: (document, token) => { const client = this._client; const provideCodeLenses = (document, token) => { return client.sendRequest(vscode_languageserver_protocol_1.CodeLensRequest.type, client.code2ProtocolConverter.asCodeLensParams(document), token).then(client.protocol2CodeConverter.asCodeLenses, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.CodeLensRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideCodeLenses ? middleware.provideCodeLenses(document, token, provideCodeLenses) : provideCodeLenses(document, token); }, resolveCodeLens: (options.resolveProvider) ? (codeLens, token) => { const client = this._client; const resolveCodeLens = (codeLens, token) => { return client.sendRequest(vscode_languageserver_protocol_1.CodeLensResolveRequest.type, client.code2ProtocolConverter.asCodeLens(codeLens), token).then(client.protocol2CodeConverter.asCodeLens, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.CodeLensResolveRequest.type, error); return codeLens; }); }; const middleware = client.clientOptions.middleware; return middleware.resolveCodeLens ? middleware.resolveCodeLens(codeLens, token, resolveCodeLens) : resolveCodeLens(codeLens, token); } : undefined }; return [vscode_1.languages.registerCodeLensProvider(options.documentSelector, provider), provider]; } } class DocumentFormattingFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DocumentFormattingRequest.type); } fillClientCapabilities(capabilites) { ensure(ensure(capabilites, 'textDocument'), 'formatting').dynamicRegistration = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.documentFormattingProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideDocumentFormattingEdits: (document, options, token) => { const client = this._client; const provideDocumentFormattingEdits = (document, options, token) => { const params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), options: client.code2ProtocolConverter.asFormattingOptions(options) }; return client.sendRequest(vscode_languageserver_protocol_1.DocumentFormattingRequest.type, params, token).then(client.protocol2CodeConverter.asTextEdits, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentFormattingRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideDocumentFormattingEdits ? middleware.provideDocumentFormattingEdits(document, options, token, provideDocumentFormattingEdits) : provideDocumentFormattingEdits(document, options, token); } }; return [vscode_1.languages.registerDocumentFormattingEditProvider(options.documentSelector, provider), provider]; } } class DocumentRangeFormattingFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DocumentRangeFormattingRequest.type); } fillClientCapabilities(capabilites) { ensure(ensure(capabilites, 'textDocument'), 'rangeFormatting').dynamicRegistration = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.documentRangeFormattingProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideDocumentRangeFormattingEdits: (document, range, options, token) => { const client = this._client; const provideDocumentRangeFormattingEdits = (document, range, options, token) => { let params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), range: client.code2ProtocolConverter.asRange(range), options: client.code2ProtocolConverter.asFormattingOptions(options) }; return client.sendRequest(vscode_languageserver_protocol_1.DocumentRangeFormattingRequest.type, params, token).then(client.protocol2CodeConverter.asTextEdits, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentRangeFormattingRequest.type, error); return Promise.resolve([]); }); }; let middleware = client.clientOptions.middleware; return middleware.provideDocumentRangeFormattingEdits ? middleware.provideDocumentRangeFormattingEdits(document, range, options, token, provideDocumentRangeFormattingEdits) : provideDocumentRangeFormattingEdits(document, range, options, token); } }; return [vscode_1.languages.registerDocumentRangeFormattingEditProvider(options.documentSelector, provider), provider]; } } class DocumentOnTypeFormattingFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DocumentOnTypeFormattingRequest.type); } fillClientCapabilities(capabilites) { ensure(ensure(capabilites, 'textDocument'), 'onTypeFormatting').dynamicRegistration = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.documentOnTypeFormattingProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideOnTypeFormattingEdits: (document, position, ch, options, token) => { const client = this._client; const provideOnTypeFormattingEdits = (document, position, ch, options, token) => { let params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), position: client.code2ProtocolConverter.asPosition(position), ch: ch, options: client.code2ProtocolConverter.asFormattingOptions(options) }; return client.sendRequest(vscode_languageserver_protocol_1.DocumentOnTypeFormattingRequest.type, params, token).then(client.protocol2CodeConverter.asTextEdits, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentOnTypeFormattingRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideOnTypeFormattingEdits ? middleware.provideOnTypeFormattingEdits(document, position, ch, options, token, provideOnTypeFormattingEdits) : provideOnTypeFormattingEdits(document, position, ch, options, token); } }; const moreTriggerCharacter = options.moreTriggerCharacter || []; return [vscode_1.languages.registerOnTypeFormattingEditProvider(options.documentSelector, provider, options.firstTriggerCharacter, ...moreTriggerCharacter), provider]; } } class RenameFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.RenameRequest.type); } fillClientCapabilities(capabilites) { let rename = ensure(ensure(capabilites, 'textDocument'), 'rename'); rename.dynamicRegistration = true; rename.prepareSupport = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.renameProvider); if (!options) { return; } if (Is.boolean(capabilities.renameProvider)) { options.prepareProvider = false; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideRenameEdits: (document, position, newName, token) => { const client = this._client; const provideRenameEdits = (document, position, newName, token) => { let params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), position: client.code2ProtocolConverter.asPosition(position), newName: newName }; return client.sendRequest(vscode_languageserver_protocol_1.RenameRequest.type, params, token).then(client.protocol2CodeConverter.asWorkspaceEdit, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.RenameRequest.type, error); return Promise.reject(new Error(error.message)); }); }; const middleware = client.clientOptions.middleware; return middleware.provideRenameEdits ? middleware.provideRenameEdits(document, position, newName, token, provideRenameEdits) : provideRenameEdits(document, position, newName, token); }, prepareRename: options.prepareProvider ? (document, position, token) => { const client = this._client; const prepareRename = (document, position, token) => { let params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), position: client.code2ProtocolConverter.asPosition(position), }; return client.sendRequest(vscode_languageserver_protocol_1.PrepareRenameRequest.type, params, token).then((result) => { if (vscode_languageserver_protocol_1.Range.is(result)) { return client.protocol2CodeConverter.asRange(result); } else if (result && vscode_languageserver_protocol_1.Range.is(result.range)) { return { range: client.protocol2CodeConverter.asRange(result.range), placeholder: result.placeholder }; } // To cancel the rename vscode API expects a rejected promise. return Promise.reject(new Error(`The element can't be renamed.`)); }, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.PrepareRenameRequest.type, error); return Promise.reject(new Error(error.message)); }); }; const middleware = client.clientOptions.middleware; return middleware.prepareRename ? middleware.prepareRename(document, position, token, prepareRename) : prepareRename(document, position, token); } : undefined }; return [vscode_1.languages.registerRenameProvider(options.documentSelector, provider), provider]; } } class DocumentLinkFeature extends TextDocumentFeature { constructor(client) { super(client, vscode_languageserver_protocol_1.DocumentLinkRequest.type); } fillClientCapabilities(capabilites) { const documentLinkCapabilities = ensure(ensure(capabilites, 'textDocument'), 'documentLink'); documentLinkCapabilities.dynamicRegistration = true; documentLinkCapabilities.tooltipSupport = true; } initialize(capabilities, documentSelector) { const options = this.getRegistrationOptions(documentSelector, capabilities.documentLinkProvider); if (!options) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: options }); } registerLanguageProvider(options) { const provider = { provideDocumentLinks: (document, token) => { const client = this._client; const provideDocumentLinks = (document, token) => { return client.sendRequest(vscode_languageserver_protocol_1.DocumentLinkRequest.type, client.code2ProtocolConverter.asDocumentLinkParams(document), token).then(client.protocol2CodeConverter.asDocumentLinks, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentLinkRequest.type, error); return Promise.resolve([]); }); }; const middleware = client.clientOptions.middleware; return middleware.provideDocumentLinks ? middleware.provideDocumentLinks(document, token, provideDocumentLinks) : provideDocumentLinks(document, token); }, resolveDocumentLink: options.resolveProvider ? (link, token) => { const client = this._client; let resolveDocumentLink = (link, token) => { return client.sendRequest(vscode_languageserver_protocol_1.DocumentLinkResolveRequest.type, client.code2ProtocolConverter.asDocumentLink(link), token).then(client.protocol2CodeConverter.asDocumentLink, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.DocumentLinkResolveRequest.type, error); return Promise.resolve(link); }); }; const middleware = client.clientOptions.middleware; return middleware.resolveDocumentLink ? middleware.resolveDocumentLink(link, token, resolveDocumentLink) : resolveDocumentLink(link, token); } : undefined }; return [vscode_1.languages.registerDocumentLinkProvider(options.documentSelector, provider), provider]; } } class ConfigurationFeature { constructor(_client) { this._client = _client; this._listeners = new Map(); } get messages() { return vscode_languageserver_protocol_1.DidChangeConfigurationNotification.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'workspace'), 'didChangeConfiguration').dynamicRegistration = true; } initialize() { let section = this._client.clientOptions.synchronize.configurationSection; if (section !== void 0) { this.register(this.messages, { id: UUID.generateUuid(), registerOptions: { section: section } }); } } register(_message, data) { let disposable = vscode_1.workspace.onDidChangeConfiguration((event) => { this.onDidChangeConfiguration(data.registerOptions.section, event); }); this._listeners.set(data.id, disposable); if (data.registerOptions.section !== void 0) { this.onDidChangeConfiguration(data.registerOptions.section, undefined); } } unregister(id) { let disposable = this._listeners.get(id); if (disposable) { this._listeners.delete(id); disposable.dispose(); } } dispose() { for (let disposable of this._listeners.values()) { disposable.dispose(); } this._listeners.clear(); } onDidChangeConfiguration(configurationSection, event) { let sections; if (Is.string(configurationSection)) { sections = [configurationSection]; } else { sections = configurationSection; } if (sections !== void 0 && event !== void 0) { let affected = sections.some((section) => event.affectsConfiguration(section)); if (!affected) { return; } } let didChangeConfiguration = (sections) => { if (sections === void 0) { this._client.sendNotification(vscode_languageserver_protocol_1.DidChangeConfigurationNotification.type, { settings: null }); return; } this._client.sendNotification(vscode_languageserver_protocol_1.DidChangeConfigurationNotification.type, { settings: this.extractSettingsInformation(sections) }); }; let middleware = this.getMiddleware(); middleware ? middleware(sections, didChangeConfiguration) : didChangeConfiguration(sections); } extractSettingsInformation(keys) { function ensurePath(config, path) { let current = config; for (let i = 0; i < path.length - 1; i++) { let obj = current[path[i]]; if (!obj) { obj = Object.create(null); current[path[i]] = obj; } current = obj; } return current; } let resource = this._client.clientOptions.workspaceFolder ? this._client.clientOptions.workspaceFolder.uri : undefined; let result = Object.create(null); for (let i = 0; i < keys.length; i++) { let key = keys[i]; let index = key.indexOf('.'); let config = null; if (index >= 0) { config = vscode_1.workspace.getConfiguration(key.substr(0, index), resource).get(key.substr(index + 1)); } else { config = vscode_1.workspace.getConfiguration(key, resource); } if (config) { let path = keys[i].split('.'); ensurePath(result, path)[path[path.length - 1]] = config; } } return result; } getMiddleware() { let middleware = this._client.clientOptions.middleware; if (middleware.workspace && middleware.workspace.didChangeConfiguration) { return middleware.workspace.didChangeConfiguration; } else { return undefined; } } } class ExecuteCommandFeature { constructor(_client) { this._client = _client; this._commands = new Map(); } get messages() { return vscode_languageserver_protocol_1.ExecuteCommandRequest.type; } fillClientCapabilities(capabilities) { ensure(ensure(capabilities, 'workspace'), 'executeCommand').dynamicRegistration = true; } initialize(capabilities) { if (!capabilities.executeCommandProvider) { return; } this.register(this.messages, { id: UUID.generateUuid(), registerOptions: Object.assign({}, capabilities.executeCommandProvider) }); } register(_message, data) { const client = this._client; const middleware = client.clientOptions.middleware; const executeCommand = (command, args) => { let params = { command, arguments: args }; return client.sendRequest(vscode_languageserver_protocol_1.ExecuteCommandRequest.type, params).then(undefined, (error) => { client.logFailedRequest(vscode_languageserver_protocol_1.ExecuteCommandRequest.type, error); }); }; if (data.registerOptions.commands) { const disposeables = []; for (const command of data.registerOptions.commands) { disposeables.push(vscode_1.commands.registerCommand(command, (...args) => { return middleware.executeCommand ? middleware.executeCommand(command, args, executeCommand) : executeCommand(command, args); })); } this._commands.set(data.id, disposeables); } } unregister(id) { let disposeables = this._commands.get(id); if (disposeables) { disposeables.forEach(disposable => disposable.dispose()); } } dispose() { this._commands.forEach((value) => { value.forEach(disposable => disposable.dispose()); }); this._commands.clear(); } } var MessageTransports; (function (MessageTransports) { function is(value) { let candidate = value; return candidate && vscode_languageserver_protocol_1.MessageReader.is(value.reader) && vscode_languageserver_protocol_1.MessageWriter.is(value.writer); } MessageTransports.is = is; })(MessageTransports = exports.MessageTransports || (exports.MessageTransports = {})); class OnReady { constructor(_resolve, _reject) { this._resolve = _resolve; this._reject = _reject; this._used = false; } get isUsed() { return this._used; } resolve() { this._used = true; this._resolve(); } reject(error) { this._used = true; this._reject(error); } } class BaseLanguageClient { constructor(id, name, clientOptions) { this._traceFormat = vscode_languageserver_protocol_1.TraceFormat.Text; this._features = []; this._method2Message = new Map(); this._dynamicFeatures = new Map(); this._id = id; this._name = name; clientOptions = clientOptions || {}; this._clientOptions = { documentSelector: clientOptions.documentSelector || [], synchronize: clientOptions.synchronize || {}, diagnosticCollectionName: clientOptions.diagnosticCollectionName, outputChannelName: clientOptions.outputChannelName || this._name, revealOutputChannelOn: clientOptions.revealOutputChannelOn || RevealOutputChannelOn.Error, stdioEncoding: clientOptions.stdioEncoding || 'utf8', initializationOptions: clientOptions.initializationOptions, initializationFailedHandler: clientOptions.initializationFailedHandler, progressOnInitialization: !!clientOptions.progressOnInitialization, errorHandler: clientOptions.errorHandler || new DefaultErrorHandler(this._name), middleware: clientOptions.middleware || {}, uriConverters: clientOptions.uriConverters, workspaceFolder: clientOptions.workspaceFolder }; this._clientOptions.synchronize = this._clientOptions.synchronize || {}; this.state = ClientState.Initial; this._connectionPromise = undefined; this._resolvedConnection = undefined; this._initializeResult = undefined; if (clientOptions.outputChannel) { this._outputChannel = clientOptions.outputChannel; this._disposeOutputChannel = false; } else { this._outputChannel = undefined; this._disposeOutputChannel = true; } this._traceOutputChannel = clientOptions.traceOutputChannel; this._listeners = undefined; this._providers = undefined; this._diagnostics = undefined; this._fileEvents = []; this._fileEventDelayer = new async_1.Delayer(250); this._onReady = new Promise((resolve, reject) => { this._onReadyCallbacks = new OnReady(resolve, reject); }); this._onStop = undefined; this._telemetryEmitter = new vscode_languageserver_protocol_1.Emitter(); this._stateChangeEmitter = new vscode_languageserver_protocol_1.Emitter(); this._tracer = { log: (messageOrDataObject, data) => { if (Is.string(messageOrDataObject)) { this.logTrace(messageOrDataObject, data); } else { this.logObjectTrace(messageOrDataObject); } }, }; this._c2p = c2p.createConverter(clientOptions.uriConverters ? clientOptions.uriConverters.code2Protocol : undefined); this._p2c = p2c.createConverter(clientOptions.uriConverters ? clientOptions.uriConverters.protocol2Code : undefined); this._syncedDocuments = new Map(); this.registerBuiltinFeatures(); } get state() { return this._state; } set state(value) { let oldState = this.getPublicState(); this._state = value; let newState = this.getPublicState(); if (newState !== oldState) { this._stateChangeEmitter.fire({ oldState, newState }); } } getPublicState() { if (this.state === ClientState.Running) { return State.Running; } else if (this.state === ClientState.Starting) { return State.Starting; } else { return State.Stopped; } } get initializeResult() { return this._initializeResult; } sendRequest(type, ...params) { if (!this.isConnectionActive()) { throw new Error('Language client is not ready yet'); } this.forceDocumentSync(); try { return this._resolvedConnection.sendRequest(type, ...params); } catch (error) { this.error(`Sending request ${Is.string(type) ? type : type.method} failed.`, error); throw error; } } onRequest(type, handler) { if (!this.isConnectionActive()) { throw new Error('Language client is not ready yet'); } try { this._resolvedConnection.onRequest(type, handler); } catch (error) { this.error(`Registering request handler ${Is.string(type) ? type : type.method} failed.`, error); throw error; } } sendNotification(type, params) { if (!this.isConnectionActive()) { throw new Error('Language client is not ready yet'); } this.forceDocumentSync(); try { this._resolvedConnection.sendNotification(type, params); } catch (error) { this.error(`Sending notification ${Is.string(type) ? type : type.method} failed.`, error); throw error; } } onNotification(type, handler) { if (!this.isConnectionActive()) { throw new Error('Language client is not ready yet'); } try { this._resolvedConnection.onNotification(type, handler); } catch (error) { this.error(`Registering notification handler ${Is.string(type) ? type : type.method} failed.`, error); throw error; } } onProgress(type, token, handler) { if (!this.isConnectionActive()) { throw new Error('Language client is not ready yet'); } try { return this._resolvedConnection.onProgress(type, token, handler); } catch (error) { this.error(`Registering progress handler for token ${token} failed.`, error); throw error; } } sendProgress(type, token, value) { if (!this.isConnectionActive()) { throw new Error('Language client is not ready yet'); } this.forceDocumentSync(); try { this._resolvedConnection.sendProgress(type, token, value); } catch (error) { this.error(`Sending progress for token ${token} failed.`, error); throw error; } } get clientOptions() { return this._clientOptions; } get protocol2CodeConverter() { return this._p2c; } get code2ProtocolConverter() { return this._c2p; } get onTelemetry() { return this._telemetryEmitter.event; } get onDidChangeState() { return this._stateChangeEmitter.event; } get outputChannel() { if (!this._outputChannel) { this._outputChannel = vscode_1.window.createOutputChannel(this._clientOptions.outputChannelName ? this._clientOptions.outputChannelName : this._name); } return this._outputChannel; } get traceOutputChannel() { if (this._traceOutputChannel) { return this._traceOutputChannel; } return this.outputChannel; } get diagnostics() { return this._diagnostics; } createDefaultErrorHandler() { return new DefaultErrorHandler(this._name); } set trace(value) { this._trace = value; this.onReady().then(() => { this.resolveConnection().then((connection) => { connection.trace(this._trace, this._tracer, { sendNotification: false, traceFormat: this._traceFormat }); }); }, () => { }); } data2String(data) { if (data instanceof vscode_languageserver_protocol_1.ResponseError) { const responseError = data; return ` Message: ${responseError.message}\n Code: ${responseError.code} ${responseError.data ? '\n' + responseError.data.toString() : ''}`; } if (data instanceof Error) { if (Is.string(data.stack)) { return data.stack; } return data.message; } if (Is.string(data)) { return data; } return data.toString(); } info(message, data) { this.outputChannel.appendLine(`[Info - ${(new Date().toLocaleTimeString())}] ${message}`); if (data) { this.outputChannel.appendLine(this.data2String(data)); } if (this._clientOptions.revealOutputChannelOn <= RevealOutputChannelOn.Info) { this.showNotificationMessage(); } } warn(message, data) { this.outputChannel.appendLine(`[Warn - ${(new Date().toLocaleTimeString())}] ${message}`); if (data) { this.outputChannel.appendLine(this.data2String(data)); } if (this._clientOptions.revealOutputChannelOn <= RevealOutputChannelOn.Warn) { this.showNotificationMessage(); } } error(message, data) { this.outputChannel.appendLine(`[Error - ${(new Date().toLocaleTimeString())}] ${message}`); if (data) { this.outputChannel.appendLine(this.data2String(data)); } if (this._clientOptions.revealOutputChannelOn <= RevealOutputChannelOn.Error) { this.showNotificationMessage(); } } showNotificationMessage() { vscode_1.window.showInformationMessage('A request has failed. See the output for more information.', 'Go to output').then(() => { this.outputChannel.show(true); }); } logTrace(message, data) { this.traceOutputChannel.appendLine(`[Trace - ${(new Date().toLocaleTimeString())}] ${message}`); if (data) { this.traceOutputChannel.appendLine(this.data2String(data)); } } logObjectTrace(data) { if (data.isLSPMessage && data.type) { this.traceOutputChannel.append(`[LSP - ${(new Date().toLocaleTimeString())}] `); } else { this.traceOutputChannel.append(`[Trace - ${(new Date().toLocaleTimeString())}] `); } if (data) { this.traceOutputChannel.appendLine(`${JSON.stringify(data)}`); } } needsStart() { return this.state === ClientState.Initial || this.state === ClientState.Stopping || this.state === ClientState.Stopped; } needsStop() { return this.state === ClientState.Starting || this.state === ClientState.Running; } onReady() { return this._onReady; } isConnectionActive() { return this.state === ClientState.Running && !!this._resolvedConnection; } start() { if (this._onReadyCallbacks.isUsed) { this._onReady = new Promise((resolve, reject) => { this._onReadyCallbacks = new OnReady(resolve, reject); }); } this._listeners = []; this._providers = []; // If we restart then the diagnostics collection is reused. if (!this._diagnostics) { this._diagnostics = this._clientOptions.diagnosticCollectionName ? vscode_1.languages.createDiagnosticCollection(this._clientOptions.diagnosticCollectionName) : vscode_1.languages.createDiagnosticCollection(); } this.state = ClientState.Starting; this.resolveConnection().then((connection) => { connection.onLogMessage((message) => { switch (message.type) { case vscode_languageserver_protocol_1.MessageType.Error: this.error(message.message); break; case vscode_languageserver_protocol_1.MessageType.Warning: this.warn(message.message); break; case vscode_languageserver_protocol_1.MessageType.Info: this.info(message.message); break; default: this.outputChannel.appendLine(message.message); } }); connection.onShowMessage((message) => { switch (message.type) { case vscode_languageserver_protocol_1.MessageType.Error: vscode_1.window.showErrorMessage(message.message); break; case vscode_languageserver_protocol_1.MessageType.Warning: vscode_1.window.showWarningMessage(message.message); break; case vscode_languageserver_protocol_1.MessageType.Info: vscode_1.window.showInformationMessage(message.message); break; default: vscode_1.window.showInformationMessage(message.message); } }); connection.onRequest(vscode_languageserver_protocol_1.ShowMessageRequest.type, (params) => { let messageFunc; switch (params.type) { case vscode_languageserver_protocol_1.MessageType.Error: messageFunc = vscode_1.window.showErrorMessage; break; case vscode_languageserver_protocol_1.MessageType.Warning: messageFunc = vscode_1.window.showWarningMessage; break; case vscode_languageserver_protocol_1.MessageType.Info: messageFunc = vscode_1.window.showInformationMessage; break; default: messageFunc = vscode_1.window.showInformationMessage; } let actions = params.actions || []; return messageFunc(params.message, ...actions); }); connection.onTelemetry((data) => { this._telemetryEmitter.fire(data); }); connection.listen(); // Error is handled in the initialize call. return this.initialize(connection); }).then(undefined, (error) => { this.state = ClientState.StartFailed; this._onReadyCallbacks.reject(error); this.error('Starting client failed', error); vscode_1.window.showErrorMessage(`Couldn't start client ${this._name}`); }); return new vscode_1.Disposable(() => { if (this.needsStop()) { this.stop(); } }); } resolveConnection() { if (!this._connectionPromise) { this._connectionPromise = this.createConnection(); } return this._connectionPromise; } initialize(connection) { this.refreshTrace(connection, false); let initOption = this._clientOptions.initializationOptions; let rootPath = this._clientOptions.workspaceFolder ? this._clientOptions.workspaceFolder.uri.fsPath : this._clientGetRootPath(); let initParams = { processId: process.pid, clientInfo: { name: 'vscode', version: vscode_1.version }, rootPath: rootPath ? rootPath : null, rootUri: rootPath ? this._c2p.asUri(vscode_1.Uri.file(rootPath)) : null, capabilities: this.computeClientCapabilities(), initializationOptions: Is.func(initOption) ? initOption() : initOption, trace: vscode_languageserver_protocol_1.Trace.toString(this._trace), workspaceFolders: null }; this.fillInitializeParams(initParams); if (this._clientOptions.progressOnInitialization) { const token = UUID.generateUuid(); const part = new progressPart_1.ProgressPart(connection, token); initParams.workDoneToken = token; return this.doInitialize(connection, initParams).then((result) => { part.done(); return result; }, (error) => { part.cancel(); throw error; }); } else { return this.doInitialize(connection, initParams); } } doInitialize(connection, initParams) { return connection.initialize(initParams).then((result) => { this._resolvedConnection = connection; this._initializeResult = result; this.state = ClientState.Running; let textDocumentSyncOptions = undefined; if (Is.number(result.capabilities.textDocumentSync)) { if (result.capabilities.textDocumentSync === vscode_languageserver_protocol_1.TextDocumentSyncKind.None) { textDocumentSyncOptions = { openClose: false, change: vscode_languageserver_protocol_1.TextDocumentSyncKind.None, save: undefined }; } else { textDocumentSyncOptions = { openClose: true, change: result.capabilities.textDocumentSync, save: { includeText: false } }; } } else if (result.capabilities.textDocumentSync !== void 0 && result.capabilities.textDocumentSync !== null) { textDocumentSyncOptions = result.capabilities.textDocumentSync; } this._capabilities = Object.assign({}, result.capabilities, { resolvedTextDocumentSync: textDocumentSyncOptions }); connection.onDiagnostics(params => this.handleDiagnostics(params)); connection.onRequest(vscode_languageserver_protocol_1.RegistrationRequest.type, params => this.handleRegistrationRequest(params)); // See https://github.com/Microsoft/vscode-languageserver-node/issues/199 connection.onRequest('client/registerFeature', params => this.handleRegistrationRequest(params)); connection.onRequest(vscode_languageserver_protocol_1.UnregistrationRequest.type, params => this.handleUnregistrationRequest(params)); // See https://github.com/Microsoft/vscode-languageserver-node/issues/199 connection.onRequest('client/unregisterFeature', params => this.handleUnregistrationRequest(params)); connection.onRequest(vscode_languageserver_protocol_1.ApplyWorkspaceEditRequest.type, params => this.handleApplyWorkspaceEdit(params)); connection.sendNotification(vscode_languageserver_protocol_1.InitializedNotification.type, {}); this.hookFileEvents(connection); this.hookConfigurationChanged(connection); this.initializeFeatures(connection); this._onReadyCallbacks.resolve(); return result; }).then(undefined, (error) => { if (this._clientOptions.initializationFailedHandler) { if (this._clientOptions.initializationFailedHandler(error)) { this.initialize(connection); } else { this.stop(); this._onReadyCallbacks.reject(error); } } else if (error instanceof vscode_languageserver_protocol_1.ResponseError && error.data && error.data.retry) { vscode_1.window.showErrorMessage(error.message, { title: 'Retry', id: 'retry' }).then(item => { if (item && item.id === 'retry') { this.initialize(connection); } else { this.stop(); this._onReadyCallbacks.reject(error); } }); } else { if (error && error.message) { vscode_1.window.showErrorMessage(error.message); } this.error('Server initialization failed.', error); this.stop(); this._onReadyCallbacks.reject(error); } throw error; }); } _clientGetRootPath() { let folders = vscode_1.workspace.workspaceFolders; if (!folders || folders.length === 0) { return undefined; } let folder = folders[0]; if (folder.uri.scheme === 'file') { return folder.uri.fsPath; } return undefined; } stop() { this._initializeResult = undefined; if (!this._connectionPromise) { this.state = ClientState.Stopped; return Promise.resolve(); } if (this.state === ClientState.Stopping && this._onStop) { return this._onStop; } this.state = ClientState.Stopping; this.cleanUp(false); // unhook listeners return this._onStop = this.resolveConnection().then(connection => { return connection.shutdown().then(() => { connection.exit(); connection.dispose(); this.state = ClientState.Stopped; this.cleanUpChannel(); this._onStop = undefined; this._connectionPromise = undefined; this._resolvedConnection = undefined; }); }); } cleanUp(channel = true, diagnostics = true) { if (this._listeners) { this._listeners.forEach(listener => listener.dispose()); this._listeners = undefined; } if (this._providers) { this._providers.forEach(provider => provider.dispose()); this._providers = undefined; } if (this._syncedDocuments) { this._syncedDocuments.clear(); } for (let handler of this._dynamicFeatures.values()) { handler.dispose(); } if (channel) { this.cleanUpChannel(); } if (diagnostics && this._diagnostics) { this._diagnostics.dispose(); this._diagnostics = undefined; } } cleanUpChannel() { if (this._outputChannel && this._disposeOutputChannel) { this._outputChannel.dispose(); this._outputChannel = undefined; } } notifyFileEvent(event) { var _a; const client = this; function didChangeWatchedFile(event) { client._fileEvents.push(event); client._fileEventDelayer.trigger(() => { client.onReady().then(() => { client.resolveConnection().then(connection => { if (client.isConnectionActive()) { client.forceDocumentSync(); connection.didChangeWatchedFiles({ changes: client._fileEvents }); } client._fileEvents = []; }); }, (error) => { client.error(`Notify file events failed.`, error); }); }); } const workSpaceMiddleware = (_a = this.clientOptions.middleware) === null || _a === void 0 ? void 0 : _a.workspace; (workSpaceMiddleware === null || workSpaceMiddleware === void 0 ? void 0 : workSpaceMiddleware.didChangeWatchedFile) ? workSpaceMiddleware.didChangeWatchedFile(event, didChangeWatchedFile) : didChangeWatchedFile(event); } forceDocumentSync() { this._dynamicFeatures.get(vscode_languageserver_protocol_1.DidChangeTextDocumentNotification.type.method).forceDelivery(); } handleDiagnostics(params) { if (!this._diagnostics) { return; } let uri = this._p2c.asUri(params.uri); let diagnostics = this._p2c.asDiagnostics(params.diagnostics); let middleware = this.clientOptions.middleware; if (middleware.handleDiagnostics) { middleware.handleDiagnostics(uri, diagnostics, (uri, diagnostics) => this.setDiagnostics(uri, diagnostics)); } else { this.setDiagnostics(uri, diagnostics); } } setDiagnostics(uri, diagnostics) { if (!this._diagnostics) { return; } this._diagnostics.set(uri, diagnostics); } createConnection() { let errorHandler = (error, message, count) => { this.handleConnectionError(error, message, count); }; let closeHandler = () => { this.handleConnectionClosed(); }; return this.createMessageTransports(this._clientOptions.stdioEncoding || 'utf8').then((transports) => { return createConnection(transports.reader, transports.writer, errorHandler, closeHandler); }); } handleConnectionClosed() { // Check whether this is a normal shutdown in progress or the client stopped normally. if (this.state === ClientState.Stopping || this.state === ClientState.Stopped) { return; } try { if (this._resolvedConnection) { this._resolvedConnection.dispose(); } } catch (error) { // Disposing a connection could fail if error cases. } let action = CloseAction.DoNotRestart; try { action = this._clientOptions.errorHandler.closed(); } catch (error) { // Ignore errors coming from the error handler. } this._connectionPromise = undefined; this._resolvedConnection = undefined; if (action === CloseAction.DoNotRestart) { this.error('Connection to server got closed. Server will not be restarted.'); this.state = ClientState.Stopped; this.cleanUp(false, true); } else if (action === CloseAction.Restart) { this.info('Connection to server got closed. Server will restart.'); this.cleanUp(false, false); this.state = ClientState.Initial; this.start(); } } handleConnectionError(error, message, count) { let action = this._clientOptions.errorHandler.error(error, message, count); if (action === ErrorAction.Shutdown) { this.error('Connection to server is erroring. Shutting down server.'); this.stop(); } } hookConfigurationChanged(connection) { vscode_1.workspace.onDidChangeConfiguration(() => { this.refreshTrace(connection, true); }); } refreshTrace(connection, sendNotification = false) { let config = vscode_1.workspace.getConfiguration(this._id); let trace = vscode_languageserver_protocol_1.Trace.Off; let traceFormat = vscode_languageserver_protocol_1.TraceFormat.Text; if (config) { const traceConfig = config.get('trace.server', 'off'); if (typeof traceConfig === 'string') { trace = vscode_languageserver_protocol_1.Trace.fromString(traceConfig); } else { trace = vscode_languageserver_protocol_1.Trace.fromString(config.get('trace.server.verbosity', 'off')); traceFormat = vscode_languageserver_protocol_1.TraceFormat.fromString(config.get('trace.server.format', 'text')); } } this._trace = trace; this._traceFormat = traceFormat; connection.trace(this._trace, this._tracer, { sendNotification, traceFormat: this._traceFormat }); } hookFileEvents(_connection) { let fileEvents = this._clientOptions.synchronize.fileEvents; if (!fileEvents) { return; } let watchers; if (Is.array(fileEvents)) { watchers = fileEvents; } else { watchers = [fileEvents]; } if (!watchers) { return; } this._dynamicFeatures.get(vscode_languageserver_protocol_1.DidChangeWatchedFilesNotification.type.method).registerRaw(UUID.generateUuid(), watchers); } registerFeatures(features) { for (let feature of features) { this.registerFeature(feature); } } registerFeature(feature) { this._features.push(feature); if (DynamicFeature.is(feature)) { let messages = feature.messages; if (Array.isArray(messages)) { for (let message of messages) { this._method2Message.set(message.method, message); this._dynamicFeatures.set(message.method, feature); } } else { this._method2Message.set(messages.method, messages); this._dynamicFeatures.set(messages.method, feature); } } } getFeature(request) { return this._dynamicFeatures.get(request); } registerBuiltinFeatures() { this.registerFeature(new ConfigurationFeature(this)); this.registerFeature(new DidOpenTextDocumentFeature(this, this._syncedDocuments)); this.registerFeature(new DidChangeTextDocumentFeature(this)); this.registerFeature(new WillSaveFeature(this)); this.registerFeature(new WillSaveWaitUntilFeature(this)); this.registerFeature(new DidSaveTextDocumentFeature(this)); this.registerFeature(new DidCloseTextDocumentFeature(this, this._syncedDocuments)); this.registerFeature(new FileSystemWatcherFeature(this, (event) => this.notifyFileEvent(event))); this.registerFeature(new CompletionItemFeature(this)); this.registerFeature(new HoverFeature(this)); this.registerFeature(new SignatureHelpFeature(this)); this.registerFeature(new DefinitionFeature(this)); this.registerFeature(new ReferencesFeature(this)); this.registerFeature(new DocumentHighlightFeature(this)); this.registerFeature(new DocumentSymbolFeature(this)); this.registerFeature(new WorkspaceSymbolFeature(this)); this.registerFeature(new CodeActionFeature(this)); this.registerFeature(new CodeLensFeature(this)); this.registerFeature(new DocumentFormattingFeature(this)); this.registerFeature(new DocumentRangeFormattingFeature(this)); this.registerFeature(new DocumentOnTypeFormattingFeature(this)); this.registerFeature(new RenameFeature(this)); this.registerFeature(new DocumentLinkFeature(this)); this.registerFeature(new ExecuteCommandFeature(this)); } fillInitializeParams(params) { for (let feature of this._features) { if (Is.func(feature.fillInitializeParams)) { feature.fillInitializeParams(params); } } } computeClientCapabilities() { let result = {}; ensure(result, 'workspace').applyEdit = true; let workspaceEdit = ensure(ensure(result, 'workspace'), 'workspaceEdit'); workspaceEdit.documentChanges = true; workspaceEdit.resourceOperations = [vscode_languageserver_protocol_1.ResourceOperationKind.Create, vscode_languageserver_protocol_1.ResourceOperationKind.Rename, vscode_languageserver_protocol_1.ResourceOperationKind.Delete]; workspaceEdit.failureHandling = vscode_languageserver_protocol_1.FailureHandlingKind.TextOnlyTransactional; let diagnostics = ensure(ensure(result, 'textDocument'), 'publishDiagnostics'); diagnostics.relatedInformation = true; diagnostics.versionSupport = false; diagnostics.tagSupport = { valueSet: [vscode_languageserver_protocol_1.DiagnosticTag.Unnecessary, vscode_languageserver_protocol_1.DiagnosticTag.Deprecated] }; for (let feature of this._features) { feature.fillClientCapabilities(result); } return result; } initializeFeatures(_connection) { let documentSelector = this._clientOptions.documentSelector; for (let feature of this._features) { feature.initialize(this._capabilities, documentSelector); } } handleRegistrationRequest(params) { return new Promise((resolve, reject) => { for (let registration of params.registrations) { const feature = this._dynamicFeatures.get(registration.method); if (!feature) { reject(new Error(`No feature implementation for ${registration.method} found. Registration failed.`)); return; } const options = registration.registerOptions || {}; options.documentSelector = options.documentSelector || this._clientOptions.documentSelector; const data = { id: registration.id, registerOptions: options }; feature.register(this._method2Message.get(registration.method), data); } resolve(); }); } handleUnregistrationRequest(params) { return new Promise((resolve, reject) => { for (let unregistration of params.unregisterations) { const feature = this._dynamicFeatures.get(unregistration.method); if (!feature) { reject(new Error(`No feature implementation for ${unregistration.method} found. Unregistration failed.`)); return; } feature.unregister(unregistration.id); } resolve(); }); } handleApplyWorkspaceEdit(params) { // This is some sort of workaround since the version check should be done by VS Code in the Workspace.applyEdit. // However doing it here adds some safety since the server can lag more behind then an extension. let workspaceEdit = params.edit; let openTextDocuments = new Map(); vscode_1.workspace.textDocuments.forEach((document) => openTextDocuments.set(document.uri.toString(), document)); let versionMismatch = false; if (workspaceEdit.documentChanges) { for (const change of workspaceEdit.documentChanges) { if (vscode_languageserver_protocol_1.TextDocumentEdit.is(change) && change.textDocument.version && change.textDocument.version >= 0) { let textDocument = openTextDocuments.get(change.textDocument.uri); if (textDocument && textDocument.version !== change.textDocument.version) { versionMismatch = true; break; } } } } if (versionMismatch) { return Promise.resolve({ applied: false }); } return Is.asPromise(vscode_1.workspace.applyEdit(this._p2c.asWorkspaceEdit(params.edit)).then((value) => { return { applied: value }; })); } logFailedRequest(type, error) { // If we get a request cancel or a content modified don't log anything. if (error instanceof vscode_languageserver_protocol_1.ResponseError && (error.code === vscode_languageserver_protocol_1.ErrorCodes.RequestCancelled || error.code === vscode_languageserver_protocol_1.ErrorCodes.ContentModified)) { return; } this.error(`Request ${type.method} failed.`, error); } } exports.BaseLanguageClient = BaseLanguageClient;<|fim▁end|>
} function createConnection(input, output, errorHandler, closeHandler) { let logger = new ConsoleLogger(); let connection = vscode_languageserver_protocol_1.createProtocolConnection(input, output, logger);
<|file_name|>addition.cc<|end_file_name|><|fim▁begin|>/* addition.cc * * KNOWN BUGS: will fail if entered with a tsp with identical pos_t's. * * nearest addition and fathest insertion, with initial convex hull and * maximal angle selections for euclidean. * * orig ceh 12-91 * * c++ conversion ceh 12-93 * added farthest insertion functionality 9-94 #define PRINTIT */ #define MAX_SUM_COST MAX_COST /* #define MAX_SUM_COST MAX_SUM */ #include "addition.h" #include <assert.h> inline void AdditionHeuristic :: internal_add(const city_id_t to, city_id_t &from) { city_id_t i; assert(to != NO_ID); if (from == NO_ID) { if (cities_traveled == degree) i = last_traveled = to; else i = traveled[last_traveled]; from = last_traveled; } else i = traveled[from]; #ifdef PRINTIT dump.form("%6d put %d<-%d<-%d\n", (int)(matrix->val(to,traveled[from]) +matrix->val(from,to)-matrix->val(from,traveled[from])), (int)traveled[from],(int)to,(int)from); #endif last_traveled = traveled[from] = to; traveled[to] = i;<|fim▁hole|> const pos_t *p3) { return (p1->x-p2->x)*(p3->x-p2->x) + (p1->y-p2->y)*(p3->y-p2->y); } inline void AdditionHeuristic :: farthest_internal_select(city_id_t &to, city_id_t &from) { sum_t the_lowest, save_cost; city_id_t *t, j; cost_t *outcost; /* pos_t *p1; */ the_lowest = COST_MAX; to = farthestcity; /* p1 = matrix->pos+to; */ for (t = traveled; t<traveled_end; t++) { /*2*/ if (*t == NO_ID) continue; outcost = matrix->cost[(city_id_t)(t-traveled)]; j = *t; /* save_cost = dot_product(matrix->pos+(t-traveled), p1, matrix->pos+j) / (outcost[to] * matrix->val(to,j)); */ save_cost = matrix->val(to,j) + outcost[to] - outcost[j]; if (save_cost < the_lowest) { the_lowest = save_cost; from = (city_id_t)(t-traveled); } } assert(the_lowest != COST_MAX); } // travel "to_travel" after city "from" in the tour so far void AdditionHeuristic :: farthest_add(city_id_t to_travel) { cost_t farthestcost; cost_t *fcost = nearcost, *outcost; city_id_t *t, from = NO_ID; if (to_travel == NO_ID) farthest_internal_select(to_travel, from); internal_add(to_travel, from); if (--cities_traveled == 0) return; farthestcost = MIN_COST; farthestcity = NO_ID; outcost = matrix->cost[to_travel]; for (t = traveled; t<traveled_end; t++, fcost++) { if (*t != NO_ID) continue; if (*fcost > outcost[(city_id_t)(t-traveled)]) *fcost = outcost[(city_id_t)(t-traveled)]; if (farthestcost < *fcost) { farthestcost = *fcost; farthestcity = (city_id_t)(t-traveled); } } assert(nearestcity != NO_ID); assert(farthestcity != NO_ID); } inline void AdditionHeuristic :: addition_internal_select(city_id_t &to, city_id_t &from) { to = nearestcity; from = farthestcity; } void AdditionHeuristic :: addition_add(city_id_t to_travel) { cost_t *ncost = nearcost, nearestcost; cost_t back_cost, back_cost2, *outcost, *jcost, *outcost2, necost; city_id_t *t, from = NO_ID, to_to, j, *fcity = farcity; /* pos_t *p1, *p2, *p3, *p4; */ if (to_travel == NO_ID) addition_internal_select(to_travel, from); internal_add(to_travel, from); if (--cities_traveled == 0) return; assert(from != NO_ID); nearestcost = MAX_SUM_COST; nearestcity = NO_ID; to_to = traveled[to_travel]; outcost = matrix->cost[to_travel]; outcost2 = matrix->cost[from]; /* p1 = matrix->pos+from; p3 = matrix->pos+to_travel; p4 = matrix->pos+to_to; */ if (to_travel == to_to) back_cost = 0; else back_cost = outcost[to_to]; if (to_travel == from) back_cost2 = 0; else back_cost2 = outcost2[to_travel]; for (t = traveled; t<traveled_end; t++, ncost++, fcity++) { if (*t != NO_ID) continue; j = (city_id_t)(t-traveled); /* p2 = matrix->pos+j; */ jcost = matrix->cost[j]; if (*fcity == from) { city_id_t *tt; for (*ncost = MAX_SUM_COST, tt = traveled; tt<traveled_end; tt++) { if (*tt == NO_ID) continue; outcost = matrix->cost[(city_id_t)(tt-traveled)]; /* p1 = matrix->pos+(tt-traveled); p3 = matrix->pos+*tt; necost = dot_product(p1, p2, p3)/(outcost[j] * jcost[*tt]); */ necost = outcost[j] + jcost[*tt] - outcost[*tt]; if (*ncost > necost) { *ncost = necost; *fcity = (city_id_t)(tt-traveled); } } outcost = matrix->cost[to_travel]; /* p1 = matrix->pos+from; p3 = matrix->pos+to_travel; */ } else { /* necost = dot_product(p3, p2, p4) / (outcost[j]*jcost[to_to]); */ necost = outcost[j] + jcost[to_to] - back_cost; if (*ncost > necost) { *ncost = necost; *fcity = to_travel; } /* necost = dot_product(p1, p2, p3) / (outcost2[j]*jcost[to_travel]); */ necost = outcost2[j] + jcost[to_travel] - back_cost2; if (*ncost > necost) { *ncost = necost; *fcity = from; } } if (nearestcost > *ncost) { nearestcost = *ncost; nearestcity = j; farthestcity = *fcity; } } assert(nearestcity != NO_ID); } /* Point *1*: find nearest (farthest) cities from tour cities. * * Point *2*: find best insertion of nearest (farthest) city */ int AdditionHeuristic :: run() { city_id_t k, l; assert(cities_traveled != degree); if (type == SPLIT_ADDITION) { city_id_t *t, *tt, *fcity = farcity, j; sum_t necost, nearestcost; cost_t *ncost = nearcost, *outcost, *jcost; while (cities_traveled > 3*degree/4) (this->*to_add)(NO_ID); nearestcost = MAX_SUM_COST; for (t = traveled; t<traveled_end; t++, ncost++, fcity++) { if (*t != NO_ID) continue; j = (city_id_t)(t-traveled); jcost = matrix->cost[j]; for (*ncost = MAX_SUM_COST, tt = traveled; tt<traveled_end; tt++) { if (*tt == NO_ID) continue; outcost = matrix->cost[(city_id_t)(tt-traveled)]; necost = outcost[j] + jcost[*tt] - outcost[*tt]; if (*ncost > necost) { *fcity = (city_id_t)(tt-traveled); if (nearestcost > (*ncost = necost)) { nearestcost = necost; nearestcity = j; farthestcity = *fcity; } } } } to_add = &AdditionHeuristic::addition_add; } while (cities_traveled) (this->*to_add)(NO_ID); for (k = l = 0; l<degree; l++) tour->travel(k = traveled[k]); return 0; } /* Define a struct to hold positions and sites to be sorted by qsort */ class poses_t { public: inline poses_t() {}; pos_t pos; city_id_t site; }; /* compare_poses * * the compare function to pass to qsort to sort the list of all edges */ static int compare_poses(const void *p1, const void *p2) { double t = (((poses_t *)p1)->pos.x - ((poses_t *)p2)->pos.x); if (t > 0.0) return 1; if (t < 0.0) return -1; return 0; } /* LINE_T * * a line structure to be a part of a convex hull, with slope and * y-intercept. */ class line_t { public: inline line_t() {}; double slope; double y_intercept; poses_t *last_pose; city_id_t site; }; #define MAX_LINES 100 /* HULL_T * * a convex hull structure, 4 of which are created to drape around the * set of points at the four corners.. */ class hull_t { public: inline hull_t() {}; line_t lines[MAX_LINES]; short line; poses_t *last_pose; void add_line_to_hull(poses_t *pose); void climb_under_hull(poses_t *end_pose, int inc); void climb_over_hull(poses_t *end_pose, int inc); }; /* add_line_to_hull * * simply add a line to the end of the hull, that will connect the new point * to the last point in the hull */ void hull_t :: add_line_to_hull(poses_t *pose) { line_t *h_line = lines+line; double save; h_line->site = pose->site; if ((save = pose->pos.x-last_pose->pos.x) == 0.0) { h_line->slope = FLOAT_MAX; h_line->y_intercept = FLOAT_MAX; } else { h_line->slope = (pose->pos.y-last_pose->pos.y) / save; h_line->y_intercept = pose->pos.y - (h_line->slope * pose->pos.x); } lines[line++].last_pose = last_pose; assert (line <= MAX_LINES); last_pose = pose; } /* Macros to determine if a line is higher/lower than a point * * Note the '>' and '<' are warped because pixels are upside down from * a mathematical graph... */ #define line_lower(line,point) \ ((point)->pos.y > \ (((line)->slope)*((point)->pos.x)) + (line)->y_intercept) #define line_higher(line,point) \ ((point)->pos.y < \ (((line)->slope)*((point)->pos.x)) + (line)->y_intercept) /* climb_hull() * * here we try to all lines to hull until we reach the maximum point. * * end_pose * we are given a hull with a single line in it that *_2_* * represents the lower edge, we then insert convex \1 * lines until we reach the upper edge..... pose* * \0 * climb_over_hull() will climb over the edges assumnig you * * start from a side and want to convex up. climb_under_hull() * will climb under, if you wish to cover the underside of points * once started for the side most point. */ void hull_t :: climb_over_hull(poses_t *end_pose, int inc) { short here_line = (short)(line-1); poses_t *pose = last_pose, *last_pose = pose; /* HI stuff ONLY */ while (pose != end_pose) { pose += inc; if (pose->pos.y < last_pose->pos.y) /* if point is lower than last line */ continue; /* then the point isn't worth it */ /* find a line in hull which will enable a convex line */ if (here_line >= 0) { while (line_lower(lines+here_line, pose)) { if (--here_line<0) break; } } if (line != ++here_line) { last_pose = lines[here_line].last_pose; line = here_line; } add_line_to_hull(pose); last_pose = pose; } } /* the only difference in the function below (climb_under_hull)() and the * function above is the line where "line_lower" is replaced by "line_higher" * */ void hull_t :: climb_under_hull(poses_t *end_pose, int inc) { short here_line = (short)(line-1); poses_t *pose = last_pose, *last_pose = pose; /* HI stuff ONLY */ while (pose != end_pose) { pose += inc; if (pose->pos.y > last_pose->pos.y) /* if point is lower than last line */ continue; /* then the point isn't worth it */ /* find a line in hull which will enable a convex line */ if (here_line >= 0) { while (line_higher(lines+here_line, pose)) { if (--here_line<0) break; } } if (line != ++here_line) { last_pose = lines[here_line].last_pose; line = here_line; } add_line_to_hull(pose); last_pose = pose; } } int AdditionHeuristic :: can_run(const Matrix *) const { return 1; } AdditionHeuristic :: ~AdditionHeuristic() { delete traveled; delete nearcost; } #define HILEFT (Hulls[0]) #define LOLEFT (Hulls[1]) #define HIRIGHT (Hulls[2]) #define LORIGHT (Hulls[3]) // hull_t HILEFT, LOLEFT, HIRIGHT, LORIGHT; AdditionHeuristic :: AdditionHeuristic(const Matrix *m, int ty) : TourFinder(m) { cost_t *ncost; city_id_t x, x_high, x_low, *t; poses_t *y_highest = NULL, *y_lowest = NULL, *poses; double highest_y = 0., lowest_y = COST_MAX; double save; short line_right; switch (ty&TYPEMASK_ADDITION) { case SPLIT_ADDITION: case FARTHEST_ADDITION: to_add = &AdditionHeuristic::farthest_add; break; case ANGLE_ADDITION: /* if (m->is_geometric_2d()) to_add = angle_add; else to_add = addition_add; break; */ default: case NORMAL_ADDITION: to_add = &AdditionHeuristic::addition_add; break; } type = ty; ncost = nearcost = new cost_t[degree]; traveled = new city_id_t[degree*2]; farcity = traveled+degree; traveled_end = traveled+degree; for (t = traveled; t<traveled_end; t++, ncost++) { *t = NO_ID; *ncost = MAX_SUM_COST; } cities_traveled = degree; farthestcity = NO_ID; if (!m->is_geometric_2d() || degree < 4) { city_id_t init; init = param.initial_choice; if (init == NO_ID) init = 0; (this->*to_add)(init); return; } poses = new poses_t[degree]; for (x = 0; x<degree; x++) { poses[x].site = x; poses[x].pos = matrix->pos[x]; } qsort(poses, (size_t)degree, (size_t)sizeof(poses_t), compare_poses); for (x = 0; x<degree; x++) { save = poses[x].pos.y; if (save > highest_y) { highest_y = save; y_highest = poses+x; } if (save < lowest_y) { lowest_y = save; y_lowest = poses+x; } #ifdef PRINTIT dump.form("%d at (%lf,%lf) %lx\n", (int)poses[x].site, poses[x].pos.x, save, (long)(poses+x)); #endif } assert(y_lowest != NULL && y_highest != NULL); /* Initialize the four hulls that will drape the points... * HIRIGHT is the upper right corner, LOLEFT is the lower left corner * the LORIGHT is in mathematical quadrant I. */ hull_t *Hulls = new hull_t[4]; HIRIGHT.line = HILEFT.line = LORIGHT.line = LOLEFT.line = 0; HIRIGHT.last_pose = poses+(degree-1); LORIGHT.last_pose = poses+(degree-1); HILEFT.last_pose = poses; LOLEFT.last_pose = poses; x_high = (poses+(degree-1))->site; x_low = (poses)->site; HIRIGHT.climb_over_hull(y_highest, -1); HILEFT.climb_over_hull(y_highest, 1); LORIGHT.climb_under_hull(y_lowest, -1); LOLEFT.climb_under_hull(y_lowest, 1); delete poses; line_right = 0; if (HIRIGHT.line>0) { /* Only do if there is >1 line(s) */ (this->*to_add)(x_high); while (line_right < HIRIGHT.line-1) { (this->*to_add)(HIRIGHT.lines[line_right].site); line_right++; } } line_right = (short)(HILEFT.line-1); while (line_right >= 0) { (this->*to_add)(HILEFT.lines[line_right].site); line_right--; } line_right = 0; if (LOLEFT.line>0) { /* Only do if there is >1 line(s) */ (this->*to_add)(x_low); while (line_right < LOLEFT.line-1) { (this->*to_add)(LOLEFT.lines[line_right].site); line_right++; } } line_right = (short)(LORIGHT.line-1); while (line_right >= 0) { (this->*to_add)(LORIGHT.lines[line_right].site); line_right--; } delete Hulls; }<|fim▁end|>
} inline double AdditionHeuristic :: dot_product(const pos_t *p1, const pos_t *p2,
<|file_name|>callbacks.py<|end_file_name|><|fim▁begin|>import logging <|fim▁hole|> logger = logging.getLogger(__name__) def run_api_endpoint_callbacks(api_endpoint): responses = [] for api_callback in api_endpoint.callbacks.all(): logger.debug("Make callback: %s", api_callback) response = make_request(method=api_callback.method, url=api_callback.url, params=api_callback.get_params(), headers=api_callback.get_headers(), timeout=settings.DEFAULT_CALLBACK_REQUEST_TIMEOUT) if response: logger.debug("Callback response status code: %s", response.status_code) responses.append(response) return responses<|fim▁end|>
from django.conf import settings from mock_api.http_utils import make_request
<|file_name|>genera_html_su_urbano.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- import cgi import sqlite3, re, string, codecs, os def cercaurbano(cNominativo): c = sqlite3.connect('./data/catasto.db') cur = c.cursor() cSele = "select distinct (id_i.foglio || '-' || id_i.numero ||'-'|| id_i.subalterno), \ '<a href=\"http://nominatim.openstreetmap.org/search/' \ || top.decodifica || ' ' || ind.indirizzo || ' ' || ltrim(ind.civico1, '0') || ',16011 Arenzano\" target=\"_blank\">W_osm </a>', \ '<a href=\"../osmlocation/dettaglio-mappa.htm?location=' \ || top.decodifica || ' ' || ind.indirizzo || ' ' || ltrim(ind.civico1, '0') || ',16011 Arenzano\" target=\"_blank\"> L_osm </a>', \ id_i.foglio, id_i.numero, id_i.subalterno, id_i.progr, \ ui.categoria, ui.classe, ui.renditaEuro, (top.decodifica || ' ' || ind.indirizzo || ' ' || ind.civico1), \ giu.denominazione, per.cognome, per.nome, per.DataNascita \ from identificativi_immobiliari as id_i \ left join indirizzi as ind On id_i.idImmobile = ind.idImmobile \ left join titolarita as tit On id_i.idImmobile = tit.idImmobile \ left join persona_fisica as per On tit.idSoggetto = per.idSoggetto \ left join persona_giuridica as giu On tit.idSoggetto = giu.idSoggetto \ left join unita_immobiliari as ui on tit.idImmobile = ui.idImmobile \ left join cod_toponimo as top on ind.toponimo = top.codice \ where trim(per.cognome) || ' ' || trim(per.nome) like '%" + cNominativo + "%' or giu.denominazione like '%" + cNominativo + "%' group by id_i.foglio, id_i.numero, id_i.subalterno order by id_i.foglio, id_i.numero, id_i.subalterno, id_i.progr desc" #print cSele cur.execute(cSele) retrows = cur.fetchall() table = "<table>" table += "<tr>" table += "<th>fog-map-sub</th><th>nominatim</th><th>loc_via_norm</th>" table += "<th>fog</th><th>map</th><th>sub</th><th>progr</th><th>cat</th>" table += "<th>cla</th><<th>rend</th><th>Indirizzo</th><th>Cognome</th><th>Nome</th><th>data_nascita</th>" table +="</tr>" for row in retrows: totcol=len(row) table += "<tr>" for col in range(0,totcol): table += "<td>" + str(row[col]) + "</td>" table += "</tr>" table += "</table>" print table return "" def main(): parametri = cgi.FieldStorage() print "Content-Type: text/html" # HTML is following print # blank line, end of headers print '<html>' print '<head>' print '<style>' print 'body {background-color: #ccff66;font-family: Arial, Verdana, sans-serif;font-size: 12px;color: #000000;}' print 'table {background-color: #ccff66;font-family: Arial, Verdana, sans-serif;font-size: 14px;color: #000000;}' print 'table {border-collapse: collapse;}' print 'table, th, td { border: 1px solid gray; }' print '</style>' print '</head>' print '<body>' glofile='./data/catasto.db' mess='' if not os.path.exists(glofile): mess+="Manca il file -- " + glofile + '<br>' glofile='./data/catasto_cart_4326.sqlite' if not os.path.exists(glofile): mess+="Manca il file -- " + glofile if len(mess)>0: print mess + '<br>' print '<a href=https://github.com/marcobra/opencatamap/wiki/OpenCataMap>Maggiori dettagli circa i files dati necessari<a>' return if (len(parametri) < 1): print "uso:<br> http://127.0.0.1:8080/cgi-bin/genera_html_su_urbano.py?N=Dam" print 'Ricerca per parametri -> ' for key in parametri.keys():<|fim▁hole|> cercaurbano(parametri["n"].value) if __name__ == "__main__": main()<|fim▁end|>
print "%s = %s" % (key, parametri[key].value)
<|file_name|>grid.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! CSS handling for the computed value of //! [grids](https://drafts.csswg.org/css-grid/) use cssparser::{Parser, Token, BasicParseError}; use parser::{Parse, ParserContext}; use std::ascii::AsciiExt; use std::mem; use style_traits::{ParseError, StyleParseError}; use values::{CSSFloat, CustomIdent}; use values::generics::grid::{GridTemplateComponent, RepeatCount, TrackBreadth, TrackKeyword, TrackRepeat}; use values::generics::grid::{LineNameList, TrackSize, TrackList, TrackListType}; use values::specified::LengthOrPercentage; /// Parse a single flexible length. pub fn parse_flex<'i, 't>(input: &mut Parser<'i, 't>) -> Result<CSSFloat, ParseError<'i>> { match *input.next()? { Token::Dimension { value, ref unit, .. } if unit.eq_ignore_ascii_case("fr") && value.is_sign_positive() => Ok(value), ref t => Err(BasicParseError::UnexpectedToken(t.clone()).into()), } } impl Parse for TrackBreadth<LengthOrPercentage> { fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> { if let Ok(lop) = input.try(|i| LengthOrPercentage::parse_non_negative(context, i)) { return Ok(TrackBreadth::Breadth(lop)) } if let Ok(f) = input.try(parse_flex) { return Ok(TrackBreadth::Flex(f)) } TrackKeyword::parse(input).map(TrackBreadth::Keyword) } } impl Parse for TrackSize<LengthOrPercentage> { fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> { if let Ok(b) = input.try(|i| TrackBreadth::parse(context, i)) { return Ok(TrackSize::Breadth(b))<|fim▁hole|> } if input.try(|i| i.expect_function_matching("minmax")).is_ok() { return input.parse_nested_block(|input| { let inflexible_breadth = match input.try(|i| LengthOrPercentage::parse_non_negative(context, i)) { Ok(lop) => TrackBreadth::Breadth(lop), Err(..) => { let keyword = TrackKeyword::parse(input)?; TrackBreadth::Keyword(keyword) } }; input.expect_comma()?; Ok(TrackSize::Minmax(inflexible_breadth, TrackBreadth::parse(context, input)?)) }); } input.expect_function_matching("fit-content")?; let lop = input.parse_nested_block(|i| LengthOrPercentage::parse_non_negative(context, i))?; Ok(TrackSize::FitContent(lop)) } } /// Parse the grid line names into a vector of owned strings. /// /// https://drafts.csswg.org/css-grid/#typedef-line-names pub fn parse_line_names<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Box<[CustomIdent]>, ParseError<'i>> { input.expect_square_bracket_block()?; input.parse_nested_block(|input| { let mut values = vec![]; while let Ok(ident) = input.try(|i| i.expect_ident_cloned()) { let ident = CustomIdent::from_ident(&ident, &["span"])?; values.push(ident); } Ok(values.into_boxed_slice()) }) } /// The type of `repeat` function (only used in parsing). /// /// https://drafts.csswg.org/css-grid/#typedef-track-repeat #[derive(Clone, Copy, Debug, PartialEq)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] enum RepeatType { /// [`<auto-repeat>`](https://drafts.csswg.org/css-grid/#typedef-auto-repeat) Auto, /// [`<track-repeat>`](https://drafts.csswg.org/css-grid/#typedef-track-repeat) Normal, /// [`<fixed-repeat>`](https://drafts.csswg.org/css-grid/#typedef-fixed-repeat) Fixed, } impl TrackRepeat<LengthOrPercentage> { fn parse_with_repeat_type<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<(TrackRepeat<LengthOrPercentage>, RepeatType), ParseError<'i>> { input.try(|i| i.expect_function_matching("repeat").map_err(|e| e.into())).and_then(|_| { input.parse_nested_block(|input| { let count = RepeatCount::parse(context, input)?; input.expect_comma()?; let is_auto = count == RepeatCount::AutoFit || count == RepeatCount::AutoFill; let mut repeat_type = if is_auto { RepeatType::Auto } else { // <fixed-size> is a subset of <track_size>, so it should work for both RepeatType::Fixed }; let mut names = vec![]; let mut values = vec![]; let mut current_names; loop { current_names = input.try(parse_line_names).unwrap_or(vec![].into_boxed_slice()); if let Ok(track_size) = input.try(|i| TrackSize::parse(context, i)) { if !track_size.is_fixed() { if is_auto { // should be <fixed-size> for <auto-repeat> return Err(StyleParseError::UnspecifiedError.into()) } if repeat_type == RepeatType::Fixed { repeat_type = RepeatType::Normal // <track-size> for sure } } values.push(track_size); names.push(current_names); if is_auto { // FIXME: In the older version of the spec // (https://www.w3.org/TR/2015/WD-css-grid-1-20150917/#typedef-auto-repeat), // if the repeat type is `<auto-repeat>` we shouldn't try to parse more than // one `TrackSize`. But in current version of the spec, this is deprecated // but we are adding this for gecko parity. We should remove this when // gecko implements new spec. names.push(input.try(parse_line_names).unwrap_or(vec![].into_boxed_slice())); break } } else { if values.is_empty() { // expecting at least one <track-size> return Err(StyleParseError::UnspecifiedError.into()) } names.push(current_names); // final `<line-names>` break // no more <track-size>, breaking } } let repeat = TrackRepeat { count: count, track_sizes: values, line_names: names.into_boxed_slice(), }; Ok((repeat, repeat_type)) }) }) } } impl Parse for TrackList<LengthOrPercentage> { fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> { // Merge the line names while parsing values. The resulting values will // all be bunch of `<track-size>` and one <auto-repeat>. // FIXME: We need to decide which way is better for repeat function in // https://bugzilla.mozilla.org/show_bug.cgi?id=1382369. // // For example, // `[a b] 100px [c d] repeat(1, 30px [g]) [h]` will be merged as `[a b] 100px [c d] 30px [g h]` // whereas, `[a b] repeat(2, [c] 50px [d]) [e f] repeat(auto-fill, [g] 12px) 10px [h]` will be merged as // `[a b c] 50px [d c] 50px [d e f] repeat(auto-fill, [g] 12px) 10px [h]`, with the `<auto-repeat>` value // set in the `auto_repeat` field, and the `idx` in TrackListType::Auto pointing to the values after // `<auto-repeat>` (in this case, `10px [h]`). let mut current_names = vec![]; let mut names = vec![]; let mut values = vec![]; let mut list_type = TrackListType::Explicit; // assume it's the simplest case // holds <auto-repeat> value. It can only be only one in a TrackList. let mut auto_repeat = None; // assume that everything is <fixed-size>. This flag is useful when we encounter <auto-repeat> let mut atleast_one_not_fixed = false; loop { current_names.extend_from_slice(&mut input.try(parse_line_names).unwrap_or(vec![].into_boxed_slice())); if let Ok(track_size) = input.try(|i| TrackSize::parse(context, i)) { if !track_size.is_fixed() { atleast_one_not_fixed = true; if auto_repeat.is_some() { // <auto-track-list> only accepts <fixed-size> and <fixed-repeat> return Err(StyleParseError::UnspecifiedError.into()) } } let vec = mem::replace(&mut current_names, vec![]); names.push(vec.into_boxed_slice()); values.push(track_size); } else if let Ok((repeat, type_)) = input.try(|i| TrackRepeat::parse_with_repeat_type(context, i)) { if list_type == TrackListType::Explicit { list_type = TrackListType::Normal; // <explicit-track-list> doesn't contain repeat() } match type_ { RepeatType::Normal => { atleast_one_not_fixed = true; if auto_repeat.is_some() { // only <fixed-repeat> return Err(StyleParseError::UnspecifiedError.into()) } }, RepeatType::Auto => { if auto_repeat.is_some() || atleast_one_not_fixed { // We've either seen <auto-repeat> earlier, or there's at least one non-fixed value return Err(StyleParseError::UnspecifiedError.into()) } list_type = TrackListType::Auto(values.len() as u16); auto_repeat = Some(repeat); let vec = mem::replace(&mut current_names, vec![]); names.push(vec.into_boxed_slice()); continue }, RepeatType::Fixed => (), } // If the repeat count is numeric, we axpand and merge the values. let mut repeat = repeat.expand(); let mut repeat_names_iter = repeat.line_names.iter(); for (size, repeat_names) in repeat.track_sizes.drain(..).zip(&mut repeat_names_iter) { current_names.extend_from_slice(&repeat_names); let vec = mem::replace(&mut current_names, vec![]); names.push(vec.into_boxed_slice()); values.push(size); } if let Some(names) = repeat_names_iter.next() { current_names.extend_from_slice(&names); } } else { if values.is_empty() && auto_repeat.is_none() { return Err(StyleParseError::UnspecifiedError.into()) } names.push(current_names.into_boxed_slice()); break } } Ok(TrackList { list_type: list_type, values: values, line_names: names.into_boxed_slice(), auto_repeat: auto_repeat, }) } } impl Parse for GridTemplateComponent<LengthOrPercentage> { // FIXME: Derive Parse (probably with None_) fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> { if input.try(|i| i.expect_ident_matching("none")).is_ok() { return Ok(GridTemplateComponent::None) } Self::parse_without_none(context, input) } } impl GridTemplateComponent<LengthOrPercentage> { /// Parses a `GridTemplateComponent<LengthOrPercentage>` except `none` keyword. pub fn parse_without_none<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> { if let Ok(t) = input.try(|i| TrackList::parse(context, i)) { return Ok(GridTemplateComponent::TrackList(t)) } LineNameList::parse(context, input).map(GridTemplateComponent::Subgrid) } }<|fim▁end|>
<|file_name|>ipa_log_manager.py<|end_file_name|><|fim▁begin|># Authors: John Dennis <[email protected]> # # Copyright (C) 2011 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #------------------------------------------------------------------------------- # Module exports __all__ = ['log_mgr', 'root_logger', 'standard_logging_setup', 'IPA_ROOT_LOGGER_NAME', 'ISO8601_UTC_DATETIME_FMT', 'LOGGING_FORMAT_STDERR', 'LOGGING_FORMAT_STDOUT', 'LOGGING_FORMAT_FILE'] #------------------------------------------------------------------------------- import sys import re import copy from log_manager import LogManager, parse_log_level #------------------------------------------------------------------------------- # Our root logger, all loggers will be descendents of this. IPA_ROOT_LOGGER_NAME = 'ipa' # Format string for time.strftime() to produce a ISO 8601 date time # formatted string in the UTC time zone. ISO8601_UTC_DATETIME_FMT = '%Y-%m-%dT%H:%M:%SZ' # Logging format string for use with logging stderr handlers LOGGING_FORMAT_STDERR = 'ipa: %(levelname)s: %(message)s' # Logging format string for use with logging stdout handlers LOGGING_FORMAT_STDOUT = '[%(asctime)s %(name)s] <%(levelname)s>: %(message)s' # Logging format string for use with logging file handlers LOGGING_FORMAT_FILE = '\t'.join([ '%(asctime)s', '%(process)d', '%(threadName)s', '%(name)s', '%(levelname)s', '%(message)s', ]) # Used by standard_logging_setup() for console message LOGGING_FORMAT_STANDARD_CONSOLE = '%(name)-12s: %(levelname)-8s %(message)s' # Used by standard_logging_setup() for file message LOGGING_FORMAT_STANDARD_FILE = '%(asctime)s %(levelname)s %(message)s' #------------------------------------------------------------------------------- class IPALogManager(LogManager): ''' Subclass the LogManager to enforce some IPA specfic logging conventions. * Default to timestamps in UTC. * Default to ISO 8601 timestamp format. * Default the message format. ''' log_logger_level_config_re = re.compile(r'^log_logger_level_(debug|info|warn|warning|error|critical|\d+)$') def __init__(self, configure_state=None): ''' :parameters: configure_state Used by clients of the log manager to track the configuration state, may be any object. ''' super(IPALogManager, self).__init__(IPA_ROOT_LOGGER_NAME, configure_state) def configure_from_env(self, env, configure_state=None): ''' Read the loggger configuration from the Env config. The following items may be configured: Logger Levels *log_logger_XXX = comma separated list of regexps* Logger levels can be explicitly specified for specific loggers as opposed to a global logging level. Specific loggers are indiciated by a list of regular expressions bound to a level. If a logger's name matches the regexp then it is assigned that level. The keys in the Env config must begin with "log_logger_level\_" and then be followed by a symbolic or numeric log level, for example:: log_logger_level_debug = ipapython\.dn\..* log_logger_level_35 = ipalib\.plugins\.dogtag The first line says any logger belonging to the ipapython.dn module will have it's level configured to debug. The second line say the ipa.plugins.dogtag logger will be configured to level 35. Note: logger names are a dot ('.') separated list forming a path in the logger tree. The dot character is also a regular expression metacharacter (matches any character) therefore you will usually need to escape the dot in the logger names by preceeding it with a backslash. The return value of this function is a dict with the following format: logger_regexps List of (regexp, level) tuples :parameters: env Env object configuration values are read from. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' logger_regexps = []<|fim▁hole|> value = getattr(env, attr, None) if value is not None: config[attr] = value for attr in list(env): # Get logger level configuration match = IPALogManager.log_logger_level_config_re.search(attr) if match: value = match.group(1) level = parse_log_level(value) value = getattr(env, attr) regexps = re.split('\s*,\s*', value) # Add the regexp, it maps to the configured level for regexp in regexps: logger_regexps.append((regexp, level)) continue self.configure(config, configure_state) return config def create_log_handlers(self, configs, logger=None, configure_state=None): 'Enforce some IPA specific configurations' configs = copy.copy(configs) for cfg in configs: if not 'time_zone_converter' in cfg: cfg['time_zone_converter'] = 'utc' if not 'datefmt' in cfg: cfg['datefmt'] = ISO8601_UTC_DATETIME_FMT if not 'format' in cfg: cfg['format'] = LOGGING_FORMAT_STDOUT return super(IPALogManager, self).create_log_handlers(configs, logger, configure_state) #------------------------------------------------------------------------------- def standard_logging_setup(filename=None, verbose=False, debug=False, filemode='w', console_format=LOGGING_FORMAT_STANDARD_CONSOLE): handlers = [] # File output is always logged at debug level if filename is not None: file_handler = dict(name='file', filename=filename, filemode=filemode, permission=0o600, level='debug', format=LOGGING_FORMAT_STANDARD_FILE) handlers.append(file_handler) if log_mgr.handlers.has_key('console'): log_mgr.remove_handler('console') level = 'error' if verbose: level = 'info' if debug: level = 'debug' console_handler = dict(name='console', stream=sys.stderr, level=level, format=console_format) handlers.append(console_handler) # default_level must be debug becuase we want the file handler to # always log at the debug level. log_mgr.configure(dict(default_level='debug', handlers=handlers), configure_state='standard') return log_mgr.root_logger #------------------------------------------------------------------------------- # Single shared instance of log manager # # By default always starts with stderr console handler at error level # so messages generated before logging is fully configured have some # place to got and won't get lost. log_mgr = IPALogManager() log_mgr.configure(dict(default_level='error', handlers=[dict(name='console', stream=sys.stderr)]), configure_state='default') root_logger = log_mgr.root_logger<|fim▁end|>
config = {'logger_regexps' : logger_regexps, } for attr in ('debug', 'verbose'):
<|file_name|>gc.go<|end_file_name|><|fim▁begin|>package cli import ( "fmt" "github.com/dailymuse/git-fit/config" "github.com/dailymuse/git-fit/transport" "github.com/dailymuse/git-fit/util" "io/ioutil" "os" ) func Gc(schema *config.Config, trans transport.Transport, args []string) { savedFiles := make(map[string]bool, len(schema.Files)*2) for _, hash := range schema.Files { savedFiles[hash] = true } <|fim▁hole|> if err != nil { util.Fatal("Could not read .git/fit: %s\n", err.Error()) } for _, file := range allFiles { _, ok := savedFiles[file.Name()] if !ok { path := fmt.Sprintf(".git/fit/%s", file.Name()) err = os.Remove(path) if err != nil { util.Error("Could not delete cached file %s: %s\n", path, err.Error()) } } } }<|fim▁end|>
allFiles, err := ioutil.ReadDir(".git/fit")
<|file_name|>artist.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 by Ecreall under licence AGPL terms # available on http://www.gnu.org/licenses/agpl.html # licence: AGPL # author: Amen Souissi import colander import deform import hashlib from functools import reduce from zope.interface import implementer from substanced.schema import NameSchemaNode from substanced.content import content from substanced.util import get_oid from dace.descriptors import ( CompositeUniqueProperty, SharedUniqueProperty, SharedMultipleProperty) from dace.util import getSite from pontus.core import VisualisableElementSchema from pontus.widget import ( FileWidget ) from pontus.file import ObjectData from pontus.form import FileUploadTempStore from lac import _ from lac.core import ( VisualisableElement, SearchableEntity, SearchableEntitySchema, DuplicableEntity, ParticipativeEntity) from lac.content.interface import IArtistInformationSheet from lac.file import Image from lac.views.widget import RichTextWidget from lac.utilities.duplicates_utility import ( find_duplicates_artist) @colander.deferred def picture_widget(node, kw): context = node.bindings['context'] request = node.bindings['request'] tmpstore = FileUploadTempStore(request) source = None root = getSite() if context is not root: if context.picture: source = context.picture return FileWidget( tmpstore=tmpstore, source=source, file_type=['image'] ) def context_is_a_artist(context, request): return request.registry.content.istype(context, 'artist') class ArtistInformationSheetSchema(VisualisableElementSchema, SearchableEntitySchema): """Schema for artist""" name = NameSchemaNode( editing=context_is_a_artist, ) id = colander.SchemaNode( colander.String(), widget=deform.widget.HiddenWidget(), title=_('Id'), missing="" ) title = colander.SchemaNode( colander.String(), widget=deform.widget.HiddenWidget(), title=_('Title') ) description = colander.SchemaNode( colander.String(), widget=deform.widget.TextAreaWidget(rows=4, cols=60), title=_('Description'), missing="" ) biography = colander.SchemaNode( colander.String(), widget=RichTextWidget(), title=_("Biography"), missing="" ) picture = colander.SchemaNode( ObjectData(Image), widget=picture_widget, title=_('Picture'), required=False, missing=None, ) is_director = colander.SchemaNode( colander.Boolean(), widget=deform.widget.CheckboxWidget(), label=_('Is a director'), title='', default=False, missing=False ) origin_oid = colander.SchemaNode( colander.Int(), widget=deform.widget.HiddenWidget(), title=_('OID'), missing=0 ) def get_artist_data(artists, artist_schema): result = [] for artist in artists: artist_data = artist.get_data(artist_schema) if artist_data['picture']: picture = artist_data['picture'] artist_data['picture'] = picture.get_data(None) result.append(artist_data) return result @content( 'artist', icon='glyphicon glyphicon-align-left', ) @implementer(IArtistInformationSheet) class ArtistInformationSheet(VisualisableElement, DuplicableEntity, ParticipativeEntity, SearchableEntity): """Artist information sheet class""" type_title = _('Artist information sheet') icon = 'glyphicon glyphicon-star' templates = {'default': 'lac:views/templates/artist_result.pt', 'bloc': 'lac:views/templates/artist_result.pt', 'diff': 'lac:views/templates/diff_artist_template.pt', 'duplicates': 'lac:views/templates/artist_duplicates.pt'} picture = CompositeUniqueProperty('picture') author = SharedUniqueProperty('author', 'contents') creations = SharedMultipleProperty('creations', 'artists') productions = SharedMultipleProperty('productions', 'artists') def __init__(self, **kwargs): super(ArtistInformationSheet, self).__init__(**kwargs) self.hash_picture = None self.hash_artist = None<|fim▁hole|> def id(self): return self.get_id() def hash_picture_fp(self): if self.picture: m = hashlib.md5() picture_r = self.picture.fp.readall() self.picture.fp.seek(0) m.update(picture_r) self.hash_picture = m.digest() else: self.hash_picture = None @property def related_contents(self): result = list(self.creations) result.extend(list(self.productions)) return result @property def improved_artist(self): original = getattr(self, 'original', None) return original if original is not self else None def get_id(self): return str(get_oid(self, 0)) def replace_by(self, source): if self is not source: creations = source.creations productions = source.productions connections_to = source.connections_to for creation in self.creations: if creation not in creations: source.addtoproperty('creations', creation) creation.reindex() self.setproperty('creations', []) for production in self.productions: if production not in productions: source.addtoproperty('productions', production) production.reindex() self.setproperty('productions', []) for connection in self.connections_to: if connection not in connections_to: source.addtoproperty('connections_to', connection) self.setproperty('connections_to', []) for branch in self.branches: source.addtoproperty('branches', branch) original = self.original if original and original is not source: source.setproperty('original', original) self.setproperty('original', None) source.add_contributors(self.contributors) self.setproperty('branches', []) return True return False def reject(self): original = self.original if original: self.replace_by(original) def hash_artist_data(self): result = self.title result += getattr(self, 'description', '') result += getattr(self, 'biography', '') result += str(getattr(self, 'is_director', False)) result += str(self.hash_picture) result = result.replace(' ', '').strip() m = hashlib.md5() m.update(result.encode()) self.hash_artist = m.digest() def eq(self, other): hash_artist = getattr(self, 'hash_artist', None) other_hash_artist = getattr(other, 'hash_artist', None) if hash_artist != other_hash_artist: return False return True def get_more_contents_criteria(self): "return specific query, filter values" artists = reduce(lambda result, x: result + getattr(x, 'artists', []), filter(lambda x: 'published' in x.state, self.creations), []) artists = filter(lambda x: 'published' in x.state, artists) return None, {'objects': set(artists)} def get_duplicates(self, states=('published', )): return find_duplicates_artist(self, states)<|fim▁end|>
self.hash_picture_fp() self.hash_artist_data() @property
<|file_name|>redirect.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from django.utils.http import urlencode from django.contrib.auth import authenticate from django.http import ( HttpResponse, HttpResponseBadRequest, HttpResponseForbidden) from django.core.exceptions import ValidationError from django.views.decorators.http import require_http_methods from urlparse import urlunsplit, urlsplit, parse_qsl from astakos.im.util import restrict_next from astakos.im.user_utils import login as auth_login, logout from astakos.im.views.decorators import cookie_fix import astakos.im.messages as astakos_messages from astakos.im.settings import REDIRECT_ALLOWED_SCHEMES import logging logger = logging.getLogger(__name__) @require_http_methods(["GET"]) @cookie_fix def login(request): """ If there is no ``next`` request parameter redirects to astakos index page displaying an error message. If the request user is authenticated and has signed the approval terms, redirects to `next` request parameter. If not, redirects to approval terms in order to return back here after agreeing with the terms. Otherwise, redirects to login in order to return back here after successful login. """ next = request.GET.get('next') if not next: return HttpResponseBadRequest('Missing next parameter') if not restrict_next(next, allowed_schemes=REDIRECT_ALLOWED_SCHEMES): return HttpResponseForbidden(_( astakos_messages.NOT_ALLOWED_NEXT_PARAM)) force = request.GET.get('force', None) response = HttpResponse() if force == '' and request.user.is_authenticated(): logout(request) if request.user.is_authenticated(): # if user has not signed the approval terms # redirect to approval terms with next the request path if not request.user.signed_terms: # first build next parameter parts = list(urlsplit(request.build_absolute_uri())) params = dict(parse_qsl(parts[3], keep_blank_values=True)) parts[3] = urlencode(params) next = urlunsplit(parts) # build url location parts[2] = reverse('latest_terms') params = {'next': next} parts[3] = urlencode(params) url = urlunsplit(parts) response['Location'] = url response.status_code = 302 return response renew = request.GET.get('renew', None) if renew == '': request.user.renew_token( flush_sessions=True, current_key=request.session.session_key ) try: request.user.save() except ValidationError, e: return HttpResponseBadRequest(e) # authenticate before login user = authenticate( username=request.user.username, auth_token=request.user.auth_token ) auth_login(request, user) logger.info('Token reset for %s' % user.username) parts = list(urlsplit(next)) parts[3] = urlencode({ 'uuid': request.user.uuid, 'token': request.user.auth_token }) url = urlunsplit(parts) response['Location'] = url response.status_code = 302 return response else: # redirect to login with next the request path # first build next parameter parts = list(urlsplit(request.build_absolute_uri())) params = dict(parse_qsl(parts[3], keep_blank_values=True)) # delete force parameter if 'force' in params: del params['force'] parts[3] = urlencode(params) next = urlunsplit(parts) # build url location parts[2] = reverse('login') params = {'next': next} parts[3] = urlencode(params) url = urlunsplit(parts) response['Location'] = url response.status_code = 302<|fim▁hole|><|fim▁end|>
return response
<|file_name|>home.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { SharedModule } from '../shared/shared.module'; import { HomeComponent } from './home.component'; import { NameListService } from '../shared/name-list/index'; <|fim▁hole|> exports: [HomeComponent], providers: [NameListService] }) export class HomeModule { }<|fim▁end|>
@NgModule({ imports: [CommonModule, SharedModule], declarations: [HomeComponent],
<|file_name|>disk_usage_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Debugger Wrapper Session Consisting of a Local Curses-based CLI.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import tempfile from tensorflow.python.client import session from tensorflow.python.debug.wrappers import dumping_wrapper from tensorflow.python.debug.wrappers import hooks from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import state_ops from tensorflow.python.ops import variables from tensorflow.python.platform import googletest from tensorflow.python.training import monitored_session @test_util.run_deprecated_v1 class DumpingDebugWrapperDiskUsageLimitTest(test_util.TensorFlowTestCase): @classmethod def setUpClass(cls): # For efficient testing, set the disk usage bytes limit to a small # number (10). os.environ["TFDBG_DISK_BYTES_LIMIT"] = "10" def setUp(self): self.session_root = tempfile.mkdtemp() self.v = variables.Variable(10.0, dtype=dtypes.float32, name="v") self.delta = constant_op.constant(1.0, dtype=dtypes.float32, name="delta") self.eta = constant_op.constant(-1.4, dtype=dtypes.float32, name="eta") self.inc_v = state_ops.assign_add(self.v, self.delta, name="inc_v") self.dec_v = state_ops.assign_add(self.v, self.eta, name="dec_v") self.sess = session.Session() self.sess.run(self.v.initializer)<|fim▁hole|> return "DebugIdentity", r"(.*delta.*|.*inc_v.*)", r".*" sess = dumping_wrapper.DumpingDebugWrapperSession( self.sess, session_root=self.session_root, watch_fn=_watch_fn, log_usage=False) sess.run(self.inc_v) def testWrapperSessionExceedingLimit(self): def _watch_fn(fetches, feeds): del fetches, feeds return "DebugIdentity", r".*delta.*", r".*" sess = dumping_wrapper.DumpingDebugWrapperSession( self.sess, session_root=self.session_root, watch_fn=_watch_fn, log_usage=False) # Due to the watch function, each run should dump only 1 tensor, # which has a size of 4 bytes, which corresponds to the dumped 'delta:0' # tensor of scalar shape and float32 dtype. # 1st run should pass, after which the disk usage is at 4 bytes. sess.run(self.inc_v) # 2nd run should also pass, after which 8 bytes are used. sess.run(self.inc_v) # 3rd run should fail, because the total byte count (12) exceeds the # limit (10) with self.assertRaises(ValueError): sess.run(self.inc_v) def testHookNotExceedingLimit(self): def _watch_fn(fetches, feeds): del fetches, feeds return "DebugIdentity", r".*delta.*", r".*" dumping_hook = hooks.DumpingDebugHook( self.session_root, watch_fn=_watch_fn, log_usage=False) mon_sess = monitored_session._HookedSession(self.sess, [dumping_hook]) mon_sess.run(self.inc_v) def testHookExceedingLimit(self): def _watch_fn(fetches, feeds): del fetches, feeds return "DebugIdentity", r".*delta.*", r".*" dumping_hook = hooks.DumpingDebugHook( self.session_root, watch_fn=_watch_fn, log_usage=False) mon_sess = monitored_session._HookedSession(self.sess, [dumping_hook]) # Like in `testWrapperSessionExceedingLimit`, the first two calls # should be within the byte limit, but the third one should error # out due to exceeding the limit. mon_sess.run(self.inc_v) mon_sess.run(self.inc_v) with self.assertRaises(ValueError): mon_sess.run(self.inc_v) if __name__ == "__main__": googletest.main()<|fim▁end|>
def testWrapperSessionNotExceedingLimit(self): def _watch_fn(fetches, feeds): del fetches, feeds
<|file_name|>cclib2openbabel.py<|end_file_name|><|fim▁begin|># This file is part of cclib (http://cclib.github.io), a library for parsing # and interpreting the results of computational chemistry packages. # # Copyright (C) 2006, the cclib development team # # The library is free software, distributed under the terms of # the GNU Lesser General Public version 2.1 or later. You should have # received a copy of the license along with cclib. You can also access # the full license online at http://www.gnu.org/copyleft/lgpl.html. <|fim▁hole|> def makeopenbabel(atomcoords, atomnos, charge=0, mult=1): """Create an Open Babel molecule. >>> import numpy, openbabel >>> atomnos = numpy.array([1,8,1],"i") >>> coords = numpy.array([[-1.,1.,0.],[0.,0.,0.],[1.,1.,0.]]) >>> obmol = makeopenbabel(coords, atomnos) >>> obconversion = openbabel.OBConversion() >>> formatok = obconversion.SetOutFormat("inchi") >>> print obconversion.WriteString(obmol).strip() InChI=1/H2O/h1H2 """ obmol = ob.OBMol() for i in range(len(atomnos)): # Note that list(atomcoords[i]) is not equivalent!!! coords = atomcoords[i].tolist() atomno = int(atomnos[i]) obatom = ob.OBAtom() obatom.SetAtomicNum(atomno) obatom.SetVector(*coords) obmol.AddAtom(obatom) obmol.ConnectTheDots() obmol.PerceiveBondOrders() obmol.SetTotalSpinMultiplicity(mult) obmol.SetTotalCharge(charge) return obmol if __name__ == "__main__": import doctest doctest.testmod()<|fim▁end|>
"""Bridge for using cclib data in openbabel (http://openbabel.org).""" import openbabel as ob
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import { autocompleteChannels, AutocompleteSuggestion, autocompleteUsersInChannel, Channel, COMMAND_SUGGESTION_CHANNEL, COMMAND_SUGGESTION_USER, DispatchFunc, GlobalState, UserAutocomplete, UserProfile, } from '@components/autocomplete/slash_suggestion/app_command_parser/app_command_parser_dependencies'; <|fim▁hole|>} export async function inTextMentionSuggestions(pretext: string, store: Store, channelID: string, teamID: string, delimiter = ''): Promise<AutocompleteSuggestion[] | null> { const separatedWords = pretext.split(' '); const incompleteLessLastWord = separatedWords.slice(0, -1).join(' '); const lastWord = separatedWords[separatedWords.length - 1]; if (lastWord.startsWith('@')) { const {data} = await store.dispatch(autocompleteUsersInChannel(lastWord.substring(1), channelID)); const users = await getUserSuggestions(data); users.forEach((u) => { let complete = incompleteLessLastWord ? incompleteLessLastWord + ' ' + u.Complete : u.Complete; if (delimiter) { complete = delimiter + complete; } u.Complete = complete; }); return users; } if (lastWord.startsWith('~') && !lastWord.startsWith('~~')) { const {data} = await store.dispatch(autocompleteChannels(teamID, lastWord.substring(1))); const channels = await getChannelSuggestions(data); channels.forEach((c) => { let complete = incompleteLessLastWord ? incompleteLessLastWord + ' ' + c.Complete : c.Complete; if (delimiter) { complete = delimiter + complete; } c.Complete = complete; }); return channels; } return null; } export async function getUserSuggestions(usersAutocomplete?: UserAutocomplete): Promise<AutocompleteSuggestion[]> { const notFoundSuggestions = [{ Complete: '', Suggestion: '', Description: 'No user found', Hint: '', IconData: '', }]; if (!usersAutocomplete) { return notFoundSuggestions; } if (!usersAutocomplete.users.length && !usersAutocomplete.out_of_channel?.length) { return notFoundSuggestions; } const items: AutocompleteSuggestion[] = []; usersAutocomplete.users.forEach((u) => { items.push(getUserSuggestion(u)); }); usersAutocomplete.out_of_channel?.forEach((u) => { items.push(getUserSuggestion(u)); }); return items; } export async function getChannelSuggestions(channels?: Channel[]): Promise<AutocompleteSuggestion[]> { const notFoundSuggestion = [{ Complete: '', Suggestion: '', Description: 'No channel found', Hint: '', IconData: '', }]; if (!channels) { return notFoundSuggestion; } if (!channels.length) { return notFoundSuggestion; } const items = channels.map((c) => { return { Complete: '~' + c.name, Suggestion: '', Description: '', Hint: '', IconData: '', type: COMMAND_SUGGESTION_CHANNEL, item: c.id, }; }); return items; } function getUserSuggestion(u: UserProfile) { return { Complete: '@' + u.username, Suggestion: '', Description: '', Hint: '', IconData: '', type: COMMAND_SUGGESTION_USER, item: u.id, }; }<|fim▁end|>
interface Store { dispatch: DispatchFunc; getState: () => GlobalState;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # <pep8 compliant> bl_info = { "name": "Rigacar (Generates Car Rig)", "author": "David Gayerie", "version": (7, 0), "blender": (2, 83, 0), "location": "View3D > Add > Armature", "description": "Adds a deformation rig for vehicules, generates animation rig and bake wheels animation.", "wiki_url": "http://digicreatures.net/articles/rigacar.html", "tracker_url": "https://github.com/digicreatures/rigacar/issues", "category": "Rigging"} if "bpy" in locals(): import importlib if "bake_operators" in locals(): importlib.reload(bake_operators) if "car_rig" in locals(): importlib.reload(car_rig) if "widgets" in locals(): importlib.reload(widgets) else: import bpy from . import bake_operators from . import car_rig def enumerate_ground_sensors(bones): bone = bones.get('GroundSensor.Axle.Ft') if bone is not None: yield bone for bone in bones: if bone.name.startswith('GroundSensor.Ft'): yield bone bone = bones.get('GroundSensor.Axle.Bk') if bone is not None: yield bone for bone in bones: if bone.name.startswith('GroundSensor.Bk'): yield bone class RIGACAR_PT_mixin: def __init__(self): self.layout.use_property_split = True self.layout.use_property_decorate = False @classmethod def is_car_rig(cls, context): return context.object is not None and context.object.data is not None and 'Car Rig' in context.object.data @classmethod def is_car_rig_generated(cls, context): return cls.is_car_rig(context) and context.object.data['Car Rig'] def display_generate_section(self, context): self.layout.operator(car_rig.POSE_OT_carAnimationRigGenerate.bl_idname, text='Generate') def display_bake_section(self, context): self.layout.operator(bake_operators.ANIM_OT_carSteeringBake.bl_idname) self.layout.operator(bake_operators.ANIM_OT_carWheelsRotationBake.bl_idname) self.layout.operator(bake_operators.ANIM_OT_carClearSteeringWheelsRotation.bl_idname) def display_rig_props_section(self, context): layout = self.layout.column() layout.prop(context.object, '["wheels_on_y_axis"]', text="Wheels on Y axis") layout.prop(context.object, '["suspension_factor"]', text="Pitch factor") layout.prop(context.object, '["suspension_rolling_factor"]', text="Roll factor") def display_ground_sensors_section(self, context):<|fim▁hole|> ground_projection_constraint = ground_sensor.constraints.get('Ground projection') self.layout.label(text=ground_sensor.name, icon='BONE_DATA') if ground_projection_constraint is not None: self.layout.prop(ground_projection_constraint, 'target', text='Ground') if ground_projection_constraint.target is not None: self.layout.prop(ground_projection_constraint, 'shrinkwrap_type') if ground_projection_constraint.shrinkwrap_type == 'PROJECT': self.layout.prop(ground_projection_constraint, 'project_limit') self.layout.prop(ground_projection_constraint, 'influence') ground_projection_limit_constraint = ground_sensor.constraints.get('Ground projection limitation') if ground_projection_limit_constraint is not None: self.layout.prop(ground_projection_limit_constraint, 'min_z', text='Min local Z') self.layout.prop(ground_projection_limit_constraint, 'max_z', text='Max local Z') self.layout.separator() class RIGACAR_PT_rigProperties(bpy.types.Panel, RIGACAR_PT_mixin): bl_label = "Rigacar" bl_space_type = "PROPERTIES" bl_region_type = "WINDOW" bl_context = "data" bl_options = {'DEFAULT_CLOSED'} @classmethod def poll(cls, context): return RIGACAR_PT_mixin.is_car_rig(context) def draw(self, context): if RIGACAR_PT_mixin.is_car_rig_generated(context): self.display_rig_props_section(context) self.layout.separator() self.display_bake_section(context) else: self.display_generate_section(context) class RIGACAR_PT_groundSensorsProperties(bpy.types.Panel, RIGACAR_PT_mixin): bl_label = "Ground Sensors" bl_parent_id = "RIGACAR_PT_rigProperties" bl_space_type = "PROPERTIES" bl_region_type = "WINDOW" bl_context = "data" bl_options = {'DEFAULT_CLOSED'} @classmethod def poll(cls, context): return RIGACAR_PT_mixin.is_car_rig_generated(context) def draw(self, context): self.display_ground_sensors_section(context) class RIGACAR_PT_animationRigView(bpy.types.Panel, RIGACAR_PT_mixin): bl_category = "Rigacar" bl_label = "Animation Rig" bl_space_type = "VIEW_3D" bl_region_type = "UI" @classmethod def poll(cls, context): return RIGACAR_PT_mixin.is_car_rig(context) def draw(self, context): if RIGACAR_PT_mixin.is_car_rig_generated(context): self.display_rig_props_section(context) else: self.display_generate_section(context) class RIGACAR_PT_wheelsAnimationView(bpy.types.Panel, RIGACAR_PT_mixin): bl_category = "Rigacar" bl_label = "Wheels animation" bl_space_type = "VIEW_3D" bl_region_type = "UI" @classmethod def poll(cls, context): return RIGACAR_PT_mixin.is_car_rig_generated(context) def draw(self, context): self.display_bake_section(context) class RIGACAR_PT_groundSensorsView(bpy.types.Panel, RIGACAR_PT_mixin): bl_category = "Rigacar" bl_label = "Ground Sensors" bl_space_type = "VIEW_3D" bl_region_type = "UI" bl_options = {'DEFAULT_CLOSED'} @classmethod def poll(cls, context): return RIGACAR_PT_mixin.is_car_rig_generated(context) def draw(self, context): self.display_ground_sensors_section(context) def menu_entries(menu, context): menu.layout.operator(car_rig.OBJECT_OT_armatureCarDeformationRig.bl_idname, text="Car (deformation rig)", icon='AUTO') classes = ( RIGACAR_PT_rigProperties, RIGACAR_PT_groundSensorsProperties, RIGACAR_PT_animationRigView, RIGACAR_PT_wheelsAnimationView, RIGACAR_PT_groundSensorsView, ) def register(): bpy.types.VIEW3D_MT_armature_add.append(menu_entries) for c in classes: bpy.utils.register_class(c) car_rig.register() bake_operators.register() def unregister(): bake_operators.unregister() car_rig.unregister() for c in classes: bpy.utils.unregister_class(c) bpy.types.VIEW3D_MT_armature_add.remove(menu_entries) if __name__ == "__main__": register()<|fim▁end|>
for ground_sensor in enumerate_ground_sensors(context.object.pose.bones):
<|file_name|>message_log.rs<|end_file_name|><|fim▁begin|>use std::{env, process::exit, time::Duration}; use assign::assign; use ruma::{ api::client::{filter::FilterDefinition, sync::sync_events}, events::{ room::message::{MessageType, RoomMessageEventContent, TextMessageEventContent}, AnySyncMessageLikeEvent, AnySyncRoomEvent, SyncMessageLikeEvent, }, presence::PresenceState, }; use tokio_stream::StreamExt as _; type HttpClient = ruma::client::http_client::HyperNativeTls; async fn log_messages( homeserver_url: String, username: &str, password: &str, ) -> anyhow::Result<()> { let client = ruma::Client::builder().homeserver_url(homeserver_url).build::<HttpClient>().await?; client.log_in(username, password, None, None).await?; let filter = FilterDefinition::ignore_all().into(); let initial_sync_response = client .send_request(assign!(sync_events::v3::Request::new(), { filter: Some(&filter), })) .await?; let mut sync_stream = Box::pin(client.sync( None, initial_sync_response.next_batch, &PresenceState::Online, Some(Duration::from_secs(30)), )); while let Some(res) = sync_stream.try_next().await? { // Only look at rooms the user hasn't left yet for (room_id, room) in res.rooms.join { for event in room.timeline.events.into_iter().flat_map(|r| r.deserialize()) { // Filter out the text messages if let AnySyncRoomEvent::MessageLike(AnySyncMessageLikeEvent::RoomMessage( SyncMessageLikeEvent { content: RoomMessageEventContent { msgtype: MessageType::Text(TextMessageEventContent { body: msg_body, .. }), .. }, sender, .. }, )) = event {<|fim▁hole|> println!("{:?} in {:?}: {}", sender, room_id, msg_body); } } } } Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> anyhow::Result<()> { let (homeserver_url, username, password) = match (env::args().nth(1), env::args().nth(2), env::args().nth(3)) { (Some(a), Some(b), Some(c)) => (a, b, c), _ => { eprintln!( "Usage: {} <homeserver_url> <username> <password>", env::args().next().unwrap() ); exit(1) } }; log_messages(homeserver_url, &username, &password).await }<|fim▁end|>
<|file_name|>variables_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tf.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function <|fim▁hole|>import functools import operator from absl.testing import parameterized import numpy as np from tensorflow.python.eager import context from tensorflow.python.eager import function from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors_impl from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import gen_state_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import gradient_descent from tensorflow.python.util import compat class VariablesTestCase(test.TestCase, parameterized.TestCase): @test_util.run_deprecated_v1 def testDistributeStrategy(self): v = variables.VariableV1(0.0) self.assertIsNone(v._distribute_strategy) @test_util.run_v1_only("b/120545219") def testInitialization(self): with self.cached_session(): var0 = variables.VariableV1(0.0) self.assertEqual("Variable:0", var0.name) self.assertEqual("Variable", var0._shared_name) self.assertEqual([], var0.get_shape()) self.assertEqual([], var0.get_shape()) self.assertEqual([], var0.shape) var1 = variables.VariableV1(1.1) self.assertEqual("Variable_1:0", var1.name) self.assertEqual("Variable_1", var1._shared_name) self.assertEqual([], var1.get_shape()) self.assertEqual([], var1.get_shape()) self.assertEqual([], var1.shape) with self.assertRaisesOpError("Attempting to use uninitialized value"): self.evaluate(var0) with self.assertRaisesOpError("Attempting to use uninitialized value"): self.evaluate(var1) self.evaluate(variables.global_variables_initializer()) self.assertAllClose(0.0, self.evaluate(var0)) self.assertAllClose(1.1, self.evaluate(var1)) @test_util.run_v1_only("b/120545219") def testInitializationOrder(self): with self.cached_session(): rnd = variables.Variable(random_ops.random_uniform([3, 6]), name="rnd") self.assertEqual("rnd:0", rnd.name) self.assertEqual([3, 6], rnd.get_shape()) self.assertEqual([3, 6], rnd.get_shape()) self.assertEqual([3, 6], rnd.shape) dep = variables.Variable(rnd.initialized_value(), name="dep") self.assertEqual("dep:0", dep.name) self.assertEqual([3, 6], dep.get_shape()) self.assertEqual([3, 6], dep.get_shape()) self.assertEqual([3, 6], dep.shape) # Currently have to set the shape manually for Add. added_val = rnd.initialized_value() + dep.initialized_value() + 2.0 added_val.set_shape(rnd.get_shape()) depdep = variables.Variable(added_val, name="depdep") self.assertEqual("depdep:0", depdep.name) self.assertEqual([3, 6], depdep.get_shape()) self.assertEqual([3, 6], depdep.get_shape()) self.assertEqual([3, 6], depdep.shape) self.evaluate(variables.global_variables_initializer()) self.assertAllClose(self.evaluate(rnd), self.evaluate(dep)) self.assertAllClose( self.evaluate(rnd) + self.evaluate(dep) + 2.0, self.evaluate(depdep)) @test_util.run_deprecated_v1 def testCyclicInitializer(self): with self.cached_session(): cyclic = control_flow_ops.while_loop( cond=lambda i: i < 10, body=lambda i: i + 1, loop_vars=(constant_op.constant(0),)) initial_value = variables._try_guard_against_uninitialized_dependencies( "test", cyclic) self.assertIs(initial_value, cyclic) def testIterable(self): with self.assertRaisesRegex(TypeError, "not iterable"): for _ in variables.Variable(0.0): pass with self.assertRaisesRegex(TypeError, "not iterable"): for _ in variables.Variable([0.0, 1.0]): pass @test_util.run_deprecated_v1 def testAssignments(self): with self.cached_session(): var = variables.Variable(0.0) plus_one = var.assign_add(1.0) minus_one = var.assign_sub(2.0) four = var.assign(4.0) self.evaluate(variables.global_variables_initializer()) self.assertAllClose(0.0, self.evaluate(var)) self.assertAllClose(1.0, self.evaluate(plus_one)) self.assertAllClose(1.0, self.evaluate(var)) self.assertAllClose(-1.0, self.evaluate(minus_one)) self.assertAllClose(-1.0, self.evaluate(var)) self.assertAllClose(4.0, self.evaluate(four)) self.assertAllClose(4.0, self.evaluate(var)) @test_util.run_deprecated_v1 def testResourceAssignments(self): with self.session(): var = resource_variable_ops.ResourceVariable(0.0) plus_one = var.assign_add(1.0) minus_one = var.assign_sub(2.0) four = var.assign(4.0) self.evaluate(variables.global_variables_initializer()) self.assertAllClose(0.0, self.evaluate(var)) self.evaluate(plus_one) self.assertAllClose(1.0, self.evaluate(var)) self.evaluate(minus_one) self.assertAllClose(-1.0, self.evaluate(var)) self.evaluate(four) self.assertAllClose(4.0, self.evaluate(var)) def testAssignDifferentShapesEagerNotAllowed(self): with context.eager_mode(): var = variables.Variable(np.zeros(shape=[1, 1])) with self.assertRaisesRegex(ValueError, "shape.*and.*are incompatible"): var.assign(np.zeros(shape=[2, 2])) @test_util.run_in_graph_and_eager_modes def testAssignDifferentShapesAllowed(self): var = variables.Variable(np.zeros(shape=[1, 1]), shape=tensor_shape.TensorShape(None)) self.evaluate(variables.global_variables_initializer()) self.assertAllEqual(np.zeros(shape=[1, 1]), var.read_value()) self.evaluate(var.assign(np.zeros(shape=[2, 2]))) self.assertAllEqual(np.zeros(shape=[2, 2]), var.read_value()) def testZeroSizeStringAssign(self): with self.cached_session() as sess: array = variables.VariableV1( initial_value=array_ops.zeros((0,), dtype=dtypes.string), name="foo", trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES]) self.evaluate(variables.local_variables_initializer()) old_value = array.value() copy_op = array.assign(old_value) self.assertEqual([], list(self.evaluate(copy_op))) def _countUpToTest(self, dtype): with self.cached_session(): zero = constant_op.constant(0, dtype=dtype) var = variables.Variable(zero) count_up_to = var.count_up_to(3) self.evaluate(variables.global_variables_initializer()) self.assertEqual(0, self.evaluate(var)) self.assertEqual(0, self.evaluate(count_up_to)) self.assertEqual(1, self.evaluate(var)) self.assertEqual(1, self.evaluate(count_up_to)) self.assertEqual(2, self.evaluate(var)) self.assertEqual(2, self.evaluate(count_up_to)) self.assertEqual(3, self.evaluate(var)) with self.assertRaisesOpError("Reached limit of 3"): self.evaluate(count_up_to) self.assertEqual(3, self.evaluate(var)) with self.assertRaisesOpError("Reached limit of 3"): self.evaluate(count_up_to) self.assertEqual(3, self.evaluate(var)) @test_util.run_deprecated_v1 def testCountUpToInt32(self): self._countUpToTest(dtypes.int32) @test_util.run_deprecated_v1 def testCountUpToInt64(self): self._countUpToTest(dtypes.int64) @test_util.run_v1_only("b/120545219") def testControlDepsNone(self): with self.cached_session(): c = constant_op.constant(1.0) with ops.control_dependencies([c]): # d get the control dep. d = constant_op.constant(2.0) # variables do not. var_x = variables.VariableV1(2.0) self.assertEqual([c.op], d.op.control_inputs) self.assertEqual([], var_x.initializer.control_inputs) self.assertEqual([], var_x.value().op.control_inputs) self.assertEqual([], var_x._ref().op.control_inputs) # pylint: disable=protected-access @test_util.run_v1_only("b/120545219") def testControlFlow(self): with self.cached_session() as sess: v0 = variables.Variable(0, name="v0") var_dict = {} # Call get_variable in each of the cond clauses. def var_in_then_clause(): v1 = variables.Variable(1, name="v1") var_dict["v1"] = v1 return v1 + v0 def var_in_else_clause(): v2 = variables.Variable(2, name="v2") var_dict["v2"] = v2 return v2 + v0 add = control_flow_ops.cond( math_ops.less(v0, 10), var_in_then_clause, var_in_else_clause) v1 = var_dict["v1"] v2 = var_dict["v2"] # We should be able to initialize and run v1 and v2 without initializing # v0, even if the variable was created with a control dep on v0. self.evaluate(v1.initializer) self.assertEqual([1], self.evaluate(v1)) self.evaluate(v2.initializer) self.assertEqual([2], self.evaluate(v2)) # v0 should still be uninitialized. with self.assertRaisesRegex(errors_impl.OpError, "uninitialized"): self.evaluate(v0) # We should not be able to run 'add' yet. with self.assertRaisesRegex(errors_impl.OpError, "uninitialized"): self.evaluate(add) # If we initialize v0 we should be able to run 'add'. self.evaluate(v0.initializer) self.evaluate(add) @test_util.run_v1_only("b/120545219") def testControlFlowInitialization(self): """Expects an error if an initializer is in a control-flow scope.""" def cond(i, _): return i < 10 def body(i, _): zero = array_ops.zeros([], dtype=dtypes.int32) v = variables.Variable(initial_value=zero) return (i + 1, v.read_value()) with self.assertRaisesRegex(ValueError, "inside a control-flow"): control_flow_ops.while_loop(cond, body, [0, 0]) @test_util.run_deprecated_v1 def testUseVariableAsTensor(self): with self.cached_session(): var_x = variables.Variable(2.0) var_y = variables.Variable(3.0) self.evaluate(variables.global_variables_initializer()) self.assertAllClose(2.0, self.evaluate(var_x)) self.assertAllClose(3.0, self.evaluate(var_y)) self.assertAllClose(5.0, self.evaluate(math_ops.add(var_x, var_y))) @test_util.run_deprecated_v1 def testZeroSizeVarSameAsConst(self): with self.cached_session(): zero_size_var = variables.Variable(array_ops.zeros([0, 2])) zero_size_const = array_ops.ones([2, 0]) variable_mul = math_ops.matmul(zero_size_const, zero_size_var) const_mul = math_ops.matmul( zero_size_const, zero_size_const, transpose_b=True) self.evaluate(variables.global_variables_initializer()) variable_output = self.evaluate(variable_mul) self.assertAllClose(self.evaluate(const_mul), variable_output) self.assertAllClose([[0., 0.], [0., 0.]], variable_output) @test_util.run_deprecated_v1 def testCachingDevice(self): with self.cached_session(): var = variables.Variable(2.0) self.assertEqual(var.device, var.initialized_value().device) var_cached = variables.Variable(2.0, caching_device="/job:foo") self.assertFalse(var_cached.device.startswith("/job:foo")) self.assertTrue(var_cached.value().device.startswith("/job:foo")) @test_util.run_deprecated_v1 def testCollections(self): with self.cached_session(): var_x = variables.VariableV1(2.0) var_y = variables.VariableV1(2.0, trainable=False) var_z = variables.VariableV1(2.0, trainable=True) var_t = variables.VariableV1( 2.0, trainable=True, collections=[ ops.GraphKeys.TRAINABLE_VARIABLES, ops.GraphKeys.GLOBAL_VARIABLES ]) self.assertEqual([var_x, var_y, var_z, var_t], variables.global_variables()) self.assertEqual([var_x, var_z, var_t], variables.trainable_variables()) @test_util.run_deprecated_v1 def testCollectionsWithScope(self): with self.cached_session(): with ops.name_scope("scope_1"): var_x = variables.VariableV1(2.0) with ops.name_scope("scope_2"): var_y = variables.VariableV1(2.0) self.assertEqual([var_x, var_y], variables.global_variables()) self.assertEqual([var_x], variables.global_variables("scope_1")) self.assertEqual([var_y], variables.global_variables("scope_2")) self.assertEqual([var_x, var_y], variables.trainable_variables()) self.assertEqual([var_x], variables.trainable_variables("scope_1")) self.assertEqual([var_y], variables.trainable_variables("scope_2")) def testOperatorWrapping(self): for attr in functools.WRAPPER_ASSIGNMENTS: self.assertEqual( getattr(variables.Variable.__add__, attr), getattr(ops.Tensor.__add__, attr)) @test_util.run_deprecated_v1 def testOperators(self): with self.cached_session(): var_f = variables.Variable([2.0]) add = var_f + 0.0 radd = 1.0 + var_f sub = var_f - 1.0 rsub = 1.0 - var_f mul = var_f * 10.0 rmul = 10.0 * var_f div = var_f / 10.0 rdiv = 10.0 / var_f lt = var_f < 3.0 rlt = 3.0 < var_f le = var_f <= 2.0 rle = 2.0 <= var_f gt = var_f > 3.0 rgt = 3.0 > var_f ge = var_f >= 2.0 rge = 2.0 >= var_f neg = -var_f abs_v = abs(var_f) var_i = variables.Variable([20]) mod = var_i % 7 rmod = 103 % var_i var_b = variables.Variable([True, False]) and_v = operator.and_(var_b, [True, True]) or_v = operator.or_(var_b, [False, True]) xor_v = operator.xor(var_b, [False, False]) invert_v = ~var_b rnd = np.random.rand(4, 4).astype("f") var_t = variables.Variable(rnd) slice_v = var_t[2, 0:0] var_m = variables.Variable([[2.0, 3.0]]) matmul = var_m.__matmul__([[10.0], [20.0]]) rmatmul = var_m.__rmatmul__([[10.0], [20.0]]) self.evaluate(variables.global_variables_initializer()) self.assertAllClose([2.0], self.evaluate(add)) self.assertAllClose([3.0], self.evaluate(radd)) self.assertAllClose([1.0], self.evaluate(sub)) self.assertAllClose([-1.0], self.evaluate(rsub)) self.assertAllClose([20.0], self.evaluate(mul)) self.assertAllClose([20.0], self.evaluate(rmul)) self.assertAllClose([0.2], self.evaluate(div)) self.assertAllClose([5.0], self.evaluate(rdiv)) self.assertAllClose([-2.0], self.evaluate(neg)) self.assertAllClose([2.0], self.evaluate(abs_v)) self.assertAllClose([True], self.evaluate(lt)) self.assertAllClose([False], self.evaluate(rlt)) self.assertAllClose([True], self.evaluate(le)) self.assertAllClose([True], self.evaluate(rle)) self.assertAllClose([False], self.evaluate(gt)) self.assertAllClose([True], self.evaluate(rgt)) self.assertAllClose([True], self.evaluate(ge)) self.assertAllClose([True], self.evaluate(rge)) self.assertAllClose([6], self.evaluate(mod)) self.assertAllClose([3], self.evaluate(rmod)) self.assertAllClose([True, False], self.evaluate(and_v)) self.assertAllClose([True, True], self.evaluate(or_v)) self.assertAllClose([True, False], self.evaluate(xor_v)) self.assertAllClose([False, True], self.evaluate(invert_v)) self.assertAllClose(rnd[2, 0:0], self.evaluate(slice_v)) self.assertAllClose([[80.0]], self.evaluate(matmul)) self.assertAllClose([[20.0, 30.0], [40.0, 60.0]], self.evaluate(rmatmul)) @test_util.run_deprecated_v1 def testSession(self): with self.cached_session() as sess: var = variables.Variable([1, 12]) self.evaluate(variables.global_variables_initializer()) self.assertAllClose([1, 12], self.evaluate(var)) @test_util.run_v1_only("b/120545219") def testColocation(self): with ops.device("/job:ps"): var = variables.VariableV1(0, name="v") with ops.device("/job:worker/task:7"): assign_op = var.assign(1) self.assertDeviceEqual("/job:ps", assign_op.device) self.assertEqual([b"loc:@v"], assign_op.op.colocation_groups()) @test_util.run_v1_only("b/120545219") def testInitializerFunction(self): value = [[-42], [133.7]] shape = [2, 1] with self.cached_session(): initializer = lambda: constant_op.constant(value) v1 = variables.Variable(initializer, dtype=dtypes.float32) self.assertEqual(shape, v1.get_shape()) self.assertEqual(shape, v1.shape) self.assertAllClose(value, self.evaluate(v1.initial_value)) with self.assertRaises(errors_impl.FailedPreconditionError): self.evaluate(v1) v2 = variables.Variable( math_ops.negative(v1.initialized_value()), dtype=dtypes.float32) self.assertEqual(v1.get_shape(), v2.get_shape()) self.assertEqual(v1.shape, v2.shape) self.assertAllClose(np.negative(value), self.evaluate(v2.initial_value)) with self.assertRaises(errors_impl.FailedPreconditionError): self.evaluate(v2) self.evaluate(variables.global_variables_initializer()) self.assertAllClose(np.negative(value), self.evaluate(v2)) def testConstraintArg(self): constraint = lambda x: x v = variables.Variable( lambda: constant_op.constant(1.), constraint=constraint) self.assertEqual(v.constraint, constraint) constraint = 0 with self.assertRaises(ValueError): v = variables.Variable( lambda: constant_op.constant(1.), constraint=constraint) @test_util.run_v1_only("b/120545219") def testNoRefDataRace(self): with self.cached_session(): a = variables.Variable([1, 2, 3], dtype=dtypes.float32) b = variables.Variable(a.initialized_value() + 2) c = variables.Variable(b.initialized_value() + 2) self.evaluate(variables.global_variables_initializer()) self.assertAllEqual(self.evaluate(a), [1, 2, 3]) self.assertAllEqual(self.evaluate(b), [3, 4, 5]) self.assertAllEqual(self.evaluate(c), [5, 6, 7]) @test_util.run_deprecated_v1 def testInitializerFunctionDevicePlacement(self): with self.cached_session(): initializer = lambda: constant_op.constant(42.0) with ops.device("/cpu:100"): v1 = variables.Variable(initializer, dtype=dtypes.float32, name="v1") expected_device = "/device:CPU:100" expected_group_v1 = [b"loc:@v1"] self.assertEqual(expected_device, v1.op.device) self.assertEqual(expected_group_v1, v1.op.colocation_groups()) for i in v1.initializer.inputs: self.assertEqual(expected_group_v1, i.op.colocation_groups()) v2 = variables.Variable(initializer, dtype=dtypes.float32, name="v2") expected_group_v2 = [b"loc:@v2"] self.assertEqual(expected_group_v2, v2.op.colocation_groups()) for i in v2.initializer.inputs: self.assertEqual(expected_group_v2, i.op.colocation_groups()) @test_util.run_v1_only("b/120545219") def testVariableDefInitializedInstances(self): with ops.Graph().as_default(), self.cached_session() as sess: v_def = variables.Variable( initial_value=constant_op.constant(3.0)).to_proto() with ops.Graph().as_default(), self.cached_session() as sess: # v describes a VariableDef-based variable without an initial value. v = variables.Variable(variable_def=v_def) self.assertEqual(3.0, self.evaluate(v.initialized_value())) # initialized_value should not rerun the initializer_op if the variable # has already been initialized elsewhere. self.evaluate(v.assign(1.0)) self.assertEqual(1.0, self.evaluate(v.initialized_value())) v_def.ClearField("initial_value_name") with ops.Graph().as_default(), self.cached_session() as sess: # Restoring a legacy VariableDef proto that does not have # initial_value_name set should still work. v = variables.Variable(variable_def=v_def) # We should also be able to re-export the variable to a new meta graph. self.assertProtoEquals(v_def, v.to_proto()) # But attempts to use initialized_value will result in errors. with self.assertRaises(ValueError): self.evaluate(v.initialized_value()) def testTrainableInProto(self): with ops.Graph().as_default(): non_trainable_variable = variables.Variable( trainable=False, initial_value=constant_op.constant(10.0)) self.assertEqual( False, variables.Variable(variable_def=non_trainable_variable.to_proto()) .trainable) trainable_variable = variables.Variable( trainable=True, initial_value=constant_op.constant(10.0)) self.assertEqual( True, variables.Variable(variable_def=trainable_variable.to_proto()) .trainable) def testSynchronizationAndAggregationSaved(self): with ops.Graph().as_default(): original_variable = variables.Variable( initial_value=constant_op.constant(10.0), synchronization=variables.VariableSynchronization.NONE, aggregation=variables.VariableAggregationV2.ONLY_FIRST_REPLICA) self.assertEqual(variables.VariableSynchronization.NONE, original_variable.synchronization) self.assertEqual(variables.VariableAggregation.ONLY_FIRST_REPLICA, original_variable.aggregation) laundered = variables.Variable( variable_def=original_variable.to_proto()) self.assertEqual( variables.VariableSynchronization.NONE, laundered.synchronization) self.assertEqual(variables.VariableAggregationV2.ONLY_FIRST_REPLICA, laundered.aggregation) @test_util.run_deprecated_v1 def testLoad(self): with self.cached_session(): var = variables.Variable(np.zeros((5, 5), np.float32)) self.evaluate(variables.global_variables_initializer()) var.load(np.ones((5, 5), np.float32)) self.assertAllClose(np.ones((5, 5), np.float32), self.evaluate(var)) @test_util.run_v1_only("b/120545219") def testRepr(self): var = variables.VariableV1(np.zeros((5, 5), np.float32), name="noop") self.assertEqual( "<tf.Variable 'noop:0' shape=(5, 5) dtype=float32_ref>", repr(var)) def testVariableNamesPreserveNameScopesWithDefun(self): @function.defun def create_variable(): with ops.name_scope("foo"): v = variables.Variable(0.0, name="bar") self.assertEqual(v.name, "foo/bar:0") with ops.get_default_graph().as_default(): create_variable() @parameterized.parameters(variables.VariableV1, variables.Variable) def testTrainableVariable(self, cls): v1 = cls(1.0) self.assertEqual(True, v1.trainable) v2 = cls(1.0, synchronization=variables.VariableSynchronization.ON_READ) self.assertEqual(False, v2.trainable) v3 = cls(1.0, synchronization=variables.VariableSynchronization.ON_READ, trainable=True) self.assertEqual(True, v3.trainable) v4 = cls(1.0, synchronization=variables.VariableSynchronization.ON_READ, trainable=False) self.assertEqual(False, v4.trainable) class IsInitializedTest(test.TestCase): def testNoVars(self): with ops.Graph().as_default(), self.cached_session() as sess: uninited = variables.report_uninitialized_variables() self.assertEqual(0, self.evaluate(uninited).size) def testAssertVariablesInitialized(self): with ops.Graph().as_default(), self.cached_session() as sess: v = variables.Variable([1, 2], name="v") w = variables.Variable([3, 4], name="w") _ = v, w uninited = variables.report_uninitialized_variables() self.assertAllEqual(np.array([b"v", b"w"]), self.evaluate(uninited)) self.evaluate(variables.global_variables_initializer()) self.assertEqual(0, self.evaluate(uninited).size) @test_util.run_v1_only("b/120545219") def testVariableList(self): with ops.Graph().as_default(), self.cached_session() as sess: v = variables.VariableV1([1, 2], name="v") w = variables.VariableV1([3, 4], name="w") uninited = variables.report_uninitialized_variables() self.assertAllEqual(np.array([b"v", b"w"]), self.evaluate(uninited)) self.evaluate(w.initializer) self.assertAllEqual(np.array([b"v"]), self.evaluate(uninited)) self.evaluate(v.initializer) self.assertEqual(0, self.evaluate(uninited).size) def testZeroSizeVarInitialized(self): with ops.Graph().as_default(), self.cached_session() as sess: v = variables.Variable(array_ops.zeros([0, 2]), name="v") uninited = variables.report_uninitialized_variables() self.evaluate(v.initializer) # not strictly necessary self.assertEqual(0, self.evaluate(uninited).size) def testTrainingWithZeroSizeVar(self): with ops.Graph().as_default(), self.cached_session() as sess: a = variables.Variable(array_ops.zeros([0, 2])) b = variables.Variable(array_ops.ones([2, 2])) objective = math_ops.reduce_sum(b + math_ops.matmul( a, a, transpose_a=True)) self.evaluate(variables.global_variables_initializer()) do_opt = gradient_descent.GradientDescentOptimizer(0.1).minimize( objective) self.evaluate([do_opt]) self.assertAllClose([[0.9, 0.9], [0.9, 0.9]], self.evaluate(b)) @test_util.run_v1_only("b/120545219") class ObsoleteIsInitializedTest(test.TestCase): def testNoVars(self): with ops.Graph().as_default(): self.assertEqual(None, variables.assert_variables_initialized()) def testVariables(self): with ops.Graph().as_default(), self.cached_session() as sess: v = variables.VariableV1([1, 2]) w = variables.VariableV1([3, 4]) _ = v, w inited = variables.assert_variables_initialized() with self.assertRaisesOpError("Attempting to use uninitialized value"): self.evaluate(inited) self.evaluate(variables.global_variables_initializer()) self.evaluate(inited) def testVariableList(self): with ops.Graph().as_default(), self.cached_session() as sess: v = variables.VariableV1([1, 2]) w = variables.VariableV1([3, 4]) inited = variables.assert_variables_initialized([v]) with self.assertRaisesOpError("Attempting to use uninitialized value"): inited.op.run() self.evaluate(w.initializer) with self.assertRaisesOpError("Attempting to use uninitialized value"): inited.op.run() self.evaluate(v.initializer) inited.op.run() class PartitionedVariableTest(test.TestCase): def testPartitionedVariable(self): with ops.Graph().as_default(): v0 = variables.Variable([0]) v1 = variables.Variable([1]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) v1._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1])) partitions = [2] # Pass variable_list as [v1, v0] to ensure they are properly # re-sorted to [v0, v1] based on their slice info offsets. partitioned_variable = variables.PartitionedVariable( name="two_vars", shape=[2], dtype=v0.dtype, variable_list=[v1, v0], partitions=partitions) concatenated = ops.convert_to_tensor(partitioned_variable) num_partitions = len(partitioned_variable) iterated_partitions = list(partitioned_variable) self.assertEqual(2, num_partitions) self.assertEqual([v0, v1], iterated_partitions) self.assertEqual([2], partitioned_variable.get_shape()) self.assertEqual([2], partitioned_variable.shape) self.assertEqual([2], concatenated.get_shape()) self.assertEqual([2], concatenated.shape) def testPartitionedVariableFailures(self): with ops.Graph().as_default(): with self.assertRaisesRegex(ValueError, "empty"): variables.PartitionedVariable( name="fail", shape=2, dtype=dtypes.int32, variable_list=[], partitions=[]) with self.assertRaisesRegex(ValueError, "must have a save_slice_info"): v0 = variables.Variable([0]) partitions = [1] variables.PartitionedVariable( name="two_vars", shape=[1], dtype=v0.dtype, variable_list=[v0], partitions=partitions) with self.assertRaisesRegex(ValueError, "full shapes must match"): v0 = variables.Variable([0]) v1 = variables.Variable([1]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) v1._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1])) partitions = [2] variables.PartitionedVariable( name="two_vars", shape=[3], dtype=v0.dtype, variable_list=[v1, v0], partitions=partitions) with self.assertRaisesRegex(ValueError, "must be positive"): v0 = variables.Variable([0]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) partitions = [0] variables.PartitionedVariable( name="two_vars", shape=[2], dtype=v0.dtype, variable_list=[v0], partitions=partitions) def testPartitionedVariableAssignments(self): with ops.Graph().as_default(), self.cached_session(): v0 = variables.Variable(initial_value=[0.0]) v1 = variables.Variable(initial_value=[1.0]) v2 = variables.Variable(initial_value=[20.0]) v3 = variables.Variable(initial_value=[30.0]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) v1._set_save_slice_info( variables.Variable.SaveSliceInfo(v1.name, [2], [1], [1])) v2._set_save_slice_info( variables.Variable.SaveSliceInfo(v2.name, [2], [0], [1])) v3._set_save_slice_info( variables.Variable.SaveSliceInfo(v3.name, [2], [1], [1])) partitions = [2] # Pass variable_list as [v1, v0] to ensure they are properly # re-sorted to [v0, v1] based on their slice info offsets. pv_0 = variables.PartitionedVariable( name="two_vars", shape=[2], dtype=v0.dtype, variable_list=[v0, v1], partitions=partitions) pv_1 = variables.PartitionedVariable( name="two_vars", shape=[2], dtype=v0.dtype, variable_list=[v2, v3], partitions=partitions) deltas_a = constant_op.constant([1.0, 2.0]) deltas_b = constant_op.constant([3.0, 4.0]) ones = array_ops.ones([2]) plus_delta = pv_0.assign_add(deltas_a) minus_delta = pv_0.assign_sub(deltas_b) assign_ones = pv_0.assign(ones) c_0 = constant_op.constant([2.0]) c_1 = constant_op.constant([3.0]) assign_list = pv_1.assign([c_0, c_1]) assign_part_value = pv_1.assign_add(assign_ones) assign_part_var = pv_1.assign_sub(pv_0) self.evaluate(variables.global_variables_initializer()) self.assertEqual([1.0], self.evaluate(plus_delta[0])) self.assertEqual([1.0], self.evaluate(v0)) self.assertEqual([3.0], self.evaluate(plus_delta[1])) self.assertEqual([3.0], self.evaluate(v1)) self.assertEqual([-2.0], self.evaluate(minus_delta[0])) self.assertEqual([-2.0], self.evaluate(v0)) self.assertEqual([-1.0], self.evaluate(minus_delta[1])) self.assertEqual([-1.0], self.evaluate(v1)) self.assertEqual([1.0], self.evaluate(assign_ones[0])) self.assertEqual([1.0], self.evaluate(v0)) self.assertEqual([1.0], self.evaluate(assign_ones[1])) self.assertEqual([1.0], self.evaluate(v1)) self.assertEqual([2.0], self.evaluate(assign_list[0])) self.assertEqual([2.0], self.evaluate(v2)) self.assertEqual([3.0], self.evaluate(assign_list[1])) self.assertEqual([3.0], self.evaluate(v3)) self.assertEqual([3.0], self.evaluate(assign_part_value[0])) self.assertEqual([3.0], self.evaluate(v2)) self.assertEqual([4.0], self.evaluate(assign_part_value[1])) self.assertEqual([4.0], self.evaluate(v3)) self.assertEqual([2.0], self.evaluate(assign_part_var[0])) self.assertEqual([2.0], self.evaluate(v2)) self.assertEqual([3.0], self.evaluate(assign_part_var[1])) self.assertEqual([3.0], self.evaluate(v3)) class VariableContainerTest(test.TestCase): def testContainer(self): with ops.Graph().as_default(): v0 = variables.Variable([0]) with ops.container("l1"): v1 = variables.Variable([1]) with ops.container("l2"): v2 = variables.Variable([2]) special_v = gen_state_ops.variable( shape=[1], dtype=dtypes.float32, name="VariableInL3", container="l3", shared_name="") v3 = variables.Variable([3]) v4 = variables.Variable([4]) self.assertEqual(compat.as_bytes(""), v0.op.get_attr("container")) self.assertEqual(compat.as_bytes("l1"), v1.op.get_attr("container")) self.assertEqual(compat.as_bytes("l2"), v2.op.get_attr("container")) self.assertEqual(compat.as_bytes("l3"), special_v.op.get_attr("container")) self.assertEqual(compat.as_bytes("l1"), v3.op.get_attr("container")) self.assertEqual(compat.as_bytes(""), v4.op.get_attr("container")) class AggregationModesTest(test.TestCase): def testV1V2Equal(self): v1 = variables.VariableAggregation v2 = variables.VariableAggregationV2 self.assertEqual(v1.NONE, v2.NONE) self.assertEqual(v1.SUM, v2.SUM) self.assertEqual(v1.MEAN, v2.MEAN) self.assertEqual(v1.ONLY_FIRST_REPLICA, v2.ONLY_FIRST_REPLICA) self.assertEqual(v1.ONLY_FIRST_TOWER, v2.ONLY_FIRST_REPLICA) self.assertEqual(v2.NONE, v1.NONE) self.assertEqual(v2.SUM, v1.SUM) self.assertEqual(v2.MEAN, v1.MEAN) self.assertEqual(v2.ONLY_FIRST_REPLICA, v1.ONLY_FIRST_REPLICA) self.assertEqual(v2.ONLY_FIRST_REPLICA, v1.ONLY_FIRST_TOWER) self.assertEqual(hash(v1.NONE), hash(v2.NONE)) self.assertEqual(hash(v1.SUM), hash(v2.SUM)) self.assertEqual(hash(v1.MEAN), hash(v2.MEAN)) self.assertEqual(hash(v1.ONLY_FIRST_REPLICA), hash(v2.ONLY_FIRST_REPLICA)) self.assertEqual(hash(v1.ONLY_FIRST_TOWER), hash(v2.ONLY_FIRST_REPLICA)) if __name__ == "__main__": test.main()<|fim▁end|>
<|file_name|>extern-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|> // pretty-expanded FIXME #23616 extern fn f() { } pub fn main() { }<|fim▁end|>
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.
<|file_name|>DatarouterWebappInstanceLogDao.java<|end_file_name|><|fim▁begin|>/* * Copyright © 2009 HotPads ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.datarouter.webappinstance.storage.webappinstancelog; import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; import io.datarouter.model.databean.FieldlessIndexEntry; import io.datarouter.scanner.Scanner; import io.datarouter.storage.Datarouter; import io.datarouter.storage.client.ClientId; import io.datarouter.storage.dao.BaseDao; import io.datarouter.storage.dao.BaseRedundantDaoParams; import io.datarouter.storage.node.factory.IndexingNodeFactory; import io.datarouter.storage.node.factory.NodeFactory; import io.datarouter.storage.node.op.combo.IndexedSortedMapStorage.IndexedSortedMapStorageNode; import io.datarouter.storage.node.op.index.IndexReader; import io.datarouter.storage.tag.Tag; import io.datarouter.util.tuple.Range; import io.datarouter.virtualnode.redundant.RedundantIndexedSortedMapStorageNode; import io.datarouter.webappinstance.storage.webappinstancelog.WebappInstanceLog.WebappInstanceLogFielder; @Singleton public class DatarouterWebappInstanceLogDao extends BaseDao{ public static class DatarouterWebappInstanceLogDaoParams extends BaseRedundantDaoParams{ public DatarouterWebappInstanceLogDaoParams(List<ClientId> clientIds){ super(clientIds); } } private final IndexedSortedMapStorageNode<WebappInstanceLogKey,WebappInstanceLog,WebappInstanceLogFielder> node; private final IndexReader<WebappInstanceLogKey,WebappInstanceLog,WebappInstanceLogByBuildInstantKey, FieldlessIndexEntry<WebappInstanceLogByBuildInstantKey,WebappInstanceLogKey,WebappInstanceLog>> byBuildInstant; @Inject public DatarouterWebappInstanceLogDao( Datarouter datarouter, NodeFactory nodeFactory, IndexingNodeFactory indexingNodeFactory, DatarouterWebappInstanceLogDaoParams params){ super(datarouter); node = Scanner.of(params.clientIds) .map(clientId -> { IndexedSortedMapStorageNode<WebappInstanceLogKey,WebappInstanceLog,WebappInstanceLogFielder> node = nodeFactory.create(clientId, WebappInstanceLog::new, WebappInstanceLogFielder::new) .withTag(Tag.DATAROUTER) .build(); return node; }) .listTo(RedundantIndexedSortedMapStorageNode::makeIfMulti); byBuildInstant = indexingNodeFactory.createKeyOnlyManagedIndex(WebappInstanceLogByBuildInstantKey::new, node)<|fim▁hole|> public void put(WebappInstanceLog log){ node.put(log); } public Scanner<WebappInstanceLog> scan(){ return node.scan(); } public Scanner<WebappInstanceLog> scanWithPrefix(WebappInstanceLogKey key){ return node.scanWithPrefix(key); } public Scanner<WebappInstanceLog> scanDatabeans(Range<WebappInstanceLogByBuildInstantKey> range){ return byBuildInstant.scanDatabeans(range); } }<|fim▁end|>
.build(); datarouter.register(node); }
<|file_name|>test_mysql.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os from .. import OratorTestCase from . import IntegrationTestCase class MySQLIntegrationTestCase(IntegrationTestCase, OratorTestCase): @classmethod def get_manager_config(cls): ci = os.environ.get("CI", False) if ci: database = "orator_test" user = "root" password = ""<|fim▁hole|> password = "orator" return { "default": "mysql", "mysql": { "driver": "mysql", "database": database, "user": user, "password": password, }, } def get_marker(self): return "%s"<|fim▁end|>
else: database = "orator_test" user = "orator"
<|file_name|>syntax-extension-minor.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at<|fim▁hole|>// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // xfail-test // this now fails (correctly, I claim) because hygiene prevents // the assembled identifier from being a reference to the binding. pub fn main() { let asdf_fdsa = ~"<.<"; assert_eq!(concat_idents!(asd, f_f, dsa), ~"<.<"); assert!(stringify!(use_mention_distinction) == "use_mention_distinction"); }<|fim▁end|>
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Configuration for fargo. # You can override Fargo default settings here # Fargo is a Django application: for the full list of settings and their # values, see https://docs.djangoproject.com/en/1.7/ref/settings/ # For more information on settings see # https://docs.djangoproject.com/en/1.7/topics/settings/ # WARNING! Quick-start development settings unsuitable for production! # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # This file is sourced by "execfile" from /usr/lib/fargo/debian_config.py # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False #ADMINS = ( # # ('User 1', '[email protected]'), # # ('User 2', '[email protected]'), #) # ALLOWED_HOSTS must be correct in production! # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = [ '*', ] # Databases # Default: a local database named "fargo" # https://docs.djangoproject.com/en/1.7/ref/settings/#databases # Warning: don't change ENGINE # DATABASES['default']['NAME'] = 'fargo' # DATABASES['default']['USER'] = 'fargo' # DATABASES['default']['PASSWORD'] = '******' # DATABASES['default']['HOST'] = 'localhost' # DATABASES['default']['PORT'] = '5432' LANGUAGE_CODE = 'fr-fr' TIME_ZONE = 'Europe/Paris' # Email configuration # EMAIL_SUBJECT_PREFIX = '[fargo] ' # SERVER_EMAIL = '[email protected]' # DEFAULT_FROM_EMAIL = '[email protected]'<|fim▁hole|># EMAIL_HOST_USER = '' # EMAIL_HOST_PASSWORD = '' # EMAIL_PORT = 25 # HTTPS Security # CSRF_COOKIE_SECURE = True # SESSION_COOKIE_SECURE = True<|fim▁end|>
# SMTP configuration # EMAIL_HOST = 'localhost'
<|file_name|>Result.java<|end_file_name|><|fim▁begin|>package com.zuoqing.demo.entity; /** * http 请求返回的最外层对象 */ public class Result<T> { private Integer code; private String msg; private T data; public Integer getCode() { return code; } public void setCode(Integer code) { this.code = code; } <|fim▁hole|> return msg; } public void setMsg(String msg) { this.msg = msg; } public T getData() { return data; } public void setData(T data) { this.data = data; } }<|fim▁end|>
public String getMsg() {
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'author': 'ADHOC SA', 'category': 'Accounting & Finance', 'demo_xml': [], 'depends': ['account'], 'description': ''' Account Invoice Prices Update ============================= ''', 'installable': True,<|fim▁hole|> 'name': 'Account Invoice Prices Update', 'test': [], 'data': [ 'wizard/update_prices_wizard_view.xml', 'views/invoice_view.xml', ], 'version': '8.0.0.0.1', 'website': 'www.adhoc.com.ar', 'license': 'AGPL-3'} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
<|file_name|>geonetwork.py<|end_file_name|><|fim▁begin|>######################################################################### # # Copyright (C) 2012 OpenPlans #<|fim▁hole|># the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from geonode.catalogue.backends.generic import CatalogueBackend \ as GenericCatalogueBackend class CatalogueBackend(GenericCatalogueBackend): """GeoNetwork CSW Backend""" def __init__(self, *args, **kwargs): super(CatalogueBackend, self).__init__(*args, **kwargs) self.catalogue.formats = ['Dublin Core', 'ISO']<|fim▁end|>
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>import datetime from django.contrib import admin from django.db.models import Q from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ from models import Invoice, InvoiceItem class InvoiceItemInline(admin.TabularInline): fieldsets = ( ( None, { 'fields': ('title', 'quantity', 'unit', 'unit_price', 'tax_rate', 'weight') } ), ) model = InvoiceItem extra = 0 class OverdueFilter(admin.SimpleListFilter): title = _('overdue') parameter_name = 'overdue' def lookups(self, request, model_admin): return ( ('no', _('no')), ('yes', _('yes')), ) def queryset(self, request, queryset): if self.value() == 'no': return queryset.filter(Q(date_due__gt=datetime.datetime.combine(now().date(), datetime.time.max))|Q(status=Invoice.STATUS.PAID)) if self.value() == 'yes': return queryset.filter(date_due__lt=datetime.datetime.combine(now().date(), datetime.time.max)).exclude(status=Invoice.STATUS.PAID) class InvoiceAdmin(admin.ModelAdmin): date_hierarchy = 'date_issue' list_display = ['pk', 'type', 'full_number', 'status', 'customer_name', 'customer_country', 'subtotal', 'vat', 'total', 'currency', 'date_issue', 'payment_term', 'is_overdue_boolean', 'is_paid'] list_editable = ['status'] list_filter = ['type', 'status', 'payment_method', OverdueFilter, #'language', 'currency' ] search_fields = ['number', 'subtitle', 'note', 'supplier_name', 'customer_name', 'shipping_name'] inlines = (InvoiceItemInline, ) fieldsets = ( (_(u'General information'), { 'fields': ( 'type', 'number', 'full_number', 'status', 'subtitle', 'language', 'note', 'date_issue', 'date_tax_point', 'date_due', 'date_sent' ) }), (_(u'Contact details'), { 'fields': ( 'issuer_name', 'issuer_email', 'issuer_phone' ) }), (_(u'Payment details'), { 'fields': ( 'currency', 'discount', 'credit', #'already_paid', 'payment_method', 'constant_symbol', 'variable_symbol', 'specific_symbol', 'reference', 'bank_name', 'bank_country', 'bank_city', 'bank_street', 'bank_zip', 'bank_iban', 'bank_swift_bic' ) }), (_(u'Supplier details'), { 'fields': ( 'supplier_name', 'supplier_street', 'supplier_zip', 'supplier_city', 'supplier_country', 'supplier_registration_id', 'supplier_tax_id', 'supplier_vat_id', 'supplier_additional_info' ) }), (_(u'Customer details'), { 'fields': ( 'customer_name', 'customer_street', 'customer_zip', 'customer_city', 'customer_country', 'customer_registration_id', 'customer_tax_id', 'customer_vat_id', 'customer_additional_info', ) }), (_(u'Shipping details'), { 'fields': ( 'shipping_name', 'shipping_street', 'shipping_zip', 'shipping_city', 'shipping_country' ) })<|fim▁hole|> def is_overdue_boolean(self, invoice): return invoice.is_overdue is_overdue_boolean.boolean = True is_overdue_boolean.short_description = _(u'Is overdue') def is_paid(self, invoice): return invoice.status == Invoice.STATUS.PAID is_paid.boolean = True is_paid.short_description = _(u'Is paid') admin.site.register(Invoice, InvoiceAdmin)<|fim▁end|>
)
<|file_name|>cavity-rank.py<|end_file_name|><|fim▁begin|>def cavity(l,n): for i in xrange(1,n-1): for j in xrange(1,n-1): if l[i-1][j]!='X' and l[i][j-1]!='X' and l[i+1][j]!='X' and l[i][j+1]!='X' and l[i][j]>l[i-1][j] and l[i][j]>l[i+1][j] and l[i][j]>l[i][j-1] and l[i][j]>l[i][j+1]: l[i][j]='X' if __name__ == '__main__': n = input() p = [] for _ in xrange(n): line = list(raw_input()) p.append(line) cavity(p, n)<|fim▁hole|> for line in p: print ''.join(line)<|fim▁end|>
<|file_name|>sync.js<|end_file_name|><|fim▁begin|>var request = require('supertest'); var should = require('should'); var express = require('express'); var expressRouter = require('../index.js'); var mockPath = 'mock.js'; describe('register routes', function(){ var app; before(function(){ app = express(); expressRouter.sync(app, mockPath); }); it('should have register: GET /api/collection', function(done){ request(app) .get('/api/collection') .expect(200) .expect('OK', done); }); it('should have register: GET /api/entity/:id', function(done){ request(app) .get('/api/entity/1') .expect(200) .expect('OK', done); }); it('should have register: POST /api/test', function(done){ request(app)<|fim▁hole|> .expect(201) .expect('Created', done); }); });<|fim▁end|>
.post('/api/test')
<|file_name|>metadata.js<|end_file_name|><|fim▁begin|>module.exports = { /*"installedPackages":{ "xmlName":"InstalledPackage", "children":{ } },*/ "labels":{ "xmlName":"CustomLabels", "children":{ "CustomLabels":"CustomLabel" } }, "staticresources":{ "xmlName":"StaticResource", "children":{ } }, "scontrols":{ "xmlName":"Scontrol", "children":{ } }, "components":{ "xmlName":"ApexComponent", "children":{ } }, "customMetadata":{ "xmlName":"CustomMetadata", "children":{ } }, "globalValueSets":{ "xmlName":"GlobalValueSet", "children":{ } }, "globalValueSetTranslations":{ "xmlName":"GlobalValueSetTranslation", "children":{ } }, "standardValueSets":{ "xmlName":"StandardValueSet", "children":{ <|fim▁hole|> "pages":{ "xmlName":"ApexPage", "children":{ } }, "queues":{ "xmlName":"Queue", "children":{ } }, "objects":{ "xmlName":"CustomObject", "children":{ "actionOverrides":{"typeName":"ActionOverride","name":"actionName"}, "fields":{"typeName":"CustomField","name":"fullName"}, "businessProcesses":{"typeName":"BusinessProcess","name":"fullName"}, "recordTypes":{"typeName":"RecordType","name":"fullName"}, "webLinks":{"typeName":"WebLink","name":"fullName"}, "validationRules":{"typeName":"ValidationRule","name":"fullName"}, "namedFilters":{"typeName":"NamedFilter","name":"fullName"}, "sharingReasons":{"typeName":"SharingReason","name":"fullName"}, "listViews":{"typeName":"ListView","name":"fullName"}, "fieldSets":{"typeName":"FieldSet","name":"fullName"}, "compactLayouts":{"typeName":"CompactLayout","name":"fullName"} } }, "reportTypes":{ "xmlName":"ReportType", "children":{ } }, "reports":{ "xmlName":"Report", "children":{ } }, "dashboards":{ "xmlName":"Dashboard", "children":{ } }, "analyticSnapshots":{ "xmlName":"AnalyticSnapshot", "children":{ } }, "layouts":{ "xmlName":"Layout", "children":{ } }, "portals":{ "xmlName":"Portal", "children":{ } }, "documents":{ "xmlName":"Document", "children":{ } }, "weblinks":{ "xmlName":"CustomPageWebLink", "children":{ } }, "quickActions":{ "xmlName":"QuickAction", "children":{ } }, "flexipages":{ "xmlName":"FlexiPage", "children":{ } }, "tabs":{ "xmlName":"CustomTab", "children":{ } }, "customApplicationComponents":{ "xmlName":"CustomApplicationComponent", "children":{ } }, "applications":{ "xmlName":"CustomApplication", "children":{ } }, "letterhead":{ "xmlName":"Letterhead", "children":{ } }, "email":{ "xmlName":"EmailTemplate", "children":{ } }, "workflows":{ "xmlName":"Workflow", "children":{ "alerts":{"typeName":"WorkflowAlert","name":"fullName"}, "tasks":{"typeName" : "WorkflowTask", "name" : "fullName"}, "outboundMessages":{"typeName" : "WorkflowOutboundMessage","name" : "fullName"}, "fieldUpdates":{"typeName" : "WorkflowFieldUpdate", "name" : "fullName"}, "rules":{"typeName" : "WorkflowRule", "name" : "fullName"}, "emailRecipients":{"typeName" : "WorkflowEmailRecipient", "name" : "fullName"}, "timeTriggers":{"typeName" : "WorkflowTimeTrigger", "name" : "fullName"}, "actionReferences":{"typeName" : "WorkflowActionReference", "name" : "fullName"} } }, "assignmentRules":{ "xmlName":"AssignmentRules", "children":{ } }, "autoResponseRules":{ "xmlName":"AutoResponseRules", "children":{ } }, "escalationRules":{ "xmlName":"EscalationRules", "children":{ } }, "roles":{ "xmlName":"Role", "children":{ } }, "groups":{ "xmlName":"Group", "children":{ } }, "postTemplates":{ "xmlName":"PostTemplate", "children":{ } }, "approvalProcesses":{ "xmlName":"ApprovalProcess", "children":{ } }, "homePageComponents":{ "xmlName":"HomePageComponent", "children":{ } }, "homePageLayouts":{ "xmlName":"HomePageLayout", "children":{ } }, "objectTranslations":{ "xmlName":"CustomObjectTranslation", "children":{ } }, "flows":{ "xmlName":"Flow", "children":{ } }, "classes":{ "xmlName":"ApexClass", "children":{ } }, "triggers":{ "xmlName":"ApexTrigger", "children":{ } }, "profiles":{ "xmlName":"Profile", "children":{ } }, "permissionsets":{ "xmlName":"PermissionSet", "children":{ } }, "datacategorygroups":{ "xmlName":"DataCategoryGroup", "children":{ } }, "remoteSiteSettings":{ "xmlName":"RemoteSiteSetting", "children":{ } }, "authproviders":{ "xmlName":"AuthProvider", "children":{ } }, "leadSharingRules":{ "xmlName":"LeadSharingRules", "children":{ } }, "campaignSharingRules":{ "xmlName":"CampaignSharingRules", "children":{ } }, "caseSharingRules":{ "xmlName":"CaseSharingRules", "children":{ } }, "contactSharingRules":{ "xmlName":"ContactSharingRules", "children":{ } }, "opportunitySharingRules":{ "xmlName":"OpportunitySharingRules", "children":{ } }, "accountSharingRules":{ "xmlName":"AccountSharingRules", "children":{ } }, "customObjectSharingRules":{ "xmlName":"CustomObjectSharingRules", "children":{ } }, "communities":{ "xmlName":"Community", "children":{ } }, "callCenters":{ "xmlName":"CallCenter", "children":{ } }, "connectedApps":{ "xmlName":"ConnectedApp", "children":{ } }, "samlssoconfigs":{ "xmlName":"SamlSsoConfig", "children":{ } }, "synonymDictionaries":{ "xmlName":"SynonymDictionary", "children":{ } }, "settings":{ "xmlName":"Settings", "children":{ } }, "aura":{ "xmlName":"AuraDefinitionBundle", "children":{ } }, "sharingRules":{ "xmlName":"SharingRules", "children":{ "sharingTerritoryRules":"SharingTerritoryRule", "sharingOwnerRules":"SharingOwnerRule", "sharingCriteriaRules":"SharingCriteriaRule" } }, "contentassets":{ "xmlName":"ContentAsset", "children":{ } }, "networks":{ "xmlName":"Network", "children":{ } }, "siteDotComSites":{ "xmlName":"SiteDotCom", "children":{ } }, "flowDefinitions":{ "xmlName":"FlowDefinition", "children":{ } }, "matchingRules":{ "xmlName":"MatchingRules", "children":{ } } };<|fim▁end|>
} },
<|file_name|>service.js<|end_file_name|><|fim▁begin|>const fs = require('fs'); const path = require('path'); class Service { constructor() { this.getFileRecursevly = this.getFileRecursevly.bind(this); this.getFiles = this.getFiles.bind(this); } getFileRecursevly(folderPath, shortPath = '') { var files = []; var folder = fs.readdirSync(path.resolve(__dirname, folderPath)); var x = folder.forEach(file => {<|fim▁hole|> folder: file, files: this.getFileRecursevly(filePath, file) }) } else { files.push({ file: file, folder: shortPath }); } }) return files; } getFiles(path) { return new Promise((resolve, reject) => { var files = this.getFileRecursevly(path) resolve(files) }) } } module.exports = Service;<|fim▁end|>
var filePath = path.resolve(folderPath, file); if (fs.lstatSync(filePath).isDirectory()) { files.push({
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import {CommonModule} from '@angular/common'; import {NgModule} from '@angular/core'; import {FormsModule} from '@angular/forms'; import {MatCardModule} from '@angular/material/card'; import {MatCheckboxModule} from '@angular/material/checkbox'; import {MatRadioModule} from '@angular/material/radio'; import {CheckboxConfigurableExample} from './checkbox-configurable/checkbox-configurable-example'; import {CheckboxOverviewExample} from './checkbox-overview/checkbox-overview-example'; import {CheckboxHarnessExample} from './checkbox-harness/checkbox-harness-example'; export { CheckboxConfigurableExample, CheckboxOverviewExample, CheckboxHarnessExample, }; const EXAMPLES = [ CheckboxConfigurableExample, CheckboxOverviewExample, CheckboxHarnessExample, ]; @NgModule({ imports: [ CommonModule, MatCardModule, MatCheckboxModule, MatRadioModule, FormsModule, ],<|fim▁hole|> exports: EXAMPLES, entryComponents: EXAMPLES, }) export class CheckboxExamplesModule { }<|fim▁end|>
declarations: EXAMPLES,
<|file_name|>minsession_test.go<|end_file_name|><|fim▁begin|>package authentication import ( "github.com/stretchr/testify/assert" // "github.com/stuphlabs/pullcord" "net/http" "testing" ) // TestMinSessionHandlerFirstPass tests if a MinSessionHandler will even give an // initial cookie. // // Steps: // 1. Create a new MinSessionHandler to test. // 2. Run the cookie mask with an empty list for the input cookies. // 3. Verify that we received a cookie. func TestMinSessionHandlerFirstPass(t *testing.T) { /* setup */ /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh, err := handler.GetSession() assert.NoError(t, err) fwd, stc, err := sesh.CookieMask(nil) /* check */ assert.NoError(t, err) assert.Nil(t, fwd) assert.NotNil(t, stc) if stc != nil { assert.Equal(t, 1, len(stc)) } assert.NotNil(t, sesh) /* if sesh != nil { assert.Equal(t, 0, len(sesh)) } */ } // TestMinSessionHandlerReuseCookie tests if a MinSessionHandler will accept a // cookie it just gave us. // // Steps: // 1. Create a new MinSessionHandler to test. // 2. Run the cookie mask with an empty list for the input cookies. // 3. Run the cookie mask again, this time including the cookie we just // received in the input cookie list. // 4. Verify that we did not receive another cookie. func TestMinSessionHandlerReuseCookie(t *testing.T) { /* setup */ var localCookies []*http.Cookie /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh1, err := handler.GetSession() assert.NoError(t, err) fwd1, stc1, err1 := sesh1.CookieMask(nil) for _, cookie := range stc1 { localCookies = append(localCookies, cookie) } sesh2, err := handler.GetSession() assert.NoError(t, err) fwd2, stc2, err2 := sesh2.CookieMask(localCookies) /* check */ assert.NoError(t, err1) assert.NoError(t, err2) assert.Nil(t, fwd1) assert.Nil(t, fwd2) assert.NotNil(t, stc1) if stc1 != nil { assert.Equal(t, 1, len(stc1)) } assert.Nil(t, stc2) assert.NotNil(t, sesh1) /* if sesh1 != nil { assert.Equal(t, 0, len(sesh1)) } */ assert.NotNil(t, sesh2) /* if sesh2 != nil { assert.Equal(t, 0, len(sesh2)) } */ } // TestMinSessionHandlerSessionDataPreservation tests if a MinSessionHandler // preserves session data between requests. // // Steps: // 1. Create a MinSessionHandler to test. // 2. Run the cookie mask to get a new cookie and session. // 3. Insert a new entry into the session data. // 4. Run the cookie mask again with the same cookie we received. // 5. Verify that the new session contains the same data we added to the // previous session. func TestMinSessionHandlerSessionDataPreservation(t *testing.T) { /* setup */ var localCookies []*http.Cookie expectedData := make(map[string]interface{}) expectedKey := "test key" /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh1, err := handler.GetSession() assert.NoError(t, err) fwd1, stc1, err1 := sesh1.CookieMask(nil) _, present1 := sesh1.GetValue(expectedKey) for _, cookie := range stc1 { localCookies = append(localCookies, cookie) /* intermediate check */ _, present1 = sesh1.GetValue(expectedKey) assert.Error(t, present1) assert.Equal(t, present1, NoSuchSessionValueError) expectedString := "saving data into " + cookie.Name + " cookie" expectedData[expectedKey] = expectedString err = sesh1.SetValue(expectedKey, expectedString) assert.NoError(t, err) } sesh2, err := handler.GetSession() assert.NoError(t, err) fwd2, stc2, err2 := sesh2.CookieMask(localCookies) /* check */ assert.NoError(t, err1) assert.NoError(t, err2) assert.Nil(t, fwd1) assert.Nil(t, fwd2) assert.NotNil(t, stc1) if stc1 != nil { assert.Equal(t, 1, len(stc1)) } assert.Nil(t, stc2) assert.NotNil(t, sesh1) assert.NotNil(t, sesh2) /* if sesh2 != nil { assert.Equal(t, 1, len(sesh2)) } */ assert.Error(t, present1) assert.Equal(t, present1, NoSuchSessionValueError) actualData2, present2 := sesh2.GetValue(expectedKey) assert.NoError(t, present2) assert.NotEqual(t, present2, NoSuchSessionValueError) assert.Equal(t, expectedData[expectedKey], actualData2) } // TestMinSessionHandlerBadCookie tests if a MinSessionHandler recognizes a bad // cookie. // // Steps: // 1. Create a new MinSessionHandler to test. // 2. Run the cookie mask in order to get a good cookie. // 3. Tamper with the cookie. // 4. Run the cookie mask with the tampered cookie. // 5. Verify that the bad cookie was rejected and replaced by another good // cookie. func TestMinSessionHandlerBadCookie(t *testing.T) { /* setup */ var localCookies []*http.Cookie var badCookie http.Cookie /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh1, err := handler.GetSession() assert.NoError(t, err) fwd1, stc1, err1 := sesh1.CookieMask(nil) for _, cookie := range stc1 { cookie.Value = cookie.Value + "bad" badCookie.Name = cookie.Name badCookie.Value = cookie.Value localCookies = append(localCookies, cookie) } sesh2, err := handler.GetSession() assert.NoError(t, err) fwd2, stc2, err2 := sesh2.CookieMask(localCookies) /* check */ assert.NoError(t, err1) assert.NoError(t, err2) assert.Nil(t, fwd1) assert.Nil(t, fwd2) assert.NotNil(t, stc1) if stc1 != nil { assert.Equal(t, 1, len(stc1)) } assert.NotNil(t, stc2) if stc2 != nil { assert.Equal(t, 2, len(stc2)) } badCookieDeleted := false for _, cookie := range stc2 { if cookie.Name == badCookie.Name { assert.Equal(t, badCookie.Value, cookie.Value) assert.Equal(t, -1, cookie.MaxAge) badCookieDeleted = true } } assert.True(t, badCookieDeleted) assert.NotNil(t, sesh1) /* if sesh1 != nil { assert.Equal(t, 0, len(sesh1)) } */ assert.NotNil(t, sesh2) /* if sesh2 != nil { assert.Equal(t, 0, len(sesh2)) } */ } // TestMinSessionHandlerInvalidCookie tests if a MinSessionHandler rejects a // cookie that it did not create. // // Steps: // 1. Forge a cookie that would match the MinSessionHandler's regular // expression. // 2. Create a new MinSessionHandler to test that will create cookies with // the same naming mechanism as our foged cookie. // 3. Run the cookie mask with the forged cookie. // 4. Verify that the forged cookie was rejected and replaced by another // cookie. func TestMinSessionHandlerInvalidCookie(t *testing.T) { /* setup */ var invalidCookie http.Cookie var localCookies []*http.Cookie invalidCookie.Name = "testHandler-" for i := 0; i < minSessionCookieNameRandSize; i++ { invalidCookie.Name += "ff" } invalidCookie.Value = "foo" localCookies = append(localCookies, &invalidCookie) /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh, err := handler.GetSession() assert.NoError(t, err) fwd, stc, err := sesh.CookieMask(localCookies) /* check */ assert.NoError(t, err) assert.Nil(t, fwd) assert.NotNil(t, stc) if stc != nil { assert.Equal(t, 2, len(stc)) } badCookieDeleted := false for _, cookie := range stc { if cookie.Name == invalidCookie.Name { assert.Equal(t, invalidCookie.Value, cookie.Value) assert.Equal(t, -1, cookie.MaxAge) badCookieDeleted = true } } assert.True(t, badCookieDeleted) assert.NotNil(t, sesh) /* if sesh != nil { assert.Equal(t, 0, len(sesh)) } */ } // TestMinSessionHandlerMultiSession tests if a MinSessionHandler can correctly // track multiple sessions. // // Steps: // 1. Create a new MinSessionHandler to test. // 2. Run the cookie mask with an empty cookie list. // 3. Save the cookie that we just received into cookie list 1. // 4. Set a value in the session we just received. // 5. Run the cookie mask with another empty cookie list. // 6. Save the cookie that we just received into cookie list 2. // 7. Set a value in the session we just received. // 8. Run the cookie mask with cookie list 2. // 9. Record what value was in the session we just received. // 10. Set a new value in the session we just received. // 11. Run the cookie mask with cookie list 1. // 12. Record what value was in the session we just received. // 13. Set a new value in the session we just received. // 14. Run the cookie mask with cookie list 2. // 15. Record what value was in the session we just received. // 16. Verify that session data was not present initially. // 17. Verify that the session data was what was expected for subsequent // accesses with the same cookie. func TestMinSessionHandlerMultiSession(t *testing.T) { /* setup */ var ( localCookies1 []*http.Cookie localCookies2 []*http.Cookie seshKey = "test key" expectedPresent1 = NoSuchSessionValueError actualPresent1 error expectedPresent2 = NoSuchSessionValueError actualPresent2 error expectedPresent3 = error(nil) actualPresent3 error expectedPresent4 = error(nil) actualPresent4 error expectedPresent5 = error(nil) actualPresent5 error expectedValue3 = "test 3" actualValue3 interface{} expectedValue4 = "test 4" actualValue4 interface{} expectedValue5 = "test 5" actualValue5 interface{} saveValue1 = expectedValue4 saveValue2 = expectedValue3 saveValue3 = expectedValue5 ) /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh1, err := handler.GetSession() assert.NoError(t, err) fwd1, stc1, err1 := sesh1.CookieMask(localCookies1) for _, cookie := range stc1 { localCookies1 = append(localCookies1, cookie) } _, actualPresent1 = sesh1.GetValue(seshKey) err = sesh1.SetValue(seshKey, saveValue1) assert.NoError(t, err) sesh2, err := handler.GetSession() assert.NoError(t, err) fwd2, stc2, err2 := sesh2.CookieMask(localCookies2) for _, cookie := range stc2 { localCookies2 = append(localCookies2, cookie) } _, actualPresent2 = sesh2.GetValue(seshKey) err = sesh2.SetValue(seshKey, saveValue2) assert.NoError(t, err) sesh3, err := handler.GetSession() assert.NoError(t, err) fwd3, stc3, err3 := sesh3.CookieMask(localCookies2) actualValue3, actualPresent3 = sesh3.GetValue(seshKey) err = sesh3.SetValue(seshKey, saveValue3) assert.NoError(t, err) sesh4, err := handler.GetSession() assert.NoError(t, err) fwd4, stc4, err4 := sesh4.CookieMask(localCookies1) actualValue4, actualPresent4 = sesh4.GetValue(seshKey) sesh5, err := handler.GetSession() assert.NoError(t, err) fwd5, stc5, err5 := sesh5.CookieMask(localCookies2) actualValue5, actualPresent5 = sesh5.GetValue(seshKey) /* check */ assert.NoError(t, err1) assert.NoError(t, err2) assert.NoError(t, err3) assert.NoError(t, err4) assert.NoError(t, err5) assert.Nil(t, fwd1) assert.Nil(t, fwd2) assert.Nil(t, fwd3) assert.Nil(t, fwd4) assert.Nil(t, fwd5) assert.NotNil(t, stc1) if stc1 != nil { assert.Equal(t, 1, len(stc1)) } assert.NotNil(t, stc2) if stc2 != nil { assert.Equal(t, 1, len(stc2)) } assert.Nil(t, stc3) assert.Nil(t, stc4) assert.Nil(t, stc5) assert.Equal(t, expectedPresent1, actualPresent1) assert.Equal(t, expectedPresent2, actualPresent2) assert.Equal(t, expectedPresent3, actualPresent3) assert.Equal(t, expectedPresent4, actualPresent4) assert.Equal(t, expectedPresent5, actualPresent5) assert.Equal(t, expectedValue3, actualValue3) assert.Equal(t, expectedValue4, actualValue4) assert.Equal(t, expectedValue5, actualValue5) } // TestMinSessionHandlerBadCookieDestroysSession tests if a MinSessionHandler // destroys a session after a bad cookie. // // Steps: // 1. Create a MinSessionHandler to test. // 2. Run the cookie mask with an empty cookie list. // 3. Save the cookie we received into the good cookie list. // 4. Tamper with a copy of the cookie we received, and save this bad // cookie into the bad cookie list. // 5. Set a value in the session we just received. // 6. Run the cookie mask with the bad cookie list. // 7. Run the cookie mask with the good cookie list. // 8. Verify that the subsequent sessions we received did not contain the // value we had previously set. // 9. Verify that each time the provided cookie was rejected and we // received a replacement cookie. func TestMinSessionHandlerBadCookieDestroysSession(t *testing.T) { /* setup */ var ( goodCookies []*http.Cookie badCookies []*http.Cookie badCookie http.Cookie seshKey = "test key" expectedSeshPresent1 = NoSuchSessionValueError actualSeshPresent1 error expectedSeshPresent2 = NoSuchSessionValueError actualSeshPresent2 error expectedSeshPresent3 = NoSuchSessionValueError actualSeshPresent3 error expectedCookiePresent2 = true actualCookiePresent2 bool expectedCookiePresent3 = true actualCookiePresent3 bool saveValue = "foo" ) /* run */ handler := NewMinSessionHandler("testHandler", "/", "example.com") sesh1, err := handler.GetSession() assert.NoError(t, err) fwd1, stc1, err1 := sesh1.CookieMask(nil) for _, goodCookie := range stc1 { goodCookies = append(goodCookies, goodCookie) badCookie.Name = goodCookie.Name badCookie.Value = goodCookie.Value + " bar" badCookies = append(badCookies, &badCookie) } _, actualSeshPresent1 = sesh1.GetValue(seshKey) err = sesh1.SetValue(seshKey, saveValue) assert.NoError(t, err) sesh2, err := handler.GetSession() assert.NoError(t, err) fwd2, stc2, err2 := sesh2.CookieMask(badCookies) actualCookiePresent2 = false for _, cookie := range stc2 { if cookie.Name == badCookie.Name { actualCookiePresent2 = true } } _, actualSeshPresent2 = sesh2.GetValue(seshKey) sesh3, err := handler.GetSession() assert.NoError(t, err) fwd3, stc3, err3 := sesh3.CookieMask(goodCookies) actualCookiePresent3 = false for _, cookie := range stc3 { if cookie.Name == badCookie.Name { actualCookiePresent3 = true } } _, actualSeshPresent3 = sesh3.GetValue(seshKey) /* check */ assert.NoError(t, err1) assert.NoError(t, err2) assert.NoError(t, err3) assert.Nil(t, fwd1) assert.Nil(t, fwd2) assert.Nil(t, fwd3) assert.NotNil(t, stc1) if stc1 != nil { assert.Equal(t, 1, len(stc1)) } assert.NotNil(t, stc2) if stc2 != nil { assert.Equal(t, 2, len(stc2)) } assert.NotNil(t, stc3) if stc3 != nil { assert.Equal(t, 2, len(stc3)) } assert.Equal(t, expectedSeshPresent1, actualSeshPresent1) assert.Equal(t, expectedSeshPresent2, actualSeshPresent2) assert.Equal(t, expectedSeshPresent3, actualSeshPresent3) assert.Equal(t, expectedCookiePresent2, actualCookiePresent2) assert.Equal(t, expectedCookiePresent3, actualCookiePresent3) } // TestMinSessionHandlerNonInterfering tests if two MinSessionHandlers interfere // with one another. // // Steps: // 1. Create two MinSessionHandlers to test with. // 2. Run the cookie mask of the first MinSessionHendler with an empty // cookie list. // 3. Save the cookie we just received into cookie list 1. // 4. Tamper with a copy of the cookie we just received and place it into // cookie list 2. // 5. Set a value in the session from this first MinSessionHandler. // 6. Run the cookie mask of the second MinSessionHandler with cookie list // 1. // 7. Record what cookies are being forwarded. // 8. Add the cookie we just received from the second MinSessionHandler // into cookie lists 1 and 2. // 9. Set a value in the session from the second MinSessionHandler. // 10. Run the cookie mask of the second MinSessionHandler with cookie list // 2. // 11. Record what cookies are being forwarded. // 12. Record the value in the session we just received. // 13. Run the cookie mask of the first MinSessionHandler with cookie list // 2. // 14. Record what cookies are being forwarded. // 15. Look for the value in the session we just received. // 16. Verify that the cookies from each MinSessionHandler were being // properly forwarded by the opposite MinSessionHandler. // 17. Verify that the correct cookie was accepted by the second // MinSessionHandler. // 18. Verify that the session data was preserved by the second // MinSessionHandler. // 19. Verify that the tampered cookie was rejected by the first // MinSessionHandler. // 20. Verify that the session data was destroyed by the first // MinSessionHandler. func TestMinSessionHandlerNonInterfering(t *testing.T) { /* setup */ var ( localCookies1 []*http.Cookie localCookies2 []*http.Cookie seshKey = "test key" expectedPresent1 = NoSuchSessionValueError actualPresent1 error expectedPresent2 = NoSuchSessionValueError actualPresent2 error expectedPresent3 = error(nil) actualPresent3 error expectedPresent4 = NoSuchSessionValueError actualPresent4 error saveValue1 = "saved 1" saveValue2 = "saved 2" expectedValue3 = saveValue2 actualValue3 interface{} ) /* run */ handler1 := NewMinSessionHandler("testHandler1", "/", "example.com") handler2 := NewMinSessionHandler("testHandler2", "/", "example.com") sesh1, err := handler1.GetSession() assert.NoError(t, err) fwd1, stc1, err1 := sesh1.CookieMask(localCookies1) for _, cookie := range stc1 { var badCookie http.Cookie localCookies1 = append(localCookies1, cookie) badCookie.Name = cookie.Name badCookie.Value = cookie.Value + " bar" localCookies2 = append(localCookies2, &badCookie) } _, actualPresent1 = sesh1.GetValue(seshKey) err = sesh1.SetValue(seshKey, saveValue1) assert.NoError(t, err) sesh2, err := handler2.GetSession() assert.NoError(t, err) fwd2, stc2, err2 := sesh2.CookieMask(localCookies1) for _, cookie := range stc2 { localCookies1 = append(localCookies1, cookie) localCookies2 = append(localCookies2, cookie) } _, actualPresent2 = sesh2.GetValue(seshKey) err = sesh2.SetValue(seshKey, saveValue2) assert.NoError(t, err) sesh3, err := handler2.GetSession() assert.NoError(t, err) fwd3, stc3, err3 := sesh3.CookieMask(localCookies2) actualValue3, actualPresent3 = sesh3.GetValue(seshKey) sesh4, err := handler1.GetSession() assert.NoError(t, err) fwd4, stc4, err4 := sesh4.CookieMask(localCookies2) _, actualPresent4 = sesh4.GetValue(seshKey) /* check */ assert.NoError(t, err1) assert.NoError(t, err2) assert.NoError(t, err3) assert.NoError(t, err4) assert.Nil(t, fwd1) assert.NotNil(t, fwd2) if fwd2 != nil { assert.Equal(t, 1, len(fwd2)) } assert.NotNil(t, fwd3)<|fim▁hole|> if fwd4 != nil { assert.Equal(t, 1, len(fwd4)) } assert.NotNil(t, stc1) if stc1 != nil { assert.Equal(t, 1, len(stc1)) } assert.NotNil(t, stc2) if stc2 != nil { assert.Equal(t, 1, len(stc2)) } assert.Nil(t, stc3) assert.NotNil(t, stc4) if stc4 != nil { assert.Equal(t, 2, len(stc4)) } assert.Equal(t, expectedPresent1, actualPresent1) assert.Equal(t, expectedPresent2, actualPresent2) assert.Equal(t, expectedPresent3, actualPresent3) assert.Equal(t, expectedPresent4, actualPresent4) assert.Equal(t, expectedValue3, actualValue3) }<|fim▁end|>
if fwd3 != nil { assert.Equal(t, 1, len(fwd3)) } assert.NotNil(t, fwd4)
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Functions for filtering images. mod median; pub use self::median::median_filter; mod sharpen; pub use self::sharpen::*; use image::{GenericImage, GenericImageView, GrayImage, ImageBuffer, Luma, Pixel, Primitive}; use crate::definitions::{Clamp, Image}; use crate::integral_image::{column_running_sum, row_running_sum}; use crate::map::{ChannelMap, WithChannel}; use num::{abs, pow, Num}; use crate::math::cast; use conv::ValueInto; use std::cmp::{max, min}; use std::f32; /// Denoise 8-bit grayscale image using bilateral filtering. /// /// # Arguments /// /// * `image` - Grayscale image to be filtered. /// * `window_size` - Window size for filtering. /// * `sigma_color` - Standard deviation for grayscale distance. A larger value results /// in averaging of pixels with larger grayscale differences. /// * `sigma_spatial` - Standard deviation for range distance. A larger value results in<|fim▁hole|>/// averaging of pixels separated by larger distances. /// /// This is a denoising filter designed to preserve edges. It averages pixels based on their spatial /// closeness and radiometric similarity [1]. Spatial closeness is measured by the Gaussian function /// of the Euclidean distance between two pixels with user-specified standard deviation /// (`sigma_spatial`). Radiometric similarity is measured by the Gaussian function of the difference /// between two grayscale values with user-specified standard deviation (`sigma_color`). /// /// # References /// /// [1] C. Tomasi and R. Manduchi. "Bilateral Filtering for Gray and Color /// Images." IEEE International Conference on Computer Vision (1998) /// 839-846. DOI: 10.1109/ICCV.1998.710815 /// /// # Examples /// /// ``` /// use imageproc::filter::bilateral_filter; /// use imageproc::utils::gray_bench_image; /// let image = gray_bench_image(500, 500); /// let filtered = bilateral_filter(&image, 10, 10., 3.); /// ``` pub fn bilateral_filter( image: &GrayImage, window_size: u32, sigma_color: f32, sigma_spatial: f32, ) -> Image<Luma<u8>> { /// Un-normalized Gaussian weights for look-up tables. fn gaussian_weight(x: f32, sigma_squared: f32) -> f32 { return (-0.5 * x.powi(2) / sigma_squared).exp(); } /// Effectively a meshgrid command with flattened outputs. fn window_coords(window_size: u32) -> (Vec<i32>, Vec<i32>) { let window_start = (-(window_size as f32) / 2.0).floor() as i32; let window_end = (window_size as f32 / 2.0).floor() as i32 + 1; let window_range = window_start..window_end; let v = window_range.collect::<Vec<i32>>(); let cc: Vec<i32> = v .iter() .cycle() .take(v.len() * v.len()) .into_iter() .cloned() .collect(); let mut rr = Vec::new(); let window_range = window_start..window_end; for i in window_range { rr.append(&mut vec![i; (window_size + 1) as usize]); } return (rr, cc); } /// Create look-up table of Gaussian weights for color dimension. fn compute_color_lut(bins: u32, sigma: f32, max_value: f32) -> Vec<f32> { let v = (0..bins as i32).collect::<Vec<i32>>(); let step_size = max_value / bins as f32; let vals = v.iter().map(|&x| x as f32 * step_size).collect::<Vec<_>>(); let sigma_squared = sigma.powi(2); let gauss_weights = vals .iter() .map(|&x| gaussian_weight(x, sigma_squared)) .collect::<Vec<_>>(); gauss_weights } /// Create look-up table of weights corresponding to flattened 2-D Gaussian kernel. fn compute_spatial_lut(window_size: u32, sigma: f32) -> Vec<f32> { let (rr, cc) = window_coords(window_size); let mut gauss_weights = Vec::new(); let it = rr.iter().zip(cc.iter()); let sigma_squared = sigma.powi(2); for (r, c) in it { let dist = f32::sqrt(pow(*r as f32, 2) + pow(*c as f32, 2)); gauss_weights.push(gaussian_weight(dist, sigma_squared)); } gauss_weights } let (width, height) = image.dimensions(); let mut out = ImageBuffer::new(width, height); let max_value = *image.iter().max().unwrap() as f32; let n_bins: u32 = 255; // for color or > 8-bit, make n_bins a user input for tuning accuracy. let color_lut = compute_color_lut(n_bins, sigma_color, max_value); let color_dist_scale = n_bins as f32 / max_value; let max_color_bin = (n_bins - 1) as usize; let range_lut = compute_spatial_lut(window_size, sigma_spatial); let window_size = window_size as i32; let window_extent = (window_size - 1) / 2; let height = height as i32; let width = width as i32; for row in 0..height { for col in 0..width { let mut total_val: f32 = 0.; let mut total_weight: f32 = 0.; let window_center_val = image.get_pixel(col as u32, row as u32)[0] as i32; for window_row in -window_extent..window_extent + 1 { let window_row_abs: i32 = row + window_row; let window_row_abs: i32 = min(height - 1, max(0, window_row_abs)); // Wrap to edge. let kr: i32 = window_row + window_extent; for window_col in -window_extent..window_extent + 1 { let window_col_abs: i32 = col + window_col; let window_col_abs: i32 = min(width - 1, max(0, window_col_abs)); // Wrap to edge. let kc: i32 = window_col + window_extent; let range_bin = (kr * window_size + kc) as usize; let range_weight: f32 = range_lut[range_bin]; let val: i32 = image.get_pixel(window_col_abs as u32, window_row_abs as u32)[0] as i32; let color_dist: i32 = abs(window_center_val - val); let color_bin = (color_dist as f32 * color_dist_scale) as usize; let color_bin: usize = min(color_bin, max_color_bin); let color_weight: f32 = color_lut[color_bin]; let weight: f32 = range_weight * color_weight; total_val += val as f32 * weight; total_weight += weight; } } let new_val = (total_val / total_weight).round() as u8; out.put_pixel(col as u32, row as u32, Luma([new_val])); } } out } /// Convolves an 8bpp grayscale image with a kernel of width (2 * `x_radius` + 1) /// and height (2 * `y_radius` + 1) whose entries are equal and /// sum to one. i.e. each output pixel is the unweighted mean of /// a rectangular region surrounding its corresponding input pixel. /// We handle locations where the kernel would extend past the image's /// boundary by treating the image as if its boundary pixels were /// repeated indefinitely. // TODO: for small kernels we probably want to do the convolution // TODO: directly instead of using an integral image. // TODO: more formats! pub fn box_filter(image: &GrayImage, x_radius: u32, y_radius: u32) -> Image<Luma<u8>> { let (width, height) = image.dimensions(); let mut out = ImageBuffer::new(width, height); if width == 0 || height == 0 { return out; } let kernel_width = 2 * x_radius + 1; let kernel_height = 2 * y_radius + 1; let mut row_buffer = vec![0; (width + 2 * x_radius) as usize]; for y in 0..height { row_running_sum(image, y, &mut row_buffer, x_radius); let val = row_buffer[(2 * x_radius) as usize] / kernel_width; unsafe { out.unsafe_put_pixel(0, y, Luma([val as u8])); } for x in 1..width { // TODO: This way we pay rounding errors for each of the // TODO: x and y convolutions. Is there a better way? let u = (x + 2 * x_radius) as usize; let l = (x - 1) as usize; let val = (row_buffer[u] - row_buffer[l]) / kernel_width; unsafe { out.unsafe_put_pixel(x, y, Luma([val as u8])); } } } let mut col_buffer = vec![0; (height + 2 * y_radius) as usize]; for x in 0..width { column_running_sum(&out, x, &mut col_buffer, y_radius); let val = col_buffer[(2 * y_radius) as usize] / kernel_height; unsafe { out.unsafe_put_pixel(x, 0, Luma([val as u8])); } for y in 1..height { let u = (y + 2 * y_radius) as usize; let l = (y - 1) as usize; let val = (col_buffer[u] - col_buffer[l]) / kernel_height; unsafe { out.unsafe_put_pixel(x, y, Luma([val as u8])); } } } out } /// A 2D kernel, used to filter images via convolution. pub struct Kernel<'a, K> { data: &'a [K], width: u32, height: u32, } impl<'a, K: Num + Copy + 'a> Kernel<'a, K> { /// Construct a kernel from a slice and its dimensions. The input slice is /// in row-major form. pub fn new(data: &'a [K], width: u32, height: u32) -> Kernel<'a, K> { assert!(width > 0 && height > 0, "width and height must be non-zero"); assert!( width * height == data.len() as u32, "Invalid kernel len: expecting {}, found {}", width * height, data.len() ); Kernel { data, width, height, } } /// Returns 2d correlation of an image. Intermediate calculations are performed /// at type K, and the results converted to pixel Q via f. Pads by continuity. pub fn filter<P, F, Q>(&self, image: &Image<P>, mut f: F) -> Image<Q> where P: Pixel + 'static, <P as Pixel>::Subpixel: ValueInto<K>, Q: Pixel + 'static, F: FnMut(&mut Q::Subpixel, K), { let (width, height) = image.dimensions(); let mut out = Image::<Q>::new(width, height); let num_channels = P::CHANNEL_COUNT as usize; let zero = K::zero(); let mut acc = vec![zero; num_channels]; let (k_width, k_height) = (self.width as i64, self.height as i64); let (width, height) = (width as i64, height as i64); for y in 0..height { for x in 0..width { for k_y in 0..k_height { let y_p = min(height - 1, max(0, y + k_y - k_height / 2)) as u32; for k_x in 0..k_width { let x_p = min(width - 1, max(0, x + k_x - k_width / 2)) as u32; accumulate( &mut acc, unsafe { &image.unsafe_get_pixel(x_p, y_p) }, unsafe { *self.data.get_unchecked((k_y * k_width + k_x) as usize) }, ); } } let out_channels = out.get_pixel_mut(x as u32, y as u32).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { f(c, *a); *a = zero; } } } out } } #[inline] fn gaussian(x: f32, r: f32) -> f32 { ((2.0 * f32::consts::PI).sqrt() * r).recip() * (-x.powi(2) / (2.0 * r.powi(2))).exp() } /// Construct a one dimensional float-valued kernel for performing a Gaussian blur /// with standard deviation sigma. fn gaussian_kernel_f32(sigma: f32) -> Vec<f32> { let kernel_radius = (2.0 * sigma).ceil() as usize; let mut kernel_data = vec![0.0; 2 * kernel_radius + 1]; for i in 0..kernel_radius + 1 { let value = gaussian(i as f32, sigma); kernel_data[kernel_radius + i] = value; kernel_data[kernel_radius - i] = value; } kernel_data } /// Blurs an image using a Gaussian of standard deviation sigma. /// The kernel used has type f32 and all intermediate calculations are performed /// at this type. /// /// # Panics /// /// Panics if `sigma <= 0.0`. // TODO: Integer type kernel, approximations via repeated box filter. pub fn gaussian_blur_f32<P>(image: &Image<P>, sigma: f32) -> Image<P> where P: Pixel + 'static, <P as Pixel>::Subpixel: ValueInto<f32> + Clamp<f32>, { assert!(sigma > 0.0, "sigma must be > 0.0"); let kernel = gaussian_kernel_f32(sigma); separable_filter_equal(image, &kernel) } /// Returns 2d correlation of view with the outer product of the 1d /// kernels `h_kernel` and `v_kernel`. pub fn separable_filter<P, K>(image: &Image<P>, h_kernel: &[K], v_kernel: &[K]) -> Image<P> where P: Pixel + 'static, <P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>, K: Num + Copy, { let h = horizontal_filter(image, h_kernel); vertical_filter(&h, v_kernel) } /// Returns 2d correlation of an image with the outer product of the 1d /// kernel filter with itself. pub fn separable_filter_equal<P, K>(image: &Image<P>, kernel: &[K]) -> Image<P> where P: Pixel + 'static, <P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>, K: Num + Copy, { separable_filter(image, kernel, kernel) } /// Returns 2d correlation of an image with a 3x3 row-major kernel. Intermediate calculations are /// performed at type K, and the results clamped to subpixel type S. Pads by continuity. pub fn filter3x3<P, K, S>(image: &Image<P>, kernel: &[K]) -> Image<ChannelMap<P, S>> where P::Subpixel: ValueInto<K>, S: Clamp<K> + Primitive + 'static, P: WithChannel<S> + 'static, K: Num + Copy, { let kernel = Kernel::new(kernel, 3, 3); kernel.filter(image, |channel, acc| *channel = S::clamp(acc)) } /// Returns horizontal correlations between an image and a 1d kernel. /// Pads by continuity. Intermediate calculations are performed at /// type K. pub fn horizontal_filter<P, K>(image: &Image<P>, kernel: &[K]) -> Image<P> where P: Pixel + 'static, <P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>, K: Num + Copy, { // Don't replace this with a call to Kernel::filter without // checking the benchmark results. At the time of writing this // specialised implementation is faster. let (width, height) = image.dimensions(); let mut out = Image::<P>::new(width, height); let zero = K::zero(); let mut acc = vec![zero; P::CHANNEL_COUNT as usize]; let k_width = kernel.len() as i32; // Typically the image side will be much larger than the kernel length. // In that case we can remove a lot of bounds checks for most pixels. if k_width >= width as i32 { for y in 0..height { for x in 0..width { for (i, k) in kernel.iter().enumerate() { let x_unchecked = (x as i32) + i as i32 - k_width / 2; let x_p = max(0, min(x_unchecked, width as i32 - 1)) as u32; let p = unsafe { image.unsafe_get_pixel(x_p, y) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x, y).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } } return out; } let half_k = k_width / 2; for y in 0..height { // Left margin - need to check lower bound only for x in 0..half_k { for (i, k) in kernel.iter().enumerate() { let x_unchecked = (x as i32) + i as i32 - k_width / 2; let x_p = max(0, x_unchecked) as u32; let p = unsafe { image.unsafe_get_pixel(x_p, y) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x as u32, y).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } // Neither margin - don't need bounds check on either side for x in half_k..(width as i32 - half_k) { for (i, k) in kernel.iter().enumerate() { let x_unchecked = (x as i32) + i as i32 - k_width / 2; let x_p = x_unchecked as u32; let p = unsafe { image.unsafe_get_pixel(x_p, y) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x as u32, y).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } // Right margin - need to check upper bound only for x in (width as i32 - half_k)..(width as i32) { for (i, k) in kernel.iter().enumerate() { let x_unchecked = (x as i32) + i as i32 - k_width / 2; let x_p = min(x_unchecked, width as i32 - 1) as u32; let p = unsafe { image.unsafe_get_pixel(x_p, y) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x as u32, y).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } } out } /// Returns horizontal correlations between an image and a 1d kernel. /// Pads by continuity. pub fn vertical_filter<P, K>(image: &Image<P>, kernel: &[K]) -> Image<P> where P: Pixel + 'static, <P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>, K: Num + Copy, { // Don't replace this with a call to Kernel::filter without // checking the benchmark results. At the time of writing this // specialised implementation is faster. let (width, height) = image.dimensions(); let mut out = Image::<P>::new(width, height); let zero = K::zero(); let mut acc = vec![zero; P::CHANNEL_COUNT as usize]; let k_height = kernel.len() as i32; // Typically the image side will be much larger than the kernel length. // In that case we can remove a lot of bounds checks for most pixels. if k_height >= height as i32 { for y in 0..height { for x in 0..width { for (i, k) in kernel.iter().enumerate() { let y_unchecked = (y as i32) + i as i32 - k_height / 2; let y_p = max(0, min(y_unchecked, height as i32 - 1)) as u32; let p = unsafe { image.unsafe_get_pixel(x, y_p) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x, y).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } } return out; } let half_k = k_height / 2; // Top margin - need to check lower bound only for y in 0..half_k { for x in 0..width { for (i, k) in kernel.iter().enumerate() { let y_unchecked = (y as i32) + i as i32 - k_height / 2; let y_p = max(0, y_unchecked) as u32; let p = unsafe { image.unsafe_get_pixel(x, y_p) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x, y as u32).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } } // Neither margin - don't need bounds check on either side for y in half_k..(height as i32 - half_k) { for x in 0..width { for (i, k) in kernel.iter().enumerate() { let y_unchecked = (y as i32) + i as i32 - k_height / 2; let y_p = y_unchecked as u32; let p = unsafe { image.unsafe_get_pixel(x, y_p) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x, y as u32).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } } // Right margin - need to check upper bound only for y in (height as i32 - half_k)..(height as i32) { for x in 0..width { for (i, k) in kernel.iter().enumerate() { let y_unchecked = (y as i32) + i as i32 - k_height / 2; let y_p = min(y_unchecked, height as i32 - 1) as u32; let p = unsafe { image.unsafe_get_pixel(x, y_p) }; accumulate(&mut acc, &p, *k); } let out_channels = out.get_pixel_mut(x, y as u32).channels_mut(); for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) { *c = <P as Pixel>::Subpixel::clamp(*a); *a = zero; } } } out } fn accumulate<P, K>(acc: &mut [K], pixel: &P, weight: K) where P: Pixel, <P as Pixel>::Subpixel: ValueInto<K>, K: Num + Copy, { for i in 0..(P::CHANNEL_COUNT as usize) { acc[i as usize] = acc[i as usize] + cast(pixel.channels()[i]) * weight; } } #[cfg(test)] mod tests { use super::*; use crate::definitions::{Clamp, Image}; use crate::utils::{gray_bench_image, rgb_bench_image}; use image::imageops::blur; use image::{GenericImage, GrayImage, ImageBuffer, Luma, Rgb}; use std::cmp::{max, min}; use test::{black_box, Bencher}; #[bench] fn bench_bilateral_filter(b: &mut Bencher) { let image = gray_bench_image(500, 500); b.iter(|| { let filtered = bilateral_filter(&image, 10, 10., 3.); black_box(filtered); }); } #[test] fn test_box_filter_handles_empty_images() { let _ = box_filter(&GrayImage::new(0, 0), 3, 3); let _ = box_filter(&GrayImage::new(1, 0), 3, 3); let _ = box_filter(&GrayImage::new(0, 1), 3, 3); } #[test] fn test_box_filter() { let image = gray_image!( 1, 2, 3; 4, 5, 6; 7, 8, 9); // For this image we get the same answer from the two 1d // convolutions as from doing the 2d convolution in one step // (but we needn't in general, as in the former case we're // clipping to an integer value twice). let expected = gray_image!( 2, 3, 3; 4, 5, 5; 6, 7, 7); assert_pixels_eq!(box_filter(&image, 1, 1), expected); } #[bench] fn bench_box_filter(b: &mut Bencher) { let image = gray_bench_image(500, 500); b.iter(|| { let filtered = box_filter(&image, 7, 7); black_box(filtered); }); } #[test] fn test_separable_filter() { let image = gray_image!( 1, 2, 3; 4, 5, 6; 7, 8, 9); // Lazily copying the box_filter test case let expected = gray_image!( 2, 3, 3; 4, 5, 5; 6, 7, 7); let kernel = vec![1f32 / 3f32; 3]; let filtered = separable_filter_equal(&image, &kernel); assert_pixels_eq!(filtered, expected); } #[test] fn test_separable_filter_integer_kernel() { let image = gray_image!( 1, 2, 3; 4, 5, 6; 7, 8, 9); let expected = gray_image!( 21, 27, 33; 39, 45, 51; 57, 63, 69); let kernel = vec![1i32; 3]; let filtered = separable_filter_equal(&image, &kernel); assert_pixels_eq!(filtered, expected); } #[bench] fn bench_separable_filter(b: &mut Bencher) { let image = gray_bench_image(300, 300); let h_kernel = vec![1f32 / 5f32; 5]; let v_kernel = vec![0.1f32, 0.4f32, 0.3f32, 0.1f32, 0.1f32]; b.iter(|| { let filtered = separable_filter(&image, &h_kernel, &v_kernel); black_box(filtered); }); } /// Reference implementation of horizontal_filter. Used to validate /// the (presumably faster) actual implementation. fn horizontal_filter_reference(image: &GrayImage, kernel: &[f32]) -> GrayImage { let (width, height) = image.dimensions(); let mut out = GrayImage::new(width, height); for y in 0..height { for x in 0..width { let mut acc = 0f32; for k in 0..kernel.len() { let mut x_unchecked = x as i32 + k as i32 - (kernel.len() / 2) as i32; x_unchecked = max(0, x_unchecked); x_unchecked = min(x_unchecked, width as i32 - 1); let x_checked = x_unchecked as u32; let color = image.get_pixel(x_checked, y)[0]; let weight = kernel[k]; acc += color as f32 * weight; } let clamped = <u8 as Clamp<f32>>::clamp(acc); out.put_pixel(x, y, Luma([clamped])); } } out } /// Reference implementation of vertical_filter. Used to validate /// the (presumably faster) actual implementation. fn vertical_filter_reference(image: &GrayImage, kernel: &[f32]) -> GrayImage { let (width, height) = image.dimensions(); let mut out = GrayImage::new(width, height); for y in 0..height { for x in 0..width { let mut acc = 0f32; for k in 0..kernel.len() { let mut y_unchecked = y as i32 + k as i32 - (kernel.len() / 2) as i32; y_unchecked = max(0, y_unchecked); y_unchecked = min(y_unchecked, height as i32 - 1); let y_checked = y_unchecked as u32; let color = image.get_pixel(x, y_checked)[0]; let weight = kernel[k]; acc += color as f32 * weight; } let clamped = <u8 as Clamp<f32>>::clamp(acc); out.put_pixel(x, y, Luma([clamped])); } } out } macro_rules! test_against_reference_implementation { ($test_name:ident, $under_test:ident, $reference_impl:ident) => { #[test] fn $test_name() { // I think the interesting edge cases here are determined entirely // by the relative sizes of the kernel and the image side length, so // I'm just enumerating over small values instead of generating random // examples via quickcheck. for height in 0..5 { for width in 0..5 { for kernel_length in 0..15 { let image = gray_bench_image(width, height); let kernel: Vec<f32> = (0..kernel_length).map(|i| i as f32 % 1.35).collect(); let expected = $reference_impl(&image, &kernel); let actual = $under_test(&image, &kernel); assert_pixels_eq!(actual, expected); } } } } }; } test_against_reference_implementation!( test_horizontal_filter_matches_reference_implementation, horizontal_filter, horizontal_filter_reference ); test_against_reference_implementation!( test_vertical_filter_matches_reference_implementation, vertical_filter, vertical_filter_reference ); #[test] fn test_horizontal_filter() { let image = gray_image!( 1, 4, 1; 4, 7, 4; 1, 4, 1); let expected = gray_image!( 2, 2, 2; 5, 5, 5; 2, 2, 2); let kernel = vec![1f32 / 3f32; 3]; let filtered = horizontal_filter(&image, &kernel); assert_pixels_eq!(filtered, expected); } #[test] fn test_horizontal_filter_with_kernel_wider_than_image_does_not_panic() { let image = gray_image!( 1, 4, 1; 4, 7, 4; 1, 4, 1); let kernel = vec![1f32 / 10f32; 10]; black_box(horizontal_filter(&image, &kernel)); } #[bench] fn bench_horizontal_filter(b: &mut Bencher) { let image = gray_bench_image(500, 500); let kernel = vec![1f32 / 5f32; 5]; b.iter(|| { let filtered = horizontal_filter(&image, &kernel); black_box(filtered); }); } #[test] fn test_vertical_filter() { let image = gray_image!( 1, 4, 1; 4, 7, 4; 1, 4, 1); let expected = gray_image!( 2, 5, 2; 2, 5, 2; 2, 5, 2); let kernel = vec![1f32 / 3f32; 3]; let filtered = vertical_filter(&image, &kernel); assert_pixels_eq!(filtered, expected); } #[test] fn test_vertical_filter_with_kernel_taller_than_image_does_not_panic() { let image = gray_image!( 1, 4, 1; 4, 7, 4; 1, 4, 1); let kernel = vec![1f32 / 10f32; 10]; black_box(vertical_filter(&image, &kernel)); } #[bench] fn bench_vertical_filter(b: &mut Bencher) { let image = gray_bench_image(500, 500); let kernel = vec![1f32 / 5f32; 5]; b.iter(|| { let filtered = vertical_filter(&image, &kernel); black_box(filtered); }); } #[test] fn test_filter3x3_with_results_outside_input_channel_range() { #[rustfmt::skip] let kernel: Vec<i32> = vec![ -1, 0, 1, -2, 0, 2, -1, 0, 1 ]; let image = gray_image!( 3, 2, 1; 6, 5, 4; 9, 8, 7); let expected = gray_image!(type: i16, -4, -8, -4; -4, -8, -4; -4, -8, -4 ); let filtered = filter3x3(&image, &kernel); assert_pixels_eq!(filtered, expected); } #[test] #[should_panic] fn test_kernel_must_be_nonempty() { let k: Vec<u8> = Vec::new(); let _ = Kernel::new(&k, 0, 0); } #[test] fn test_kernel_filter_with_even_kernel_side() { let image = gray_image!( 3, 2; 4, 1); let k = vec![1u8, 2u8]; let kernel = Kernel::new(&k, 2, 1); let filtered = kernel.filter(&image, |c, a| *c = a); let expected = gray_image!( 9, 7; 12, 6); assert_pixels_eq!(filtered, expected); } #[test] fn test_kernel_filter_with_empty_image() { let image = gray_image!(); let k = vec![2u8]; let kernel = Kernel::new(&k, 1, 1); let filtered = kernel.filter(&image, |c, a| *c = a); let expected = gray_image!(); assert_pixels_eq!(filtered, expected); } #[test] fn test_kernel_filter_with_kernel_dimensions_larger_than_image() { let image = gray_image!( 9, 4; 8, 1); #[rustfmt::skip] let k: Vec<f32> = vec![ 0.1, 0.2, 0.1, 0.2, 0.4, 0.2, 0.1, 0.2, 0.1 ]; let kernel = Kernel::new(&k, 3, 3); let filtered: Image<Luma<u8>> = kernel.filter(&image, |c, a| *c = <u8 as Clamp<f32>>::clamp(a)); let expected = gray_image!( 11, 7; 10, 5); assert_pixels_eq!(filtered, expected); } #[bench] fn bench_filter3x3_i32_filter(b: &mut Bencher) { let image = gray_bench_image(500, 500); #[rustfmt::skip] let kernel: Vec<i32> = vec![ -1, 0, 1, -2, 0, 2, -1, 0, 1 ]; b.iter(|| { let filtered: ImageBuffer<Luma<i16>, Vec<i16>> = filter3x3::<_, _, i16>(&image, &kernel); black_box(filtered); }); } /// Baseline implementation of Gaussian blur is that provided by image::imageops. /// We can also use this to validate correctnes of any implementations we add here. fn gaussian_baseline_rgb<I>(image: &I, stdev: f32) -> Image<Rgb<u8>> where I: GenericImage<Pixel = Rgb<u8>> + 'static, { blur(image, stdev) } #[bench] #[ignore] // Gives a baseline performance using code from another library fn bench_baseline_gaussian_stdev_1(b: &mut Bencher) { let image = rgb_bench_image(100, 100); b.iter(|| { let blurred = gaussian_baseline_rgb(&image, 1f32); black_box(blurred); }); } #[bench] #[ignore] // Gives a baseline performance using code from another library fn bench_baseline_gaussian_stdev_3(b: &mut Bencher) { let image = rgb_bench_image(100, 100); b.iter(|| { let blurred = gaussian_baseline_rgb(&image, 3f32); black_box(blurred); }); } #[bench] #[ignore] // Gives a baseline performance using code from another library fn bench_baseline_gaussian_stdev_10(b: &mut Bencher) { let image = rgb_bench_image(100, 100); b.iter(|| { let blurred = gaussian_baseline_rgb(&image, 10f32); black_box(blurred); }); } #[bench] fn bench_gaussian_f32_stdev_1(b: &mut Bencher) { let image = rgb_bench_image(100, 100); b.iter(|| { let blurred = gaussian_blur_f32(&image, 1f32); black_box(blurred); }); } #[bench] fn bench_gaussian_f32_stdev_3(b: &mut Bencher) { let image = rgb_bench_image(100, 100); b.iter(|| { let blurred = gaussian_blur_f32(&image, 3f32); black_box(blurred); }); } #[bench] fn bench_gaussian_f32_stdev_10(b: &mut Bencher) { let image = rgb_bench_image(100, 100); b.iter(|| { let blurred = gaussian_blur_f32(&image, 10f32); black_box(blurred); }); } #[test] #[should_panic] fn test_gaussian_blur_f32_rejects_zero_sigma() { let image = gray_image!( 1, 2, 3; 4, 5, 6; 7, 8, 9 ); let _ = gaussian_blur_f32(&image, 0.0); } #[test] #[should_panic] fn test_gaussian_blur_f32_rejects_negative_sigma() { let image = gray_image!( 1, 2, 3; 4, 5, 6; 7, 8, 9 ); let _ = gaussian_blur_f32(&image, -0.5); } }<|fim▁end|>
<|file_name|>ConsulServiceTest.java<|end_file_name|><|fim▁begin|>package com.rebuy.consul; import com.ecwid.consul.v1.ConsulClient; import com.ecwid.consul.v1.QueryParams; import com.ecwid.consul.v1.Response; import com.ecwid.consul.v1.agent.model.NewService; import com.ecwid.consul.v1.catalog.model.CatalogService; import com.rebuy.consul.exceptions.NoServiceFoundException; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ConsulServiceTest { private ConsulClient clientMock; private ConsulService service; private NewService serviceMock; @Before public void before() { clientMock = mock(ConsulClient.class); serviceMock = mock(NewService.class); when(serviceMock.getId()).thenReturn("42"); service = new ConsulService(clientMock, serviceMock); } @Test public void register_should_invoke_client()<|fim▁hole|> service.register(); Mockito.verify(clientMock).agentServiceRegister(serviceMock); } @Test public void unregister_should_invoke_client() { service.unregister(); Mockito.verify(clientMock).agentServiceSetMaintenance("42", true); } @Test(expected = NoServiceFoundException.class) public void findService_should_throw_exception_if_no_services_are_found() { Response<List<CatalogService>> response = new Response<>(new ArrayList<>(), 1L, true, 1L); when(clientMock.getCatalogService(Mockito.anyString(), Mockito.any(QueryParams.class))).thenReturn(response); service.getRandomService("service"); Mockito.verify(clientMock).getCatalogService("service", Mockito.any(QueryParams.class)); } @Test public void findService_should_invoke_client() { List<CatalogService> services = new ArrayList<>(); services.add(mock(CatalogService.class)); Response<List<CatalogService>> response = new Response<>(services, 1L, true, 1L); when(clientMock.getCatalogService(Mockito.anyString(), Mockito.any(QueryParams.class))).thenReturn(response); service.getRandomService("service"); Mockito.verify(clientMock).getCatalogService(Mockito.eq("service"), Mockito.any(QueryParams.class)); } @Test public void findService_should_return_one_service() { List<CatalogService> services = new ArrayList<>(); CatalogService service1 = mock(CatalogService.class); when(service1.getAddress()).thenReturn("192.168.0.1"); services.add(service1); CatalogService service2 = mock(CatalogService.class); when(service2.getAddress()).thenReturn("192.168.0.2"); services.add(service2); CatalogService service3 = mock(CatalogService.class); when(service3.getAddress()).thenReturn("192.168.0.3"); services.add(service3); Response<List<CatalogService>> response = new Response<>(services, 1L, true, 1L); when(clientMock.getCatalogService(Mockito.anyString(), Mockito.any(QueryParams.class))).thenReturn(response); Service catalogService = service.getRandomService("service"); boolean foundMatch = false; for (CatalogService service : services) { if (service.getAddress().equals(catalogService.getHostname()) && service.getServicePort() == catalogService.getPort()) { foundMatch = true; } } assertTrue(foundMatch); Mockito.verify(clientMock).getCatalogService(Mockito.eq("service"), Mockito.any(QueryParams.class)); } @Test public void findService_should_return_one_service_with_tag() { List<CatalogService> services = new ArrayList<>(); CatalogService service1 = mock(CatalogService.class); when(service1.getAddress()).thenReturn("192.168.0.1"); services.add(service1); CatalogService service2 = mock(CatalogService.class); when(service2.getAddress()).thenReturn("192.168.0.2"); services.add(service2); CatalogService service3 = mock(CatalogService.class); when(service3.getAddress()).thenReturn("192.168.0.3"); services.add(service3); Response<List<CatalogService>> response = new Response<>(services, 1L, true, 1L); when(clientMock.getCatalogService(Mockito.anyString(), Mockito.anyString(), Mockito.any(QueryParams.class))).thenReturn(response); Service catalogService = service.getRandomService("service", "my-tag"); boolean foundMatch = false; for (CatalogService service : services) { if (service.getAddress().equals(catalogService.getHostname()) && service.getServicePort() == catalogService.getPort()) { foundMatch = true; } } assertTrue(foundMatch); Mockito.verify(clientMock).getCatalogService(Mockito.eq("service"), Mockito.eq("my-tag"), Mockito.any(QueryParams.class)); } }<|fim▁end|>
{ when(clientMock.agentServiceRegister(Mockito.any())).thenReturn(null);
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sufwebapp1.settings") from django.core.management import execute_from_command_line <|fim▁hole|><|fim▁end|>
execute_from_command_line(sys.argv)
<|file_name|>html_completions.py<|end_file_name|><|fim▁begin|>import sublime, sublime_plugin import re def match(rex, str): m = rex.match(str) if m: return m.group(0) else: return None # This responds to on_query_completions, but conceptually it's expanding # expressions, rather than completing words. # # It expands these simple expressions: # tag.class # tag#id class HtmlCompletions(sublime_plugin.EventListener): def on_query_completions(self, view, prefix, locations): # Only trigger within HTML if not view.match_selector(locations[0], "text.html - source - meta.tag, punctuation.definition.tag.begin"): return [] # Get the contents of each line, from the beginning of the line to # each point lines = [view.substr(sublime.Region(view.line(l).a, l)) for l in locations] # Reverse the contents of each line, to simulate having the regex # match backwards lines = [l[::-1] for l in lines] # Check the first location looks like an expression rex = re.compile("([\w-]+)([.#])(\w+)") expr = match(rex, lines[0]) if not expr: return [] # Ensure that all other lines have identical expressions for i in xrange(1, len(lines)): ex = match(rex, lines[i]) if ex != expr: return [] # Return the completions arg, op, tag = rex.match(expr).groups() arg = arg[::-1] tag = tag[::-1] expr = expr[::-1] if op == '.': snippet = "<{0} class=\"{1}\">$1</{0}>$0".format(tag, arg) else: snippet = "<{0} id=\"{1}\">$1</{0}>$0".format(tag, arg) return [(expr, snippet)] # Provide completions that match just after typing an opening angle bracket class TagCompletions(sublime_plugin.EventListener): def on_query_completions(self, view, prefix, locations): # Only trigger within HTML if not view.match_selector(locations[0],<|fim▁hole|> pt = locations[0] - len(prefix) - 1 ch = view.substr(sublime.Region(pt, pt + 1)) if ch != '<': return [] return ([ ("a\tTag", "a href=\"$1\">$2</a>"), ("abbr\tTag", "abbr>$1</abbr>"), ("acronym\tTag", "acronym>$1</acronym>"), ("address\tTag", "address>$1</address>"), ("applet\tTag", "applet>$1</applet>"), ("area\tTag", "area>$1</area>"), ("b\tTag", "b>$1</b>"), ("base\tTag", "base>$1</base>"), ("big\tTag", "big>$1</big>"), ("blockquote\tTag", "blockquote>$1</blockquote>"), ("body\tTag", "body>$1</body>"), ("button\tTag", "button>$1</button>"), ("center\tTag", "center>$1</center>"), ("caption\tTag", "caption>$1</caption>"), ("cdata\tTag", "cdata>$1</cdata>"), ("cite\tTag", "cite>$1</cite>"), ("col\tTag", "col>$1</col>"), ("colgroup\tTag", "colgroup>$1</colgroup>"), ("code\tTag", "code>$1</code>"), ("div\tTag", "div>$1</div>"), ("dd\tTag", "dd>$1</dd>"), ("del\tTag", "del>$1</del>"), ("dfn\tTag", "dfn>$1</dfn>"), ("dl\tTag", "dl>$1</dl>"), ("dt\tTag", "dt>$1</dt>"), ("em\tTag", "em>$1</em>"), ("fieldset\tTag", "fieldset>$1</fieldset>"), ("font\tTag", "font>$1</font>"), ("form\tTag", "form>$1</form>"), ("frame\tTag", "frame>$1</frame>"), ("frameset\tTag", "frameset>$1</frameset>"), ("head\tTag", "head>$1</head>"), ("h1\tTag", "h1>$1</h1>"), ("h2\tTag", "h2>$1</h2>"), ("h3\tTag", "h3>$1</h3>"), ("h4\tTag", "h4>$1</h4>"), ("h5\tTag", "h5>$1</h5>"), ("h6\tTag", "h6>$1</h6>"), ("i\tTag", "i>$1</i>"), ("iframe\tTag", "iframe src=\"$1\"></iframe>"), ("ins\tTag", "ins>$1</ins>"), ("kbd\tTag", "kbd>$1</kbd>"), ("li\tTag", "li>$1</li>"), ("label\tTag", "label>$1</label>"), ("legend\tTag", "legend>$1</legend>"), ("link\tTag", "link rel=\"stylesheet\" type=\"text/css\" href=\"$1\">"), ("map\tTag", "map>$1</map>"), ("noframes\tTag", "noframes>$1</noframes>"), ("object\tTag", "object>$1</object>"), ("ol\tTag", "ol>$1</ol>"), ("optgroup\tTag", "optgroup>$1</optgroup>"), ("option\tTag", "option>$0</option>"), ("p\tTag", "p>$1</p>"), ("pre\tTag", "pre>$1</pre>"), ("span\tTag", "span>$1</span>"), ("samp\tTag", "samp>$1</samp>"), ("script\tTag", "script type=\"${1:text/javascript}\">$0</script>"), ("style\tTag", "style type=\"${1:text/css}\">$0</style>"), ("select\tTag", "select>$1</select>"), ("small\tTag", "small>$1</small>"), ("strong\tTag", "strong>$1</strong>"), ("sub\tTag", "sub>$1</sub>"), ("sup\tTag", "sup>$1</sup>"), ("table\tTag", "table>$1</table>"), ("tbody\tTag", "tbody>$1</tbody>"), ("td\tTag", "td>$1</td>"), ("textarea\tTag", "textarea>$1</textarea>"), ("tfoot\tTag", "tfoot>$1</tfoot>"), ("th\tTag", "th>$1</th>"), ("thead\tTag", "thead>$1</thead>"), ("title\tTag", "title>$1</title>"), ("tr\tTag", "tr>$1</tr>"), ("tt\tTag", "tt>$1</tt>"), ("u\tTag", "u>$1</u>"), ("ul\tTag", "ul>$1</ul>"), ("var\tTag", "var>$1</var>"), ("br\tTag", "br>"), ("embed\tTag", "embed>"), ("hr\tTag", "hr>"), ("img\tTag", "img src=\"$1\">"), ("input\tTag", "input>"), ("meta\tTag", "meta>"), ("param\tTag", "param name=\"$1\" value=\"$2\">"), ("article\tTag", "article>$1</article>"), ("aside\tTag", "aside>$1</aside>"), ("audio\tTag", "audio>$1</audio>"), ("canvas\tTag", "canvas>$1</canvas>"), ("footer\tTag", "footer>$1</footer>"), ("header\tTag", "header>$1</header>"), ("nav\tTag", "nav>$1</nav>"), ("section\tTag", "section>$1</section>"), ("video\tTag", "video>$1</video>"), ("A\tTag", "A HREF=\"$1\">$2</A>"), ("ABBR\tTag", "ABBR>$1</ABBR>"), ("ACRONYM\tTag", "ACRONYM>$1</ACRONYM>"), ("ADDRESS\tTag", "ADDRESS>$1</ADDRESS>"), ("APPLET\tTag", "APPLET>$1</APPLET>"), ("AREA\tTag", "AREA>$1</AREA>"), ("B\tTag", "B>$1</B>"), ("BASE\tTag", "BASE>$1</BASE>"), ("BIG\tTag", "BIG>$1</BIG>"), ("BLOCKQUOTE\tTag", "BLOCKQUOTE>$1</BLOCKQUOTE>"), ("BODY\tTag", "BODY>$1</BODY>"), ("BUTTON\tTag", "BUTTON>$1</BUTTON>"), ("CENTER\tTag", "CENTER>$1</CENTER>"), ("CAPTION\tTag", "CAPTION>$1</CAPTION>"), ("CDATA\tTag", "CDATA>$1</CDATA>"), ("CITE\tTag", "CITE>$1</CITE>"), ("COL\tTag", "COL>$1</COL>"), ("COLGROUP\tTag", "COLGROUP>$1</COLGROUP>"), ("CODE\tTag", "CODE>$1</CODE>"), ("DIV\tTag", "DIV>$1</DIV>"), ("DD\tTag", "DD>$1</DD>"), ("DEL\tTag", "DEL>$1</DEL>"), ("DFN\tTag", "DFN>$1</DFN>"), ("DL\tTag", "DL>$1</DL>"), ("DT\tTag", "DT>$1</DT>"), ("EM\tTag", "EM>$1</EM>"), ("FIELDSET\tTag", "FIELDSET>$1</FIELDSET>"), ("FONT\tTag", "FONT>$1</FONT>"), ("FORM\tTag", "FORM>$1</FORM>"), ("FRAME\tTag", "FRAME>$1</FRAME>"), ("FRAMESET\tTag", "FRAMESET>$1</FRAMESET>"), ("HEAD\tTag", "HEAD>$1</HEAD>"), ("H1\tTag", "H1>$1</H1>"), ("H2\tTag", "H2>$1</H2>"), ("H3\tTag", "H3>$1</H3>"), ("H4\tTag", "H4>$1</H4>"), ("H5\tTag", "H5>$1</H5>"), ("H6\tTag", "H6>$1</H6>"), ("I\tTag", "I>$1</I>"), ("IFRAME\tTag", "IFRAME src=\"$1\"></IFRAME>"), ("INS\tTag", "INS>$1</INS>"), ("KBD\tTag", "KBD>$1</KBD>"), ("LI\tTag", "LI>$1</LI>"), ("LABEL\tTag", "LABEL>$1</LABEL>"), ("LEGEND\tTag", "LEGEND>$1</LEGEND>"), ("LINK\tTag", "LINK>$1</LINK>"), ("MAP\tTag", "MAP>$1</MAP>"), ("NOFRAMES\tTag", "NOFRAMES>$1</NOFRAMES>"), ("OBJECT\tTag", "OBJECT>$1</OBJECT>"), ("OL\tTag", "OL>$1</OL>"), ("OPTGROUP\tTag", "OPTGROUP>$1</OPTGROUP>"), ("OPTION\tTag", "OPTION>$1</OPTION>"), ("P\tTag", "P>$1</P>"), ("PRE\tTag", "PRE>$1</PRE>"), ("SPAN\tTag", "SPAN>$1</SPAN>"), ("SAMP\tTag", "SAMP>$1</SAMP>"), ("SCRIPT\tTag", "SCRIPT TYPE=\"${1:text/javascript}\">$0</SCRIPT>"), ("STYLE\tTag", "STYLE TYPE=\"${1:text/css}\">$0</STYLE>"), ("SELECT\tTag", "SELECT>$1</SELECT>"), ("SMALL\tTag", "SMALL>$1</SMALL>"), ("STRONG\tTag", "STRONG>$1</STRONG>"), ("SUB\tTag", "SUB>$1</SUB>"), ("SUP\tTag", "SUP>$1</SUP>"), ("TABLE\tTag", "TABLE>$1</TABLE>"), ("TBODY\tTag", "TBODY>$1</TBODY>"), ("TD\tTag", "TD>$1</TD>"), ("TEXTAREA\tTag", "TEXTAREA>$1</TEXTAREA>"), ("TFOOT\tTag", "TFOOT>$1</TFOOT>"), ("TH\tTag", "TH>$1</TH>"), ("THEAD\tTag", "THEAD>$1</THEAD>"), ("TITLE\tTag", "TITLE>$1</TITLE>"), ("TR\tTag", "TR>$1</TR>"), ("TT\tTag", "TT>$1</TT>"), ("U\tTag", "U>$1</U>"), ("UL\tTag", "UL>$1</UL>"), ("VAR\tTag", "VAR>$1</VAR>"), ("BR\tTag", "BR>"), ("EMBED\tTag", "EMBED>"), ("HR\tTag", "HR>"), ("IMG\tTag", "IMG SRC=\"$1\">"), ("INPUT\tTag", "INPUT>"), ("META\tTag", "META>"), ("PARAM\tTag", "PARAM NAME=\"$1\" VALUE=\"$2\">)"), ("ARTICLE\tTag", "ARTICLE>$1</ARTICLE>"), ("ASIDE\tTag", "ASIDE>$1</ASIDE>"), ("AUDIO\tTag", "AUDIO>$1</AUDIO>"), ("CANVAS\tTag", "CANVAS>$1</CANVAS>"), ("FOOTER\tTag", "FOOTER>$1</FOOTER>"), ("HEADER\tTag", "HEADER>$1</HEADER>"), ("NAV\tTag", "NAV>$1</NAV>"), ("SECTION\tTag", "SECTION>$1</SECTION>"), ("VIDEO\tTag", "VIDEO>$1</VIDEO>") ], sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)<|fim▁end|>
"text.html - source"): return []
<|file_name|>_scripted_fields.js<|end_file_name|><|fim▁begin|>import _ from 'lodash'; import 'ui/paginated_table'; import popularityHtml from 'plugins/kibana/management/sections/indices/_field_popularity.html'; import controlsHtml from 'plugins/kibana/management/sections/indices/_field_controls.html'; import dateScripts from 'plugins/kibana/management/sections/indices/_date_scripts'; import uiModules from 'ui/modules'; import scriptedFieldsTemplate from 'plugins/kibana/management/sections/indices/_scripted_fields.html'; import { getSupportedScriptingLangs } from 'ui/scripting_langs'; import { scriptedFields as docLinks } from 'ui/documentation_links/documentation_links'; uiModules.get('apps/management') .directive('scriptedFields', function (kbnUrl, Notifier, $filter, confirmModal) { const rowScopes = []; // track row scopes, so they can be destroyed as needed const filter = $filter('filter'); const notify = new Notifier(); return { restrict: 'E', template: scriptedFieldsTemplate, scope: true, link: function ($scope) { const fieldCreatorPath = '/management/kibana/indices/{{ indexPattern }}/scriptedField'; const fieldEditorPath = fieldCreatorPath + '/{{ fieldName }}'; $scope.docLinks = docLinks; $scope.perPage = 25; $scope.columns = [ { title: 'name' }, { title: 'lang' }, { title: 'script' }, { title: 'format' }, { title: 'controls', sortable: false } ]; $scope.$watchMulti(['[]indexPattern.fields', 'fieldFilter'], refreshRows); function refreshRows() { _.invoke(rowScopes, '$destroy'); rowScopes.length = 0; const fields = filter($scope.indexPattern.getScriptedFields(), { name: $scope.fieldFilter }); _.find($scope.editSections, { index: 'scriptedFields' }).count = fields.length; // Update the tab count $scope.rows = fields.map(function (field) { const rowScope = $scope.$new(); rowScope.field = field; rowScopes.push(rowScope);<|fim▁hole|> return [ _.escape(field.name), _.escape(field.lang), _.escape(field.script), _.get($scope.indexPattern, ['fieldFormatMap', field.name, 'type', 'title']), { markup: controlsHtml, scope: rowScope } ]; }); } $scope.addDateScripts = function () { const conflictFields = []; let fieldsAdded = 0; _.each(dateScripts($scope.indexPattern), function (script, field) { try { $scope.indexPattern.addScriptedField(field, script, 'number'); fieldsAdded++; } catch (e) { conflictFields.push(field); } }); if (fieldsAdded > 0) { notify.info(fieldsAdded + ' script fields created'); } if (conflictFields.length > 0) { notify.info('Not adding ' + conflictFields.length + ' duplicate fields: ' + conflictFields.join(', ')); } }; $scope.create = function () { const params = { indexPattern: $scope.indexPattern.id }; kbnUrl.change(fieldCreatorPath, params); }; $scope.edit = function (field) { const params = { indexPattern: $scope.indexPattern.id, fieldName: field.name }; kbnUrl.change(fieldEditorPath, params); }; $scope.remove = function (field) { const confirmModalOptions = { confirmButtonText: 'Delete field', onConfirm: () => { $scope.indexPattern.removeScriptedField(field.name); } }; confirmModal(`Are you sure want to delete ${field.name}? This action is irreversible!`, confirmModalOptions); }; $scope.getDeprecatedLanguagesInUse = function () { const fields = $scope.indexPattern.getScriptedFields(); const langsInUse = _.uniq(_.map(fields, 'lang')); return _.difference(langsInUse, getSupportedScriptingLangs()); }; } }; });<|fim▁end|>
<|file_name|>fig0621.cpp<|end_file_name|><|fim▁begin|>// File: fig0621.cpp // Computer Systems, Fourth Edition<|fim▁hole|> #include <iostream> using namespace std; int numPts; int value; int j; void printBar (int n) { int k; for (k = 1; k <= n; k++) { cout << '*'; } cout << endl; } int main () { cin >> numPts; for (j = 1; j <= numPts; j++) { cin >> value; printBar (value); } return 0; }<|fim▁end|>
// Figure 6.21
<|file_name|>elastic.py<|end_file_name|><|fim▁begin|>import ast import json import arrow import elasticsearch from bson import ObjectId from flask import request from eve.utils import config from eve.io.base import DataLayer try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse def parse_date(date_str): """Parse elastic datetime string.""" try: date = arrow.get(date_str) except TypeError: date = arrow.get(date_str[0]) return date.datetime def get_dates(schema): """Return list of datetime fields for given schema.""" dates = [config.LAST_UPDATED, config.DATE_CREATED] for field, field_schema in schema.items(): if field_schema['type'] == 'datetime': dates.append(field) return dates def format_doc(hit, schema, dates): """Format given doc to match given schema.""" doc = hit.get('_source', {}) doc.setdefault(config.ID_FIELD, hit.get('_id')) doc.setdefault('_type', hit.get('_type')) for key in dates: if key in doc: doc[key] = parse_date(doc[key]) return doc def noop(): pass def is_elastic(datasource): """Detect if given resource uses elastic.""" return datasource.get('backend') == 'elastic' or datasource.get('search_backend') == 'elastic' class ElasticJSONSerializer(elasticsearch.JSONSerializer): """Customize the JSON serializer used in Elastic""" def default(self, value): """Convert mongo.ObjectId.""" if isinstance(value, ObjectId): return str(value) return super(ElasticJSONSerializer, self).default(value) class ElasticCursor(object): """Search results cursor.""" no_hits = {'hits': {'total': 0, 'hits': []}} def __init__(self, hits=None, docs=None): """Parse hits into docs.""" self.hits = hits if hits else self.no_hits self.docs = docs if docs else [] def __getitem__(self, key): return self.docs[key] def first(self): """Get first doc.""" return self.docs[0] if self.docs else None def count(self, **kwargs): """Get hits count.""" return int(self.hits['hits']['total']) def extra(self, response): """Add extra info to response.""" if 'facets' in self.hits: response['_facets'] = self.hits['facets'] if 'aggregations' in self.hits: response['_aggregations'] = self.hits['aggregations'] def set_filters(query, base_filters): """Put together all filters we have and set them as 'and' filter within filtered query. :param query: elastic query being constructed :param base_filters: all filters set outside of query (eg. resource config, sub_resource_lookup) """ filters = [f for f in base_filters if f is not None] query_filter = query['query']['filtered'].get('filter', None) if query_filter is not None: if 'and' in query_filter: filters.extend(query_filter['and']) else: filters.append(query_filter) if filters: query['query']['filtered']['filter'] = {'and': filters} def set_sort(query, sort): query['sort'] = [] for (key, sortdir) in sort: sort_dict = dict([(key, 'asc' if sortdir > 0 else 'desc')]) query['sort'].append(sort_dict) def get_es(url): o = urlparse(url) es = elasticsearch.Elasticsearch(hosts=[{'host': o.hostname, 'port': o.port}]) es.transport.serializer = ElasticJSONSerializer() return es def get_indices(es): return elasticsearch.client.IndicesClient(es) class Elastic(DataLayer): """ElasticSearch data layer.""" serializers = { 'integer': int, 'datetime': parse_date, 'objectid': ObjectId, } def init_app(self, app): app.config.setdefault('ELASTICSEARCH_URL', 'http://localhost:9200/') app.config.setdefault('ELASTICSEARCH_INDEX', 'eve') self.index = app.config['ELASTICSEARCH_INDEX'] self.es = get_es(app.config['ELASTICSEARCH_URL']) self.create_index(self.index) self.put_mapping(app) def _get_field_mapping(self, schema): """Get mapping for given field schema.""" if 'mapping' in schema: return schema['mapping'] elif schema['type'] == 'datetime': return {'type': 'date'} elif schema['type'] == 'string' and schema.get('unique'): return {'type': 'string', 'index': 'not_analyzed'} def create_index(self, index=None): if index is None: index = self.index try: get_indices(self.es).create(self.index) except elasticsearch.TransportError: pass def put_mapping(self, app): """Put mapping for elasticsearch for current schema. It's not called automatically now, but rather left for user to call it whenever it makes sense. """ indices = get_indices(self.es) for resource, resource_config in app.config['DOMAIN'].items(): datasource = resource_config.get('datasource', {}) if not is_elastic(datasource): continue if datasource.get('source', resource) != resource: # only put mapping for core types continue properties = {} properties[config.DATE_CREATED] = self._get_field_mapping({'type': 'datetime'}) properties[config.LAST_UPDATED] = self._get_field_mapping({'type': 'datetime'}) for field, schema in resource_config['schema'].items(): field_mapping = self._get_field_mapping(schema) if field_mapping: properties[field] = field_mapping mapping = {'properties': properties} indices.put_mapping(index=self.index, doc_type=resource, body=mapping, ignore_conflicts=True) def find(self, resource, req, sub_resource_lookup): args = getattr(req, 'args', request.args if request else {}) source_config = config.SOURCES[resource] if args.get('source'): query = json.loads(args.get('source')) if 'filtered' not in query.get('query', {}): _query = query.get('query') query['query'] = {'filtered': {}} if _query: query['query']['filtered']['query'] = _query else: query = {'query': {'filtered': {}}} if args.get('q', None): query['query']['filtered']['query'] = _build_query_string(args.get('q'), default_field=args.get('df', '_all')) if 'sort' not in query: if req.sort: sort = ast.literal_eval(req.sort) set_sort(query, sort) elif self._default_sort(resource) and 'query' not in query['query']['filtered']: set_sort(query, self._default_sort(resource)) if req.max_results: query.setdefault('size', req.max_results) if req.page > 1: query.setdefault('from', (req.page - 1) * req.max_results) filters = [] filters.append(source_config.get('elastic_filter')) filters.append(source_config.get('elastic_filter_callback', noop)()) filters.append({'term': sub_resource_lookup} if sub_resource_lookup else None) filters.append(json.loads(args.get('filter')) if 'filter' in args else None) set_filters(query, filters) if 'facets' in source_config: query['facets'] = source_config['facets'] if 'aggregations' in source_config: query['aggs'] = source_config['aggregations'] args = self._es_args(resource) hits = self.es.search(body=query, **args) return self._parse_hits(hits, resource) def find_one(self, resource, req, **lookup): def is_found(hit): if 'exists' in hit: hit['found'] = hit['exists'] return hit.get('found', False) args = self._es_args(resource) if config.ID_FIELD in lookup: try: hit = self.es.get(id=lookup[config.ID_FIELD], **args) except elasticsearch.NotFoundError: return if not is_found(hit): return docs = self._parse_hits({'hits': {'hits': [hit]}}, resource) return docs.first() else: query = { 'query': { 'term': lookup } } try: args['size'] = 1 hits = self.es.search(body=query, **args) docs = self._parse_hits(hits, resource) return docs.first() except elasticsearch.NotFoundError: return def find_one_raw(self, resource, _id): args = self._es_args(resource) hit = self.es.get(id=_id, **args) return self._parse_hits({'hits': {'hits': [hit]}}, resource).first() def find_list_of_ids(self, resource, ids, client_projection=None): args = self._es_args(resource) return self._parse_hits(self.es.multi_get(ids, **args), resource) def insert(self, resource, doc_or_docs, **kwargs): ids = [] kwargs.update(self._es_args(resource)) for doc in doc_or_docs: doc.update(self.es.index(body=doc, id=doc.get('_id'), **kwargs)) ids.append(doc['_id']) get_indices(self.es).refresh(self.index) return ids def update(self, resource, id_, updates): args = self._es_args(resource, refresh=True) return self.es.update(id=id_, body={'doc': updates}, **args) def replace(self, resource, id_, document): args = self._es_args(resource, refresh=True) return self.es.index(body=document, id=id_, **args) def remove(self, resource, lookup=None): args = self._es_args(resource) if lookup: try: return self.es.delete(id=lookup.get('_id'), refresh=True, **args) except elasticsearch.NotFoundError: return else: query = {'query': {'match_all': {}}} return self.es.delete_by_query(body=query, **args) def is_empty(self, resource): args = self._es_args(resource) res = self.es.count(body={'query': {'match_all': {}}}, **args) return res.get('count', 0) == 0 def get_mapping(self, index, doc_type=None):<|fim▁hole|> def _parse_hits(self, hits, resource): """Parse hits response into documents.""" datasource = self._datasource(resource) schema = config.DOMAIN[datasource[0]]['schema'] dates = get_dates(schema) docs = [] for hit in hits.get('hits', {}).get('hits', []): docs.append(format_doc(hit, schema, dates)) return ElasticCursor(hits, docs) def _es_args(self, resource, refresh=None): """Get index and doctype args.""" datasource = self._datasource(resource) args = { 'index': self.index, 'doc_type': datasource[0], } if refresh: args['refresh'] = refresh return args def _fields(self, resource): """Get projection fields for given resource.""" datasource = self._datasource(resource) keys = datasource[2].keys() return ','.join(keys) + ','.join([config.LAST_UPDATED, config.DATE_CREATED]) def _default_sort(self, resource): datasource = self._datasource(resource) return datasource[3] def build_elastic_query(doc): """ Builds a query which follows ElasticSearch syntax from doc. 1. Converts {"q":"cricket"} to the below elastic query { "query": { "filtered": { "query": { "query_string": { "query": "cricket", "lenient": false, "default_operator": "AND" } } } } } 2. Converts a faceted query {"q":"cricket", "type":['text'], "source": "AAP"} to the below elastic query { "query": { "filtered": { "filter": { "and": [ {"terms": {"type": ["text"]}}, {"term": {"source": "AAP"}} ] }, "query": { "query_string": { "query": "cricket", "lenient": false, "default_operator": "AND" } } } } } :param doc: A document object which is inline with the syntax specified in the examples. It's the developer responsibility to pass right object. :returns ElasticSearch query """ elastic_query, filters = {"query": {"filtered": {}}}, [] for key in doc.keys(): if key == 'q': elastic_query['query']['filtered']['query'] = _build_query_string(doc['q']) else: _value = doc[key] filters.append({"terms": {key: _value}} if isinstance(_value, list) else {"term": {key: _value}}) set_filters(elastic_query, filters) return elastic_query def _build_query_string(q, default_field=None): """ Builds "query_string" object from 'q'. :param: q of type String :param: default_field :return: dictionary object. """ query_string = {'query_string': {'query': q, 'default_operator': 'AND'}} query_string['query_string'].update({'lenient': False} if default_field else {'default_field': default_field}) return query_string<|fim▁end|>
return get_indices(self.es).get_mapping(index=index, doc_type=doc_type)
<|file_name|>GridWidget.ts<|end_file_name|><|fim▁begin|>import Geometry = THREE.Geometry; import Mesh = THREE.Mesh; import LineBasicMaterial = THREE.LineBasicMaterial; import Material = THREE.Material; import Vector3 = THREE.Vector3; import Line = THREE.Line; import { ChartWidget } from "../Widget"; import LineSegments = THREE.LineSegments; import { Utils } from "../Utils"; import { IViewportParams } from "../Viewport"; import { IAxisOptions } from "../interfaces"; import { Color } from "../Color"; export interface IGridParamsForAxis { start: number, end: number, step: number, stepInPx: number, length: number, segmentsCount: number } /** * widget for drawing state grid */ export class GridWidget extends ChartWidget{ static widgetName = 'Grid'; private lineSegments: LineSegments; private gridSizeH: number; private gridSizeV: number; private isDestroyed = false; onReadyHandler() { var {width, height, xAxis, yAxis} = this.chart.state; this.gridSizeH = Math.floor(width / xAxis.grid.minSizePx) * 3; this.gridSizeV = Math.floor(height / yAxis.grid.minSizePx) * 3; this.initGrid(); this.updateGrid(); this.bindEvents(); } bindEvents() { // grid is bigger then interpolatedViewport, so it's no need to update it on each scroll event let updateGridThrottled = Utils.throttle(() => this.updateGrid(), 1000); this.bindEvent(this.chart.onScroll(() => updateGridThrottled()), this.chart.interpolatedViewport.onZoomInterpolation((options) => { updateGridThrottled(); this.onZoomFrame(options); }), this.chart.onDestroy(() => { this.isDestroyed = true; this.unbindEvents(); }), this.chart.onResize(() => { this.updateGrid(); }) ); } private initGrid() { let color = new Color(this.chart.state.xAxis.grid.color); let geometry = new THREE.Geometry(); let material = new THREE.LineBasicMaterial({linewidth: 1, color: color.value, opacity: color.a, transparent: true}); let xLinesCount = this.gridSizeH; let yLinesCount = this.gridSizeV; while (xLinesCount--) geometry.vertices.push(new Vector3(), new Vector3()); while (yLinesCount--) geometry.vertices.push(new Vector3(), new Vector3()); this.lineSegments = new LineSegments(geometry, material); this.lineSegments.position.setZ(-1); this.lineSegments.frustumCulled = false; } private updateGrid() { if (this.isDestroyed) return; var {yAxis, xAxis, width, height} = this.chart.state; var axisXGrid = GridWidget.getGridParamsForAxis(xAxis, width, xAxis.range.zoom); var axisYGrid = GridWidget.getGridParamsForAxis(yAxis, height, yAxis.range.zoom); var scrollXInSegments = Math.ceil(xAxis.range.scroll / axisXGrid.step); var scrollYInSegments = Math.ceil(yAxis.range.scroll / axisYGrid.step); var gridScrollXVal = scrollXInSegments * axisXGrid.step; var gridScrollYVal = scrollYInSegments * axisYGrid.step; var startXVal = axisXGrid.start + gridScrollXVal; var startYVal = axisYGrid.start + gridScrollYVal; var geometry = this.lineSegments.geometry as Geometry; var vertices = geometry.vertices; var lineInd = 0; for (let i = -this.gridSizeH / 3; i < this.gridSizeH * 2/3; i++) { let value = startXVal + i * axisXGrid.step; let lineSegment = this.getVerticalLineSegment(value, gridScrollXVal, gridScrollYVal); vertices[lineInd * 2].set(lineSegment[0].x, lineSegment[0].y, 0); vertices[lineInd * 2 + 1].set(lineSegment[1].x, lineSegment[1].y, 0); lineInd++; } for (let i = -this.gridSizeV / 3; i < this.gridSizeV * 2/3; i++) { let value = startYVal + i * axisYGrid.step; let lineSegment = this.getHorizontalLineSegment(value, gridScrollXVal, gridScrollYVal); vertices[lineInd * 2].set(lineSegment[0].x, lineSegment[0].y, 0); vertices[lineInd * 2 + 1].set(lineSegment[1].x, lineSegment[1].y, 0); lineInd++; } geometry.verticesNeedUpdate = true; this.lineSegments.scale.set( xAxis.range.scaleFactor * xAxis.range.zoom, yAxis.range.scaleFactor * yAxis.range.zoom, 1 ) } private getHorizontalLineSegment(yVal: number, scrollXVal: number, scrollYVal: number): Vector3[] { var chartState = this.chart; var localYVal = yVal - chartState.state.yAxis.range.zeroVal - scrollYVal; var widthVal = chartState.viewport.pxToValByXAxis(chartState.state.width); return [ new THREE.Vector3(widthVal * 2 + scrollXVal, localYVal, 0 ), new THREE.Vector3( -widthVal + scrollXVal, localYVal, 0 ) ]; } private getVerticalLineSegment(xVal: number, scrollXVal: number, scrollYVal: number): Vector3[] { let chart = this.chart; let localXVal = xVal - chart.state.xAxis.range.zeroVal - scrollXVal; let heightVal = chart.viewport.pxToValByYAxis(chart.state.height); return [ new THREE.Vector3(localXVal, heightVal * 2 + scrollYVal, 0 ), new THREE.Vector3(localXVal, -heightVal + scrollYVal, 0 ) ]; } private onZoomFrame(options: IViewportParams) { var {xAxis, yAxis} = this.chart.state; if (options.zoomX) this.lineSegments.scale.setX(xAxis.range.scaleFactor * options.zoomX); if (options.zoomY) this.lineSegments.scale.setY(yAxis.range.scaleFactor * options.zoomY); } // TODO: move this code to core static getGridParamsForAxis(axisOptions: IAxisOptions, axisWidth: number, zoom: number): IGridParamsForAxis {<|fim▁hole|> let gridStep = 0; let gridStepInPixels = 0; let minGridStepInPixels = axisOptions.grid.minSizePx; let axisLengthStr = String(axisLength); let axisLengthPointPosition = axisLengthStr.indexOf('.'); let intPartLength = axisLengthPointPosition !== -1 ? axisLengthPointPosition : axisLengthStr.length; let gridStepFound = false; let digitPos = 0; while (!gridStepFound) { let power = intPartLength - digitPos - 1; let multiplier = (Math.pow(10, power) || 1); let dividers = [1, 2, 5]; for (let dividerInd = 0; dividerInd < dividers.length; dividerInd++) { let nextGridStep = multiplier / dividers[dividerInd]; let nextGridStepInPixels = nextGridStep / axisLength * axisWidth; if (nextGridStepInPixels >= minGridStepInPixels) { gridStep = nextGridStep; gridStepInPixels = nextGridStepInPixels; } else { gridStepFound = true; if (gridStep === 0) { gridStep = nextGridStep; gridStepInPixels = nextGridStepInPixels; } break; } } if (!gridStepFound) digitPos++ } var gridStart = Math.floor(from / gridStep) * gridStep; var gridEnd = Math.floor(to / gridStep) * gridStep; return { start: gridStart, end: gridEnd, step: gridStep, stepInPx: gridStepInPixels, length: gridEnd - gridStart, segmentsCount: Math.round((gridEnd - gridStart) / gridStep) } } getObject3D() { return this.lineSegments; } }<|fim▁end|>
let axisRange = axisOptions.range; let from = axisRange.from; let to = axisRange.to; let axisLength = to - from;
<|file_name|>recipe-189745.py<|end_file_name|><|fim▁begin|>class Obfuscator: """ A simple obfuscator class using repeated xor """ def __init__(self, data): self._string = data def obfuscate(self): """Obfuscate a string by using repeated xor""" out = "" data = self._string a0=ord(data[0]) a1=ord(data[1]) e0=chr(a0^a1) out += e0 x=1 eprev=e0 while x<len(data): ax=ord(data[x]) ex=chr(ax^ord(eprev)) out += ex #throw some chaff chaff = chr(ord(ex)^ax) out += chaff eprev = ex x+=1 return out def unobfuscate(self): """ Reverse of obfuscation """ out = "" data = self._string x=len(data) - 2 while x>1: apos=data[x] aprevpos=data[x-2] epos=chr(ord(apos)^ord(aprevpos)) out += epos x -= 2 #reverse string out2="" x=len(out)-1 while x>=0: out2 += out[x] x -= 1 out=out2 #second character e2=data[2] a2=data[1] a1=chr(ord(a2)^ord(e2)) a1 += out out = a1 #first character e1=out[0] a1=data[0] a0=chr(ord(a1)^ord(e1)) a0 += out out = a0 return out def main(): testString="Python obfuscator" obfuscator = Obfuscator(testString) testStringObf = obfuscator.obfuscate() print testStringObf<|fim▁hole|> obfuscator = Obfuscator(testStringObf) testString = obfuscator.unobfuscate() print testString if __name__=="__main__": main()<|fim▁end|>
<|file_name|>node_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package auth import ( "fmt" "net/http" "net/http/httptest" "testing" "time" storagev1beta1 "k8s.io/api/storage/v1beta1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apiserver/pkg/authentication/request/bearertoken" "k8s.io/apiserver/pkg/authentication/token/tokenfile" "k8s.io/apiserver/pkg/authentication/user" utilfeature "k8s.io/apiserver/pkg/util/feature" utilfeaturetesting "k8s.io/apiserver/pkg/util/feature/testing" versionedinformers "k8s.io/client-go/informers" externalclientset "k8s.io/client-go/kubernetes" restclient "k8s.io/client-go/rest" "k8s.io/kubernetes/pkg/api/legacyscheme" api "k8s.io/kubernetes/pkg/apis/core" "k8s.io/kubernetes/pkg/apis/policy" "k8s.io/kubernetes/pkg/auth/nodeidentifier" clientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset" informers "k8s.io/kubernetes/pkg/client/informers/informers_generated/internalversion" "k8s.io/kubernetes/pkg/features" "k8s.io/kubernetes/pkg/kubeapiserver/authorizer" "k8s.io/kubernetes/plugin/pkg/admission/noderestriction" "k8s.io/kubernetes/test/integration/framework" ) func TestNodeAuthorizer(t *testing.T) { // Start the server so we know the address h := &framework.MasterHolder{Initialized: make(chan struct{})} apiServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { <-h.Initialized h.M.GenericAPIServer.Handler.ServeHTTP(w, req) })) const ( // Define credentials tokenMaster = "master-token" tokenNodeUnknown = "unknown-token" tokenNode1 = "node1-token" tokenNode2 = "node2-token" ) authenticator := bearertoken.New(tokenfile.New(map[string]*user.DefaultInfo{ tokenMaster: {Name: "admin", Groups: []string{"system:masters"}}, tokenNodeUnknown: {Name: "unknown", Groups: []string{"system:nodes"}}, tokenNode1: {Name: "system:node:node1", Groups: []string{"system:nodes"}}, tokenNode2: {Name: "system:node:node2", Groups: []string{"system:nodes"}}, })) // Build client config, clientset, and informers clientConfig := &restclient.Config{Host: apiServer.URL, ContentConfig: restclient.ContentConfig{NegotiatedSerializer: legacyscheme.Codecs}} superuserClient, superuserClientExternal := clientsetForToken(tokenMaster, clientConfig) informerFactory := informers.NewSharedInformerFactory(superuserClient, time.Minute) versionedInformerFactory := versionedinformers.NewSharedInformerFactory(superuserClientExternal, time.Minute) // Enabled CSIPersistentVolume feature at startup so volumeattachments get watched defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIPersistentVolume, true)() // Set up Node+RBAC authorizer authorizerConfig := &authorizer.AuthorizationConfig{ AuthorizationModes: []string{"Node", "RBAC"}, InformerFactory: informerFactory, VersionedInformerFactory: versionedInformerFactory, } nodeRBACAuthorizer, _, err := authorizerConfig.New() if err != nil { t.Fatal(err) } // Set up NodeRestriction admission nodeRestrictionAdmission := noderestriction.NewPlugin(nodeidentifier.NewDefaultNodeIdentifier()) nodeRestrictionAdmission.SetInternalKubeClientSet(superuserClient) if err := nodeRestrictionAdmission.ValidateInitialization(); err != nil { t.Fatal(err) } // Start the server masterConfig := framework.NewIntegrationTestMasterConfig() masterConfig.GenericConfig.Authentication.Authenticator = authenticator masterConfig.GenericConfig.Authorization.Authorizer = nodeRBACAuthorizer masterConfig.GenericConfig.AdmissionControl = nodeRestrictionAdmission _, _, closeFn := framework.RunAMasterUsingServer(masterConfig, apiServer, h) defer closeFn() // Start the informers stopCh := make(chan struct{}) defer close(stopCh) informerFactory.Start(stopCh) versionedInformerFactory.Start(stopCh) // Wait for a healthy server for { result := superuserClient.Core().RESTClient().Get().AbsPath("/healthz").Do() _, err := result.Raw() if err == nil { break } t.Log(err) time.Sleep(time.Second) }<|fim▁hole|> if _, err := superuserClient.Core().Secrets("ns").Create(&api.Secret{ObjectMeta: metav1.ObjectMeta{Name: "mysecret"}}); err != nil { t.Fatal(err) } if _, err := superuserClient.Core().Secrets("ns").Create(&api.Secret{ObjectMeta: metav1.ObjectMeta{Name: "mypvsecret"}}); err != nil { t.Fatal(err) } if _, err := superuserClient.Core().ConfigMaps("ns").Create(&api.ConfigMap{ObjectMeta: metav1.ObjectMeta{Name: "myconfigmap"}}); err != nil { t.Fatal(err) } pvName := "mypv" if _, err := superuserClientExternal.StorageV1beta1().VolumeAttachments().Create(&storagev1beta1.VolumeAttachment{ ObjectMeta: metav1.ObjectMeta{Name: "myattachment"}, Spec: storagev1beta1.VolumeAttachmentSpec{ Attacher: "foo", Source: storagev1beta1.VolumeAttachmentSource{PersistentVolumeName: &pvName}, NodeName: "node2", }, }); err != nil { t.Fatal(err) } if _, err := superuserClient.Core().PersistentVolumeClaims("ns").Create(&api.PersistentVolumeClaim{ ObjectMeta: metav1.ObjectMeta{Name: "mypvc"}, Spec: api.PersistentVolumeClaimSpec{ AccessModes: []api.PersistentVolumeAccessMode{api.ReadOnlyMany}, Resources: api.ResourceRequirements{Requests: api.ResourceList{api.ResourceStorage: resource.MustParse("1")}}, }, }); err != nil { t.Fatal(err) } if _, err := superuserClient.Core().PersistentVolumes().Create(&api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "mypv"}, Spec: api.PersistentVolumeSpec{ AccessModes: []api.PersistentVolumeAccessMode{api.ReadOnlyMany}, Capacity: api.ResourceList{api.ResourceStorage: resource.MustParse("1")}, ClaimRef: &api.ObjectReference{Namespace: "ns", Name: "mypvc"}, PersistentVolumeSource: api.PersistentVolumeSource{AzureFile: &api.AzureFilePersistentVolumeSource{ShareName: "default", SecretName: "mypvsecret"}}, }, }); err != nil { t.Fatal(err) } getSecret := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().Secrets("ns").Get("mysecret", metav1.GetOptions{}) return err } } getPVSecret := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().Secrets("ns").Get("mypvsecret", metav1.GetOptions{}) return err } } getConfigMap := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().ConfigMaps("ns").Get("myconfigmap", metav1.GetOptions{}) return err } } getPVC := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().PersistentVolumeClaims("ns").Get("mypvc", metav1.GetOptions{}) return err } } getPV := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().PersistentVolumes().Get("mypv", metav1.GetOptions{}) return err } } getVolumeAttachment := func(client externalclientset.Interface) func() error { return func() error { _, err := client.StorageV1beta1().VolumeAttachments().Get("myattachment", metav1.GetOptions{}) return err } } createNode2NormalPod := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().Pods("ns").Create(&api.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "node2normalpod"}, Spec: api.PodSpec{ NodeName: "node2", Containers: []api.Container{{Name: "image", Image: "busybox"}}, Volumes: []api.Volume{ {Name: "secret", VolumeSource: api.VolumeSource{Secret: &api.SecretVolumeSource{SecretName: "mysecret"}}}, {Name: "cm", VolumeSource: api.VolumeSource{ConfigMap: &api.ConfigMapVolumeSource{LocalObjectReference: api.LocalObjectReference{Name: "myconfigmap"}}}}, {Name: "pvc", VolumeSource: api.VolumeSource{PersistentVolumeClaim: &api.PersistentVolumeClaimVolumeSource{ClaimName: "mypvc"}}}, }, }, }) return err } } updateNode2NormalPodStatus := func(client clientset.Interface) func() error { return func() error { startTime := metav1.NewTime(time.Now()) _, err := client.Core().Pods("ns").UpdateStatus(&api.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "node2normalpod"}, Status: api.PodStatus{StartTime: &startTime}, }) return err } } deleteNode2NormalPod := func(client clientset.Interface) func() error { return func() error { zero := int64(0) return client.Core().Pods("ns").Delete("node2normalpod", &metav1.DeleteOptions{GracePeriodSeconds: &zero}) } } createNode2MirrorPod := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().Pods("ns").Create(&api.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "node2mirrorpod", Annotations: map[string]string{api.MirrorPodAnnotationKey: "true"}, }, Spec: api.PodSpec{ NodeName: "node2", Containers: []api.Container{{Name: "image", Image: "busybox"}}, }, }) return err } } deleteNode2MirrorPod := func(client clientset.Interface) func() error { return func() error { zero := int64(0) return client.Core().Pods("ns").Delete("node2mirrorpod", &metav1.DeleteOptions{GracePeriodSeconds: &zero}) } } createNode2 := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().Nodes().Create(&api.Node{ObjectMeta: metav1.ObjectMeta{Name: "node2"}}) return err } } updateNode2Status := func(client clientset.Interface) func() error { return func() error { _, err := client.Core().Nodes().UpdateStatus(&api.Node{ ObjectMeta: metav1.ObjectMeta{Name: "node2"}, Status: api.NodeStatus{}, }) return err } } deleteNode2 := func(client clientset.Interface) func() error { return func() error { return client.Core().Nodes().Delete("node2", nil) } } createNode2NormalPodEviction := func(client clientset.Interface) func() error { return func() error { return client.Policy().Evictions("ns").Evict(&policy.Eviction{ TypeMeta: metav1.TypeMeta{ APIVersion: "policy/v1beta1", Kind: "Eviction", }, ObjectMeta: metav1.ObjectMeta{ Name: "node2normalpod", Namespace: "ns", }, }) } } createNode2MirrorPodEviction := func(client clientset.Interface) func() error { return func() error { return client.Policy().Evictions("ns").Evict(&policy.Eviction{ TypeMeta: metav1.TypeMeta{ APIVersion: "policy/v1beta1", Kind: "Eviction", }, ObjectMeta: metav1.ObjectMeta{ Name: "node2mirrorpod", Namespace: "ns", }, }) } } capacity := 50 updatePVCCapacity := func(client clientset.Interface) func() error { return func() error { capacity++ statusString := fmt.Sprintf("{\"status\": {\"capacity\": {\"storage\": \"%dG\"}}}", capacity) patchBytes := []byte(statusString) _, err := client.Core().PersistentVolumeClaims("ns").Patch("mypvc", types.StrategicMergePatchType, patchBytes, "status") return err } } updatePVCPhase := func(client clientset.Interface) func() error { return func() error { patchBytes := []byte(`{"status":{"phase": "Bound"}}`) _, err := client.Core().PersistentVolumeClaims("ns").Patch("mypvc", types.StrategicMergePatchType, patchBytes, "status") return err } } nodeanonClient, _ := clientsetForToken(tokenNodeUnknown, clientConfig) node1Client, node1ClientExternal := clientsetForToken(tokenNode1, clientConfig) node2Client, node2ClientExternal := clientsetForToken(tokenNode2, clientConfig) // all node requests from node1 and unknown node fail expectForbidden(t, getSecret(nodeanonClient)) expectForbidden(t, getPVSecret(nodeanonClient)) expectForbidden(t, getConfigMap(nodeanonClient)) expectForbidden(t, getPVC(nodeanonClient)) expectForbidden(t, getPV(nodeanonClient)) expectForbidden(t, createNode2NormalPod(nodeanonClient)) expectForbidden(t, createNode2MirrorPod(nodeanonClient)) expectForbidden(t, deleteNode2NormalPod(nodeanonClient)) expectForbidden(t, deleteNode2MirrorPod(nodeanonClient)) expectForbidden(t, createNode2MirrorPodEviction(nodeanonClient)) expectForbidden(t, createNode2(nodeanonClient)) expectForbidden(t, updateNode2Status(nodeanonClient)) expectForbidden(t, deleteNode2(nodeanonClient)) expectForbidden(t, getSecret(node1Client)) expectForbidden(t, getPVSecret(node1Client)) expectForbidden(t, getConfigMap(node1Client)) expectForbidden(t, getPVC(node1Client)) expectForbidden(t, getPV(node1Client)) expectForbidden(t, createNode2NormalPod(nodeanonClient)) expectForbidden(t, createNode2MirrorPod(node1Client)) expectNotFound(t, deleteNode2MirrorPod(node1Client)) expectNotFound(t, createNode2MirrorPodEviction(node1Client)) expectForbidden(t, createNode2(node1Client)) expectForbidden(t, updateNode2Status(node1Client)) expectForbidden(t, deleteNode2(node1Client)) // related object requests from node2 fail expectForbidden(t, getSecret(node2Client)) expectForbidden(t, getPVSecret(node2Client)) expectForbidden(t, getConfigMap(node2Client)) expectForbidden(t, getPVC(node2Client)) expectForbidden(t, getPV(node2Client)) expectForbidden(t, createNode2NormalPod(nodeanonClient)) // mirror pod and self node lifecycle is allowed expectAllowed(t, createNode2MirrorPod(node2Client)) expectAllowed(t, deleteNode2MirrorPod(node2Client)) expectAllowed(t, createNode2MirrorPod(node2Client)) expectAllowed(t, createNode2MirrorPodEviction(node2Client)) expectAllowed(t, createNode2(node2Client)) expectAllowed(t, updateNode2Status(node2Client)) expectAllowed(t, deleteNode2(node2Client)) // create a pod as an admin to add object references expectAllowed(t, createNode2NormalPod(superuserClient)) // unidentifiable node and node1 are still forbidden expectForbidden(t, getSecret(nodeanonClient)) expectForbidden(t, getPVSecret(nodeanonClient)) expectForbidden(t, getConfigMap(nodeanonClient)) expectForbidden(t, getPVC(nodeanonClient)) expectForbidden(t, getPV(nodeanonClient)) expectForbidden(t, createNode2NormalPod(nodeanonClient)) expectForbidden(t, updateNode2NormalPodStatus(nodeanonClient)) expectForbidden(t, deleteNode2NormalPod(nodeanonClient)) expectForbidden(t, createNode2NormalPodEviction(nodeanonClient)) expectForbidden(t, createNode2MirrorPod(nodeanonClient)) expectForbidden(t, deleteNode2MirrorPod(nodeanonClient)) expectForbidden(t, createNode2MirrorPodEviction(nodeanonClient)) expectForbidden(t, getSecret(node1Client)) expectForbidden(t, getPVSecret(node1Client)) expectForbidden(t, getConfigMap(node1Client)) expectForbidden(t, getPVC(node1Client)) expectForbidden(t, getPV(node1Client)) expectForbidden(t, createNode2NormalPod(node1Client)) expectForbidden(t, updateNode2NormalPodStatus(node1Client)) expectForbidden(t, deleteNode2NormalPod(node1Client)) expectForbidden(t, createNode2NormalPodEviction(node1Client)) expectForbidden(t, createNode2MirrorPod(node1Client)) expectNotFound(t, deleteNode2MirrorPod(node1Client)) expectNotFound(t, createNode2MirrorPodEviction(node1Client)) // node2 can get referenced objects now expectAllowed(t, getSecret(node2Client)) expectAllowed(t, getPVSecret(node2Client)) expectAllowed(t, getConfigMap(node2Client)) expectAllowed(t, getPVC(node2Client)) expectAllowed(t, getPV(node2Client)) expectForbidden(t, createNode2NormalPod(node2Client)) expectAllowed(t, updateNode2NormalPodStatus(node2Client)) expectAllowed(t, deleteNode2NormalPod(node2Client)) expectAllowed(t, createNode2MirrorPod(node2Client)) expectAllowed(t, deleteNode2MirrorPod(node2Client)) // recreate as an admin to test eviction expectAllowed(t, createNode2NormalPod(superuserClient)) expectAllowed(t, createNode2MirrorPod(superuserClient)) expectAllowed(t, createNode2NormalPodEviction(node2Client)) expectAllowed(t, createNode2MirrorPodEviction(node2Client)) // re-create a pod as an admin to add object references expectAllowed(t, createNode2NormalPod(superuserClient)) // ExpandPersistentVolumes feature disabled defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.ExpandPersistentVolumes, false)() expectForbidden(t, updatePVCCapacity(node1Client)) expectForbidden(t, updatePVCCapacity(node2Client)) // ExpandPersistentVolumes feature enabled defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.ExpandPersistentVolumes, true)() expectForbidden(t, updatePVCCapacity(node1Client)) expectAllowed(t, updatePVCCapacity(node2Client)) expectForbidden(t, updatePVCPhase(node2Client)) // Disabled CSIPersistentVolume feature defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIPersistentVolume, false)() expectForbidden(t, getVolumeAttachment(node1ClientExternal)) expectForbidden(t, getVolumeAttachment(node2ClientExternal)) // Enabled CSIPersistentVolume feature defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIPersistentVolume, true)() expectForbidden(t, getVolumeAttachment(node1ClientExternal)) expectAllowed(t, getVolumeAttachment(node2ClientExternal)) //TODO(mikedanese): integration test node restriction of TokenRequest } // expect executes a function a set number of times until it either returns the // expected error or executes too many times. It returns if the retries timed // out and the last error returned by the method. func expect(t *testing.T, f func() error, wantErr func(error) bool) (timeout bool, lastErr error) { t.Helper() err := wait.PollImmediate(time.Second, 30*time.Second, func() (bool, error) { t.Helper() lastErr = f() if wantErr(lastErr) { return true, nil } t.Logf("unexpected response, will retry: %v", lastErr) return false, nil }) return err == nil, lastErr } func expectForbidden(t *testing.T, f func() error) { t.Helper() if ok, err := expect(t, f, errors.IsForbidden); !ok { t.Errorf("Expected forbidden error, got %v", err) } } func expectNotFound(t *testing.T, f func() error) { t.Helper() if ok, err := expect(t, f, errors.IsNotFound); !ok { t.Errorf("Expected notfound error, got %v", err) } } func expectAllowed(t *testing.T, f func() error) { t.Helper() if ok, err := expect(t, f, func(e error) bool { return e == nil }); !ok { t.Errorf("Expected no error, got %v", err) } }<|fim▁end|>
// Create objects
<|file_name|>example-Gnomonic.cpp<|end_file_name|><|fim▁begin|>using namespace System; using namespace NETGeographicLib; int main(array<System::String ^> ^/*args*/) { try { Geodesic^ geod = gcnew Geodesic(); // WGS84 const double lat0 = 48 + 50/60.0, lon0 = 2 + 20/60.0; // Paris Gnomonic^ proj = gcnew Gnomonic(geod); { // Sample forward calculation double lat = 50.9, lon = 1.8; // Calais double x, y; proj->Forward(lat0, lon0, lat, lon, x, y); Console::WriteLine(String::Format("X: {0} Y: {1}", x, y)); } { // Sample reverse calculation double x = -38e3, y = 230e3; double lat, lon; proj->Reverse(lat0, lon0, x, y, lat, lon); Console::WriteLine(String::Format("Latitude: {0} Longitude: {1}", lat, lon)); } } catch (GeographicErr^ e) {<|fim▁hole|> Console::WriteLine(String::Format("Caught exception: {0}", e->Message)); return -1; } return 0; }<|fim▁end|>
<|file_name|>os_str.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// The underlying OsString/OsStr implementation on Windows is a /// wrapper around the "WTF-8" encoding; see the `wtf8` module for more. use borrow::Cow; use fmt::{self, Debug}; use sys_common::wtf8::{Wtf8, Wtf8Buf}; use string::String; use result::Result; use option::Option; use mem; #[derive(Clone, Hash)]<|fim▁hole|>pub struct Buf { pub inner: Wtf8Buf } impl Debug for Buf { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { self.as_slice().fmt(formatter) } } pub struct Slice { pub inner: Wtf8 } impl Debug for Slice { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { self.inner.fmt(formatter) } } impl Buf { pub fn from_string(s: String) -> Buf { Buf { inner: Wtf8Buf::from_string(s) } } pub fn as_slice(&self) -> &Slice { unsafe { mem::transmute(self.inner.as_slice()) } } pub fn into_string(self) -> Result<String, Buf> { self.inner.into_string().map_err(|buf| Buf { inner: buf }) } pub fn push_slice(&mut self, s: &Slice) { self.inner.push_wtf8(&s.inner) } } impl Slice { pub fn from_str(s: &str) -> &Slice { unsafe { mem::transmute(Wtf8::from_str(s)) } } pub fn to_str(&self) -> Option<&str> { self.inner.as_str() } pub fn to_string_lossy(&self) -> Cow<str> { self.inner.to_string_lossy() } pub fn to_owned(&self) -> Buf { let mut buf = Wtf8Buf::with_capacity(self.inner.len()); buf.push_wtf8(&self.inner); Buf { inner: buf } } }<|fim▁end|>
<|file_name|>test_clipping.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #coding:utf-8 # Author: mozman --<[email protected]> # Purpose: test mixin Clipping # Created: 31.10.2010 # Copyright (C) 2010, Manfred Moitzi # License: GPLv3 <|fim▁hole|>from svgwrite.mixins import Clipping from svgwrite.base import BaseElement class SVGMock(BaseElement, Clipping): elementname = 'svg' class TestClipping(unittest.TestCase): def test_clip_rect_numbers(self): obj = SVGMock(debug=True) obj.clip_rect(1, 2, 3, 4) self.assertEqual(obj['clip'], 'rect(1,2,3,4)') def test_clip_rect_auto(self): obj = SVGMock(debug=True) obj.clip_rect('auto', 'auto', 'auto', 'auto') self.assertEqual(obj['clip'], 'rect(auto,auto,auto,auto)') if __name__=='__main__': unittest.main()<|fim▁end|>
import unittest
<|file_name|>SVGFEDropShadowElement.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) Research In Motion Limited 2011. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. */ #include "config.h" #include "core/svg/SVGFEDropShadowElement.h" #include "SVGNames.h" #include "core/rendering/style/RenderStyle.h" #include "core/rendering/style/SVGRenderStyle.h" #include "core/svg/SVGElementInstance.h" #include "core/svg/SVGParserUtilities.h" #include "core/svg/graphics/filters/SVGFilterBuilder.h" namespace WebCore { // Animated property definitions DEFINE_ANIMATED_STRING(SVGFEDropShadowElement, SVGNames::inAttr, In1, in1) DEFINE_ANIMATED_NUMBER(SVGFEDropShadowElement, SVGNames::dxAttr, Dx, dx) DEFINE_ANIMATED_NUMBER(SVGFEDropShadowElement, SVGNames::dyAttr, Dy, dy) DEFINE_ANIMATED_NUMBER_MULTIPLE_WRAPPERS(SVGFEDropShadowElement, SVGNames::stdDeviationAttr, stdDeviationXIdentifier(), StdDeviationX, stdDeviationX) DEFINE_ANIMATED_NUMBER_MULTIPLE_WRAPPERS(SVGFEDropShadowElement, SVGNames::stdDeviationAttr, stdDeviationYIdentifier(), StdDeviationY, stdDeviationY) BEGIN_REGISTER_ANIMATED_PROPERTIES(SVGFEDropShadowElement) REGISTER_LOCAL_ANIMATED_PROPERTY(in1) REGISTER_LOCAL_ANIMATED_PROPERTY(dx) REGISTER_LOCAL_ANIMATED_PROPERTY(dy) REGISTER_LOCAL_ANIMATED_PROPERTY(stdDeviationX) REGISTER_LOCAL_ANIMATED_PROPERTY(stdDeviationY) REGISTER_PARENT_ANIMATED_PROPERTIES(SVGFilterPrimitiveStandardAttributes) END_REGISTER_ANIMATED_PROPERTIES inline SVGFEDropShadowElement::SVGFEDropShadowElement(const QualifiedName& tagName, Document* document) : SVGFilterPrimitiveStandardAttributes(tagName, document) , m_dx(2) , m_dy(2) , m_stdDeviationX(2) , m_stdDeviationY(2) { ASSERT(hasTagName(SVGNames::feDropShadowTag)); ScriptWrappable::init(this); registerAnimatedPropertiesForSVGFEDropShadowElement(); } PassRefPtr<SVGFEDropShadowElement> SVGFEDropShadowElement::create(const QualifiedName& tagName, Document* document) { return adoptRef(new SVGFEDropShadowElement(tagName, document)); } const AtomicString& SVGFEDropShadowElement::stdDeviationXIdentifier() { DEFINE_STATIC_LOCAL(AtomicString, s_identifier, ("SVGStdDeviationX", AtomicString::ConstructFromLiteral)); return s_identifier; } const AtomicString& SVGFEDropShadowElement::stdDeviationYIdentifier() { DEFINE_STATIC_LOCAL(AtomicString, s_identifier, ("SVGStdDeviationY", AtomicString::ConstructFromLiteral)); return s_identifier; } void SVGFEDropShadowElement::setStdDeviation(float x, float y) { setStdDeviationXBaseValue(x); setStdDeviationYBaseValue(y); invalidate(); } bool SVGFEDropShadowElement::isSupportedAttribute(const QualifiedName& attrName) { DEFINE_STATIC_LOCAL(HashSet<QualifiedName>, supportedAttributes, ()); if (supportedAttributes.isEmpty()) { supportedAttributes.add(SVGNames::inAttr); supportedAttributes.add(SVGNames::dxAttr); supportedAttributes.add(SVGNames::dyAttr); supportedAttributes.add(SVGNames::stdDeviationAttr); } return supportedAttributes.contains<SVGAttributeHashTranslator>(attrName); } void SVGFEDropShadowElement::parseAttribute(const QualifiedName& name, const AtomicString& value) { if (!isSupportedAttribute(name)) { SVGFilterPrimitiveStandardAttributes::parseAttribute(name, value); return; } if (name == SVGNames::stdDeviationAttr) { float x, y; if (parseNumberOptionalNumber(value, x, y)) { setStdDeviationXBaseValue(x); setStdDeviationYBaseValue(y); } return; } if (name == SVGNames::inAttr) { setIn1BaseValue(value); return;<|fim▁hole|> if (name == SVGNames::dxAttr) { setDxBaseValue(value.toFloat()); return; } if (name == SVGNames::dyAttr) { setDyBaseValue(value.toFloat()); return; } ASSERT_NOT_REACHED(); } void SVGFEDropShadowElement::svgAttributeChanged(const QualifiedName& attrName) { if (!isSupportedAttribute(attrName)) { SVGFilterPrimitiveStandardAttributes::svgAttributeChanged(attrName); return; } SVGElementInstance::InvalidationGuard invalidationGuard(this); if (attrName == SVGNames::inAttr || attrName == SVGNames::stdDeviationAttr || attrName == SVGNames::dxAttr || attrName == SVGNames::dyAttr) { invalidate(); return; } ASSERT_NOT_REACHED(); } PassRefPtr<FilterEffect> SVGFEDropShadowElement::build(SVGFilterBuilder* filterBuilder, Filter* filter) { RenderObject* renderer = this->renderer(); if (!renderer) return 0; if (stdDeviationX() < 0 || stdDeviationY() < 0) return 0; ASSERT(renderer->style()); const SVGRenderStyle* svgStyle = renderer->style()->svgStyle(); Color color = svgStyle->floodColor(); float opacity = svgStyle->floodOpacity(); FilterEffect* input1 = filterBuilder->getEffectById(in1()); if (!input1) return 0; RefPtr<FilterEffect> effect = FEDropShadow::create(filter, stdDeviationX(), stdDeviationY(), dx(), dy(), color, opacity); effect->inputEffects().append(input1); return effect.release(); } }<|fim▁end|>
}
<|file_name|>person.go<|end_file_name|><|fim▁begin|>package face // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/autorest/validation" "net/http" ) // PersonClient is the an API for face detection, verification, and identification. type PersonClient struct { ManagementClient } // NewPersonClient creates an instance of the PersonClient client. func NewPersonClient(subscriptionKey string, azureRegion AzureRegions) PersonClient { return PersonClient{New(subscriptionKey, azureRegion)} } // AddFace add a representative face to a person for identification. The input face is specified as an image with a // targetFace rectangle. // // personGroupID is specifying the person group containing the target person. personID is target person that the face // is added to. userData is user-specified data about the target face to add for any purpose. The maximum length is // 1KB. targetFace is a face rectangle to specify the target face to be added to a person in the format of // "targetFace=left,top,width,height". E.g. "targetFace=10,10,100,100". If there is more than one face in the image, // targetFace is required to specify which face to add. No targetFace means there is only one face detected in the // entire image. func (client PersonClient) AddFace(personGroupID string, personID string, userData string, targetFace string) (result autorest.Response, err error) { req, err := client.AddFacePreparer(personGroupID, personID, userData, targetFace) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "AddFace", nil, "Failure preparing request") return } resp, err := client.AddFaceSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "face.PersonClient", "AddFace", resp, "Failure sending request") return } result, err = client.AddFaceResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "AddFace", resp, "Failure responding to request") } return } // AddFacePreparer prepares the AddFace request. func (client PersonClient) AddFacePreparer(personGroupID string, personID string, userData string, targetFace string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } queryParameters := map[string]interface{}{} if len(userData) > 0 { queryParameters["userData"] = autorest.Encode("query", userData) } if len(targetFace) > 0 { queryParameters["targetFace"] = autorest.Encode("query", targetFace) } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}/persistedFaces", pathParameters), autorest.WithQueryParameters(queryParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // AddFaceSender sends the AddFace request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) AddFaceSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // AddFaceResponder handles the response to the AddFace request. The method always // closes the http.Response Body. func (client PersonClient) AddFaceResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return } // AddFaceFromStream add a representative face to a person for identification. The input face is specified as an image // with a targetFace rectangle. // // personGroupID is specifying the person group containing the target person. personID is target person that the face // is added to. userData is user-specified data about the target face to add for any purpose. The maximum length is // 1KB. targetFace is a face rectangle to specify the target face to be added to a person, in the format of // "targetFace=left,top,width,height". E.g. "targetFace=10,10,100,100". If there is more than one face in the image, // targetFace is required to specify which face to add. No targetFace means there is only one face detected in the // entire image. func (client PersonClient) AddFaceFromStream(personGroupID string, personID string, userData string, targetFace string) (result autorest.Response, err error) { req, err := client.AddFaceFromStreamPreparer(personGroupID, personID, userData, targetFace) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "AddFaceFromStream", nil, "Failure preparing request") return } resp, err := client.AddFaceFromStreamSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "face.PersonClient", "AddFaceFromStream", resp, "Failure sending request") return } result, err = client.AddFaceFromStreamResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "AddFaceFromStream", resp, "Failure responding to request") } return } // AddFaceFromStreamPreparer prepares the AddFaceFromStream request. func (client PersonClient) AddFaceFromStreamPreparer(personGroupID string, personID string, userData string, targetFace string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } queryParameters := map[string]interface{}{} if len(userData) > 0 { queryParameters["userData"] = autorest.Encode("query", userData) } if len(targetFace) > 0 { queryParameters["targetFace"] = autorest.Encode("query", targetFace) } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}/persistedFaces", pathParameters), autorest.WithQueryParameters(queryParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // AddFaceFromStreamSender sends the AddFaceFromStream request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) AddFaceFromStreamSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // AddFaceFromStreamResponder handles the response to the AddFaceFromStream request. The method always // closes the http.Response Body. func (client PersonClient) AddFaceFromStreamResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return } // Create create a new person in a specified person group. // // personGroupID is specifying the target person group to create the person. func (client PersonClient) Create(personGroupID string, body CreatePersonRequest) (result CreatePersonResult, err error) { if err := validation.Validate([]validation.Validation{ {TargetValue: body, Constraints: []validation.Constraint{{Target: "body.Name", Name: validation.Null, Rule: false, Chain: []validation.Constraint{{Target: "body.Name", Name: validation.MaxLength, Rule: 128, Chain: nil}}}, {Target: "body.UserData", Name: validation.Null, Rule: false, Chain: []validation.Constraint{{Target: "body.UserData", Name: validation.MaxLength, Rule: 16384, Chain: nil}}}}}}); err != nil { return result, validation.NewErrorWithValidationError(err, "face.PersonClient", "Create") } req, err := client.CreatePreparer(personGroupID, body) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Create", nil, "Failure preparing request") return } resp, err := client.CreateSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "face.PersonClient", "Create", resp, "Failure sending request") return } result, err = client.CreateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Create", resp, "Failure responding to request") } return } // CreatePreparer prepares the Create request. func (client PersonClient) CreatePreparer(personGroupID string, body CreatePersonRequest) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPost(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons", pathParameters), autorest.WithJSON(body), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // CreateSender sends the Create request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) CreateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // CreateResponder handles the response to the Create request. The method always // closes the http.Response Body. func (client PersonClient) CreateResponder(resp *http.Response) (result CreatePersonResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete delete an existing person from a person group. Persisted face images of the person will also be deleted. // // personGroupID is specifying the person group containing the person. personID is the target personId to delete. func (client PersonClient) Delete(personGroupID string, personID string) (result autorest.Response, err error) { req, err := client.DeletePreparer(personGroupID, personID) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Delete", nil, "Failure preparing request") return } resp, err := client.DeleteSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "face.PersonClient", "Delete", resp, "Failure sending request") return } result, err = client.DeleteResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Delete", resp, "Failure responding to request") } return } // DeletePreparer prepares the Delete request. func (client PersonClient) DeletePreparer(personGroupID string, personID string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}", pathParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) DeleteSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client PersonClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return } // DeleteFace delete a face from a person. Relative image for the persisted face will also be deleted. // // personGroupID is specifying the person group containing the target person. personID is specifying the person that // the target persisted face belong to. persistedFaceID is the persisted face to remove. func (client PersonClient) DeleteFace(personGroupID string, personID string, persistedFaceID string) (result autorest.Response, err error) { req, err := client.DeleteFacePreparer(personGroupID, personID, persistedFaceID) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "DeleteFace", nil, "Failure preparing request") return } resp, err := client.DeleteFaceSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "face.PersonClient", "DeleteFace", resp, "Failure sending request") return } result, err = client.DeleteFaceResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "DeleteFace", resp, "Failure responding to request") } return } // DeleteFacePreparer prepares the DeleteFace request. func (client PersonClient) DeleteFacePreparer(personGroupID string, personID string, persistedFaceID string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "persistedFaceId": autorest.Encode("path", persistedFaceID), "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}/persistedFaces/{persistedFaceId}", pathParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // DeleteFaceSender sends the DeleteFace request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) DeleteFaceSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // DeleteFaceResponder handles the response to the DeleteFace request. The method always // closes the http.Response Body. func (client PersonClient) DeleteFaceResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return } // Get retrieve a person's information, including registered persisted faces, name and userData. // // personGroupID is specifying the person group containing the target person. personID is specifying the target person. func (client PersonClient) Get(personGroupID string, personID string) (result PersonResult, err error) { req, err := client.GetPreparer(personGroupID, personID) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "face.PersonClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Get", resp, "Failure responding to request") } return } // GetPreparer prepares the Get request. func (client PersonClient) GetPreparer(personGroupID string, personID string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}", pathParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) GetSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client PersonClient) GetResponder(resp *http.Response) (result PersonResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // GetFace retrieve information about a persisted face (specified by persistedFaceId, personId and its belonging // personGroupId). // // personGroupID is specifying the person group containing the target person. personID is specifying the target person // that the face belongs to. persistedFaceID is the persistedFaceId of the target persisted face of the person. func (client PersonClient) GetFace(personGroupID string, personID string, persistedFaceID string) (result PersonFaceResult, err error) { req, err := client.GetFacePreparer(personGroupID, personID, persistedFaceID) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "GetFace", nil, "Failure preparing request") return } resp, err := client.GetFaceSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "face.PersonClient", "GetFace", resp, "Failure sending request") return } result, err = client.GetFaceResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "GetFace", resp, "Failure responding to request") } return } // GetFacePreparer prepares the GetFace request. func (client PersonClient) GetFacePreparer(personGroupID string, personID string, persistedFaceID string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "persistedFaceId": autorest.Encode("path", persistedFaceID), "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}/persistedFaces/{persistedFaceId}", pathParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // GetFaceSender sends the GetFace request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) GetFaceSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // GetFaceResponder handles the response to the GetFace request. The method always // closes the http.Response Body. func (client PersonClient) GetFaceResponder(resp *http.Response) (result PersonFaceResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // List list all persons in a person group, and retrieve person information (including personId, name, userData and // persistedFaceIds of registered faces of the person). // // personGroupID is personGroupId of the target person group. func (client PersonClient) List(personGroupID string) (result ListPersonResult, err error) { req, err := client.ListPreparer(personGroupID) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "List", nil, "Failure preparing request") return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "face.PersonClient", "List", resp, "Failure sending request") return } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "List", resp, "Failure responding to request") } return } // ListPreparer prepares the List request. func (client PersonClient) ListPreparer(personGroupID string) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons", pathParameters), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) }<|fim▁hole|> return autorest.SendWithSender(client, req) } // ListResponder handles the response to the List request. The method always // closes the http.Response Body. func (client PersonClient) ListResponder(resp *http.Response) (result ListPersonResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result.Value), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Update update name or userData of a person. // // personGroupID is specifying the person group containing the target person. personID is personId of the target // person. func (client PersonClient) Update(personGroupID string, personID string, body CreatePersonRequest) (result autorest.Response, err error) { req, err := client.UpdatePreparer(personGroupID, personID, body) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Update", nil, "Failure preparing request") return } resp, err := client.UpdateSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "face.PersonClient", "Update", resp, "Failure sending request") return } result, err = client.UpdateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "Update", resp, "Failure responding to request") } return } // UpdatePreparer prepares the Update request. func (client PersonClient) UpdatePreparer(personGroupID string, personID string, body CreatePersonRequest) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPatch(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}", pathParameters), autorest.WithJSON(body), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // UpdateSender sends the Update request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) UpdateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // UpdateResponder handles the response to the Update request. The method always // closes the http.Response Body. func (client PersonClient) UpdateResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return } // UpdateFace update a person persisted face's userData field. // // personGroupID is specifying the person group containing the target person. personID is personId of the target // person. persistedFaceID is persistedFaceId of target face, which is persisted and will not expire. func (client PersonClient) UpdateFace(personGroupID string, personID string, persistedFaceID string, body UpdatePersonFaceDataRequest) (result autorest.Response, err error) { req, err := client.UpdateFacePreparer(personGroupID, personID, persistedFaceID, body) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "UpdateFace", nil, "Failure preparing request") return } resp, err := client.UpdateFaceSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "face.PersonClient", "UpdateFace", resp, "Failure sending request") return } result, err = client.UpdateFaceResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "face.PersonClient", "UpdateFace", resp, "Failure responding to request") } return } // UpdateFacePreparer prepares the UpdateFace request. func (client PersonClient) UpdateFacePreparer(personGroupID string, personID string, persistedFaceID string, body UpdatePersonFaceDataRequest) (*http.Request, error) { urlParameters := map[string]interface{}{ "AzureRegion": client.AzureRegion, } pathParameters := map[string]interface{}{ "persistedFaceId": autorest.Encode("path", persistedFaceID), "personGroupId": autorest.Encode("path", personGroupID), "personId": autorest.Encode("path", personID), } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPatch(), autorest.WithCustomBaseURL("https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0", urlParameters), autorest.WithPathParameters("/persongroups/{personGroupId}/persons/{personId}/persistedFaces/{persistedFaceId}", pathParameters), autorest.WithJSON(body), autorest.WithHeader("Ocp-Apim-Subscription-Key", client.SubscriptionKey)) return preparer.Prepare(&http.Request{}) } // UpdateFaceSender sends the UpdateFace request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) UpdateFaceSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // UpdateFaceResponder handles the response to the UpdateFace request. The method always // closes the http.Response Body. func (client PersonClient) UpdateFaceResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return }<|fim▁end|>
// ListSender sends the List request. The method will close the // http.Response Body if it receives an error. func (client PersonClient) ListSender(req *http.Request) (*http.Response, error) {
<|file_name|>strategy_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package clusterservicebroker import ( "testing" sc "github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) func clusterServiceBrokerWithOldSpec() *sc.ClusterServiceBroker { return &sc.ClusterServiceBroker{ ObjectMeta: metav1.ObjectMeta{ Generation: 1, }, Spec: sc.ClusterServiceBrokerSpec{ CommonServiceBrokerSpec: sc.CommonServiceBrokerSpec{ URL: "https://kubernetes.default.svc:443/brokers/template.k8s.io", }, }, Status: sc.ClusterServiceBrokerStatus{ CommonServiceBrokerStatus: sc.CommonServiceBrokerStatus{ Conditions: []sc.ServiceBrokerCondition{ { Type: sc.ServiceBrokerConditionReady, Status: sc.ConditionFalse, }, }, }, }, } } func clusterServiceBrokerWithNewSpec() *sc.ClusterServiceBroker { b := clusterServiceBrokerWithOldSpec() b.Spec.URL = "new" return b } // TestClusterServiceBrokerStrategyTrivial is the testing of the trivial hardcoded // boolean flags. func TestClusterServiceBrokerStrategyTrivial(t *testing.T) { if clusterServiceBrokerRESTStrategies.NamespaceScoped() { t.Errorf("clusterservicebroker must not be namespace scoped") } if clusterServiceBrokerRESTStrategies.AllowCreateOnUpdate() { t.Errorf("clusterservicebroker should not allow create on update") } if clusterServiceBrokerRESTStrategies.AllowUnconditionalUpdate() { t.Errorf("clusterservicebroker should not allow unconditional update") } } // TestClusterServiceBrokerCreate func TestClusterServiceBroker(t *testing.T) { // Create a clusterservicebroker or clusterservicebrokers broker := &sc.ClusterServiceBroker{ Spec: sc.ClusterServiceBrokerSpec{ CommonServiceBrokerSpec: sc.CommonServiceBrokerSpec{ URL: "abcd", }, }, Status: sc.ClusterServiceBrokerStatus{ CommonServiceBrokerStatus: sc.CommonServiceBrokerStatus{ Conditions: nil, }, }, } // Canonicalize the broker clusterServiceBrokerRESTStrategies.PrepareForCreate(nil, broker) if broker.Status.Conditions == nil { t.Fatalf("Fresh clusterservicebroker should have empty status") } if len(broker.Status.Conditions) != 0 { t.Fatalf("Fresh clusterservicebroker should have empty status") } } // TestClusterServiceBrokerUpdate tests that generation is incremented // correctly when the spec of a ClusterServiceBroker is updated. func TestClusterServiceBrokerUpdate(t *testing.T) { cases := []struct { name string older *sc.ClusterServiceBroker newer *sc.ClusterServiceBroker shouldGenerationIncrement bool }{ { name: "no spec change", older: clusterServiceBrokerWithOldSpec(), newer: clusterServiceBrokerWithOldSpec(), shouldGenerationIncrement: false, }, { name: "spec change", older: clusterServiceBrokerWithOldSpec(), newer: clusterServiceBrokerWithNewSpec(), shouldGenerationIncrement: true, }, } for i := range cases { clusterServiceBrokerRESTStrategies.PrepareForUpdate(nil, cases[i].newer, cases[i].older) <|fim▁hole|> } } else { if e, a := cases[i].older.Generation, cases[i].newer.Generation; e != a { t.Fatalf("%v: expected %v, got %v for generation", cases[i].name, e, a) } } } } // TestClusterServiceBrokerUpdateForRelistRequests tests that the RelistRequests field is // ignored during updates when it is the default value. func TestClusterServiceBrokerUpdateForRelistRequests(t *testing.T) { cases := []struct { name string oldValue int64 newValue int64 expectedValue int64 }{ { name: "both default", oldValue: 0, newValue: 0, expectedValue: 0, }, { name: "old default", oldValue: 0, newValue: 1, expectedValue: 1, }, { name: "new default", oldValue: 1, newValue: 0, expectedValue: 1, }, { name: "neither default", oldValue: 1, newValue: 2, expectedValue: 2, }, } for _, tc := range cases { oldBroker := clusterServiceBrokerWithOldSpec() oldBroker.Spec.RelistRequests = tc.oldValue newClusterServiceBroker := clusterServiceBrokerWithOldSpec() newClusterServiceBroker.Spec.RelistRequests = tc.newValue clusterServiceBrokerRESTStrategies.PrepareForUpdate(nil, newClusterServiceBroker, oldBroker) if e, a := tc.expectedValue, newClusterServiceBroker.Spec.RelistRequests; e != a { t.Errorf("%s: got unexpected RelistRequests: expected %v, got %v", tc.name, e, a) } } }<|fim▁end|>
if cases[i].shouldGenerationIncrement { if e, a := cases[i].older.Generation+1, cases[i].newer.Generation; e != a { t.Fatalf("%v: expected %v, got %v for generation", cases[i].name, e, a)
<|file_name|>relay_test.go<|end_file_name|><|fim▁begin|>package relay_test import ( "fmt" "os" "strings" "testing"<|fim▁hole|> "github.com/nanopack/pulse/relay" "github.com/nanopack/pulse/server" ) var serverAddr = "127.0.0.1:9899" var testRelay *relay.Relay func stdoutPublisher(messages plexer.MessageSet) error { // immitation batch for _, message := range messages.Messages { tags := map[string]string{} for _, tag := range append(messages.Tags, message.Tags...) { elems := strings.SplitN(tag, ":", 2) // only keep key:value tags if len(elems) < 2 { continue } // save as tags[key] = value tags[elems[0]] = elems[1] } fmt.Printf("BATCH : %s, %s, %s\n", message.ID, tags, message.Data) } // immitation single for _, message := range messages.Messages { message.Tags = append(message.Tags, messages.Tags...) fmt.Printf("SINGLE: %+q, %s\n", append(message.Tags, message.ID), message.Data) } return nil } func TestMain(m *testing.M) { go server.StartPolling(nil, nil, 1*time.Second, nil) err := server.Listen(serverAddr, stdoutPublisher) if err != nil { fmt.Printf("Failed to start server - %s\n", err) return } time.Sleep(time.Second) os.Exit(m.Run()) } func TestCollectors(t *testing.T) { testRelay, err := relay.NewRelay(serverAddr, "test_client") if err != nil { fmt.Printf("Failed to create relay - %s\n", err) return } ramCollector := relay.NewPointCollector(func() float64 { return 25.0 }) fmt.Println("Adding ram collector") if err := testRelay.AddCollector("ram", []string{"guy"}, ramCollector); err != nil { t.Errorf("Failed to add ram collector - %s\n", err) t.FailNow() } diskCollector := relay.NewSetCollector(func() map[string]float64 { return map[string]float64{"disk": 50.0} }) fmt.Println("Adding disk collector") if err := testRelay.AddCollector("disk", []string{"guy"}, diskCollector); err != nil { t.Errorf("Failed to add disk collector - %s\n", err) t.FailNow() } if err := testRelay.AddCollector("disk", []string{"guy"}, diskCollector); err == nil { t.Errorf("Failed to fail addding disk collector - %s\n", err) t.FailNow() } if err := testRelay.AddCollector("_connected", []string{"guy"}, diskCollector); err == nil { t.Errorf("Failed to fail addding reserved collector - %s\n", err) t.FailNow() } fmt.Println("INFO - ", testRelay.Info()) time.Sleep(3 * time.Second) fmt.Println("Removing collector") testRelay.RemoveCollector("ram-used") testRelay.RemoveCollector("ram") time.Sleep(1 * time.Second) testRelay.Close() }<|fim▁end|>
"time" "github.com/nanopack/pulse/plexer"
<|file_name|>InterestingPeopleWithAPI.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import PropTypes from 'prop-types'; import { FormattedMessage } from 'react-intl'; import { Link } from 'react-router-dom'; import _ from 'lodash'; import User from './User'; import Loading from '../../components/Icon/Loading';<|fim▁hole|> static propTypes = { authenticatedUser: PropTypes.shape({ name: PropTypes.string, }), followingList: PropTypes.arrayOf(PropTypes.string), }; static defaultProps = { authenticatedUser: { name: '', }, followingList: [], }; state = { users: [], loading: true, noUsers: false, }; componentWillMount() { const authenticatedUsername = this.props.authenticatedUser.name; const usernameValidator = window.location.pathname.match(/@(.*)/); const username = usernameValidator ? usernameValidator[1] : authenticatedUsername; this.getBlogAuthors(username); } getBlogAuthors = (username = '') => steemAPI .getBlogAuthorsAsync(username) .then((result) => { const followers = this.props.followingList; const users = _.sortBy(result, user => user[1]) .reverse() .filter(user => !followers.includes(user[0])) .slice(0, 5) .map(user => ({ name: user[0] })); if (users.length > 0) { this.setState({ users, loading: false, noUsers: false, }); } else { this.setState({ noUsers: true, }); } }) .catch(() => { this.setState({ noUsers: true, }); }); render() { const { users, loading, noUsers } = this.state; if (noUsers) { return <div />; } if (loading) { return <Loading />; } return ( <div className="InterestingPeople"> <div className="InterestingPeople__container"> <h4 className="InterestingPeople__title"> <i className="iconfont icon-group InterestingPeople__icon" /> {' '} <FormattedMessage id="interesting_people" defaultMessage="Interesting People" /> </h4> <div className="InterestingPeople__divider" /> {users && users.map(user => <User key={user.name} user={user} />)} <h4 className="InterestingPeople__more"> <Link to={'/latest-comments'}> <FormattedMessage id="discover_more_people" defaultMessage="Discover More People" /> </Link> </h4> </div> </div> ); } } export default InterestingPeopleWithAPI;<|fim▁end|>
import './InterestingPeople.less'; import steemAPI from '../../steemAPI'; class InterestingPeopleWithAPI extends Component {
<|file_name|>thingspeak_cpu_loop.py<|end_file_name|><|fim▁begin|>from time import localtime, strftime # download from http://code.google.com/p/psutil/ import psutil import time import thingspeak channel_id = 0 # PUT CHANNEL ID HERE write_key = 0 # PUT YOUR WRITE KEY HERE def doit(channel): cpu_pc = psutil.cpu_percent() mem_avail_mb = psutil.virtual_memory().percent try: response = channel.update({1: cpu_pc, 2: mem_avail}) print(cpu_pc) print(mem_avail_mb) print(strftime("%a, %d %b %Y %H:%M:%S", localtime())) print(response) except: print("connection failed") # sleep for 16 seconds (api limit of 15 secs) if __name__ == "__main__":<|fim▁hole|> doit(channel) time.sleep(16)<|fim▁end|>
channel = thingspeak.Channel(id=channel_id, write_key=write_key) while True:
<|file_name|>site.js<|end_file_name|><|fim▁begin|>/** * Module dependencies. */ var passport = require('passport'); module.exports.loginForm = function(req, res) { res.render('login'); }; module.exports.login = passport.authenticate('local', { successReturnToOrRedirect: '/login/status/success', failureRedirect: '/login/status/failure' }); module.exports.logout = function(req, res) { req.logout(); res.status(200).json('logout success'); } module.exports.loginStatus = function(req, res) { const { status } = req.params; if (status === 'current') { if (req.session.passport) { res.status(230).json('logining'); } else { res.status(403).json('no login');<|fim▁hole|> } else if (status === 'success'){ res.status(200).json('login success'); } else { res.status(403).json('login failure'); } }<|fim▁end|>
}
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf8 -*- import setuptools from setuptools.command.develop import develop from setuptools.command.install import install class DevelopScript(develop): def run(self): develop.run(self) ntlk_install_packages() class InstallScript(install): def run(self): install.run(self) ntlk_install_packages() def ntlk_install_packages(): import nltk print("Downloading nltk packages...") nltk.download('punkt') nltk.download('averaged_perceptron_tagger') <|fim▁hole|> name="zeeguu_core", version="0.1", packages=setuptools.find_packages(), include_package_data=True, zip_safe=False, author="Zeeguu Team", author_email="[email protected]", description="Core for Zeeguu", keywords="second language acquisition api", cmdclass={ 'develop': DevelopScript, 'install': InstallScript, }, )<|fim▁end|>
setuptools.setup(
<|file_name|>create_table_snr.py<|end_file_name|><|fim▁begin|>import astropy.io.fits as fits import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as p import numpy as n import os import sys from scipy.stats import scoreatpercentile as sc from scipy.interpolate import interp1d survey = sys.argv[1] z_min, z_max = 0., 1.6 imfs = ["Chabrier_ELODIE_", "Chabrier_MILES_", "Chabrier_STELIB_", "Kroupa_ELODIE_", "Kroupa_MILES_", "Kroupa_STELIB_", "Salpeter_ELODIE_", "Salpeter_MILES_", "Salpeter_STELIB_" ] z_bins = n.array([0, 0.025, 0.375, 0.7, 0.85, 1.6]) key_SNR = 'SNR_ALL' SNR_keys = n.array([ 'SNR_32_35', 'SNR_35_39', 'SNR_39_41', 'SNR_41_55', 'SNR_55_68', 'SNR_68_74', 'SNR_74_93' ]) SNR_w_min = n.array([ 32, 35, 39, 41, 55, 68, 74 ]) SNR_w_max = n.array([ 35, 39, 41, 55, 68, 74, 93 ]) wl_40 = ((z_bins[1:]+z_bins[:-1]) * 0.5 + 1)*40. snr_ids = n.searchsorted(SNR_w_max, wl_40) print(SNR_keys[snr_ids]) out_dir = os.path.join(os.environ['OBS_REPO'], 'spm', 'results') #path_2_MAG_cat = os.path.join( os.environ['HOME'], 'SDSS', "dr14_specphot_gri.fits" ) #hd = fits.open(path_2_MAG_cat) #path_2_sdss_cat = os.path.join( os.environ['HOME'], 'SDSS', '26', 'catalogs', "FireFly.fits" ) #path_2_eboss_cat = os.path.join( os.environ['HOME'], 'SDSS', 'v5_10_0', 'catalogs', "FireFly.fits" ) path_2_sdss_cat = os.path.join( os.environ['OBS_REPO'], 'SDSS', '26', 'catalogs', "FireFly.fits" ) path_2_eboss_cat = os.path.join( os.environ['OBS_REPO'], 'SDSS', 'v5_10_0', 'catalogs', "FireFly.fits" ) # OPENS THE CATALOGS print("Loads catalog") if survey =='deep2': deep2_dir = os.path.join(os.environ['OBS_REPO'], 'DEEP2') path_2_deep2_cat = os.path.join( deep2_dir, "zcat.deep2.dr4.v4.LFcatalogTC.Planck13.spm.fits" ) catalog = fits.open(path_2_deep2_cat)[1].data <|fim▁hole|> z_name, z_err_name, class_name, zwarning = 'Z', 'Z_ERR', 'CLASS', 'ZWARNING' if survey =='boss': catalog = fits.open(path_2_eboss_cat)[1].data z_name, z_err_name, class_name, zwarning = 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO' IMF = imfs[0] prf = IMF.split('_')[0]+' & '+IMF.split('_')[1] print(IMF, prf) name, zflg_val, prefix = prf, 0., IMF catalog_0 = (catalog[z_err_name] > 0.) & (catalog[z_name] > catalog[z_err_name]) & (catalog[class_name]=='GALAXY') & (catalog[zwarning]==zflg_val) & (catalog[z_name] > z_min) & (catalog[z_name] < z_max) catalog_zOk = catalog_0 & (catalog['SNR_ALL']>0) converged = (catalog_zOk)&(catalog[prefix+'stellar_mass'] < 10**13. ) & (catalog[prefix+'stellar_mass'] > 10**4 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) dex04 = (converged) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] > 0 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.8 ) dex02 = (dex04) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.4 ) #target_bits program_names = n.array(list(set( catalog['PROGRAMNAME'] ))) program_names.sort() sourcetypes = n.array(list(set( catalog['SOURCETYPE'] ))) sourcetypes.sort() length = lambda selection : len(selection.nonzero()[0]) pcs_ref = list(n.arange(0., 101, 5)) g = lambda key, s1, pcs = pcs_ref : n.hstack(( length(s1), sc(catalog[key][s1], pcs) )) sel_pg = lambda pgr : (catalog_zOk) & (catalog['PROGRAMNAME']==pgr) sel_st = lambda pgr : (catalog_zOk) & (catalog['SOURCETYPE']==pgr) sel0_pg = lambda pgr : (catalog_0) & (catalog['PROGRAMNAME']==pgr) sel0_st = lambda pgr : (catalog_0) & (catalog['SOURCETYPE']==pgr) all_galaxies = [] tpps = [] for pg in sourcetypes: sel_all = sel_st(pg) n_all = length( sel_all ) if n_all > 100 : #print(pg, n_all) all_galaxies.append(n_all) all_out = [] for z_Min, z_Max, snr_key in zip(z_bins[:-1], z_bins[1:], SNR_keys[snr_ids]): s_z = sel_all &(catalog[z_name] >= z_Min) & (catalog[z_name] < z_Max) n_z = length(s_z) #print(z_Min, z_Max, n_z) if n_z > 0 : #print(n.min(catalog[snr_key][s_z]), n.max(catalog[snr_key][s_z])) itp = interp1d(sc(catalog[snr_key][s_z], pcs_ref), pcs_ref, kind='linear', fill_value= 100., bounds_error=False) #print(itp.x, itp.y) all_out.append( [n_z, itp(5), itp(20)] ) else : all_out.append([0., -1, -1]) all_out = n.hstack((all_out)) tpp = pg + " & " + str(int(n_all)) + " & " + " & ".join(n.array([ str(int(el)) for el in all_out]) ) + ' \\\\ \n' print( tpp) tpps.append(tpp) all_galaxies = n.array(all_galaxies) tpps = n.array(tpps) ids = n.argsort(all_galaxies)[::-1] out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_comp_"+survey+"_snr_all_sourcetype_SNR_moments.tex") f=open(out_file, 'w') #f.write('source type & N & \multicolumn{c}{2}{N galaxies} && \multicolumn{c}{2}{SNR ALL$>0$} & \\multicolumn{c}{2}{frefly converged} & \multicolumn{c}{2}{$\sigma_{\log_M}<0.4$} & \multicolumn{c}{2}{$\sigma_{\log_M}<0.2$} \\\\ \n') #f.write(' & & N & % & & N & % & N & % & N & % \\\\ \n') for jj in ids : f.write( tpps[jj] ) f.close() sys.exit() #converged = (catalog_zOk)&(catalog[prefix+'stellar_mass'] < 10**13. ) & (catalog[prefix+'stellar_mass'] > 10**4 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) #dex04 = (converged) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] > 0 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.8 ) #dex02 = (dex04) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.4 ) #m_catalog = n.log10(catalog[prefix+'stellar_mass']) #w_catalog = n.ones_like(catalog[prefix+'stellar_mass']) #print(ld(catalog_zOk)) #return name + " & $"+ sld(converged)+"$ ("+str(n.round(ld(converged)/ld(catalog_zOk)*100.,1))+") & $"+ sld(dex04)+"$ ("+str(n.round(ld(dex04)/ld(catalog_zOk)*100.,1))+") & $"+ sld(dex02)+ "$ ("+str(n.round(ld(dex02)/ld(catalog_zOk)*100.,1))+r") \\\\" ##return catalog_sel, m_catalog, w_catalog sys.exit() for IMF in imfs : prf = IMF.split('_')[0]+' & '+IMF.split('_')[1] l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=False) f.write(l2w + " \n") f.write('\\hline \n') #l2w = get_basic_stat_DR12(boss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & BOSS & 12 ', 0.) #f.write(l2w + " \n") #l2w = get_basic_stat_DR12(boss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & BOSS & 12 ', 0.) #f.write(l2w + " \n") #l2w = get_basic_stat_DR12(boss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & BOSS & 12 ', 0.) #f.write(l2w + " \n") #l2w = get_basic_stat_DR12(boss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & BOSS & 12 ', 0.) #f.write(l2w + " \n") for IMF in imfs : prf = IMF.split('_')[0]+' & '+IMF.split('_')[1] l2w = get_basic_stat_firefly_DR14(boss, 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO', prf, 0., IMF) f.write(l2w + " \n") f.write('\\hline \n') #l2w = get_basic_stat_DR12(sdss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & SDSS & 12 ', 0.) #f.write(l2w + " \n") #l2w = get_basic_stat_DR12(sdss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & SDSS & 12 ', 0.) #f.write(l2w + " \n") #l2w = get_basic_stat_DR12(sdss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & SDSS & 12 ', 0.) #f.write(l2w + " \n") #l2w = get_basic_stat_DR12(sdss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & SDSS & 12 ', 0.) #f.write(l2w + " \n") for IMF in imfs : prf = IMF.split('_')[0]+' & '+IMF.split('_')[1] l2w = get_basic_stat_firefly_DR14(sdss, 'Z', 'Z_ERR', 'CLASS', 'ZWARNING', prf, 0., IMF) f.write(l2w + " \n") f.write('\\hline \n') f.close() #""" out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_2_r.tex") f=open(out_file, 'w') for IMF in imfs : prf = IMF.split('_')[0]+' & '+IMF.split('_')[1] l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=True) f.write(l2w + " \n") f.close()<|fim▁end|>
if survey =='sdss': catalog = fits.open(path_2_sdss_cat)[1].data
<|file_name|>intersection.rs<|end_file_name|><|fim▁begin|>use geometry::Shape; use math::{Point3, Vector3}; use ray::Ray; use texture::TextureCoord; #[derive(Copy, Clone)] pub struct Intersection<'a> { pub t: f32, pub shape: &'a dyn Shape, pub point: Point3, pub ray: Ray, pub normal: Vector3, pub inside: bool, pub texture_coord: Option<TextureCoord>, } impl<'a> Intersection<'a> {<|fim▁hole|> ray: Ray, normal: Vector3, inside: bool, texture_coord: Option<TextureCoord>, ) -> Intersection { Intersection { t, shape, point, ray, normal, inside, texture_coord, } } }<|fim▁end|>
pub fn new( t: f32, shape: &'a dyn Shape, point: Point3,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .db import Database<|fim▁hole|>__version__ = "0.1.1" __maintainer__ = "Gunther Cox" __email__ = "[email protected]"<|fim▁end|>
<|file_name|>main.js<|end_file_name|><|fim▁begin|>'use strict'; <|fim▁hole|> // load the controller's module beforeEach(module('tallytextApp')); var MainCtrl, scope; // Initialize the controller and a mock scope beforeEach(inject(function ($controller, $rootScope) { scope = $rootScope.$new(); MainCtrl = $controller('MainCtrl', { $scope: scope }); })); it('should attach a list of awesomeThings to the scope', function () { expect(scope.awesomeThings.length).toBe(3); }); });<|fim▁end|>
describe('Controller: MainCtrl', function () {
<|file_name|>overlay.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0) // DO NOT EDIT use Bin; use Container; use Widget; use ffi; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::translate::*; glib_wrapper! { pub struct Overlay(Object<ffi::GtkOverlay>): Bin, Container, Widget; match fn { get_type => || ffi::gtk_overlay_get_type(), }<|fim▁hole|> impl Overlay { pub fn new() -> Overlay { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_overlay_new()).downcast_unchecked() } } } pub trait OverlayExt { fn add_overlay<P: IsA<Widget>>(&self, widget: &P); #[cfg(feature = "v3_18")] fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool; #[cfg(feature = "v3_18")] fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32); #[cfg(feature = "v3_18")] fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool); fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32; fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32); //fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64; } impl<O: IsA<Overlay> + IsA<Container>> OverlayExt for O { fn add_overlay<P: IsA<Widget>>(&self, widget: &P) { unsafe { ffi::gtk_overlay_add_overlay(self.to_glib_none().0, widget.to_glib_none().0); } } #[cfg(feature = "v3_18")] fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool { unsafe { from_glib(ffi::gtk_overlay_get_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0)) } } #[cfg(feature = "v3_18")] fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32) { unsafe { ffi::gtk_overlay_reorder_overlay(self.to_glib_none().0, child.to_glib_none().0, position); } } #[cfg(feature = "v3_18")] fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool) { unsafe { ffi::gtk_overlay_set_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0, pass_through.to_glib()); } } fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32 { let mut value = Value::from(&0); unsafe { ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, value.to_glib_none_mut().0); } value.get().unwrap() } fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32) { unsafe { ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, Value::from(&index).to_glib_none().0); } } //fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64 { // Out allocation: Gdk.Rectangle //} }<|fim▁end|>
}
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from nativedroid.protobuf.java_signatures_pb2 import * __author__ = "Fengguo Wei" __copyright__ = "Copyright 2018, The Argus-SAF Project" __license__ = "Apache v2.0" def java_package_str(java_package_pb, delimiter): """ Return full string of a java package proto. :param JavaPackage java_package_pb: java_signatures_pb2.JavaPackage :param str delimiter: :return: str """ pkg_str = java_package_pb.name tmp = java_package_pb while tmp.HasField('parent'): tmp = tmp.parent pkg_str = tmp.name + delimiter + pkg_str return pkg_str def primitive_type_str(primitive_type_pb, is_signature): """ Return full string of a primitive type proto. :param PrimitiveType primitive_type_pb: java_signatures_pb2.PrimitiveType :param bool is_signature: normal form int, signature form I :return: str """ if primitive_type_pb.type == PrimitiveType.BYTE: if is_signature: return 'B' else: return 'byte' elif primitive_type_pb.type == PrimitiveType.SHORT: if is_signature: return 'S' else: return 'short' elif primitive_type_pb.type == PrimitiveType.INT: if is_signature: return 'I' else: return 'int' elif primitive_type_pb.type == PrimitiveType.FLOAT: if is_signature: return 'F' else: return 'float' elif primitive_type_pb.type == PrimitiveType.BOOLEAN: if is_signature: return 'Z' else: return 'boolean' elif primitive_type_pb.type == PrimitiveType.CHAR: if is_signature: return 'C' else: return 'char' elif primitive_type_pb.type == PrimitiveType.LONG: if is_signature: return 'L' else: return 'long' elif primitive_type_pb.type == PrimitiveType.DOUBLE: if is_signature: return 'D' else: return 'double' def class_type_str(class_type_pb, is_signature):<|fim▁hole|> :param bool is_signature: normal form java.lang.Object, signature form Ljava/lang/Object; :return: str """ type_str = class_type_pb.name if is_signature: delimiter = '/' else: delimiter = '.' if class_type_pb.HasField('package'): type_str = java_package_str(class_type_pb.package, delimiter) + delimiter + type_str if class_type_pb.unknown: type_str += '?' if is_signature: type_str = 'L' + type_str + ';' return type_str def java_type_str(java_type_pb, is_signature): """ Return full string of a java type proto. :param JavaType java_type_pb: java_signatures_pb2.JavaType :param bool is_signature: normal form java.lang.Object[], signature form [Ljava/lang/Object; :return: str """ if java_type_pb.HasField('primitive_type'): type_str = primitive_type_str(java_type_pb.primitive_type, is_signature) else: type_str = class_type_str(java_type_pb.class_type, is_signature) dimension = java_type_pb.dimension while dimension > 0: if is_signature: type_str = '[' + type_str else: type_str += '[]' dimension -= 1 return type_str def method_proto_str(method_proto_pb): """ Return full string of a method proto proto. :param MethodProto method_proto_pb: java_signatures_pb2.MethodProto :return: str """ proto = '(' for param in method_proto_pb.param_types: proto += java_type_str(param, True) proto += ')' if method_proto_pb.HasField('return_java_type'): proto += java_type_str(method_proto_pb.return_java_type, True) else: proto += 'V' return proto def method_signature_str(method_signature_pb): """ Return full string of a method signature proto. :param MethodSignature method_signature_pb: java_signatures_pb2.MethodSignature :return: str """ owner_str = java_type_str(method_signature_pb.owner, is_signature=True) proto_str = method_proto_str(method_signature_pb.proto) return owner_str + '.' + method_signature_pb.name + ':' + proto_str def get_params_from_method_signature(method_signature_pb, is_static): """ Get parameter types from method signature. :param MethodSignature method_signature_pb: java_signatures_pb2.MethodSignature :param bool is_static: is static method :return: list of JavaType """ param_types = [] if not is_static: param_types.append(method_signature_pb.owner) param_types.extend(method_signature_pb.proto.param_types) return param_types<|fim▁end|>
""" Return full string of a class type proto. :param ClassType class_type_pb: java_signatures_pb2.ClassType
<|file_name|>jquery.twbsFlickrCarousel.js<|end_file_name|><|fim▁begin|>/** * jQuery Twitter Bootstrap Flickr Carousel v1.0.0 * http://jguadagno.github.io/twbs-FlickrCarousel/ * * Copyright 2014, Joseph Guadagno * Released under Apache 2.0 license * http://apache.org/licenses/LICENSE-2.0.html */ ;<|fim▁hole|> var old = $.fn.twbsFlickrCarousel; // Contructor var TwbsFlickrCarousel = function (element, options) { this.$element = $(element); this.settings = $.extend({}, $.fn.twbsFlickrCarousel.defaults, options); if (this.settings.onPageClick instanceof(Function)) { this.$element.first().bind('page', this.settings.onPageClick); } if (this.settings.onLoadError instanceof(Function)) { this.$element.first().bind('page', this.settings.onLoadError); } this.getPhotos(this.settings.pageNumber); return this; } // Prototype TwbsFlickrCarousel.prototype = { constructImageElement: function(photo) { if (photo === undefined) { return ""; } return "http://farm" + photo.farm + ".staticflickr.com/" + photo.server + "/" + photo.id + "_" + photo.secret + "_" + this.settings.flickrSizeSuffix + "." + this.settings.flickrImageType; }, createCarouselItemCaption: function(title, description) { if (title === undefined) {title = ""; } if (description === undefined) {description = ""; } var caption = $('<div>').addClass('carousel-caption'); $('<h3>').text(title).appendTo(caption); $('<p>').text(description).appendTo(caption); return caption; }, createCarouselItem: function(photo) { var item = $('<div>').addClass('item'); $('<img>').attr('src', this.constructImageElement(photo, false)).attr('alt', photo.title._content).appendTo(item); this.createCarouselItemCaption(photo.title._content, photo.description._content).appendTo(item); return item; }, createCarouselIndicators: function() { // Draw out the indicator items var indicatorHolder = this.$element.find('.carousel-indicators'); if (indicatorHolder === undefined || indicatorHolder.length === 0) { indicatorHolder = $('<ol>').addClass('carousel-indicators').prependTo(this.$element); } indicatorHolder.children().remove(); for (var item = 0; item < this.settings.imagesPerPage; item++) { var indicator = $('<li>').attr('data-target', "#" + this.$element.attr('id')).attr('data-slide-to', item); if (item === 0) { indicator.addClass('active'); } indicator.appendTo(indicatorHolder); } }, createCarouselInner: function() { var carouselInner = this.$element.find('div.carousel-inner'); if (carouselInner === undefined || carouselInner.length === 0) { carouselInner = $('<div>').addClass('carousel-inner'); carouselInner.prependTo(this.$element); // Add the placeholder image var placeHolderUrl = "<img src='http://www.placehold.it/" + this.settings.width + "x" + this.settings.height + "&text=Fetching+images...' alt='Fetching images'>"; var item = $('<div>').addClass('item active'); $(placeHolderUrl).appendTo(item); item.appendTo(carouselInner); } return carouselInner; }, createCarouselNavigation: function() { var left = this.$element.find('a.left'); if (left === undefined || left.length === 0) { left = $('<a>').addClass('left carousel-control').attr('href', "#" + this.$element.attr('id')).attr('role', 'button').attr('data-slide', 'prev'); $('<span>').addClass('glyphicon glyphicon-chevron-left').appendTo(left); left.appendTo(this.$element); } var right = this.$element.find('a.right'); if (right === undefined || right.length === 0) { right = $('<a>').addClass('right carousel-control').attr('href', "#" + this.$element.attr('id')).attr('role', 'button').attr('data-slide', 'next'); $('<span>').addClass('glyphicon glyphicon-chevron-right').appendTo(right); right.appendTo(this.$element); } }, // TODO: Implement our own pagination createPaginationLinks: function(paginationElement, photos) { if (paginationElement === undefined || paginationElement.length === 0) { console.log("createPaginationLinks: paginationElement was undefined or not found"); return; } if (photos === undefined) { console.log("creationPagination: photos element was undefined"); return; } for (var page = 1; page < photos.pages; page++) { var listItem = $('<li>'); if (page === photos.page) { listItem.addClass('active'); } $('<span>').text(page).appendTo(listItem) listItem.appendTo(paginationElement); } }, getPhotoInfo: function(photo, addClass) { if (photo === undefined) { console.log("getPhotoInfo: The photo was undefined"); return; } var carouselInner = this.$element.find('div.carousel-inner') var twbsObject = this; $.getJSON(this.settings.flickrApiUrl, { method: "flickr.photos.getInfo", api_key: this.settings.flickrApiKey, photo_id: photo.id, secret: photo.secret, format: "json", nojsoncallback: "1" }).done(function (data) { var title = "", description = ""; if (data.stat !== "ok") { console.error("getPhotoInfo: Failed to get the details of the photo"); } else { var carouselItem = twbsObject.createCarouselItem(data.photo); if (addClass === 'active') { carouselItem.addClass('active'); } $(carouselItem).appendTo(carouselInner); } }); }, getPhotos: function(pageNumber) { var carouselElement = this.$element; var base = this; // Make sure the required elements for the carousel are there carouselElement.addClass("carousel slide").attr("data-ride", "carousel"); carouselElement.width(this.settings.width); this.createCarouselIndicators(); var carouselInner = this.createCarouselInner(); this.createCarouselNavigation(); $.getJSON(this.settings.flickrApiUrl, { method: "flickr.photos.search", api_key: this.settings.flickrApiKey, tags: this.settings.tagsToSearchFor, per_page: this.settings.imagesPerPage, page: pageNumber, format: "json", nojsoncallback: "1" }).done(function (data) { if (data.stat !== "ok") { console.error("getPhotos: Failed to get the list of photos"); return; } carouselInner.children().remove(); $.each(data.photos.photo, function (i, item) { if (i === 0) { base.getPhotoInfo(item, 'active'); } else { base.getPhotoInfo(item); } }); if (base.settings.paginationSelector !== undefined) { var paginationElement = $(base.settings.paginationSelector); if (paginationElement != undefined && paginationElement.length > 0 && ($.fn.twbsPagination)) { $(base.settings.paginationSelector).twbsPagination({ totalPages: data.photos.pages, visiblePages: base.settings.imagesPerPage, startPage: pageNumber, onPageClick: function (event, page) { base.getPhotos(page); } }); } } }); } }; // Plugin $.fn.twbsFlickrCarousel = function (option) { var args = Array.prototype.slice.call(arguments, 1); var methodReturn; var $this = $(this); var data = $this.data('twbs-flickrCarousel'); var options = typeof option === 'object' && option; if (!data) $this.data('twbs-flickrCarousel', (data = new TwbsFlickrCarousel(this, options) )); if (typeof option === 'string') methodReturn = data[ option ].apply(data, args); return ( methodReturn === undefined ) ? $this : methodReturn; }; $.fn.twbsFlickrCarousel.defaults = { flickrApiKey: '', flickrApiUrl: 'https://api.flickr.com/services/rest/', tagsToSearchFor: '', width: '600', height: '600', imagesPerPage: 10, pageNumber: 1, flickrSizeSuffix: 'z', flickrImageType: 'jpg', paginationSelector: '#flickr-pagination', paginationClass: 'pagination', onPageClick: null, onLoadError: null }; $.fn.twbsFlickrCarousel.Constructor = TwbsFlickrCarousel; $.fn.twbsFlickrCarousel.noConflict = function () { $.fn.twbsFlickrCarousel = old; return this; }; })(jQuery, window, document);<|fim▁end|>
(function ($, window, document, undefined) { 'use strict';
<|file_name|>pt_query.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import argparse import datetime import getpass import json import logging import logging.config import os import re import sys import tabulate import uuid from critsapi.critsapi import CRITsAPI from critsapi.critsdbapi import CRITsDBAPI from lib.pt.common.config import Config from lib.pt.common.constants import PT_HOME from lib.pt.core.database import Database from lib.pt.ptapi import PTAPI from lib.crits.vocabulary.indicators import IndicatorTypes as it from operator import itemgetter from configparser import ConfigParser log = logging.getLogger() VERSION = "0.1337" # Check configuration directory local_config_dir = os.path.join(PT_HOME, 'etc', 'local') if not os.path.exists(local_config_dir): os.makedirs(local_config_dir) sys.exit('No etc/local/ directory. See README to create.') config = Config() # Check local data directory if config.core.cache_enabled: if not os.path.exists(config.core.cache_dir): log.info('Creating Cache directory in ' '{}'.format(config.core.cache_dir)) os.makedirs(config.core.cache_dir) # Initialize loggin log_path = os.path.join(PT_HOME, 'etc', 'local', 'logging.ini') try: logging.config.fileConfig(log_path) except Exception as e: sys.exit('unable to load logging configuration file {}: ' '{}'.format(log_path, str(e))) pt = PTAPI(username=config.core.pt_username, apikey=config.core.pt_apikey) pt.set_proxy(http=config.proxy.http, https=config.proxy.https) argparser = argparse.ArgumentParser() argparser.add_argument('QUERY', action='store', help='A value to send as a' ' query to PT. Email, phone, name, etc.') argparser.add_argument('--dev', dest='dev', action='store_true', default=False) argparser.add_argument('--crits', dest='crits', action='store_true', default=False, help='Write the results to CRITs with' ' appropriate relationships.') argparser.add_argument('--test', dest='test', action='store_true', default=False, help='Run with test data. (Save PT ' 'queries)') argparser.add_argument('-f', dest='force', action='store_true', default=False, help='Force a new API query (do not used cached ' 'results.') argparser.add_argument('-t', action='append', dest='tags', default=[], help='Bucket list tags for crits. Multiple -t options '<|fim▁hole|> help='The query is a name and pt_query will not try to ' 'determine the type automatically.') meg.add_argument('-a', dest='address', action='store_true', default=False, help='The query is an address and pt_query will not ' 'try to determine the type automatically.') args = argparser.parse_args() # Patterns for determining which type of lookup to do # Some items cannot be differentiated via regex (name vs address), so we use # a flag to specify these # Load patterns for regexes pattern_config = ConfigParser() patterns = {} with open(os.path.join(PT_HOME, 'etc', 'patterns.ini')) as fp: pattern_config.readfp(fp) email_address_pattern = re.compile(pattern_config.get('email', 'pattern')) phone_pattern = re.compile(pattern_config.get('phone', 'pattern')) domain_pattern = re.compile(pattern_config.get('domain', 'pattern')) database = None if config.core.cache_enabled: database = Database() if args.crits: HOME = os.path.expanduser("~") if not os.path.exists(os.path.join(HOME, '.crits_api')): print('''Please create a file with the following contents: [crits] user = lolnate [keys] prod_api_key = keyhere dev_api_key = keyhere ''') raise SystemExit('~/.crits_api was not found or was not accessible.') crits_config = ConfigParser() crits_config.read(os.path.join(HOME, '.crits_api')) if crits_config.has_option("keys", "prod"): crits_api_prod = crits_config.get("keys", "prod") if crits_config.has_option("keys", "dev"): crits_api_dev = crits_config.get("keys", "dev") if crits_config.has_option("crits", "user"): crits_username = crits_config.get("crits", "user") if args.dev: crits_url = config.crits.crits_dev_api_url crits_api_key = crits_api_dev if len(crits_api_key) != 40: print("Dev API key in ~/.crits_api is the wrong length! Must be 40\ characters.") else: crits_url = config.crits.crits_prod_api_url crits_api_key = crits_api_prod if len(crits_api_key) != 40: print("Prod API key in ~/.crits_api is the wrong length! Must be 40\ characters.") crits_proxy = { 'http': config.crits.crits_proxy_url, 'https': config.crits.crits_proxy_url, } # Build our mongo connection if args.dev: crits_mongo = CRITsDBAPI(mongo_uri=config.crits.mongo_uri_dev, db_name=config.crits.database) else: crits_mongo = CRITsDBAPI(mongo_uri=config.crits.mongo_uri, db_name=config.crits.database) crits_mongo.connect() # Connect to the CRITs API crits = CRITsAPI( api_url=crits_url, api_key=crits_api_key, username=crits_username, proxies=crits_proxy, verify=config.crits.crits_verify ) query = args.QUERY.rstrip() # Get the user launching all this user = getpass.getuser() # Used to store the type of indicator in CRITs for the query object. crits_indicator_type = '' # Used to store the cache file location cache_file = None if database and not args.force and config.core.cache_enabled: cache_file = database.get_cache_file(query) if cache_file: log.info('Using cache file for query {}'.format(query)) with open(cache_file) as fp: results = json.loads(fp.read()) bucket_list = ['whois', 'pt:query'] for t in args.tags: bucket_list.append(t) if args.name or args.address: if args.name: field_str = 'name' if args.address: field_str = 'address' if args.test: results = pt.get_test_results(field=field_str) else: results = pt.whois_search(query=query, field=field_str) if database and not cache_file and config.core.cache_enabled: filepath = os.path.join(config.core.cache_dir, str(uuid.uuid4())) log.debug('Filepath is {}'.format(filepath)) database.add_results_to_cache(query, user, results, filepath) base_reference = 'https://www.passivetotal.org/search/whois/'\ '{}'.format(field_str) # Use our config defined indicator type of whois email objects if args.name: crits_indicator_type = it.WHOIS_NAME if args.address: crits_indicator_type = it.WHOIS_ADDR1 bucket_list.append('registrant') elif re.match(email_address_pattern, query): if args.test: results = pt.get_test_results(field='email') else: results = pt.whois_search(query=query, field='email') # Now add the results to the db if we have it if database and not cache_file and config.core.cache_enabled: filepath = os.path.join(config.core.cache_dir, str(uuid.uuid4())) log.debug('Filepath is {}'.format(filepath)) database.add_results_to_cache(query, user, results, filepath) base_reference = 'https://www.passivetotal.org/search/whois/email' # Use our config defined indicator type of whois email objects crits_indicator_type = it.WHOIS_REGISTRANT_EMAIL_ADDRESS bucket_list.append('registrant') elif re.match(phone_pattern, query): if args.test: results = pt.get_test_results(field='phone') else: results = pt.whois_search(query=query, field='phone') # Now add the results to the db if we have it if database and not cache_file and config.core.cache_enabled: filepath = os.path.join(config.core.cache_dir, str(uuid.uuid4())) log.debug('Filepath is {}'.format(filepath)) database.add_results_to_cache(query, user, results, filepath) base_reference = 'https://www.passivetotal.org/search/whois/phone' crits_indicator_type = it.WHOIS_TELEPHONE bucket_list.append('registrant') elif re.match(domain_pattern, query): if args.test: results = pt.get_test_results(field='domain') else: results = pt.whois_search(query=query, field='domain') # Now add the results to the db if we have it if database and not cache_file and config.core.cache_enabled: filepath = os.path.join(config.core.cache_dir, str(uuid.uuid4())) log.debug('Filepath is {}'.format(filepath)) database.add_results_to_cache(query, user, results, filepath) base_reference = 'https://www.passivetotal.org/search/whois/domain' crits_indicator_type = it.DOMAIN else: raise SystemExit("Your query didn't match a known pattern.") # Add the query to CRITs regardless of the number of results # TODO: Add campaigns if args.crits: found = False # Search for it with raw mongo because API is slow crits_result = crits_mongo.find('indicators', {'value': query, 'type': crits_indicator_type}) if crits_result.count() > 0: for r in crits_result: if r['value'] == query: indicator = r found = True if not found: indicator = crits.add_indicator( value=query, itype=crits_indicator_type, source=config.crits.default_source, reference='Added via pt_query.py', method='pt_query.py', bucket_list=bucket_list, indicator_confidence='low', indicator_impact='low', description='Queried with pt_query.py', ) # This is pretty hacky - Since we use both the raw DB and the API, we might # receive either an '_id' or an 'id' back. We are going to standardize on # 'id', rather than '_id' if 'id' not in indicator: if '_id' not in indicator: print(repr(indicator)) raise SystemExit('id and _id not found for query: ' '{} in new indicator'.format(query)) else: indicator['id'] = indicator['_id'] # Iterate through all results and print/add to CRITs (if args provided) formatted_results = [] for result in results['results']: if 'domain' in result: crits_indicators_to_add = [] # Row contains: # Domain, Registrant Email, Registrant Name, Registrant Date, # Expiration Date, Tags row = ['', '', '', '', '', ''] row[0] = result['domain'] # Email address used to register if 'registrant' in result: # Append the registrant email if 'email' in result['registrant']: row[1] = result['registrant']['email'] email_obj = { 'value': result['registrant']['email'], 'type': it.WHOIS_REGISTRANT_EMAIL_ADDRESS, 'related_to': result['domain'] } crits_indicators_to_add.append(email_obj) if 'name' in result['registrant']: row[2] = result['registrant']['name'] name_obj = { 'value': result['registrant']['name'], 'type': it.WHOIS_NAME, 'related_to': result['domain'] } crits_indicators_to_add.append(name_obj) if 'telephone' in result['registrant']: row[3] = result['registrant']['telephone'] phone_obj = { 'value': result['registrant']['telephone'], 'type': it.WHOIS_TELEPHONE, 'related_to': result['domain'] } crits_indicators_to_add.append(phone_obj) if 'street' in result['registrant']: addr1_obj = { 'value': result['registrant']['street'], 'type': it.WHOIS_ADDR1, 'related_to': result['domain'] } crits_indicators_to_add.append(addr1_obj) # Date the domain was registered if 'registered' in result: row[4] = result['registered'] if 'expiresAt' in result: row[5] = result['expiresAt'] formatted_results.append(row) # TODO: Tags. They appear to be an extra API query which is annoying reference = '{0}/{1}'.format(base_reference, query) if args.crits: # Let's try getting the confidence and impact from the parent whois # indicator confidence = 'low' impact = 'low' if 'confidence' in indicator: if 'rating' in indicator['confidence']: confidence = indicator['confidence']['rating'] if 'impact' in indicator: if 'rating' in indicator['impact']: impact = indicator['impact']['rating'] # If not in CRITs, add all the associated indicators bucket_list = ['whois pivoting', 'pt:found'] for t in args.tags: bucket_list.append(t) new_ind = crits.add_indicator( value=result['domain'], itype=it.DOMAIN, source=config.crits.default_source, reference=reference, method='pt_query.py', bucket_list=bucket_list, indicator_confidence=confidence, indicator_impact=impact, description='Discovered through PT whois pivots' ) # The CRITs API allows us to add a campaign to the indicator, but # not multiple campaigns at one time, # so we will do it directly with the DB. # We want to replicate the campaigns of the WHOIS indicator (if # a campaign exists) to the new indicator. if 'campaign' in indicator: for campaign in indicator['campaign']: crits_mongo.add_embedded_campaign( new_ind['id'], 'indicators', campaign['name'], campaign['confidence'], campaign['analyst'], datetime.datetime.now(), campaign['description'] ) # If the new indicator and the indicator are not related, # relate them. if not crits.has_relationship(indicator['id'], 'Indicator', new_ind['id'], 'Indicator', rel_type='Registered'): crits.forge_relationship(indicator['id'], 'Indicator', new_ind['id'], 'Indicator', rel_type='Registered') # Now we can add the rest of the WHOIS indicators (if necessary) for ind in crits_indicators_to_add: # If the indicator exists, just get the id and use it to build # relationships. We will look for one with the same source. # If not in CRITs, add it and relate it. whois_indicator = crits_mongo.find_one( 'indicators', { 'value': ind['value'], 'type': ind['type'], 'source.name': config.crits.default_source, }) if not whois_indicator: bucket_list = ['whois pivoting', 'pt:found'] for t in args.tags: bucket_list.append(t) whois_indicator = crits.add_indicator( value=ind['value'], itype=ind['type'], source=config.crits.default_source, reference=reference, method='pt_query.py', bucket_list=bucket_list, indicator_confidence=confidence, indicator_impact=impact, description='Discovered through PT whois pivots' ) # This is pretty hacky - Since we use both the raw DB and the # API, we might receive either an '_id' or an 'id' back. We # are going to standardize on 'id', rather than '_id' if 'id' not in whois_indicator: if '_id' not in whois_indicator: print(repr(whois_indicator)) raise SystemExit('id and _id not found for query: ' '{} in whois indicator'.format(query)) whois_indicator['id'] = whois_indicator['_id'] # Not a huge deal, but make sure we don't waste time adding # a relationship to itself if whois_indicator['id'] == new_ind['id']: continue # The CRITs API allows us to add a campaign to the indicator, # but not multiple campaigns at one time, # so we will do it directly with the DB. # We want to replicate the campaigns of the WHOIS indicator (if # a campaign exists) to the new indicator. # Continue with the same campaign if 'campaign' in indicator: for campaign in indicator['campaign']: crits_mongo.add_embedded_campaign( whois_indicator['id'], 'indicators', campaign['name'], campaign['confidence'], campaign['analyst'], datetime.datetime.now(), campaign['description'] ) # If the new indicator and the indicator are not related, # relate them. if not crits.has_relationship(whois_indicator['id'], 'Indicator', new_ind['id'], 'Indicator', rel_type='Registered'): crits.forge_relationship(whois_indicator['id'], 'Indicator', new_ind['id'], 'Indicator', rel_type='Registered') # Add a bucket_list item to track that we searched for this whois indicator if args.crits: crits_mongo.add_bucket_list_item(indicator['id'], 'indicators', 'pt:whois_search_completed') # SORT BY DATE formatted_results = sorted(formatted_results, key=itemgetter(3), reverse=True) # Row contains: # Domain, Registrant Email, Registrant Name, Registrant Telephone, # Registrant Date, Expiration Date, Tags headers = ['Domain', 'Registrant Email', 'Registrant Name', 'Registrant Telephone', 'Registrant Date', 'Expiration Date', 'Tags'] print(tabulate.tabulate(formatted_results, headers))<|fim▁end|>
'are allowed.') # Add our mutually exclusive items meg = argparser.add_mutually_exclusive_group() meg.add_argument('-n', dest='name', action='store_true', default=False,
<|file_name|>UserPreferencesDao.java<|end_file_name|><|fim▁begin|>package stroom.config.global.impl; import stroom.ui.config.shared.UserPreferences;<|fim▁hole|> import java.util.Optional; public interface UserPreferencesDao { Optional<UserPreferences> fetch(String userId); int update(String userId, UserPreferences userPreferences); int delete(String userId); }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .chucky_neighborhood_tool import NeighborhoodTool<|fim▁end|>
<|file_name|>test_base_handler_with_auto_webp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/thumbor/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com [email protected] from shutil import which<|fim▁hole|>from preggy import expect from tornado.testing import gen_test from tests.handlers.test_base_handler import BaseImagingTestCase from thumbor.config import Config from thumbor.context import Context, RequestParameters, ServerParameters from thumbor.importer import Importer # pylint: disable=broad-except,abstract-method,attribute-defined-outside-init,line-too-long,too-many-public-methods # pylint: disable=too-many-lines class ImageOperationsWithAutoWebPTestCase(BaseImagingTestCase): def get_context(self): cfg = Config(SECURITY_KEY="ACME-SEC") cfg.LOADER = "thumbor.loaders.file_loader" cfg.FILE_LOADER_ROOT_PATH = self.loader_path cfg.STORAGE = "thumbor.storages.no_storage" cfg.AUTO_WEBP = True importer = Importer(cfg) importer.import_modules() server = ServerParameters( 8889, "localhost", "thumbor.conf", None, "info", None ) server.security_key = "ACME-SEC" ctx = Context(server, cfg, importer) ctx.server.gifsicle_path = which("gifsicle") return ctx async def get_as_webp(self, url): return await self.async_fetch( url, headers={"Accept": "image/webp,*/*;q=0.8"} ) @gen_test async def test_can_auto_convert_jpeg(self): response = await self.get_as_webp("/unsafe/image.jpg") expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp() @gen_test async def test_should_not_convert_animated_gifs_to_webp(self): response = await self.get_as_webp("/unsafe/animated.gif") expect(response.code).to_equal(200) expect(response.headers).not_to_include("Vary") expect(response.body).to_be_gif() @gen_test async def test_should_convert_image_with_small_width_and_no_height(self): response = await self.get_as_webp("/unsafe/0x0:1681x596/1x/image.jpg") expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp() @gen_test async def test_should_convert_monochromatic_jpeg(self): response = await self.get_as_webp("/unsafe/grayscale.jpg") expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp() @gen_test async def test_should_convert_cmyk_jpeg(self): response = await self.get_as_webp("/unsafe/cmyk.jpg") expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp() @gen_test async def test_shouldnt_convert_cmyk_jpeg_if_format_specified(self): response = await self.get_as_webp( "/unsafe/filters:format(png)/cmyk.jpg" ) expect(response.code).to_equal(200) expect(response.headers).not_to_include("Vary") expect(response.body).to_be_png() @gen_test async def test_shouldnt_convert_cmyk_jpeg_if_gif(self): response = await self.get_as_webp( "/unsafe/filters:format(gif)/cmyk.jpg" ) expect(response.code).to_equal(200) expect(response.headers).not_to_include("Vary") expect(response.body).to_be_gif() @gen_test async def test_shouldnt_convert_if_format_specified(self): response = await self.get_as_webp( "/unsafe/filters:format(gif)/image.jpg" ) expect(response.code).to_equal(200) expect(response.headers).not_to_include("Vary") expect(response.body).to_be_gif() @gen_test async def test_shouldnt_add_vary_if_format_specified(self): response = await self.get_as_webp( "/unsafe/filters:format(webp)/image.jpg" ) expect(response.code).to_equal(200) expect(response.headers).not_to_include("Vary") expect(response.body).to_be_webp() @gen_test async def test_should_add_vary_if_format_invalid(self): response = await self.get_as_webp( "/unsafe/filters:format(asdf)/image.jpg" ) expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp() @gen_test async def test_converting_return_etags(self): response = await self.get_as_webp("/unsafe/image.jpg") expect(response.headers).to_include("Etag") class ImageOperationsWithAutoWebPWithResultStorageTestCase( BaseImagingTestCase ): def get_request(self, *args, **kwargs): return RequestParameters(*args, **kwargs) def get_context(self): cfg = Config(SECURITY_KEY="ACME-SEC") cfg.LOADER = "thumbor.loaders.file_loader" cfg.FILE_LOADER_ROOT_PATH = self.loader_path cfg.RESULT_STORAGE = "thumbor.result_storages.file_storage" cfg.RESULT_STORAGE_EXPIRATION_SECONDS = 60 cfg.RESULT_STORAGE_FILE_STORAGE_ROOT_PATH = self.root_path cfg.AUTO_WEBP = True importer = Importer(cfg) importer.import_modules() server = ServerParameters( 8889, "localhost", "thumbor.conf", None, "info", None ) server.security_key = "ACME-SEC" ctx = Context(server, cfg, importer) ctx.request = self.get_request() ctx.server.gifsicle_path = which("gifsicle") return ctx @property def result_storage(self): return self.context.modules.result_storage async def get_as_webp(self, url): return await self.async_fetch( url, headers={"Accept": "image/webp,*/*;q=0.8"} ) @patch("thumbor.handlers.Context") @gen_test async def test_can_auto_convert_jpeg_from_result_storage( self, context_mock ): # NOQA context_mock.return_value = self.context crypto = CryptoURL("ACME-SEC") url = crypto.generate( image_url=quote("http://test.com/smart/image.jpg") ) self.context.request = self.get_request(url=url, accepts_webp=True) with open("./tests/fixtures/images/image.webp", "rb") as fixture: await self.context.modules.result_storage.put(fixture.read()) response = await self.get_as_webp(url) expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp() @patch("thumbor.handlers.Context") @gen_test async def test_can_auto_convert_unsafe_jpeg_from_result_storage( self, context_mock ): context_mock.return_value = self.context self.context.request = self.get_request(accepts_webp=True) response = await self.get_as_webp("/unsafe/image.jpg") expect(response.code).to_equal(200) expect(response.headers).to_include("Vary") expect(response.headers["Vary"]).to_include("Accept") expect(response.body).to_be_webp()<|fim▁end|>
from unittest.mock import patch from urllib.parse import quote from libthumbor import CryptoURL
<|file_name|>egress_test.go<|end_file_name|><|fim▁begin|>package stream_multiplexer import ( "bytes" "encoding/binary" "fmt" "go.dedis.ch/onet/log" "net" "sync" "testing" "time" ) func handleConnection(id int, conn net.Conn, expect []byte, t *testing.T, wg *sync.WaitGroup) { defer wg.Done() buffer := make([]byte, 0) errorCount := 0 maxError := 3 for len(buffer) < len(expect) { buffer2 := make([]byte, 20) conn.SetReadDeadline(time.Now().Add(time.Second)) n, err := conn.Read(buffer2) if err != nil { errorCount++ if errorCount >= maxError { t.Error("Could not read", err) } continue } for i := 0; i < n; i++ { buffer = append(buffer, buffer2[i]) } } if !bytes.Equal(buffer, expect) { t.Error("StartServerAndExpect failed, handler", id, "expected", expect, "got", buffer) } else { fmt.Println("StartServerAndExpect handler", id, " indeed received", buffer) } conn.SetWriteDeadline(time.Now().Add(time.Second)) n, err := conn.Write(buffer) if err != nil || n != len(buffer) { t.Error("Could not echo back the", len(buffer), "bytes, only", n, ":", err) } } func StartServerAndExpect(data map[int][]byte, remote string, t *testing.T, done chan bool) { var socketListener *net.TCPListener s, err := net.Listen("tcp", remote) socketListener = s.(*net.TCPListener) if err != nil { panic(err) } var wg sync.WaitGroup id := 0 wait := 0 maxWait := 2 for { socketListener.SetDeadline(time.Now().Add(time.Second)) conn, err := socketListener.Accept() log.Lvl1("ExpectedServer accepted a conn") if err != nil { if err, ok := err.(*net.OpError); ok && err.Timeout() { // it was a timeout if wait >= maxWait { break } wait++ continue } t.Error("StartServerAndExpect accept error:", err) } else { wg.Add(1) go handleConnection(id, conn, data[id], t, &wg) id++ } } wg.Wait() socketListener.Close() time.Sleep(time.Second) done <- true } // Tests that the multiplexer forwards short messages func TestEgress1(t *testing.T) { remote := "127.0.0.1:3000" payloadLength := 20 upstreamChan := make(chan []byte) downstreamChan := make(chan []byte) stopChan := make(chan bool) go StartEgressHandler(remote, payloadLength, upstreamChan, downstreamChan, stopChan, true) // prepare a dummy message payload := []byte("hello") multiplexedMsg := make([]byte, MULTIPLEXER_HEADER_SIZE+len(payload)) ID_str := generateRandomID() ID := []byte(ID_str[0:4]) copy(multiplexedMsg[0:4], ID) multiplexedMsg[7] = byte(len(payload)) copy(multiplexedMsg[8:], payload) doneChan := make(chan bool, 1) expected := make(map[int][]byte) expected[0] = payload go StartServerAndExpect(expected, remote, t, doneChan) time.Sleep(time.Second) upstreamChan <- multiplexedMsg <-doneChan echo := <-downstreamChan echoID := echo[0:4] size := int(binary.BigEndian.Uint32(echo[4:8])) data := echo[8:] if !bytes.Equal(echoID, ID) { t.Error("Echoed message ID is wrong", ID, echoID) } if !bytes.Equal(payload, data[:size]) { t.Error("Echoed message data is wrong", payload, data[:size]) } } // Tests that the multiplexer forwards double messages into one stream func TestEgress2(t *testing.T) { remote := "127.0.0.1:3000" payloadLength := 20 upstreamChan := make(chan []byte) downstreamChan := make(chan []byte) stopChan := make(chan bool) go StartEgressHandler(remote, payloadLength, upstreamChan, downstreamChan, stopChan, true) // prepare a dummy message payload := []byte("hello") doubleHello := make([]byte, 2*len(payload)) copy(doubleHello[0:5], payload) copy(doubleHello[5:10], payload) multiplexedMsg := make([]byte, MULTIPLEXER_HEADER_SIZE+len(payload)) ID_str := generateRandomID() ID := []byte(ID_str[0:4]) copy(multiplexedMsg[0:4], ID) multiplexedMsg[7] = byte(len(payload)) copy(multiplexedMsg[8:], payload) doneChan := make(chan bool, 1) expected := make(map[int][]byte) expected[0] = doubleHello go StartServerAndExpect(expected, remote, t, doneChan) time.Sleep(time.Second) upstreamChan <- multiplexedMsg upstreamChan <- multiplexedMsg <-doneChan echo := <-downstreamChan echoID := echo[0:4] size := int(binary.BigEndian.Uint32(echo[4:8])) data := echo[8:] if !bytes.Equal(echoID, ID) { t.Error("Echoed message ID is wrong", ID, echoID) } if !bytes.Equal(doubleHello, data[:size]) { t.Error("Echoed message data is wrong", doubleHello, data[:size]) } } // Tests that the multiplexer multiplexes short messages func TestEgressMultiplex(t *testing.T) { remote := "127.0.0.1:3000" payloadLength := 20 upstreamChan := make(chan []byte) downstreamChan := make(chan []byte) stopChan := make(chan bool) go StartEgressHandler(remote, payloadLength, upstreamChan, downstreamChan, stopChan, true) // prepare a dummy message payload := []byte("hello") multiplexedMsg := make([]byte, MULTIPLEXER_HEADER_SIZE+len(payload)) ID_str := generateRandomID() ID := []byte(ID_str[0:4]) copy(multiplexedMsg[0:4], ID) multiplexedMsg[7] = byte(len(payload)) copy(multiplexedMsg[8:], payload) // prepare a dummy message 2 payload2 := []byte("hello2") multiplexedMsg2 := make([]byte, MULTIPLEXER_HEADER_SIZE+len(payload2)) ID2_str := generateRandomID() ID2 := []byte(ID2_str[0:4]) copy(multiplexedMsg2[0:4], ID2) multiplexedMsg2[7] = byte(len(payload2)) copy(multiplexedMsg2[8:], payload2) doneChan := make(chan bool, 1) expected := make(map[int][]byte) expected[0] = payload expected[1] = payload2 go StartServerAndExpect(expected, remote, t, doneChan) time.Sleep(time.Second) upstreamChan <- multiplexedMsg upstreamChan <- multiplexedMsg2 <-doneChan echo1 := <-downstreamChan echo2 := <-downstreamChan //swap messages if needed if bytes.Equal(ID, echo2[0:4]) && bytes.Equal(ID2, echo1[0:4]) { tmp := echo1 echo1 = echo2 echo2 = tmp } echoID1 := echo1[0:4] size1 := int(binary.BigEndian.Uint32(echo1[4:8])) data1 := echo1[8:] if !bytes.Equal(echoID1, ID) { t.Error("Echoed message ID is wrong", ID, echoID1) } if !bytes.Equal(payload, data1[:size1]) { t.Error("Echoed message data is wrong", payload, data1[:size1]) } echoID2 := echo2[0:4] size2 := int(binary.BigEndian.Uint32(echo2[4:8])) data2 := echo2[8:] if !bytes.Equal(echoID2, ID2) { t.Error("Echoed message ID is wrong", ID2, echoID2) } if !bytes.Equal(payload2, data2[:size2]) { t.Error("Echoed message data is wrong", payload2, data2[:size2]) } } // Tests that the multiplexer multiplexes long messages func TestEgressMultiplexLong(t *testing.T) { remote := "127.0.0.1:3000" payloadLength := 20 upstreamChan := make(chan []byte) downstreamChan := make(chan []byte) stopChan := make(chan bool) go StartEgressHandler(remote, payloadLength, upstreamChan, downstreamChan, stopChan, true) // prepare a dummy message payload := []byte("hello") multiplexedMsg := make([]byte, MULTIPLEXER_HEADER_SIZE+len(payload)) ID_str := generateRandomID() ID := []byte(ID_str[0:4]) copy(multiplexedMsg[0:4], ID) multiplexedMsg[7] = byte(len(payload)) copy(multiplexedMsg[8:], payload) // prepare a dummy message 2 payload2 := []byte("hello2") multiplexedMsg2 := make([]byte, MULTIPLEXER_HEADER_SIZE+len(payload2)) ID2_str := generateRandomID() ID2 := []byte(ID2_str[0:4]) copy(multiplexedMsg2[0:4], ID2) multiplexedMsg2[7] = byte(len(payload2)) copy(multiplexedMsg2[8:], payload2) doneChan := make(chan bool, 1) doubleHello := make([]byte, 2*len(payload)) copy(doubleHello[0:5], payload) copy(doubleHello[5:10], payload) doubleHello2 := make([]byte, 2*len(payload2)) copy(doubleHello2[0:6], payload2) copy(doubleHello2[6:12], payload2)<|fim▁hole|> go StartServerAndExpect(expected, remote, t, doneChan) time.Sleep(time.Second) upstreamChan <- multiplexedMsg upstreamChan <- multiplexedMsg2 upstreamChan <- multiplexedMsg2 upstreamChan <- multiplexedMsg <-doneChan echo1 := <-downstreamChan echo2 := <-downstreamChan //swap messages if needed if bytes.Equal(ID, echo2[0:4]) && bytes.Equal(ID2, echo1[0:4]) { tmp := echo1 echo1 = echo2 echo2 = tmp } echoID1 := echo1[0:4] size1 := int(binary.BigEndian.Uint32(echo1[4:8])) data1 := echo1[8:] if !bytes.Equal(echoID1, ID) { t.Error("Echoed message ID is wrong", ID, echoID1) } if !bytes.Equal(doubleHello, data1[:size1]) { t.Error("Echoed message data is wrong", doubleHello, data1[:size1]) } echoID2 := echo2[0:4] size2 := int(binary.BigEndian.Uint32(echo2[4:8])) data2 := echo2[8:] if !bytes.Equal(echoID2, ID2) { t.Error("Echoed message ID is wrong", ID2, echoID2) } if !bytes.Equal(doubleHello2, data2[:size2]) { t.Error("Echoed message data is wrong", doubleHello2, data2[:size2]) } }<|fim▁end|>
expected := make(map[int][]byte) expected[0] = doubleHello expected[1] = doubleHello2
<|file_name|>network_tiers.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package network import ( "fmt" "net/http" "time" computealpha "google.golang.org/api/compute/v0.alpha" "github.com/GoogleCloudPlatform/k8s-cloud-provider/pkg/cloud" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/util/wait" clientset "k8s.io/client-go/kubernetes" cloudprovider "k8s.io/cloud-provider" "k8s.io/kubernetes/test/e2e/framework" "k8s.io/kubernetes/test/e2e/framework/providers/gce" e2eservice "k8s.io/kubernetes/test/e2e/framework/service" gcecloud "k8s.io/legacy-cloud-providers/gce" "github.com/onsi/ginkgo" ) var _ = SIGDescribe("Services [Feature:GCEAlphaFeature][Slow]", func() { f := framework.NewDefaultFramework("services") var cs clientset.Interface serviceLBNames := []string{} ginkgo.BeforeEach(func() { // This test suite requires the GCE environment. framework.SkipUnlessProviderIs("gce") cs = f.ClientSet }) ginkgo.AfterEach(func() { if ginkgo.CurrentGinkgoTestDescription().Failed { e2eservice.DescribeSvc(f.Namespace.Name) } for _, lb := range serviceLBNames { framework.Logf("cleaning gce resource for %s", lb) framework.TestContext.CloudConfig.Provider.CleanupServiceResources(cs, lb, framework.TestContext.CloudConfig.Region, framework.TestContext.CloudConfig.Zone) } //reset serviceLBNames serviceLBNames = []string{} }) ginkgo.It("should be able to create and tear down a standard-tier load balancer [Slow]", func() { lagTimeout := e2eservice.LoadBalancerLagTimeoutDefault createTimeout := e2eservice.GetServiceLoadBalancerCreationTimeout(cs) svcName := "net-tiers-svc" ns := f.Namespace.Name jig := e2eservice.NewTestJig(cs, ns, svcName)<|fim▁hole|> ginkgo.By("creating a pod to be part of the service " + svcName) _, err := jig.Run(nil) framework.ExpectNoError(err) // Test 1: create a standard tiered LB for the Service. ginkgo.By("creating a Service of type LoadBalancer using the standard network tier") svc, err := jig.CreateTCPService(func(svc *v1.Service) { svc.Spec.Type = v1.ServiceTypeLoadBalancer setNetworkTier(svc, string(gcecloud.NetworkTierAnnotationStandard)) }) framework.ExpectNoError(err) // Verify that service has been updated properly. svcTier, err := gcecloud.GetServiceNetworkTier(svc) framework.ExpectNoError(err) framework.ExpectEqual(svcTier, cloud.NetworkTierStandard) // Record the LB name for test cleanup. serviceLBNames = append(serviceLBNames, cloudprovider.DefaultLoadBalancerName(svc)) // Wait and verify the LB. ingressIP := waitAndVerifyLBWithTier(jig, "", createTimeout, lagTimeout) // Test 2: re-create a LB of a different tier for the updated Service. ginkgo.By("updating the Service to use the premium (default) tier") svc, err = jig.UpdateService(func(svc *v1.Service) { clearNetworkTier(svc) }) framework.ExpectNoError(err) // Verify that service has been updated properly. svcTier, err = gcecloud.GetServiceNetworkTier(svc) framework.ExpectNoError(err) framework.ExpectEqual(svcTier, cloud.NetworkTierDefault) // Wait until the ingress IP changes. Each tier has its own pool of // IPs, so changing tiers implies changing IPs. ingressIP = waitAndVerifyLBWithTier(jig, ingressIP, createTimeout, lagTimeout) // Test 3: create a standard-tierd LB with a user-requested IP. ginkgo.By("reserving a static IP for the load balancer") requestedAddrName := fmt.Sprintf("e2e-ext-lb-net-tier-%s", framework.RunID) gceCloud, err := gce.GetGCECloud() framework.ExpectNoError(err) requestedIP, err := reserveAlphaRegionalAddress(gceCloud, requestedAddrName, cloud.NetworkTierStandard) framework.ExpectNoError(err, "failed to reserve a STANDARD tiered address") defer func() { if requestedAddrName != "" { // Release GCE static address - this is not kube-managed and will not be automatically released. if err := gceCloud.DeleteRegionAddress(requestedAddrName, gceCloud.Region()); err != nil { framework.Logf("failed to release static IP address %q: %v", requestedAddrName, err) } } }() framework.ExpectNoError(err) framework.Logf("Allocated static IP to be used by the load balancer: %q", requestedIP) ginkgo.By("updating the Service to use the standard tier with a requested IP") svc, err = jig.UpdateService(func(svc *v1.Service) { svc.Spec.LoadBalancerIP = requestedIP setNetworkTier(svc, string(gcecloud.NetworkTierAnnotationStandard)) }) framework.ExpectNoError(err) // Verify that service has been updated properly. framework.ExpectEqual(svc.Spec.LoadBalancerIP, requestedIP) svcTier, err = gcecloud.GetServiceNetworkTier(svc) framework.ExpectNoError(err) framework.ExpectEqual(svcTier, cloud.NetworkTierStandard) // Wait until the ingress IP changes and verifies the LB. ingressIP = waitAndVerifyLBWithTier(jig, ingressIP, createTimeout, lagTimeout) }) }) func waitAndVerifyLBWithTier(jig *e2eservice.TestJig, existingIP string, waitTimeout, checkTimeout time.Duration) string { // If existingIP is "" this will wait for any ingress IP to show up. Otherwise // it will wait for the ingress IP to change to something different. svc, err := jig.WaitForNewIngressIP(existingIP, waitTimeout) framework.ExpectNoError(err) svcPort := int(svc.Spec.Ports[0].Port) lbIngress := &svc.Status.LoadBalancer.Ingress[0] ingressIP := e2eservice.GetIngressPoint(lbIngress) ginkgo.By("running sanity and reachability checks") if svc.Spec.LoadBalancerIP != "" { // Verify that the new ingress IP is the requested IP if it's set. framework.ExpectEqual(ingressIP, svc.Spec.LoadBalancerIP) } // If the IP has been used by previous test, sometimes we get the lingering // 404 errors even after the LB is long gone. Tolerate and retry until the // the new LB is fully established since this feature is still Alpha in GCP. e2eservice.TestReachableHTTPWithRetriableErrorCodes(ingressIP, svcPort, []int{http.StatusNotFound}, checkTimeout) // Verify the network tier matches the desired. svcNetTier, err := gcecloud.GetServiceNetworkTier(svc) framework.ExpectNoError(err) netTier, err := getLBNetworkTierByIP(ingressIP) framework.ExpectNoError(err, "failed to get the network tier of the load balancer") framework.ExpectEqual(netTier, svcNetTier) return ingressIP } func getLBNetworkTierByIP(ip string) (cloud.NetworkTier, error) { var rule *computealpha.ForwardingRule // Retry a few times to tolerate flakes. err := wait.PollImmediate(5*time.Second, 15*time.Second, func() (bool, error) { obj, err := getGCEForwardingRuleByIP(ip) if err != nil { return false, err } rule = obj return true, nil }) if err != nil { return "", err } return cloud.NetworkTierGCEValueToType(rule.NetworkTier), nil } func getGCEForwardingRuleByIP(ip string) (*computealpha.ForwardingRule, error) { cloud, err := gce.GetGCECloud() if err != nil { return nil, err } ruleList, err := cloud.ListAlphaRegionForwardingRules(cloud.Region()) if err != nil { return nil, err } for _, rule := range ruleList { if rule.IPAddress == ip { return rule, nil } } return nil, fmt.Errorf("forwarding rule with ip %q not found", ip) } func setNetworkTier(svc *v1.Service, tier string) { key := gcecloud.NetworkTierAnnotationKey if svc.ObjectMeta.Annotations == nil { svc.ObjectMeta.Annotations = map[string]string{} } svc.ObjectMeta.Annotations[key] = tier } func clearNetworkTier(svc *v1.Service) { key := gcecloud.NetworkTierAnnotationKey if svc.ObjectMeta.Annotations == nil { return } delete(svc.ObjectMeta.Annotations, key) } // TODO: add retries if this turns out to be flaky. // TODO(#51665): remove this helper function once Network Tiers becomes beta. func reserveAlphaRegionalAddress(cloud *gcecloud.Cloud, name string, netTier cloud.NetworkTier) (string, error) { alphaAddr := &computealpha.Address{ Name: name, NetworkTier: netTier.ToGCEValue(), } if err := cloud.ReserveAlphaRegionAddress(alphaAddr, cloud.Region()); err != nil { return "", err } addr, err := cloud.GetRegionAddress(name, cloud.Region()) if err != nil { return "", err } return addr.Address, nil }<|fim▁end|>
<|file_name|>AddTagCommand.java<|end_file_name|><|fim▁begin|>package org.ovirt.engine.core.bll; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.TagsOperationParameters; import org.ovirt.engine.core.common.businessentities.Tags; import org.ovirt.engine.core.common.errors.EngineMessage; import org.ovirt.engine.core.dal.dbbroker.DbFacade; public class AddTagCommand<T extends TagsOperationParameters> extends TagsCommandOperationBase<T> { public AddTagCommand(T parameters) { super(parameters); } @Override protected void executeCommand() { DbFacade.getInstance().getTagDao().save(getTag()); TagsDirector.getInstance().addTag(getTag()); <|fim▁hole|> @Override protected boolean validate() { Tags tag = DbFacade.getInstance().getTagDao() .getByName(getParameters().getTag().getTagName()); if (tag != null) { addValidationMessage(EngineMessage.TAGS_SPECIFY_TAG_IS_IN_USE); return false; } return true; } @Override public AuditLogType getAuditLogTypeValue() { return getSucceeded() ? AuditLogType.USER_ADD_TAG : AuditLogType.USER_ADD_TAG_FAILED; } }<|fim▁end|>
setSucceeded(true); }
<|file_name|>fakepuppy.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import time import os DATA_DIR='/local/devel/guppy/testing/' opts, args = getopt.getopt(sys.argv[1:], 'c:t') transfer = False listdir = False for opt, optarg in opts: if opt == '-c': if optarg == 'get' or optarg == 'put': transfer = True if optarg == 'dir': listdir = True if optarg == 'cancel': os.system("pkill -f 'fakepuppy.py -c get.*'") if optarg == 'size': size = True if transfer: inc = 10 percent = 0.0 for i in xrange(100/inc): percent = percent + inc print >> sys.stderr, "\r%6.2f%%, %5.2f Mbits/s, %02d:%02d:%02d elapsed, %d:%02d:%02d remaining" % (percent, 2.2, 1, 1, 1, 2, 2, 2), time.sleep(0.5) print elif listdir: listing = open(DATA_DIR + 'puppy-listdir.txt') for line in listing: print line, listing.close() elif size: print 'Total %10u kiB %7u MiB %4u GiB' % (0, 0, 120) print 'Free %10u kiB %7u MiB %4u GiB' % (0, 500, 0) else: print opts, '|', args,<|fim▁end|>
#!/usr/bin/python import sys import getopt
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from unittest import mock from urllib.error import HTTPError, URLError import requests from azure.mgmt.cdn import CdnManagementClient from azure.mgmt.frontdoor import FrontDoorManagementClient from django.core.exceptions import ImproperlyConfigured from django.test import TestCase from django.test.utils import override_settings from wagtail.contrib.frontend_cache.backends import ( AzureCdnBackend, AzureFrontDoorBackend, BaseBackend, CloudflareBackend, CloudfrontBackend, HTTPBackend) from wagtail.contrib.frontend_cache.utils import get_backends from wagtail.core.models import Page from wagtail.tests.testapp.models import EventIndex from .utils import ( PurgeBatch, purge_page_from_cache, purge_pages_from_cache, purge_url_from_cache, purge_urls_from_cache) class TestBackendConfiguration(TestCase): def test_default(self): backends = get_backends() self.assertEqual(len(backends), 0) def test_varnish(self): backends = get_backends(backend_settings={ 'varnish': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.HTTPBackend', 'LOCATION': 'http://localhost:8000', }, }) self.assertEqual(set(backends.keys()), set(['varnish'])) self.assertIsInstance(backends['varnish'], HTTPBackend) self.assertEqual(backends['varnish'].cache_scheme, 'http') self.assertEqual(backends['varnish'].cache_netloc, 'localhost:8000') def test_cloudflare(self): backends = get_backends(backend_settings={ 'cloudflare': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudflareBackend', 'EMAIL': '[email protected]', 'API_KEY': 'this is the api key', 'ZONEID': 'this is a zone id', 'BEARER_TOKEN': 'this is a bearer token' }, }) self.assertEqual(set(backends.keys()), set(['cloudflare'])) self.assertIsInstance(backends['cloudflare'], CloudflareBackend) self.assertEqual(backends['cloudflare'].cloudflare_email, '[email protected]') self.assertEqual(backends['cloudflare'].cloudflare_api_key, 'this is the api key') self.assertEqual(backends['cloudflare'].cloudflare_token, 'this is a bearer token') def test_cloudfront(self): backends = get_backends(backend_settings={ 'cloudfront': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudfrontBackend', 'DISTRIBUTION_ID': 'frontend', }, }) self.assertEqual(set(backends.keys()), set(['cloudfront'])) self.assertIsInstance(backends['cloudfront'], CloudfrontBackend) self.assertEqual(backends['cloudfront'].cloudfront_distribution_id, 'frontend') def test_azure_cdn(self): backends = get_backends(backend_settings={ 'azure_cdn': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureCdnBackend', 'RESOURCE_GROUP_NAME': 'test-resource-group', 'CDN_PROFILE_NAME': 'wagtail-io-profile', 'CDN_ENDPOINT_NAME': 'wagtail-io-endpoint', }, }) self.assertEqual(set(backends.keys()), set(['azure_cdn'])) self.assertIsInstance(backends['azure_cdn'], AzureCdnBackend) self.assertEqual(backends['azure_cdn']._resource_group_name, 'test-resource-group') self.assertEqual(backends['azure_cdn']._cdn_profile_name, 'wagtail-io-profile') self.assertEqual(backends['azure_cdn']._cdn_endpoint_name, 'wagtail-io-endpoint') def test_azure_front_door(self): backends = get_backends(backend_settings={ 'azure_front_door': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend', 'RESOURCE_GROUP_NAME': 'test-resource-group', 'FRONT_DOOR_NAME': 'wagtail-io-front-door', }, }) self.assertEqual(set(backends.keys()), set(['azure_front_door'])) self.assertIsInstance(backends['azure_front_door'], AzureFrontDoorBackend) self.assertEqual(backends['azure_front_door']._resource_group_name, 'test-resource-group') self.assertEqual(backends['azure_front_door']._front_door_name, 'wagtail-io-front-door') def test_azure_cdn_get_client(self): mock_credentials = mock.MagicMock() backends = get_backends(backend_settings={ 'azure_cdn': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureCdnBackend', 'RESOURCE_GROUP_NAME': 'test-resource-group', 'CDN_PROFILE_NAME': 'wagtail-io-profile', 'CDN_ENDPOINT_NAME': 'wagtail-io-endpoint', 'SUBSCRIPTION_ID': 'fake-subscription-id', 'CREDENTIALS': mock_credentials, }, }) self.assertEqual(set(backends.keys()), set(['azure_cdn'])) client = backends['azure_cdn']._get_client() self.assertIsInstance(client, CdnManagementClient) self.assertEqual(client.config.subscription_id, 'fake-subscription-id') self.assertIs(client.config.credentials, mock_credentials) def test_azure_front_door_get_client(self): mock_credentials = mock.MagicMock() backends = get_backends(backend_settings={<|fim▁hole|> 'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend', 'RESOURCE_GROUP_NAME': 'test-resource-group', 'FRONT_DOOR_NAME': 'wagtail-io-fake-front-door-name', 'SUBSCRIPTION_ID': 'fake-subscription-id', 'CREDENTIALS': mock_credentials, }, }) client = backends['azure_front_door']._get_client() self.assertEqual(set(backends.keys()), set(['azure_front_door'])) self.assertIsInstance(client, FrontDoorManagementClient) self.assertEqual(client.config.subscription_id, 'fake-subscription-id') self.assertIs(client.config.credentials, mock_credentials) @mock.patch('wagtail.contrib.frontend_cache.backends.AzureCdnBackend._make_purge_call') def test_azure_cdn_purge(self, make_purge_call_mock): backends = get_backends(backend_settings={ 'azure_cdn': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureCdnBackend', 'RESOURCE_GROUP_NAME': 'test-resource-group', 'CDN_PROFILE_NAME': 'wagtail-io-profile', 'CDN_ENDPOINT_NAME': 'wagtail-io-endpoint', 'CREDENTIALS': 'Fake credentials', }, }) self.assertEqual(set(backends.keys()), set(['azure_cdn'])) self.assertIsInstance(backends['azure_cdn'], AzureCdnBackend) # purge() backends['azure_cdn'].purge('http://www.wagtail.org/home/events/christmas/?test=1') make_purge_call_mock.assert_called_once() call_args = tuple(make_purge_call_mock.call_args)[0] self.assertEqual(len(call_args), 2) self.assertIsInstance(call_args[0], CdnManagementClient) self.assertEqual(call_args[1], ["/home/events/christmas/?test=1"]) make_purge_call_mock.reset_mock() # purge_batch() backends['azure_cdn'].purge_batch([ 'http://www.wagtail.org/home/events/christmas/?test=1', 'http://torchbox.com/blog/' ]) make_purge_call_mock.assert_called_once() call_args = tuple(make_purge_call_mock.call_args)[0] self.assertIsInstance(call_args[0], CdnManagementClient) self.assertEqual(call_args[1], ["/home/events/christmas/?test=1", "/blog/"]) @mock.patch('wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend._make_purge_call') def test_azure_front_door_purge(self, make_purge_call_mock): backends = get_backends(backend_settings={ 'azure_front_door': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend', 'RESOURCE_GROUP_NAME': 'test-resource-group', 'FRONT_DOOR_NAME': 'wagtail-io-front-door', 'CREDENTIALS': 'Fake credentials', }, }) self.assertEqual(set(backends.keys()), set(['azure_front_door'])) self.assertIsInstance(backends['azure_front_door'], AzureFrontDoorBackend) # purge() backends['azure_front_door'].purge('http://www.wagtail.org/home/events/christmas/?test=1') make_purge_call_mock.assert_called_once() call_args = tuple(make_purge_call_mock.call_args)[0] self.assertIsInstance(call_args[0], FrontDoorManagementClient) self.assertEqual(call_args[1], ["/home/events/christmas/?test=1"]) make_purge_call_mock.reset_mock() # purge_batch() backends['azure_front_door'].purge_batch([ 'http://www.wagtail.org/home/events/christmas/?test=1', 'http://torchbox.com/blog/' ]) make_purge_call_mock.assert_called_once() call_args = tuple(make_purge_call_mock.call_args)[0] self.assertIsInstance(call_args[0], FrontDoorManagementClient) self.assertEqual(call_args[1], ["/home/events/christmas/?test=1", "/blog/"]) def test_http(self): """Test that `HTTPBackend.purge` works when urlopen succeeds""" self._test_http_with_side_effect(urlopen_side_effect=None) def test_http_httperror(self): """Test that `HTTPBackend.purge` can handle `HTTPError`""" http_error = HTTPError( url='http://localhost:8000/home/events/christmas/', code=500, msg='Internal Server Error', hdrs={}, fp=None ) with self.assertLogs(level='ERROR') as log_output: self._test_http_with_side_effect(urlopen_side_effect=http_error) self.assertIn( "Couldn't purge 'http://www.wagtail.org/home/events/christmas/' from HTTP cache. HTTPError: 500 Internal Server Error", log_output.output[0] ) def test_http_urlerror(self): """Test that `HTTPBackend.purge` can handle `URLError`""" url_error = URLError(reason='just for tests') with self.assertLogs(level='ERROR') as log_output: self._test_http_with_side_effect(urlopen_side_effect=url_error) self.assertIn( "Couldn't purge 'http://www.wagtail.org/home/events/christmas/' from HTTP cache. URLError: just for tests", log_output.output[0] ) @mock.patch('wagtail.contrib.frontend_cache.backends.urlopen') def _test_http_with_side_effect(self, urlopen_mock, urlopen_side_effect): # given a backends configuration with one HTTP backend backends = get_backends(backend_settings={ 'varnish': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.HTTPBackend', 'LOCATION': 'http://localhost:8000', }, }) self.assertEqual(set(backends.keys()), set(['varnish'])) self.assertIsInstance(backends['varnish'], HTTPBackend) # and mocked urlopen that may or may not raise network-related exception urlopen_mock.side_effect = urlopen_side_effect # when making a purge request backends.get('varnish').purge('http://www.wagtail.org/home/events/christmas/') # then no exception is raised # and mocked urlopen is called with a proper purge request self.assertEqual(urlopen_mock.call_count, 1) (purge_request,), _call_kwargs = urlopen_mock.call_args self.assertEqual(purge_request.full_url, 'http://localhost:8000/home/events/christmas/') def test_cloudfront_validate_distribution_id(self): with self.assertRaises(ImproperlyConfigured): get_backends(backend_settings={ 'cloudfront': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudfrontBackend', }, }) @mock.patch('wagtail.contrib.frontend_cache.backends.CloudfrontBackend._create_invalidation') def test_cloudfront_distribution_id_mapping(self, _create_invalidation): backends = get_backends(backend_settings={ 'cloudfront': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudfrontBackend', 'DISTRIBUTION_ID': { 'www.wagtail.org': 'frontend', } }, }) backends.get('cloudfront').purge('http://www.wagtail.org/home/events/christmas/') backends.get('cloudfront').purge('http://torchbox.com/blog/') _create_invalidation.assert_called_once_with('frontend', ['/home/events/christmas/']) def test_multiple(self): backends = get_backends(backend_settings={ 'varnish': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.HTTPBackend', 'LOCATION': 'http://localhost:8000/', }, 'cloudflare': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudflareBackend', 'EMAIL': '[email protected]', 'API_KEY': 'this is the api key', 'ZONEID': 'this is a zone id', } }) self.assertEqual(set(backends.keys()), set(['varnish', 'cloudflare'])) def test_filter(self): backends = get_backends(backend_settings={ 'varnish': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.HTTPBackend', 'LOCATION': 'http://localhost:8000/', }, 'cloudflare': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudflareBackend', 'EMAIL': '[email protected]', 'API_KEY': 'this is the api key', 'ZONEID': 'this is a zone id', } }, backends=['cloudflare']) self.assertEqual(set(backends.keys()), set(['cloudflare'])) @override_settings(WAGTAILFRONTENDCACHE_LOCATION='http://localhost:8000') def test_backwards_compatibility(self): backends = get_backends() self.assertEqual(set(backends.keys()), set(['default'])) self.assertIsInstance(backends['default'], HTTPBackend) self.assertEqual(backends['default'].cache_scheme, 'http') self.assertEqual(backends['default'].cache_netloc, 'localhost:8000') PURGED_URLS = [] class MockBackend(BaseBackend): def __init__(self, config): pass def purge(self, url): PURGED_URLS.append(url) class MockCloudflareBackend(CloudflareBackend): def __init__(self, config): pass def _purge_urls(self, urls): if len(urls) > self.CHUNK_SIZE: raise Exception("Cloudflare backend is not chunking requests as expected") PURGED_URLS.extend(urls) @override_settings(WAGTAILFRONTENDCACHE={ 'varnish': { 'BACKEND': 'wagtail.contrib.frontend_cache.tests.MockBackend', }, }) class TestCachePurgingFunctions(TestCase): fixtures = ['test.json'] def setUp(self): # Reset PURGED_URLS to an empty list PURGED_URLS[:] = [] def test_purge_url_from_cache(self): purge_url_from_cache('http://localhost/foo') self.assertEqual(PURGED_URLS, ['http://localhost/foo']) def test_purge_urls_from_cache(self): purge_urls_from_cache(['http://localhost/foo', 'http://localhost/bar']) self.assertEqual(PURGED_URLS, ['http://localhost/foo', 'http://localhost/bar']) def test_purge_page_from_cache(self): page = EventIndex.objects.get(url_path='/home/events/') purge_page_from_cache(page) self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/']) def test_purge_pages_from_cache(self): purge_pages_from_cache(EventIndex.objects.all()) self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/']) def test_purge_batch(self): batch = PurgeBatch() page = EventIndex.objects.get(url_path='/home/events/') batch.add_page(page) batch.add_url('http://localhost/foo') batch.purge() self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/', 'http://localhost/foo']) @override_settings(WAGTAILFRONTENDCACHE={ 'cloudflare': { 'BACKEND': 'wagtail.contrib.frontend_cache.tests.MockCloudflareBackend', }, }) class TestCloudflareCachePurgingFunctions(TestCase): def setUp(self): # Reset PURGED_URLS to an empty list PURGED_URLS[:] = [] def test_cloudflare_purge_batch_chunked(self): batch = PurgeBatch() urls = ['https://localhost/foo{}'.format(i) for i in range(1, 65)] batch.add_urls(urls) batch.purge() self.assertCountEqual(PURGED_URLS, urls) @override_settings(WAGTAILFRONTENDCACHE={ 'varnish': { 'BACKEND': 'wagtail.contrib.frontend_cache.tests.MockBackend', }, }) class TestCachePurgingSignals(TestCase): fixtures = ['test.json'] def setUp(self): # Reset PURGED_URLS to an empty list PURGED_URLS[:] = [] def test_purge_on_publish(self): page = EventIndex.objects.get(url_path='/home/events/') page.save_revision().publish() self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/']) def test_purge_on_unpublish(self): page = EventIndex.objects.get(url_path='/home/events/') page.unpublish() self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/']) def test_purge_with_unroutable_page(self): root = Page.objects.get(url_path='/') page = EventIndex(title='new top-level page') root.add_child(instance=page) page.save_revision().publish() self.assertEqual(PURGED_URLS, []) @override_settings(ROOT_URLCONF='wagtail.tests.urls_multilang', LANGUAGE_CODE='en', WAGTAILFRONTENDCACHE_LANGUAGES=['en', 'fr', 'pt-br']) def test_purge_on_publish_in_multilang_env(self): PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list page = EventIndex.objects.get(url_path='/home/events/') page.save_revision().publish() self.assertEqual(PURGED_URLS, [ 'http://localhost/en/events/', 'http://localhost/en/events/past/', 'http://localhost/fr/events/', 'http://localhost/fr/events/past/', 'http://localhost/pt-br/events/', 'http://localhost/pt-br/events/past/', ]) @override_settings(ROOT_URLCONF='wagtail.tests.urls_multilang', LANGUAGE_CODE='en', WAGTAIL_I18N_ENABLED=True, WAGTAIL_CONTENT_LANGUAGES=[('en', 'English'), ('fr', 'French')]) def test_purge_on_publish_with_i18n_enabled(self): PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list page = EventIndex.objects.get(url_path='/home/events/') page.save_revision().publish() self.assertEqual(PURGED_URLS, [ 'http://localhost/en/events/', 'http://localhost/en/events/past/', 'http://localhost/fr/events/', 'http://localhost/fr/events/past/', ]) @override_settings(ROOT_URLCONF='wagtail.tests.urls_multilang', LANGUAGE_CODE='en', WAGTAIL_CONTENT_LANGUAGES=[('en', 'English'), ('fr', 'French')]) def test_purge_on_publish_without_i18n_enabled(self): # It should ignore WAGTAIL_CONTENT_LANGUAGES as WAGTAIL_I18N_ENABLED isn't set PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list page = EventIndex.objects.get(url_path='/home/events/') page.save_revision().publish() self.assertEqual(PURGED_URLS, ['http://localhost/en/events/', 'http://localhost/en/events/past/']) class TestPurgeBatchClass(TestCase): # Tests the .add_*() methods on PurgeBatch. The .purge() method is tested # by TestCachePurgingFunctions.test_purge_batch above fixtures = ['test.json'] def test_add_url(self): batch = PurgeBatch() batch.add_url('http://localhost/foo') self.assertEqual(batch.urls, ['http://localhost/foo']) def test_add_urls(self): batch = PurgeBatch() batch.add_urls(['http://localhost/foo', 'http://localhost/bar']) self.assertEqual(batch.urls, ['http://localhost/foo', 'http://localhost/bar']) def test_add_page(self): page = EventIndex.objects.get(url_path='/home/events/') batch = PurgeBatch() batch.add_page(page) self.assertEqual(batch.urls, ['http://localhost/events/', 'http://localhost/events/past/']) def test_add_pages(self): batch = PurgeBatch() batch.add_pages(EventIndex.objects.all()) self.assertEqual(batch.urls, ['http://localhost/events/', 'http://localhost/events/past/']) def test_multiple_calls(self): page = EventIndex.objects.get(url_path='/home/events/') batch = PurgeBatch() batch.add_page(page) batch.add_url('http://localhost/foo') batch.purge() self.assertEqual(batch.urls, ['http://localhost/events/', 'http://localhost/events/past/', 'http://localhost/foo']) @mock.patch('wagtail.contrib.frontend_cache.backends.requests.delete') def test_http_error_on_cloudflare_purge_batch(self, requests_delete_mock): backend_settings = { 'cloudflare': { 'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudflareBackend', 'EMAIL': '[email protected]', 'API_KEY': 'this is the api key', 'ZONEID': 'this is a zone id', }, } class MockResponse: def __init__(self, status_code=200): self.status_code = status_code http_error = requests.exceptions.HTTPError(response=MockResponse(status_code=500)) requests_delete_mock.side_effect = http_error page = EventIndex.objects.get(url_path='/home/events/') batch = PurgeBatch() batch.add_page(page) with self.assertLogs(level='ERROR') as log_output: batch.purge(backend_settings=backend_settings) self.assertIn( "Couldn't purge 'http://localhost/events/' from Cloudflare. HTTPError: 500", log_output.output[0] )<|fim▁end|>
'azure_front_door': {
<|file_name|>WhileToForFix.java<|end_file_name|><|fim▁begin|>/* * Copyright 2014 Guidewire Software, Inc. */ package gw.plugin.ij.intentions; import com.intellij.codeInsight.CodeInsightUtilBase; import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiWhiteSpace; import com.intellij.psi.util.PsiMatcherImpl; import gw.internal.gosu.parser.Expression; import gw.internal.gosu.parser.expressions.NumericLiteral; import gw.lang.parser.IStatement; import gw.lang.parser.statements.IAssignmentStatement; import gw.lang.parser.statements.IStatementList; import gw.lang.parser.statements.IWhileStatement; import gw.plugin.ij.lang.psi.api.statements.IGosuVariable; import gw.plugin.ij.lang.psi.impl.statements.GosuForEachStatementImpl; import gw.plugin.ij.lang.psi.impl.statements.GosuWhileStatementImpl; import gw.plugin.ij.lang.psi.util.GosuPsiParseUtil; import gw.plugin.ij.util.GosuBundle; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static com.intellij.psi.util.PsiMatchers.hasClass; public class WhileToForFix extends LocalQuickFixAndIntentionActionOnPsiElement { String ident; Expression rhs; private IGosuVariable declarationEqualToZero; private IAssignmentStatement increment; public WhileToForFix(PsiElement whileStmt, String ident, Expression rhs, IGosuVariable declarationEqualToZero, IAssignmentStatement increment) { super(whileStmt); this.ident = ident; this.rhs = rhs; this.declarationEqualToZero = declarationEqualToZero; this.increment = increment; } @Override public void invoke(@NotNull Project project, @NotNull PsiFile file, @Nullable("is null when called from inspection") Editor editor, @NotNull PsiElement startElement, @NotNull PsiElement endElement) { if (!CodeInsightUtilBase.prepareFileForWrite(startElement.getContainingFile())) { return; } IWhileStatement parsedElement = ((GosuWhileStatementImpl) startElement).getParsedElement(); if (parsedElement == null) { return; } IStatement statement = parsedElement.getStatement(); IStatement[] statements = ((IStatementList) statement).getStatements(); StringBuilder forStmt = new StringBuilder(); forStmt.append("for ("); forStmt.append(ident); forStmt.append(" in 0.."); if(rhs instanceof NumericLiteral) { Object res = rhs.evaluate(); if(res instanceof Integer) { forStmt.append(((Integer)res)-1); } } else { forStmt.append("|" + rhs); } forStmt.append(") {\n"); String indent = getIndet(parsedElement, statements); for (IStatement statement1 : statements) { if (statement1 != increment) { forStmt.append(indent); forStmt.append(statement1.getLocation().getTextFromTokens()); forStmt.append("\n");<|fim▁hole|> forStmt.append("}"); PsiElement stub = GosuPsiParseUtil.parseProgramm(forStmt.toString(), startElement, file.getManager(), null); PsiElement newForStmt = new PsiMatcherImpl(stub) .descendant(hasClass(GosuForEachStatementImpl.class)) .getElement(); if (newForStmt != null) { declarationEqualToZero.delete(); startElement.replace(newForStmt); } } private String getIndet(IWhileStatement parsedElement, IStatement[] statements) { int whileColum = parsedElement.getLocation().getColumn(); int column = statements[1].getLocation().getColumn() - whileColum; if(column < 0) { return " "; } StringBuilder out = new StringBuilder(); for(int i = 0; i <= column; i++) { out.append(" "); } return out.toString(); } private void removeVarDecl(PsiElement whileStmt, String ident) { PsiElement prev = whileStmt.getPrevSibling(); while (prev instanceof PsiWhiteSpace) { prev = prev.getPrevSibling(); } if (prev instanceof IGosuVariable && ((IGosuVariable) prev).getName().equals(ident)) { prev.delete(); } } @Override public boolean isAvailable(@NotNull Project project, @NotNull PsiFile file, @NotNull PsiElement startElement, @NotNull PsiElement endElement) { return startElement instanceof GosuWhileStatementImpl; } @NotNull @Override public String getText() { return GosuBundle.message("inspection.while.to.for"); } @NotNull @Override public String getFamilyName() { return GosuBundle.message("inspection.group.name.statement.issues"); } }<|fim▁end|>
} }
<|file_name|>Cycle_Second_25_S_120.py<|end_file_name|><|fim▁begin|>import tests.periodicities.period_test as per<|fim▁hole|> per.buildModel((120 , 'S' , 25));<|fim▁end|>
<|file_name|>SQLContextLoggerBeanPostProcessor.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2012. Piraso Alvin R. de Leon. All Rights Reserved. * * See the NOTICE file distributed with<|fim▁hole|> * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.piraso.server.sql; import org.piraso.server.AbstractContextLoggerBeanProcessor; import javax.sql.DataSource; /** * Create a bean post processor which ensures that any bean instance of type {@link DataSource} will * be wrap by a context logger aware instance. * */ public class SQLContextLoggerBeanPostProcessor extends AbstractContextLoggerBeanProcessor<DataSource> { public SQLContextLoggerBeanPostProcessor() { super(DataSource.class); } @Override public DataSource createProxy(DataSource o, String id) { return SQLContextLogger.create(o, id); } }<|fim▁end|>
* this work for additional information regarding copyright ownership. * The Piraso licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with
<|file_name|>vmware_local_role_manager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # Author(s): Abhijeet Kasurde <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: vmware_local_role_manager short_description: Manage local roles on an ESXi host description: - Manage local roles on an ESXi host version_added: "2.5" author: Abhijeet Kasurde (@akasurde) <[email protected]> notes: - Tested on ESXi 6.5 - Be sure that the ESXi user used for login, has the appropriate rights to create / delete / edit roles requirements: - "python >= 2.6" - PyVmomi options: local_role_name: description: - The local role name to be managed. required: True local_privilege_ids: description: - The list of privileges that role needs to have. - Please see U(https://docs.vmware.com/en/VMware-vSphere/6.0/com.vmware.vsphere.security.doc/GUID-ED56F3C4-77D0-49E3-88B6-B99B8B437B62.html) default: [] state: description: - Indicate desired state of the role. - If the role already exists when C(state=present), the role info is updated. choices: ['present', 'absent'] default: present force_remove: description: - If set to C(False) then prevents the role from being removed if any permissions are using it. default: False type: bool extends_documentation_fragment: vmware.documentation ''' EXAMPLES = ''' # Example vmware_local_role_manager command from Ansible Playbooks - name: Add local role to ESXi vmware_local_role_manager: hostname: esxi_hostname username: root password: vmware local_role_name: vmware_qa state: present - name: Add local role with privileges to ESXi vmware_local_role_manager: hostname: esxi_hostname username: root password: vmware local_role_name: vmware_qa local_privilege_ids: [ 'Folder.Create', 'Folder.Delete'] state: present - name: Remove local role from ESXi vmware_local_role_manager: hostname: esxi_hostname username: root password: vmware local_role_name: vmware_qa state: absent ''' RETURN = r''' local_role_name: description: Name of local role returned: always type: string role_id: description: ESXi generated local role id returned: always type: int old_privileges: description: List of privileges of role before update returned: on update type: list new_privileges: description: List of privileges of role after update returned: on update type: list ''' try: from pyVmomi import vim, vmodl except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec class VMwareLocalRoleManager(PyVmomi): def __init__(self, module): super(VMwareLocalRoleManager, self).__init__(module) self.module = module self.params = module.params self.role_name = self.params['local_role_name'] self.state = self.params['state'] self.priv_ids = self.params['local_privilege_ids'] self.force = not self.params['force_remove'] self.current_role = None if self.content.authorizationManager is None: self.module.fail_json(msg="Failed to get local authorization manager settings.", details="It seems that %s is a vCenter server " "instead of an ESXi server" % self.params['hostname']) def process_state(self): local_role_manager_states = { 'absent': { 'present': self.state_remove_role, 'absent': self.state_exit_unchanged, }, 'present': { 'present': self.state_update_role, 'absent': self.state_create_role, } } try: local_role_manager_states[self.state][self.check_local_role_manager_state()]() except vmodl.RuntimeFault as runtime_fault: self.module.fail_json(msg=runtime_fault.msg) except vmodl.MethodFault as method_fault: self.module.fail_json(msg=method_fault.msg) except Exception as e: self.module.fail_json(msg=str(e)) def check_local_role_manager_state(self): auth_role = self.find_authorization_role() if auth_role: self.current_role = auth_role return 'present' else: return 'absent' def find_authorization_role(self): desired_role = None for role in self.content.authorizationManager.roleList: if role.name == self.role_name: desired_role = role return desired_role def state_create_role(self): try: role_id = self.content.authorizationManager.AddAuthorizationRole(name=self.role_name, privIds=self.priv_ids) except vim.fault.AlreadyExists as e: self.module.fail_json(msg="Failed to create a role %s as the user specified role name " "already exists." % self.role_name, details=e.msg) except vim.fault.InvalidName as e: self.module.fail_json(msg="Failed to create a role %s as the user specified role name " "is empty" % self.role_name, details=e.msg) except vmodl.fault.InvalidArgument as e: self.module.fail_json(msg="Failed to create a role %s as the user specified privileges " "are unknown" % self.role_name, details=e.msg)<|fim▁hole|> result = { 'changed': True, 'role_id': role_id, 'privileges': self.priv_ids, 'local_role_name': self.role_name, } self.module.exit_json(**result) def state_remove_role(self): try: self.content.authorizationManager.RemoveAuthorizationRole(roleId=self.current_role.roleId, failIfUsed=self.force) except vim.fault.NotFound as e: self.module.fail_json(msg="Failed to remove a role %s as the user specified role name " "does not exist." % self.role_name, details=e.msg) except vim.fault.RemoveFailed as e: msg = "Failed to remove a role %s as the user specified role name." % self.role_name if self.force: msg += " Use force_remove as True." self.module.fail_json(msg=msg, details=e.msg) except vmodl.fault.InvalidArgument as e: self.module.fail_json(msg="Failed to remove a role %s as the user specified " "role is a system role" % self.role_name, details=e.msg) result = { 'changed': True, 'role_id': self.current_role.roleId, 'local_role_name': self.role_name, } self.module.exit_json(**result) def state_exit_unchanged(self): self.module.exit_json(changed=False) def state_update_role(self): current_privileges = set(self.current_role.privilege) # Add system-defined privileges, "System.Anonymous", "System.View", and "System.Read". self.params['local_privilege_ids'].extend(['System.Anonymous', 'System.Read', 'System.View']) desired_privileges = set(self.params['local_privilege_ids']) changed_privileges = current_privileges ^ desired_privileges changed_privileges = list(changed_privileges) if not changed_privileges: self.state_exit_unchanged() # Delete unwanted privileges that are not required for priv in changed_privileges: if priv not in desired_privileges: changed_privileges.remove(priv) try: self.content.authorizationManager.UpdateAuthorizationRole(roleId=self.current_role.roleId, newName=self.current_role.name, privIds=changed_privileges) except vim.fault.NotFound as e: self.module.fail_json(msg="Failed to update Role %s. Please check privileges " "provided for update" % self.role_name, details=e.msg) except vim.fault.InvalidName as e: self.module.fail_json(msg="Failed to update Role %s as role name is empty" % self.role_name, details=e.msg) except vim.fault.AlreadyExists as e: self.module.fail_json(msg="Failed to update Role %s." % self.role_name, details=e.msg) except vmodl.fault.InvalidArgument as e: self.module.fail_json(msg="Failed to update Role %s as user specified " "role is system role which can not be changed" % self.role_name, details=e.msg) except vim.fault.NoPermission as e: self.module.fail_json(msg="Failed to update Role %s as current session does not" " have any privilege to update specified role" % self.role_name, details=e.msg) role = self.find_authorization_role() result = { 'changed': True, 'role_id': role.roleId, 'local_role_name': role.name, 'new_privileges': role.privilege, 'old_privileges': current_privileges, } self.module.exit_json(**result) def main(): argument_spec = vmware_argument_spec() argument_spec.update(dict(local_role_name=dict(required=True, type='str'), local_privilege_ids=dict(default=[], type='list'), force_remove=dict(default=False, type='bool'), state=dict(default='present', choices=['present', 'absent'], type='str'))) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) vmware_local_role_manager = VMwareLocalRoleManager(module) vmware_local_role_manager.process_state() if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>Navigation.js<|end_file_name|><|fim▁begin|>import React from "react" import { injectIntl } from "react-intl" import { NavLink } from "react-router-dom" import PropTypes from "prop-types" import Styles from "./Navigation.css" <|fim▁hole|> return ( <ul className={Styles.list}> <li><NavLink exact to="/" activeClassName={Styles.activeLink}>Home</NavLink></li> <li><NavLink to="/redux" activeClassName={Styles.activeLink}>Redux</NavLink></li> <li><NavLink to="/localization" activeClassName={Styles.activeLink}>Localization</NavLink></li> <li><NavLink to="/markdown" activeClassName={Styles.activeLink}>Markdown</NavLink></li> <li><NavLink to="/missing" activeClassName={Styles.activeLink}>Missing</NavLink></li> </ul> ) } Navigation.propTypes = { intl: PropTypes.object } export default injectIntl(Navigation)<|fim▁end|>
function Navigation({ intl }) {
<|file_name|>gather_essays.py<|end_file_name|><|fim▁begin|><|fim▁hole|> if f.endswith(".markdown"): fp = os.path.join(base, f) _, np = os.path.split(base) np = re.sub(r"_def$", "", np) np = os.path.join("essays", np+".markdown") # print fp, "=>", np # shutil.copy(fp, np) cmd = 'git mv "{0}" "{1}"'.format(fp, np) print cmd os.system(cmd)<|fim▁end|>
import os, re, shutil for (base, _, files) in os.walk("essays",): for f in files:
<|file_name|>7-pwm.py<|end_file_name|><|fim▁begin|># CamJam EduKit 3 - Robotics # Worksheet 7 - Controlling the motors with PWM import RPi.GPIO as GPIO # Import the GPIO Library import time # Import the Time library # Set the GPIO modes GPIO.setmode(GPIO.BCM) GPIO.setwarnings(False) # Set variables for the GPIO motor pins pinMotorAForwards = 10 pinMotorABackwards = 9 pinMotorBForwards = 8 pinMotorBBackwards = 7 # How many times to turn the pin on and off each second<|fim▁hole|>Frequency = 20 # How long the pin stays on each cycle, as a percent (here, it's 30%) DutyCycle = 30 # Setting the duty cycle to 0 means the motors will not turn Stop = 0 # Set the GPIO Pin mode to be Output GPIO.setup(pinMotorAForwards, GPIO.OUT) GPIO.setup(pinMotorABackwards, GPIO.OUT) GPIO.setup(pinMotorBForwards, GPIO.OUT) GPIO.setup(pinMotorBBackwards, GPIO.OUT) # Set the GPIO to software PWM at 'Frequency' Hertz pwmMotorAForwards = GPIO.PWM(pinMotorAForwards, Frequency) pwmMotorABackwards = GPIO.PWM(pinMotorABackwards, Frequency) pwmMotorBForwards = GPIO.PWM(pinMotorBForwards, Frequency) pwmMotorBBackwards = GPIO.PWM(pinMotorBBackwards, Frequency) # Start the software PWM with a duty cycle of 0 (i.e. not moving) pwmMotorAForwards.start(Stop) pwmMotorABackwards.start(Stop) pwmMotorBForwards.start(Stop) pwmMotorBBackwards.start(Stop) # Turn all motors off def stopmotors(): pwmMotorAForwards.ChangeDutyCycle(Stop) pwmMotorABackwards.ChangeDutyCycle(Stop) pwmMotorBForwards.ChangeDutyCycle(Stop) pwmMotorBBackwards.ChangeDutyCycle(Stop) # Turn both motors forwards def forwards(): pwmMotorAForwards.ChangeDutyCycle(DutyCycle) pwmMotorABackwards.ChangeDutyCycle(Stop) pwmMotorBForwards.ChangeDutyCycle(DutyCycle) pwmMotorBBackwards.ChangeDutyCycle(Stop) # Turn both motors backwards def backwards(): pwmMotorAForwards.ChangeDutyCycle(Stop) pwmMotorABackwards.ChangeDutyCycle(DutyCycle) pwmMotorBForwards.ChangeDutyCycle(Stop) pwmMotorBBackwards.ChangeDutyCycle(DutyCycle) # Turn left def left(): pwmMotorAForwards.ChangeDutyCycle(Stop) pwmMotorABackwards.ChangeDutyCycle(DutyCycle) pwmMotorBForwards.ChangeDutyCycle(DutyCycle) pwmMotorBBackwards.ChangeDutyCycle(Stop) # Turn Right def right(): pwmMotorAForwards.ChangeDutyCycle(DutyCycle) pwmMotorABackwards.ChangeDutyCycle(Stop) pwmMotorBForwards.ChangeDutyCycle(Stop) pwmMotorBBackwards.ChangeDutyCycle(DutyCycle) # Your code to control the robot goes below this line forwards() time.sleep(1) # Pause for 1 second left() time.sleep(0.5) # Pause for half a second forwards() time.sleep(1) right() time.sleep(0.5) backwards() time.sleep(0.5) stopmotors() GPIO.cleanup()<|fim▁end|>
<|file_name|>CallingHeader.tsx<|end_file_name|><|fim▁begin|>// Copyright 2020-2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only import React from 'react'; import classNames from 'classnames'; import { LocalizerType } from '../types/Util'; import { Tooltip } from './Tooltip'; import { Theme } from '../util/theme'; export type PropsType = { canPip?: boolean; i18n: LocalizerType; isInSpeakerView?: boolean; isGroupCall?: boolean; message?: string; participantCount: number; showParticipantsList: boolean; title?: string; toggleParticipants?: () => void; togglePip?: () => void; toggleSettings: () => void; toggleSpeakerView?: () => void; }; export const CallingHeader = ({ canPip = false, i18n, isInSpeakerView, isGroupCall = false, message, participantCount, showParticipantsList, title, toggleParticipants, togglePip, toggleSettings, toggleSpeakerView, }: PropsType): JSX.Element => ( <div className="module-calling__header"> {title ? ( <div className="module-calling__header--header-name">{title}</div> ) : null} {message ? ( <div className="module-ongoing-call__header-message">{message}</div> ) : null} <div className="module-calling-tools"> {isGroupCall ? ( <div className="module-calling-tools__button"> <Tooltip content={i18n('calling__participants', [String(participantCount)])} theme={Theme.Dark} > <button aria-label={i18n('calling__participants', [ String(participantCount), ])} className={classNames( 'module-calling-button__participants--container', { 'module-calling-button__participants--shown': showParticipantsList, } )} onClick={toggleParticipants} type="button" > <i className="module-calling-button__participants" /> <span className="module-calling-button__participants--count"> {participantCount} </span> </button><|fim▁hole|> ) : null} <div className="module-calling-tools__button"> <Tooltip content={i18n('callingDeviceSelection__settings')} theme={Theme.Dark} > <button aria-label={i18n('callingDeviceSelection__settings')} className="module-calling-button__settings" onClick={toggleSettings} type="button" /> </Tooltip> </div> {isGroupCall && participantCount > 2 && toggleSpeakerView && ( <div className="module-calling-tools__button"> <Tooltip content={i18n( isInSpeakerView ? 'calling__switch-view--to-grid' : 'calling__switch-view--to-speaker' )} theme={Theme.Dark} > <button aria-label={i18n( isInSpeakerView ? 'calling__switch-view--to-grid' : 'calling__switch-view--to-speaker' )} className={ isInSpeakerView ? 'module-calling-button__grid-view' : 'module-calling-button__speaker-view' } onClick={toggleSpeakerView} type="button" /> </Tooltip> </div> )} {canPip && ( <div className="module-calling-tools__button"> <Tooltip content={i18n('calling__pip--on')} theme={Theme.Dark}> <button aria-label={i18n('calling__pip--on')} className="module-calling-button__pip" onClick={togglePip} type="button" /> </Tooltip> </div> )} </div> </div> );<|fim▁end|>
</Tooltip> </div>
<|file_name|>test_plasma_unlimited.py<|end_file_name|><|fim▁begin|>import numpy as np import random import os import shutil import platform import pytest import ray from ray.test_utils import wait_for_condition from ray.internal.internal_api import memory_summary MB = 1024 * 1024 def _init_ray(): return ray.init( num_cpus=2, object_store_memory=700e6, _system_config={"plasma_unlimited": True}) def _check_spilled_mb(address, spilled=None, restored=None, fallback=None): def ok(): s = memory_summary(address=address["redis_address"], stats_only=True) print(s) if restored: if "Restored {} MiB".format(restored) not in s: return False else: if "Restored" in s: return False if spilled: if "Spilled {} MiB".format(spilled) not in s: return False<|fim▁hole|> if fallback: if "Plasma filesystem mmap usage: {} MiB".format( fallback) not in s: return False else: if "Plasma filesystem mmap usage:" in s: return False return True wait_for_condition(ok, timeout=3, retry_interval_ms=1000) @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_fallback_when_spilling_impossible_on_put(): try: address = _init_ray() x1 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) x1p = ray.get(x1) # x2 will be fallback allocated on the filesystem. x2 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) x2p = ray.get(x2) del x1p del x2p _check_spilled_mb(address, spilled=None, fallback=400) finally: ray.shutdown() @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_spilling_when_possible_on_put(): try: address = _init_ray() results = [] for _ in range(5): results.append(ray.put(np.zeros(400 * MB, dtype=np.uint8))) _check_spilled_mb(address, spilled=1600) finally: ray.shutdown() @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_fallback_when_spilling_impossible_on_get(): try: address = _init_ray() x1 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) # x1 will be spilled. x2 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) _check_spilled_mb(address, spilled=400) # x1 will be restored, x2 will be spilled. x1p = ray.get(x1) _check_spilled_mb(address, spilled=800, restored=400) # x2 will be restored, triggering a fallback allocation. x2p = ray.get(x2) _check_spilled_mb(address, spilled=800, restored=800, fallback=400) del x1p del x2p finally: ray.shutdown() @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_spilling_when_possible_on_get(): try: address = _init_ray() x1 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) # x1 will be spilled. x2 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) _check_spilled_mb(address, spilled=400) # x1 will be restored, x2 will be spilled. ray.get(x1) _check_spilled_mb(address, spilled=800, restored=400) # x2 will be restored, spilling x1. ray.get(x2) _check_spilled_mb(address, spilled=800, restored=800) finally: ray.shutdown() @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_task_unlimited(): try: address = _init_ray() x1 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) refs = [x1] # x1 is spilled. x2 = ray.put(np.zeros(400 * MB, dtype=np.uint8)) x2p = ray.get(x2) sentinel = ray.put(np.zeros(100 * MB, dtype=np.uint8)) _check_spilled_mb(address, spilled=400) @ray.remote def consume(refs): # triggers fallback allocation, spilling of the sentinel ray.get(refs[0]) # triggers fallback allocation. return ray.put(np.zeros(400 * MB, dtype=np.uint8)) # round 1 ray.get(consume.remote(refs)) _check_spilled_mb(address, spilled=500, restored=400, fallback=400) del x2p del sentinel finally: ray.shutdown() @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_task_unlimited_multiget_args(): try: address = _init_ray() # Too many refs to fit into memory. refs = [] for _ in range(10): refs.append(ray.put(np.zeros(200 * MB, dtype=np.uint8))) x2 = ray.put(np.zeros(600 * MB, dtype=np.uint8)) x2p = ray.get(x2) _check_spilled_mb(address, spilled=2000) @ray.remote def consume(refs): # Should work without thrashing. ray.get(refs) return os.getpid() ray.get([consume.remote(refs) for _ in range(1000)]) _check_spilled_mb(address, spilled=2000, restored=2000, fallback=2000) del x2p finally: ray.shutdown() @pytest.mark.skipif( platform.system() == "Windows", reason="Need to fix up for Windows.") def test_fd_reuse_no_memory_corruption(shutdown_only): @ray.remote class Actor: def produce(self, i): s = int(random.random() * 200) z = np.ones(s * 1024 * 1024) z[0] = i return z def consume(self, x, i): print(x) assert x[0] == i, x ray.init(object_store_memory=100e6) a = Actor.remote() b = Actor.remote() for i in range(20): x_id = a.produce.remote(i) ray.get(b.consume.remote(x_id, i)) @pytest.mark.skipif( platform.system() != "Linux", reason="Only Linux handles fallback allocation disk full error.") def test_fallback_allocation_failure(shutdown_only): ray.init( object_store_memory=100e6, _temp_dir="/dev/shm", _system_config={"plasma_unlimited": True}) shm_size = shutil.disk_usage("/dev/shm").total object_size = max(100e6, shm_size // 5) num_exceptions = 0 refs = [] for i in range(8): try: refs.append(ray.put(np.zeros(object_size, dtype=np.uint8))) except ray.exceptions.ObjectStoreFullError: num_exceptions = num_exceptions + 1 assert num_exceptions > 0 # TODO(ekl) enable this test once we implement this behavior. # @pytest.mark.skipif( # platform.system() == "Windows", reason="Need to fix up for Windows.") # def test_task_unlimited_huge_args(): # try: # address = _init_ray() # # # PullManager should raise an error, since the set of task args is # # too huge to fit into memory. # @ray.remote # def consume(*refs): # return "ok" # # # Too many refs to fit into memory. # refs = [] # for _ in range(10): # refs.append(ray.put(np.zeros(200 * MB, dtype=np.uint8))) # # with pytest.raises(Exception): # ray.get(consume.remote(*refs)) # finally: # ray.shutdown() if __name__ == "__main__": import sys sys.exit(pytest.main(["-v", __file__]))<|fim▁end|>
else: if "Spilled" in s: return False
<|file_name|>pool_camera.py<|end_file_name|><|fim▁begin|>import cv2, numpy as np from dolphintracker.singlecam_tracker.camera_filter.FindDolphin import SearchBlobs from dolphintracker.singlecam_tracker.camera_filter.BackGroundDetector import BackGroundDetector import datetime class PoolCamera(object): def __init__(self, videofile, name, scene, maskObjectsNames, filters, frames_range=None): self.name = name self.videoCap = cv2.VideoCapture(videofile) self.scene = scene self.filters = filters self.mask = self.create_mask(maskObjectsNames) self.frames_range = frames_range self._searchblobs = SearchBlobs()<|fim▁hole|> if self.frames_range is not None: self.videoCap.set(cv2.CAP_PROP_POS_FRAMES, self.frames_range[0]) print('set first frame', self.frames_range) self._total_frames = self.videoCap.get(7) self._colors = [(255,0,0),(0,255,0),(0,0,255)] def create_mask(self, objectsNames): mask = np.zeros((self.img_height,self.img_width), np.uint8) for objname in objectsNames: obj = self.scene.getObject(objname) ptsProjection = self.points_projection( [p for p in obj.points if p[2]<0.2] ) hull = cv2.convexHull(np.int32(ptsProjection)) cv2.fillPoly(mask, np.int32([hull]), 255) return mask def read(self): res, self.frame = self.videoCap.read() if res: self.originalFrame = self.frame.copy() else: self.originalFrame = None return res def process(self): if len(self._backgrounds)==0: for i, colorFilter in enumerate(self.filters): firstFrame = self.frame_index bgDetector = BackGroundDetector(capture=self.videoCap, filterFunction=colorFilter.process) print('Background detection parameters', self._total_frames*0.04, self._total_frames*0.03) last_frame = self.frames_range[1] if self.frames_range is not None else None bg = bgDetector.detect(int(self._total_frames*0.04), int(self._total_frames*0.03), 180, last_frame) bg = cv2.dilate( bg, kernel=cv2.getStructuringElement( cv2.MORPH_RECT, (5,5) ), iterations=2 ) bg = 255-bg bg[bg<255]=0 self._backgrounds.append( cv2.bitwise_and(bg, self.mask) ) self.frame_index = firstFrame result = [] for i, colorFilter in enumerate(self.filters): filterResult = colorFilter.filter(self.frame, self._backgrounds[i]) blobs = self._searchblobs.process(filterResult) res = blobs[0] if len(blobs)>=1 else None result.append(res) return result def create_empty_mask(self): return np.zeros( (self.img_height, self.img_width), np.uint8 ) def points_projection(self, points): cam = self.scene_camera; return [cam.calcPixel(*p) for p in points] @property def scene_camera(self): return self.scene.getCamera(self.name) @property def img_width(self): return int( self.videoCap.get(cv2.CAP_PROP_FRAME_WIDTH) ) @property def img_height(self): return int( self.videoCap.get(cv2.CAP_PROP_FRAME_HEIGHT) ) @property def fps(self): return int( self.videoCap.get(cv2.CAP_PROP_FPS) ) @property def frame_index(self): return int( self.videoCap.get(cv2.CAP_PROP_POS_FRAMES) ) @frame_index.setter def frame_index(self, value): self.videoCap.set(cv2.CAP_PROP_POS_FRAMES, value) @property def currentTime(self): milli = self.videoCap.get(cv2.CAP_PROP_POS_MSEC) return datetime.timedelta(milliseconds=milli) @property def totalFrames(self): return self.videoCap.get(cv2.CAP_PROP_FRAME_COUNT)<|fim▁end|>
self._backgrounds = [] self._last_centroid = None
<|file_name|>init.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // TODO(madhusdancs): // 1. Make printSuccess prepend protocol/scheme to the IPs/hostnames. // 1. Add a dry-run support. // 2. Make all the API object names customizable. // Ex: federation-apiserver, federation-controller-manager, etc. // 3. Make image name and tag customizable. // 4. Separate etcd container from API server pod as a first step towards enabling HA. // 5. Generate credentials of the following types for the API server: // i. "known_tokens.csv" // ii. "basic_auth.csv" // 6. Add the ability to customize DNS domain suffix. It should probably be derived // from cluster config. // 7. Make etcd PVC size configurable. // 8. Make API server and controller manager replicas customizable via the HA work. package init import ( "fmt" "io" "strings" "time" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/intstr" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/client-go/tools/clientcmd" clientcmdapi "k8s.io/client-go/tools/clientcmd/api" certutil "k8s.io/client-go/util/cert" triple "k8s.io/client-go/util/cert/triple" kubeadmkubeconfigphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/kubeconfig" "k8s.io/kubernetes/federation/pkg/kubefed/util" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/apis/extensions" "k8s.io/kubernetes/pkg/apis/rbac" client "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset" "k8s.io/kubernetes/pkg/kubectl/cmd/templates" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/version" "github.com/spf13/cobra" ) const ( APIServerCN = "federation-apiserver" ControllerManagerCN = "federation-controller-manager" AdminCN = "admin" HostClusterLocalDNSZoneName = "cluster.local." // User name used by federation controller manager to make // calls to federation API server. ControllerManagerUser = "federation-controller-manager" // Name of the ServiceAccount used by the federation controller manager // to access the secrets in the host cluster. ControllerManagerSA = "federation-controller-manager" // Group name of the legacy/core API group legacyAPIGroup = "" lbAddrRetryInterval = 5 * time.Second podWaitInterval = 2 * time.Second ) var ( init_long = templates.LongDesc(` Initialize a federation control plane. Federation control plane is hosted inside a Kubernetes cluster. The host cluster must be specified using the --host-cluster-context flag.`) init_example = templates.Examples(` # Initialize federation control plane for a federation # named foo in the host cluster whose local kubeconfig # context is bar. kubectl init foo --host-cluster-context=bar`) componentLabel = map[string]string{ "app": "federated-cluster", } apiserverSvcSelector = map[string]string{ "app": "federated-cluster", "module": "federation-apiserver", } apiserverPodLabels = map[string]string{ "app": "federated-cluster", "module": "federation-apiserver", } controllerManagerPodLabels = map[string]string{ "app": "federated-cluster", "module": "federation-controller-manager", } hyperkubeImageName = "gcr.io/google_containers/hyperkube-amd64" ) // NewCmdInit defines the `init` command that bootstraps a federation // control plane inside a set of host clusters. func NewCmdInit(cmdOut io.Writer, config util.AdminConfig) *cobra.Command { cmd := &cobra.Command{ Use: "init FEDERATION_NAME --host-cluster-context=HOST_CONTEXT", Short: "init initializes a federation control plane", Long: init_long, Example: init_example, Run: func(cmd *cobra.Command, args []string) { err := initFederation(cmdOut, config, cmd, args) cmdutil.CheckErr(err) }, } defaultImage := fmt.Sprintf("%s:%s", hyperkubeImageName, version.Get()) util.AddSubcommandFlags(cmd) cmd.Flags().String("dns-zone-name", "", "DNS suffix for this federation. Federated Service DNS names are published with this suffix.") cmd.Flags().String("image", defaultImage, "Image to use for federation API server and controller manager binaries.") cmd.Flags().String("dns-provider", "google-clouddns", "Dns provider to be used for this deployment.")<|fim▁hole|>} type entityKeyPairs struct { ca *triple.KeyPair server *triple.KeyPair controllerManager *triple.KeyPair admin *triple.KeyPair } // initFederation initializes a federation control plane. // See the design doc in https://github.com/kubernetes/kubernetes/pull/34484 // for details. func initFederation(cmdOut io.Writer, config util.AdminConfig, cmd *cobra.Command, args []string) error { initFlags, err := util.GetSubcommandFlags(cmd, args) if err != nil { return err } dnsZoneName := cmdutil.GetFlagString(cmd, "dns-zone-name") image := cmdutil.GetFlagString(cmd, "image") dnsProvider := cmdutil.GetFlagString(cmd, "dns-provider") etcdPVCapacity := cmdutil.GetFlagString(cmd, "etcd-pv-capacity") dryRun := cmdutil.GetDryRunFlag(cmd) storageBackend := cmdutil.GetFlagString(cmd, "storage-backend") hostFactory := config.HostFactory(initFlags.Host, initFlags.Kubeconfig) hostClientset, err := hostFactory.ClientSet() if err != nil { return err } serverName := fmt.Sprintf("%s-apiserver", initFlags.Name) serverCredName := fmt.Sprintf("%s-credentials", serverName) cmName := fmt.Sprintf("%s-controller-manager", initFlags.Name) cmKubeconfigName := fmt.Sprintf("%s-kubeconfig", cmName) // 1. Create a namespace for federation system components _, err = createNamespace(hostClientset, initFlags.FederationSystemNamespace, dryRun) if err != nil { return err } // 2. Expose a network endpoint for the federation API server svc, err := createService(hostClientset, initFlags.FederationSystemNamespace, serverName, dryRun) if err != nil { return err } ips, hostnames, err := waitForLoadBalancerAddress(hostClientset, svc, dryRun) if err != nil { return err } // 3. Generate TLS certificates and credentials entKeyPairs, err := genCerts(initFlags.FederationSystemNamespace, initFlags.Name, svc.Name, HostClusterLocalDNSZoneName, ips, hostnames) if err != nil { return err } _, err = createAPIServerCredentialsSecret(hostClientset, initFlags.FederationSystemNamespace, serverCredName, entKeyPairs, dryRun) if err != nil { return err } // 4. Create a kubeconfig secret _, err = createControllerManagerKubeconfigSecret(hostClientset, initFlags.FederationSystemNamespace, initFlags.Name, svc.Name, cmKubeconfigName, entKeyPairs, dryRun) if err != nil { return err } // 5. Create a persistent volume and a claim to store the federation // API server's state. This is where federation API server's etcd // stores its data. pvc, err := createPVC(hostClientset, initFlags.FederationSystemNamespace, svc.Name, etcdPVCapacity, dryRun) if err != nil { return err } // Since only one IP address can be specified as advertise address, // we arbitrarily pick the first available IP address advertiseAddress := "" if len(ips) > 0 { advertiseAddress = ips[0] } endpoint := advertiseAddress if advertiseAddress == "" && len(hostnames) > 0 { endpoint = hostnames[0] } // 6. Create federation API server _, err = createAPIServer(hostClientset, initFlags.FederationSystemNamespace, serverName, image, serverCredName, pvc.Name, advertiseAddress, storageBackend, dryRun) if err != nil { return err } // 7. Create federation controller manager // 7a. Create a service account in the host cluster for federation // controller manager. sa, err := createControllerManagerSA(hostClientset, initFlags.FederationSystemNamespace, dryRun) if err != nil { return err } // 7b. Create RBAC role and role binding for federation controller // manager service account. _, _, err = createRoleBindings(hostClientset, initFlags.FederationSystemNamespace, sa.Name, dryRun) if err != nil { return err } // 7c. Create federation controller manager deployment. _, err = createControllerManager(hostClientset, initFlags.FederationSystemNamespace, initFlags.Name, svc.Name, cmName, image, cmKubeconfigName, dnsZoneName, dnsProvider, sa.Name, dryRun) if err != nil { return err } // 8. Write the federation API server endpoint info, credentials // and context to kubeconfig err = updateKubeconfig(config, initFlags.Name, endpoint, entKeyPairs, dryRun) if err != nil { return err } if !dryRun { fedPods := []string{serverName, cmName} err = waitForPods(hostClientset, fedPods, initFlags.FederationSystemNamespace) if err != nil { return err } err = waitSrvHealthy(config, initFlags.Name, initFlags.Kubeconfig) if err != nil { return err } return printSuccess(cmdOut, ips, hostnames) } _, err = fmt.Fprintf(cmdOut, "Federation control plane runs (dry run)\n") return err } func createNamespace(clientset *client.Clientset, namespace string, dryRun bool) (*api.Namespace, error) { ns := &api.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: namespace, }, } if dryRun { return ns, nil } return clientset.Core().Namespaces().Create(ns) } func createService(clientset *client.Clientset, namespace, svcName string, dryRun bool) (*api.Service, error) { svc := &api.Service{ ObjectMeta: metav1.ObjectMeta{ Name: svcName, Namespace: namespace, Labels: componentLabel, }, Spec: api.ServiceSpec{ Type: api.ServiceTypeLoadBalancer, Selector: apiserverSvcSelector, Ports: []api.ServicePort{ { Name: "https", Protocol: "TCP", Port: 443, TargetPort: intstr.FromInt(443), }, }, }, } if dryRun { return svc, nil } return clientset.Core().Services(namespace).Create(svc) } func waitForLoadBalancerAddress(clientset *client.Clientset, svc *api.Service, dryRun bool) ([]string, []string, error) { ips := []string{} hostnames := []string{} if dryRun { return ips, hostnames, nil } err := wait.PollImmediateInfinite(lbAddrRetryInterval, func() (bool, error) { pollSvc, err := clientset.Core().Services(svc.Namespace).Get(svc.Name, metav1.GetOptions{}) if err != nil { return false, nil } if ings := pollSvc.Status.LoadBalancer.Ingress; len(ings) > 0 { for _, ing := range ings { if len(ing.IP) > 0 { ips = append(ips, ing.IP) } if len(ing.Hostname) > 0 { hostnames = append(hostnames, ing.Hostname) } } if len(ips) > 0 || len(hostnames) > 0 { return true, nil } } return false, nil }) if err != nil { return nil, nil, err } return ips, hostnames, nil } func genCerts(svcNamespace, name, svcName, localDNSZoneName string, ips, hostnames []string) (*entityKeyPairs, error) { ca, err := triple.NewCA(name) if err != nil { return nil, fmt.Errorf("failed to create CA key and certificate: %v", err) } server, err := triple.NewServerKeyPair(ca, APIServerCN, svcName, svcNamespace, localDNSZoneName, ips, hostnames) if err != nil { return nil, fmt.Errorf("failed to create federation API server key and certificate: %v", err) } cm, err := triple.NewClientKeyPair(ca, ControllerManagerCN, nil) if err != nil { return nil, fmt.Errorf("failed to create federation controller manager client key and certificate: %v", err) } admin, err := triple.NewClientKeyPair(ca, AdminCN, nil) if err != nil { return nil, fmt.Errorf("failed to create client key and certificate for an admin: %v", err) } return &entityKeyPairs{ ca: ca, server: server, controllerManager: cm, admin: admin, }, nil } func createAPIServerCredentialsSecret(clientset *client.Clientset, namespace, credentialsName string, entKeyPairs *entityKeyPairs, dryRun bool) (*api.Secret, error) { // Build the secret object with API server credentials. secret := &api.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: credentialsName, Namespace: namespace, }, Data: map[string][]byte{ "ca.crt": certutil.EncodeCertPEM(entKeyPairs.ca.Cert), "server.crt": certutil.EncodeCertPEM(entKeyPairs.server.Cert), "server.key": certutil.EncodePrivateKeyPEM(entKeyPairs.server.Key), }, } if dryRun { return secret, nil } // Boilerplate to create the secret in the host cluster. return clientset.Core().Secrets(namespace).Create(secret) } func createControllerManagerKubeconfigSecret(clientset *client.Clientset, namespace, name, svcName, kubeconfigName string, entKeyPairs *entityKeyPairs, dryRun bool) (*api.Secret, error) { config := kubeadmkubeconfigphase.MakeClientConfigWithCerts( fmt.Sprintf("https://%s", svcName), name, ControllerManagerUser, certutil.EncodeCertPEM(entKeyPairs.ca.Cert), certutil.EncodePrivateKeyPEM(entKeyPairs.controllerManager.Key), certutil.EncodeCertPEM(entKeyPairs.controllerManager.Cert), ) return util.CreateKubeconfigSecret(clientset, config, namespace, kubeconfigName, dryRun) } func createPVC(clientset *client.Clientset, namespace, svcName, etcdPVCapacity string, dryRun bool) (*api.PersistentVolumeClaim, error) { capacity, err := resource.ParseQuantity(etcdPVCapacity) if err != nil { return nil, err } pvc := &api.PersistentVolumeClaim{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("%s-etcd-claim", svcName), Namespace: namespace, Labels: componentLabel, Annotations: map[string]string{ "volume.alpha.kubernetes.io/storage-class": "yes", }, }, Spec: api.PersistentVolumeClaimSpec{ AccessModes: []api.PersistentVolumeAccessMode{ api.ReadWriteOnce, }, Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceStorage: capacity, }, }, }, } if dryRun { return pvc, nil } return clientset.Core().PersistentVolumeClaims(namespace).Create(pvc) } func createAPIServer(clientset *client.Clientset, namespace, name, image, credentialsName, pvcName, advertiseAddress, storageBackend string, dryRun bool) (*extensions.Deployment, error) { command := []string{ "/hyperkube", "federation-apiserver", "--bind-address=0.0.0.0", "--etcd-servers=http://localhost:2379", "--secure-port=443", "--client-ca-file=/etc/federation/apiserver/ca.crt", "--tls-cert-file=/etc/federation/apiserver/server.crt", "--tls-private-key-file=/etc/federation/apiserver/server.key", fmt.Sprintf("--storage-backend=%s", storageBackend), } if advertiseAddress != "" { command = append(command, fmt.Sprintf("--advertise-address=%s", advertiseAddress)) } dataVolumeName := "etcddata" dep := &extensions.Deployment{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: namespace, Labels: componentLabel, }, Spec: extensions.DeploymentSpec{ Replicas: 1, Template: api.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Name: name, Labels: apiserverPodLabels, }, Spec: api.PodSpec{ Containers: []api.Container{ { Name: "apiserver", Image: image, Command: command, Ports: []api.ContainerPort{ { Name: "https", ContainerPort: 443, }, { Name: "local", ContainerPort: 8080, }, }, VolumeMounts: []api.VolumeMount{ { Name: credentialsName, MountPath: "/etc/federation/apiserver", ReadOnly: true, }, }, }, { Name: "etcd", Image: "gcr.io/google_containers/etcd:3.0.14-alpha.1", Command: []string{ "/usr/local/bin/etcd", "--data-dir", "/var/etcd/data", }, VolumeMounts: []api.VolumeMount{ { Name: dataVolumeName, MountPath: "/var/etcd", }, }, }, }, Volumes: []api.Volume{ { Name: credentialsName, VolumeSource: api.VolumeSource{ Secret: &api.SecretVolumeSource{ SecretName: credentialsName, }, }, }, { Name: dataVolumeName, VolumeSource: api.VolumeSource{ PersistentVolumeClaim: &api.PersistentVolumeClaimVolumeSource{ ClaimName: pvcName, }, }, }, }, }, }, }, } if dryRun { return dep, nil } return clientset.Extensions().Deployments(namespace).Create(dep) } func createControllerManagerSA(clientset *client.Clientset, namespace string, dryRun bool) (*api.ServiceAccount, error) { sa := &api.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: ControllerManagerSA, Namespace: namespace, Labels: componentLabel, }, } if dryRun { return sa, nil } return clientset.Core().ServiceAccounts(namespace).Create(sa) } func createRoleBindings(clientset *client.Clientset, namespace, saName string, dryRun bool) (*rbac.Role, *rbac.RoleBinding, error) { roleName := "federation-system:federation-controller-manager" role := &rbac.Role{ // a role to use for bootstrapping the federation-controller-manager so it can access // secrets in the host cluster to access other clusters. ObjectMeta: metav1.ObjectMeta{ Name: roleName, Namespace: namespace, Labels: componentLabel, }, Rules: []rbac.PolicyRule{ rbac.NewRule("get", "list", "watch").Groups(legacyAPIGroup).Resources("secrets").RuleOrDie(), }, } rolebinding, err := rbac.NewRoleBinding(roleName, namespace).SAs(namespace, saName).Binding() if err != nil { return nil, nil, err } rolebinding.Labels = componentLabel if dryRun { return role, &rolebinding, nil } newRole, err := clientset.Rbac().Roles(namespace).Create(role) if err != nil { return nil, nil, err } newRolebinding, err := clientset.Rbac().RoleBindings(namespace).Create(&rolebinding) return newRole, newRolebinding, err } func createControllerManager(clientset *client.Clientset, namespace, name, svcName, cmName, image, kubeconfigName, dnsZoneName, dnsProvider, saName string, dryRun bool) (*extensions.Deployment, error) { dep := &extensions.Deployment{ ObjectMeta: metav1.ObjectMeta{ Name: cmName, Namespace: namespace, Labels: componentLabel, }, Spec: extensions.DeploymentSpec{ Replicas: 1, Template: api.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Name: cmName, Labels: controllerManagerPodLabels, }, Spec: api.PodSpec{ Containers: []api.Container{ { Name: "controller-manager", Image: image, Command: []string{ "/hyperkube", "federation-controller-manager", fmt.Sprintf("--master=https://%s", svcName), "--kubeconfig=/etc/federation/controller-manager/kubeconfig", fmt.Sprintf("--dns-provider=%s", dnsProvider), "--dns-provider-config=", fmt.Sprintf("--federation-name=%s", name), fmt.Sprintf("--zone-name=%s", dnsZoneName), }, VolumeMounts: []api.VolumeMount{ { Name: kubeconfigName, MountPath: "/etc/federation/controller-manager", ReadOnly: true, }, }, Env: []api.EnvVar{ { Name: "POD_NAMESPACE", ValueFrom: &api.EnvVarSource{ FieldRef: &api.ObjectFieldSelector{ FieldPath: "metadata.namespace", }, }, }, }, }, }, Volumes: []api.Volume{ { Name: kubeconfigName, VolumeSource: api.VolumeSource{ Secret: &api.SecretVolumeSource{ SecretName: kubeconfigName, }, }, }, }, ServiceAccountName: saName, }, }, }, } if dryRun { return dep, nil } return clientset.Extensions().Deployments(namespace).Create(dep) } func waitForPods(clientset *client.Clientset, fedPods []string, namespace string) error { err := wait.PollInfinite(podWaitInterval, func() (bool, error) { podCheck := len(fedPods) podList, err := clientset.Core().Pods(namespace).List(metav1.ListOptions{}) if err != nil { return false, nil } for _, pod := range podList.Items { for _, fedPod := range fedPods { if strings.HasPrefix(pod.Name, fedPod) && pod.Status.Phase == "Running" { podCheck -= 1 } } //ensure that all pods are in running state or keep waiting if podCheck == 0 { return true, nil } } return false, nil }) return err } func waitSrvHealthy(config util.AdminConfig, context, kubeconfig string) error { fedClientSet, err := config.FederationClientset(context, kubeconfig) if err != nil { return err } fedDiscoveryClient := fedClientSet.Discovery() err = wait.PollInfinite(podWaitInterval, func() (bool, error) { body, err := fedDiscoveryClient.RESTClient().Get().AbsPath("/healthz").Do().Raw() if err != nil { return false, nil } if strings.EqualFold(string(body), "ok") { return true, nil } return false, nil }) return err } func printSuccess(cmdOut io.Writer, ips, hostnames []string) error { svcEndpoints := append(ips, hostnames...) _, err := fmt.Fprintf(cmdOut, "Federation API server is running at: %s\n", strings.Join(svcEndpoints, ", ")) return err } func updateKubeconfig(config util.AdminConfig, name, endpoint string, entKeyPairs *entityKeyPairs, dryRun bool) error { po := config.PathOptions() kubeconfig, err := po.GetStartingConfig() if err != nil { return err } // Populate API server endpoint info. cluster := clientcmdapi.NewCluster() // Prefix "https" as the URL scheme to endpoint. if !strings.HasPrefix(endpoint, "https://") { endpoint = fmt.Sprintf("https://%s", endpoint) } cluster.Server = endpoint cluster.CertificateAuthorityData = certutil.EncodeCertPEM(entKeyPairs.ca.Cert) // Populate credentials. authInfo := clientcmdapi.NewAuthInfo() authInfo.ClientCertificateData = certutil.EncodeCertPEM(entKeyPairs.admin.Cert) authInfo.ClientKeyData = certutil.EncodePrivateKeyPEM(entKeyPairs.admin.Key) authInfo.Username = AdminCN // Populate context. context := clientcmdapi.NewContext() context.Cluster = name context.AuthInfo = name // Update the config struct with API server endpoint info, // credentials and context. kubeconfig.Clusters[name] = cluster kubeconfig.AuthInfos[name] = authInfo kubeconfig.Contexts[name] = context if !dryRun { // Write the update kubeconfig. if err := clientcmd.ModifyConfig(po, *kubeconfig, true); err != nil { return err } } return nil }<|fim▁end|>
cmd.Flags().String("etcd-pv-capacity", "10Gi", "Size of persistent volume claim to be used for etcd.") cmd.Flags().Bool("dry-run", false, "dry run without sending commands to server.") cmd.Flags().String("storage-backend", "etcd2", "The storage backend for persistence. Options: 'etcd2' (default), 'etcd3'.") return cmd
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # from django.shortcuts import render, render_to_response, redirect, get_object_or_404, get_list_or_404, Http404 from django.core.cache import cache from django.shortcuts import * from django.views.generic import TemplateView, FormView from django.http import HttpResponseRedirect, HttpResponse from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse_lazy from django.core.urlresolvers import reverse from django.contrib import messages from django.template import RequestContext from django import template from models import proyecto from .forms import * #from Logica.ConexionBD import adminBD import funciones import sys #~ from administradorConsultas import AdministradorConsultas # Esta la comente JAPeTo #~ from manejadorArchivos import obtener_autores # Esta la comente JAPeTo #~ from red import Red # Esta la comente JAPeTo from Logica import ConsumirServicios, procesamientoScopusXml, procesamientoArxiv # import igraph import traceback import json import django.utils from Logica.ConexionBD.adminBD import AdminBD from principal.parameters import * from principal.permisos import * # sys.setdefaultencoding is cancelled by site.py reload(sys) # to re-enable sys.setdefaultencoding() sys.setdefaultencoding('utf-8') # Create your views here. # @login_required #ruta = "/home/administrador/ManejoVigtech/ArchivosProyectos/" sesion_proyecto=None proyectos_list =None model_proyecto =None id_proyecto = None ##nombre_proyecto = None class home(TemplateView): template_name = "home.html" def get_context_data(self, **kwargs): global proyectos_list global model_proyecto try: existe_proyecto = False proyectos_list = get_list_or_404(proyecto, idUsuario=self.request.user) for project in proyectos_list: if project == model_proyecto: existe_proyecto = True if not (existe_proyecto): model_proyecto = None except: # print traceback.format_exc() proyectos_list = None model_proyecto = None return {'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos} class RegistrarUsuario(FormView): template_name = "registrarUsuario.html" form_class = FormularioRegistrarUsuario success_url = reverse_lazy('RegistrarUsuarios') def form_valid(self, form): user = form.save() messages.success(self.request, "Se ha creado exitosamente el usuario") return redirect('login') def cambia_mensaje(crfsession,proyecto,usuario,borrar, mensaje,valor): # print ">>>> AQUI ESTOY"+str(borrar)+" & "+str(mensaje) try: cache_key = "%s_%s_%s" % (crfsession,proyecto.replace(" ",""),usuario) data = cache.get(cache_key) if data: data['estado'] = valor data['mensaje'] += mensaje if borrar : data['mensaje'] = mensaje cache.set(cache_key, data) else: cache.set(cache_key, { 'estado': 0, 'mensaje' : mensaje }) except: pass @login_required def nuevo_proyecto(request): global id_proyecto global model_proyecto global proyectos_list if request.method == 'POST': form = FormularioCrearProyecto(request.POST) fraseB = request.POST.get('fraseB') fraseA = request.POST.get('fraseA') autor = request.POST.get('autor') words = request.POST.get('words') before = request.POST.get('before') after = request.POST.get('after') limArxiv = request.POST.get('limArxiv') limSco = request.POST.get('limSco') cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"",0) busqueda = fraseB + "," + words + "," + fraseA + "," + autor + "," + before + "," + after # print "busca "+busqueda+", by japeto" if form.is_valid(): nombreDirectorio = form.cleaned_data['nombre'] articulos = {} modelo_proyecto = form.save(commit=False) modelo_proyecto.idUsuario = request.user # print "formulario valido, by japeto" # print "2" # proyectos_list = get_list_or_404(proyecto, idUsuario=request.user) # proyectos_list = get_list_or_404(proyecto, idUsuario=request.user) #modelo_proyecto.calificacion=5 modelo_proyecto.fraseBusqueda = busqueda modelo_proyecto.save() proyectos_list = get_list_or_404(proyecto, idUsuario=request.user) model_proyecto = get_object_or_404(proyecto, id_proyecto=modelo_proyecto.id_proyecto) id_proyecto = model_proyecto.id_proyecto #Creacion del directorio donde se guardaran los documentos respectivos del proyecto creado. mensajes_pantalla="<p class='text-primary'><span class='fa fa-send fa-fw'></span>Se ha creado el Directorio para el proyecto</p>" funciones.CrearDirectorioProyecto(modelo_proyecto.id_proyecto, request.user) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,mensajes_pantalla,6) # print "se crea directorio, by japeto" if fraseB != "": try: """ Descarga de documentos de Google Arxiv """ # print "descarga de documentos, by japeto" cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Descarga de documentos de Arxiv</p>",12) articulos_arxiv= ConsumirServicios.consumir_arxiv(fraseB, request.user.username, str(modelo_proyecto.id_proyecto), limArxiv) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Descarga de documentos terminada</p>",18) except: cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA: </b>Descarga de documentos de Arxiv</p>",12) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"STOP",0) print traceback.format_exc() try: """ Descarga de documentos de Google Scopus """ cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Descarga de documentos de Scopus</p>",24) articulos_scopus = ConsumirServicios.consumir_scopus(fraseB, request.user.username, str(modelo_proyecto.id_proyecto), limSco) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Descarga de documentos terminada</p>",30) except: cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA: </b>Descarga de documentos de Scopus</p>",24) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"STOP",0) print traceback.format_exc() try: """ Inserción de metadatos Arxiv """ cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inica la inserción de metadatos Arxiv</p>",36) xml = open(REPOSITORY_DIR+ str(request.user.username)+ "." + str(modelo_proyecto.id_proyecto) + "/salida.xml") procesamientoArxiv.insertar_metadatos_bd(xml, str(modelo_proyecto.id_proyecto)) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>La inserción de metadatos Arxiv ha terminado</p>",42) except: # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",36) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b>La inserción de metadatos Arxiv no se puede completar</p>",36) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",36) # print traceback.format_exc() try: """ Conexión con base datos para insertar metadatos de paper de Scopus """ cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inica la inserción de metadatos Scopus</p>",48) busqueda = open(REPOSITORY_DIR+ str(request.user.username)+ "." + str(modelo_proyecto.id_proyecto) + "/busqueda0.xml") procesamientoScopusXml.xml_to_bd(busqueda, modelo_proyecto.id_proyecto, articulos_scopus['titulos']) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>La inserción de metadatos Scopus ha terminado</p>",54) except: # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",48) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b>La inserción de metadatos Scopus no se puede completar</p>",48) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",48) # print traceback.format_exc() # try: # """ # NAIVE BAYES # """ # #ConsumirServicios.consumir_recuperacion_unidades_academicas(str(request.user.username),str(modelo_proyecto.id_proyecto)) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inicia el procesado Scopus XML</ṕ>",60) # procesamientoScopusXml.xml_to_bd(busqueda, modelo_proyecto.id_proyecto, articulos_scopus['titulos']) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>El procesmiento Scopus XML ha terminado</p>",62) # except: # # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",60) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b> El procesando Scopus XML no se puede completar</p>",60) # # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",60) # # print traceback.format_exc() try: """ generar el XML OUTPUT """ admin =AdminBD() cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inicia convertidor archivo de XML</ṕ>",60) #papers = admin.getPapers(modelo_proyecto.id_proyecto) adminBD = AdminBD() papers =adminBD.getPapers(modelo_proyecto.id_proyecto) target = open(REPOSITORY_DIR+ str(request.user.username)+ "." + str(modelo_proyecto.id_proyecto) + "/busqueda1.xml", 'w') target.write(funciones.papersToXML(papers)) target.close() # print str(funciones.papersToXML(papers)) # funciones.papersToXML(papers).write(REPOSITORY_DIR+ str(request.user.username)+ "." + str(modelo_proyecto.id_proyecto) + "/busqueda1.xml") cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>termina el convertidor archivo de XML</ṕ>",60) except: # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",64) print traceback.format_exc() cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b>Error al convertir archivo de XML</p>",64) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",64) # print traceback.format_exc() try: """ indexación """ cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inicia la indexación</label></p>",64) ir = ConsumirServicios.IR() ir.indexar(str(request.user.username),str(modelo_proyecto.id_proyecto)) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Indexacion terminada</p>",68) except: # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",64) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b>La indexación no se puede completar</p>",64) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",64) # print traceback.format_exc() try: """" Analisis """ cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inicia el Analisis</p>",66) data = ConsumirServicios.consumir_analisis(str(request.user.username),str(modelo_proyecto.id_proyecto)) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Analisis terminado</p>",68) except: # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",66) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b> El Analisis no se puede completar</p>",66) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",66) # print traceback.format_exc() try: """ Analisis de Redes Sociales """ cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inicia el Analisis de Redes Sociales</p>",70) network = ConsumirServicios.consumir_red(str(request.user.username),str(modelo_proyecto.id_proyecto)) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Analisis de Redes Sociales terminado</p>",72) except: # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",70) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b>El Analisis de Redes Sociales no se puede completar</p>",70) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",70) #print traceback.format_exc() try: """ Recuperacion de unidades """ # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Inicia la recuperacion de unidades academicas</p>",10) # ConsumirServicios.consumir_recuperacion_unidades_academicas(str(request.user.username),str(modelo_proyecto.id_proyecto)) # cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Finaliza la recuperacion de unidades academicas</p>",10) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",80) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Se ha creado el proyecto</p>",90) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Su navegador se reiniciara</p>",97) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"EOF",100) except: cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b> la recuperacion de unidades academicas no se puede completar: {}</p>".format(traceback.format_exc()),80) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"EOF",100) # print traceback.format_exc() # messages.success(request, "Se ha creado exitosamente el proyecto") #articulos = funciones.buscadorSimple(fraseB) #ac = AdministradorConsultas() #ac.descargar_papers(fraseB) #lista_scopus = ac.titulos_descargas #if fraseA != "" or autor != "" or words != "": # articulos = funciones.buscadorAvanzado(fraseA, words, autor, after, before) #print articulos #funciones.moveFiles(modelo_proyecto.id_proyecto, request.user, articulos, lista_scopus) #funciones.escribir_archivo_documentos(modelo_proyecto.id_proyecto, request.user, articulos, lista_scopus) # messages.success(request, "Se ha creado exitosamente el proyecto") #~ return redirect('crear_proyecto') else: messages.error(request, "Imposible crear el proyecto") else: form = FormularioCrearProyecto() return render(request, 'GestionProyecto/NuevoProyecto.html', {'form': form, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}, context_instance=RequestContext(request)) #Visualización de proyectos propios de un usuario. @login_required def ver_mis_proyectos(request): global model_proyecto global proyectos_list try: proyectos_list = get_list_or_404(proyecto, idUsuario=request.user) except: proyectos_list =None messages.success(request, "Usted no tiene proyectos") return render(request, 'GestionProyecto/verMisProyectos.html', {'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}, context_instance=RequestContext(request)) #Visualización de proyectos con disponibilidad pública que no pertenecen al usuario actual. @login_required def ver_otros_proyectos(request): global model_proyecto global proyecto_list if (model_proyecto != None and model_proyecto.idUsuario != request.user): model_proyecto = None try: proyectos_list_all = get_list_or_404(proyecto) idUser = request.user otros_proyectos = [] for project in proyectos_list_all: if project.idUsuario != idUser: otros_proyectos.append(project) except: proyectos_list_all =None otros_proyectos = None return render(request, 'GestionProyecto/OtrosProyectos.html', { 'proyectos': otros_proyectos, 'proyectos_user':proyectos_list, 'mproyecto': model_proyecto}, context_instance=RequestContext(request)) @login_required def busqueda_navegacion(request): global proyectos_list global model_proyecto return render(request, 'GestionBusqueda/Busqueda_Navegacion.html', {'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def editar_proyecto(request, id_proyecto): global proyectos_list global model_proyecto model_proyecto = get_object_or_404(proyecto, id_proyecto=id_proyecto) request.session['proyecto']= str(model_proyecto.id_proyecto) request.proyecto = model_proyecto # print "This is my project:",request.session['proyecto'] lista = funciones.crearListaDocumentos(id_proyecto, request.user) if request.method == 'POST': proyecto_form = FormularioCrearProyecto(request.POST, instance=model_proyecto) #proyecto_form.fields['disponibilidad'].widget.attrs['disabled']=True if proyecto_form.is_valid: #print proyecto_form.cleaned_data #nuevoNombre=proyecto_form.cleaned_data['nombre'] model_project = proyecto_form.save() # funciones.cambiarNombreDirectorio(nombreDirectorioAnterior,nuevoNombre,request.user) messages.success(request, "Se ha modificado exitosamente el proyecto") else: messages.error(request, "Imposible editar el proyecto") else: proyecto_form = FormularioCrearProyecto(instance=model_proyecto) return render(request, 'GestionProyecto/editar_proyecto.html', {'form': proyecto_form, 'lista': lista, 'user': request.user, 'mproyecto':model_proyecto, 'proyectos_user': proyectos_list, 'proyecto': id_proyecto, 'lista_permisos': permisos}, context_instance=RequestContext(request)) @login_required def ver_proyecto(request, id_proyecto): global model_proyecto global proyectos_list proyecto_actual = None proyecto_actual = get_object_or_404(proyecto, id_proyecto=id_proyecto) proyecto_form = FormularioCrearProyecto(instance=proyecto_actual) if (model_proyecto != None and model_proyecto.idUsuario != request.user): model_proyecto = None #model_proyecto = get_object_or_404(proyecto, id_proyecto=id_proyecto) #proyecto_form = FormularioCrearProyecto(instance=model_proyecto) #proyecto_form.fields['disponibilidad'].widget.attrs['disabled']=True #proyecto_form.fields['nombre'].label="Titulo del proyecto" proyecto_form.fields['nombre'].widget.attrs['disabled'] = True proyecto_form.fields['resumen'].widget.attrs['disabled'] = True return render(request, 'GestionProyecto/ver_proyecto.html', {'form': proyecto_form, 'mproyecto':model_proyecto, 'proyectos_user':proyectos_list, 'lista_permisos': permisos}, context_instance=RequestContext(request)) @login_required def buscador(request): global proyectos_list global model_proyecto if request.method == 'GET': ir = ConsumirServicios.IR() fraseBusqueda = request.GET.get("busquedaIR") data = ir.consultar(fraseBusqueda,str(request.user.username), str(model_proyecto.id_proyecto)) # print model_proyecto # IR.consultar(fraseBusqueda,"","") # data = ir.consultar(fraseBusqueda,str(request.user.username),request.session['proyecto']) #data = funciones.busqueda(fraseBusqueda) #for d in data: # d['path'] = d['path'].replace("/home/vigtech/shared/repository/", "/media/").encode("utf8") # print data # print fraseBusqueda return render(request, "GestionBusqueda/Busqueda_Navegacion.html", {'resultados': data, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) else: return render(request, "GestionBusqueda/Busqueda_Navegacion.html", {'resultados': data, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto,'lista_permisos': permisos}) @login_required def analisisView(request): global proyectos_list global model_proyecto #data = ConsumirServicios.consumir_red(request.user.username, request.session['proyecto']) try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) with open(REPOSITORY_DIR + proyecto + "/coautoria.json") as json_file: data = json.load(json_file) #nodos, aristas = r.generar_json() nodos1 = json.dumps(data['nodes']) aristas1 = json.dumps(data['links']) # return render(request, "GestionAnalisis/coautoria.html", {"nodos": nodos1, "aristas": aristas1}) return render(request, "GestionAnalisis/coautoria.html", {"nodos": nodos1, "aristas": aristas1, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto,'lista_permisos': permisos}) #return render(request, "GestionAnalisis/coautoria2.html", {"proyecto":proyecto}) @login_required def coautoria_old(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) with open(REPOSITORY_DIR + proyecto + "/coautoria.json") as json_file: data = json.load(json_file) #nodos, aristas = r.generar_json() nodos1 = json.dumps(data['nodes']) aristas1 = json.dumps(data['links']) # return render(request, "GestionAnalisis/coautoria.html", {"nodos": nodos1, "aristas": aristas1}) return render(request, "GestionAnalisis/Analisis.html", {"nodos": nodos1, "aristas": aristas1, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def eliminar_proyecto(request, id_proyecto): global model_proyecto global proyectos_list try: # print "#1" proyectos_list = get_list_or_404(proyecto, idUsuario=self.request.user) # print "#2" model_proyecto = get_object_or_404(proyecto, id_proyecto=str(self.request.session['proyecto'])) # print "#3" except: proyectos_list = None model_proyecto = None user = request.user project = get_object_or_404(proyecto, id_proyecto=id_proyecto) funciones.eliminar_proyecto(id_proyecto, user) project.delete() messages.success(request, "El proyecto \""+project.nombre+"\" se elimino.") return HttpResponseRedirect(reverse('ver_mis_proyectos')) @login_required def analisis_paises(request): global proyectos_list global model_proyecto # print model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) with open(REPOSITORY_DIR+ proyecto + "/data.json") as json_file: data = json.load(json_file) # print data labels=json.dumps(data['paises']['labels']) values=json.dumps(data['paises']['valores']) # print proyecto #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/paisesbar.html",{"proyecto":proyecto, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_autores(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values})<|fim▁hole|> return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_afiliaciones(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/afiliacionesbar.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_revistas(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/revistasbar.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_docsfechas(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/fechasbar.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_tipodocs(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/tiposbar.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_paisespie(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/paisespie.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_autorespie(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/autorespie.html",{"proyecto":proyecto, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_afiliacionespie(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/afiliacionespie.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_revistaspie(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/revistaspie.html",{"proyecto":proyecto, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_docsfechaspie(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/fechaspie.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_tipodocspie(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) return render(request, "GestionAnalisis/tipospie.html",{"proyecto":proyecto, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def analisis_clustering(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) return render(request, "GestionAnalisis/grupos.html",{"proyecto":proyecto, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) @login_required def analisis_indicadores(request): global proyectos_list global model_proyecto try: #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) with open(REPOSITORY_DIR + proyecto + "/data.json") as json_file: data = json.load(json_file) return render(request, "GestionAnalisis/indicadores.html",{"data":data, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: print traceback.format_exc() return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) # print data #labels=json.dumps(data['paises']['labels']) #values=json.dumps(data['paises']['valores']) #print proyecto #return render(request, "GestionAnalisis/paisesbar.html",{"labels": labels, "values": values}) #return render(request, "GestionAnalisis/indicadores.html",{"data":data, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto}) @login_required def clasificacion_eisc(request): global proyectos_list global model_proyecto try: proyecto = str(request.user.username) + "." + str(model_proyecto.id_proyecto) #proyecto = str(request.user.username) + "." + str(request.session['proyecto']) with open(REPOSITORY_DIR + proyecto + "/eisc.json") as json_file: data = json.load(json_file) eids = data['clasificacion'] if eids : adminBD = AdminBD() papers =adminBD.get_papers_eid(eids) return render (request, "GestionEISC/clasificacion_eisc.html", {"papers": papers, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) else: return render (request, "GestionEISC/clasificacion_eisc.html", {"papers": [], 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except: return render(request, "GestionAnalisis/Blank_default.html", { 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) def logmensajes(request): """ Permite consultar el estado del proceso de creacion de un nuevo proyecto """ try: cache_key = "%s_%s_%s" % (request.GET.get('csrfmiddlewaretoken'),request.GET.get('fraseB').replace(" ",""),request.user.username) data = json.dumps(cache.get(cache_key)) print cache.get(cache_key)['estado'] cache.set(cache_key, {'estado': cache.get(cache_key)['estado'],'mensaje' : ""}) except: print "hay problema" cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"",0) return HttpResponse(data,content_type="application/json") # Configuración de los permisos --links a mostrar alozada @login_required def configurar_permisos(request): global model_proyecto global proyectos_list # print permisos["estadisticas"] try: proyectos_list = get_list_or_404(proyecto, idUsuario=request.user) except: proyectos_list =None messages.success(request, "Usted no tiene proyectos") if request.method == 'POST': if 'cbIndicadores' in request.POST: permisos["indicadores"] = 1 else: permisos["indicadores"] = 0 if 'graficos_barra' in request.POST: permisos["graficos_barra"] = 1 else: permisos["graficos_barra"] = 0 if 'graficos_pie' in request.POST: permisos["graficos_pie"] = 1 else: permisos["graficos_pie"] = 0 if not ('cbIndicadores' in request.POST and 'graficos_barra' in request.POST and 'graficos_pie' and request.POST): print "entra if" permisos["estadisticas"] = 0 else: print "entra else" permisos["estadisticas"] = 1 if 'coautoria' in request.POST: permisos["coautoria"] = 1 else: permisos["coautoria"] = 0 if 'coautoria_medidas' in request.POST: permisos["coautoria_medidas"] = 1 else: permisos["coautoria_medidas"] = 0 if 'clustering' in request.POST: permisos["clustering"] = 1 else: permisos["clustering"] = 0 if 'clasificacion_eisc' in request.POST: permisos["clasificacion_eisc"] = 1 else: permisos["clasificacion_eisc"] = 0 return render(request, 'configurar_permisos.html', {'proyectos_user': proyectos_list, 'lista_permisos': permisos, 'mproyecto': model_proyecto}, context_instance=RequestContext(request)) # def registrarusuario(request): # if request.method == 'GET': # return render(request, "registrarUsuario.html") # elif request.method == 'POST': # data = request.POST.get('nombre') # print data # # messages.success(request, "Se ha creado exitosamente el usuario") # # return redirect('login') # return render (request, "registrarUsuario.html", {"response": data}) # else: # return render(request, "registrarUsuario.html")<|fim▁end|>
return render(request, "GestionAnalisis/autoresbar.html",{"proyecto":proyecto,'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) except:
<|file_name|>SpringcloudConfigClientApplicationTests.java<|end_file_name|><|fim▁begin|>package com.xiaofeng; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class)<|fim▁hole|> public void contextLoads() { } }<|fim▁end|>
@SpringBootTest public class SpringcloudConfigClientApplicationTests { @Test
<|file_name|>IfcTelecomAddress.cpp<|end_file_name|><|fim▁begin|>/* -*-c++-*- IfcPlusPlus - www.ifcplusplus.com - Copyright (C) 2011 Fabian Gerold * * This library is open source and may be redistributed and/or modified under * the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or * (at your option) any later version. The full license is in LICENSE file * included with this distribution, and on the openscenegraph.org website. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * OpenSceneGraph Public License for more details. */ #include <sstream> #include <limits> #include "ifcpp/model/IfcPPException.h" #include "ifcpp/model/IfcPPAttributeObject.h" #include "ifcpp/model/IfcPPGuid.h" #include "ifcpp/reader/ReaderUtil.h" #include "ifcpp/writer/WriterUtil.h" #include "ifcpp/IfcPPEntityEnums.h" #include "include/IfcAddressTypeEnum.h" #include "include/IfcLabel.h" #include "include/IfcOrganization.h" #include "include/IfcPerson.h" #include "include/IfcTelecomAddress.h" #include "include/IfcText.h" #include "include/IfcURIReference.h" // ENTITY IfcTelecomAddress IfcTelecomAddress::IfcTelecomAddress() { m_entity_enum = IFCTELECOMADDRESS; } IfcTelecomAddress::IfcTelecomAddress( int id ) { m_id = id; m_entity_enum = IFCTELECOMADDRESS; } IfcTelecomAddress::~IfcTelecomAddress() {} shared_ptr<IfcPPObject> IfcTelecomAddress::getDeepCopy( IfcPPCopyOptions& options ) { shared_ptr<IfcTelecomAddress> copy_self( new IfcTelecomAddress() ); if( m_Purpose ) { copy_self->m_Purpose = dynamic_pointer_cast<IfcAddressTypeEnum>( m_Purpose->getDeepCopy(options) ); } if( m_Description ) { copy_self->m_Description = dynamic_pointer_cast<IfcText>( m_Description->getDeepCopy(options) ); } if( m_UserDefinedPurpose ) { copy_self->m_UserDefinedPurpose = dynamic_pointer_cast<IfcLabel>( m_UserDefinedPurpose->getDeepCopy(options) ); } for( size_t ii=0; ii<m_TelephoneNumbers.size(); ++ii ) { auto item_ii = m_TelephoneNumbers[ii]; if( item_ii ) { copy_self->m_TelephoneNumbers.push_back( dynamic_pointer_cast<IfcLabel>(item_ii->getDeepCopy(options) ) ); } } for( size_t ii=0; ii<m_FacsimileNumbers.size(); ++ii ) { auto item_ii = m_FacsimileNumbers[ii]; if( item_ii ) { copy_self->m_FacsimileNumbers.push_back( dynamic_pointer_cast<IfcLabel>(item_ii->getDeepCopy(options) ) ); } } if( m_PagerNumber ) { copy_self->m_PagerNumber = dynamic_pointer_cast<IfcLabel>( m_PagerNumber->getDeepCopy(options) ); } for( size_t ii=0; ii<m_ElectronicMailAddresses.size(); ++ii ) { auto item_ii = m_ElectronicMailAddresses[ii]; if( item_ii ) { copy_self->m_ElectronicMailAddresses.push_back( dynamic_pointer_cast<IfcLabel>(item_ii->getDeepCopy(options) ) ); } } if( m_WWWHomePageURL ) { copy_self->m_WWWHomePageURL = dynamic_pointer_cast<IfcURIReference>( m_WWWHomePageURL->getDeepCopy(options) ); } for( size_t ii=0; ii<m_MessagingIDs.size(); ++ii ) { auto item_ii = m_MessagingIDs[ii]; if( item_ii ) { copy_self->m_MessagingIDs.push_back( dynamic_pointer_cast<IfcURIReference>(item_ii->getDeepCopy(options) ) ); } } return copy_self; } void IfcTelecomAddress::getStepLine( std::stringstream& stream ) const { stream << "#" << m_id << "= IFCTELECOMADDRESS" << "("; if( m_Purpose ) { m_Purpose->getStepParameter( stream ); } else { stream << "*"; } stream << ","; if( m_Description ) { m_Description->getStepParameter( stream ); } else { stream << "*"; } stream << ","; if( m_UserDefinedPurpose ) { m_UserDefinedPurpose->getStepParameter( stream ); } else { stream << "*"; } stream << ","; writeTypeList( stream, m_TelephoneNumbers ); stream << ","; writeTypeList( stream, m_FacsimileNumbers ); stream << ","; if( m_PagerNumber ) { m_PagerNumber->getStepParameter( stream ); } else { stream << "$"; } stream << ","; writeTypeList( stream, m_ElectronicMailAddresses ); stream << ","; if( m_WWWHomePageURL ) { m_WWWHomePageURL->getStepParameter( stream ); } else { stream << "$"; } stream << ","; writeTypeList( stream, m_MessagingIDs ); stream << ");"; } void IfcTelecomAddress::getStepParameter( std::stringstream& stream, bool ) const { stream << "#" << m_id; } void IfcTelecomAddress::readStepArguments( const std::vector<std::wstring>& args, const boost::unordered_map<int,shared_ptr<IfcPPEntity> >& map ) { const int num_args = (int)args.size(); if( num_args != 9 ){ std::stringstream err; err << "Wrong parameter count for entity IfcTelecomAddress, expecting 9, having " << num_args << ". Entity ID: " << m_id << std::endl; throw IfcPPException( err.str().c_str() ); } m_Purpose = IfcAddressTypeEnum::createObjectFromSTEP( args[0] ); m_Description = IfcText::createObjectFromSTEP( args[1] ); m_UserDefinedPurpose = IfcLabel::createObjectFromSTEP( args[2] ); readSelectList( args[3], m_TelephoneNumbers, map );<|fim▁hole|> readSelectList( args[6], m_ElectronicMailAddresses, map ); m_WWWHomePageURL = IfcURIReference::createObjectFromSTEP( args[7] ); readSelectList( args[8], m_MessagingIDs, map ); } void IfcTelecomAddress::getAttributes( std::vector<std::pair<std::string, shared_ptr<IfcPPObject> > >& vec_attributes ) { IfcAddress::getAttributes( vec_attributes ); if( m_TelephoneNumbers.size() > 0 ) { shared_ptr<IfcPPAttributeObjectVector> TelephoneNumbers_vec_object( new IfcPPAttributeObjectVector() ); std::copy( m_TelephoneNumbers.begin(), m_TelephoneNumbers.end(), std::back_inserter( TelephoneNumbers_vec_object->m_vec ) ); vec_attributes.push_back( std::make_pair( "TelephoneNumbers", TelephoneNumbers_vec_object ) ); } if( m_FacsimileNumbers.size() > 0 ) { shared_ptr<IfcPPAttributeObjectVector> FacsimileNumbers_vec_object( new IfcPPAttributeObjectVector() ); std::copy( m_FacsimileNumbers.begin(), m_FacsimileNumbers.end(), std::back_inserter( FacsimileNumbers_vec_object->m_vec ) ); vec_attributes.push_back( std::make_pair( "FacsimileNumbers", FacsimileNumbers_vec_object ) ); } vec_attributes.push_back( std::make_pair( "PagerNumber", m_PagerNumber ) ); if( m_ElectronicMailAddresses.size() > 0 ) { shared_ptr<IfcPPAttributeObjectVector> ElectronicMailAddresses_vec_object( new IfcPPAttributeObjectVector() ); std::copy( m_ElectronicMailAddresses.begin(), m_ElectronicMailAddresses.end(), std::back_inserter( ElectronicMailAddresses_vec_object->m_vec ) ); vec_attributes.push_back( std::make_pair( "ElectronicMailAddresses", ElectronicMailAddresses_vec_object ) ); } vec_attributes.push_back( std::make_pair( "WWWHomePageURL", m_WWWHomePageURL ) ); if( m_MessagingIDs.size() > 0 ) { shared_ptr<IfcPPAttributeObjectVector> MessagingIDs_vec_object( new IfcPPAttributeObjectVector() ); std::copy( m_MessagingIDs.begin(), m_MessagingIDs.end(), std::back_inserter( MessagingIDs_vec_object->m_vec ) ); vec_attributes.push_back( std::make_pair( "MessagingIDs", MessagingIDs_vec_object ) ); } } void IfcTelecomAddress::getAttributesInverse( std::vector<std::pair<std::string, shared_ptr<IfcPPObject> > >& vec_attributes_inverse ) { IfcAddress::getAttributesInverse( vec_attributes_inverse ); } void IfcTelecomAddress::setInverseCounterparts( shared_ptr<IfcPPEntity> ptr_self_entity ) { IfcAddress::setInverseCounterparts( ptr_self_entity ); } void IfcTelecomAddress::unlinkFromInverseCounterparts() { IfcAddress::unlinkFromInverseCounterparts(); }<|fim▁end|>
readSelectList( args[4], m_FacsimileNumbers, map ); m_PagerNumber = IfcLabel::createObjectFromSTEP( args[5] );
<|file_name|>test_k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ KubeVirt API This is KubeVirt API an add-on for Kubernetes. <|fim▁hole|> Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import os import sys import unittest import kubevirt from kubevirt.rest import ApiException from kubevirt.models.k8s_io_apimachinery_pkg_apis_meta_v1_root_paths import K8sIoApimachineryPkgApisMetaV1RootPaths class TestK8sIoApimachineryPkgApisMetaV1RootPaths(unittest.TestCase): """ K8sIoApimachineryPkgApisMetaV1RootPaths unit test stubs """ def setUp(self): pass def tearDown(self): pass def testK8sIoApimachineryPkgApisMetaV1RootPaths(self): """ Test K8sIoApimachineryPkgApisMetaV1RootPaths """ # FIXME: construct object with mandatory attributes with example values #model = kubevirt.models.k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.K8sIoApimachineryPkgApisMetaV1RootPaths() pass if __name__ == '__main__': unittest.main()<|fim▁end|>
OpenAPI spec version: 1.0.0
<|file_name|>rtp_viewer.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python import cv2 import sys from os import path, getenv PPRZ_SRC = getenv("PAPARAZZI_SRC", path.normpath(path.join(path.dirname(path.abspath(__file__)), '../../../'))) sys.path.append(PPRZ_SRC + "/sw/ext/pprzlink/lib/v1.0/python") from pprzlink.ivy import IvyMessagesInterface from pprzlink.message import PprzMessage class RtpViewer: frame = None mouse = dict() def __init__(self, src): # Create the video capture device self.cap = cv2.VideoCapture(src) # Start the ivy interface self.ivy = IvyMessagesInterface("RTPviewer", start_ivy=False) self.ivy.start() # Create a named window and add a mouse callback cv2.namedWindow('rtp') cv2.setMouseCallback('rtp', self.on_mouse) def run(self): # Start an 'infinite' loop while True: # Read a frame from the video capture ret, self.frame = self.cap.read() # Quit if frame could not be retrieved or 'q' is pressed if not ret or cv2.waitKey(1) & 0xFF == ord('q'): break # Run the computer vision function self.cv() def cv(self): # If a selection is happening if self.mouse.get('start'): # Draw a rectangle indicating the region of interest cv2.rectangle(self.frame, self.mouse['start'], self.mouse['now'], (0, 255, 0), 2) # Show the image in a window cv2.imshow('rtp', self.frame) def on_mouse(self, event, x, y, flags, param): if event == cv2.EVENT_LBUTTONDOWN: self.mouse['start'] = (x, y) if event == cv2.EVENT_RBUTTONDOWN: self.mouse['start'] = None if event == cv2.EVENT_MOUSEMOVE: self.mouse['now'] = (x, y) if event == cv2.EVENT_LBUTTONUP:<|fim▁hole|> # Obtain mouse start coordinates sx, sy = self.mouse['start'] # Create a new message msg = PprzMessage("datalink", "VIDEO_ROI") msg['ac_id'] = None msg['startx'] = sx msg['starty'] = sy msg['width'] = abs(x - sx) msg['height'] = abs(y - sy) msg['downsized_width'] = self.frame.shape[1] # Send message via the ivy interface self.ivy.send_raw_datalink(msg) # Reset mouse start self.mouse['start'] = None def cleanup(self): # Shutdown ivy interface self.ivy.shutdown() if __name__ == '__main__': viewer = RtpViewer("rtp_viewer.sdp") if not viewer.cap.isOpened(): viewer.cleanup() sys.exit("Can't open video stream") viewer.run() viewer.cleanup()<|fim▁end|>
# If mouse start is defined, a region has been selected if not self.mouse.get('start'): return
<|file_name|>date.js<|end_file_name|><|fim▁begin|>var base64url = require('urlsafe-base64') , After = require('json-list-response').After , inherits = require('util').inherits module.exports = DateAfter function DateAfter(value, options) { After.call(this, value, options) this.skip = 0 this.value = 0 if (value) { value = base64url.decode(value) if (value.length === 9) { this.value = value.readDoubleBE(0) this.skip = value.readUInt8(8) } } } inherits(DateAfter, After) DateAfter.prototype.add = function (row) { var value = row[this.key] if (!value) return if (+this.value === +value) { this.skip++ } else { this.skip = 0 this.value = value } } DateAfter.prototype.toString = function () { if (!this.value) return '' var buf = new Buffer(9) buf.writeDoubleBE(+this.value || 0, 0) buf.writeUInt8(this.skip, 8) return base64url.encode(buf) } DateAfter.prototype.mongoSorting = function (list, sorting) { var obj = {} obj[sorting.key] = {} obj[sorting.key][sorting.descending ? '$lte' : '$gte'] = new Date(this.value)<|fim▁hole|><|fim▁end|>
list.selector.$and.push(obj) list.cursor.skip(this.skip + 1) }
<|file_name|>speedtest.py<|end_file_name|><|fim▁begin|>import sys import time from entrypoint2 import entrypoint import pyscreenshot from pyscreenshot.plugins.gnome_dbus import GnomeDBusWrapper from pyscreenshot.plugins.gnome_screenshot import GnomeScreenshotWrapper from pyscreenshot.plugins.kwin_dbus import KwinDBusWrapper from pyscreenshot.util import run_mod_as_subproc def run(force_backend, n, childprocess, bbox=None): sys.stdout.write("%-20s\t" % force_backend) sys.stdout.flush() # before any crash if force_backend == "default": force_backend = None try: start = time.time() for _ in range(n): pyscreenshot.grab( backend=force_backend, childprocess=childprocess, bbox=bbox ) end = time.time() dt = end - start s = "%-4.2g sec\t" % dt s += "(%5d ms per call)" % (1000.0 * dt / n) sys.stdout.write(s) finally: print("") novirt = [GnomeDBusWrapper.name, KwinDBusWrapper.name, GnomeScreenshotWrapper.name] def run_all(n, childprocess_param, virtual_only=True, bbox=None): debug = True print("") print("n=%s" % n) print("------------------------------------------------------") if bbox: x1, y1, x2, y2 = map(str, bbox) bbox = ":".join(map(str, (x1, y1, x2, y2))) bboxpar = ["--bbox", bbox] else: bboxpar = [] if debug:<|fim▁hole|> else: debugpar = [] for x in ["default"] + pyscreenshot.backends(): backendpar = ["--backend", x] # skip non X backends if virtual_only and x in novirt: continue p = run_mod_as_subproc( "pyscreenshot.check.speedtest", ["--childprocess", childprocess_param] + bboxpar + debugpar + backendpar, ) print(p.stdout) @entrypoint def speedtest(virtual_display=False, backend="", childprocess="", bbox="", number=10): """Performance test of all back-ends. :param virtual_display: run with Xvfb :param bbox: bounding box coordinates x1:y1:x2:y2 :param backend: back-end can be forced if set (example:default, scrot, wx,..), otherwise all back-ends are tested :param childprocess: pyscreenshot parameter childprocess (0/1) :param number: number of screenshots for each backend (default:10) """ childprocess_param = childprocess if childprocess == "": childprocess = True # default elif childprocess == "0": childprocess = False elif childprocess == "1": childprocess = True else: raise ValueError("invalid childprocess value") if bbox: x1, y1, x2, y2 = map(int, bbox.split(":")) bbox = x1, y1, x2, y2 else: bbox = None def f(virtual_only): if backend: try: run(backend, number, childprocess, bbox=bbox) except pyscreenshot.FailedBackendError: pass else: run_all(number, childprocess_param, virtual_only=virtual_only, bbox=bbox) if virtual_display: from pyvirtualdisplay import Display with Display(visible=0): f(virtual_only=True) else: f(virtual_only=False)<|fim▁end|>
debugpar = ["--debug"]
<|file_name|>util.py<|end_file_name|><|fim▁begin|># Copyright 2016-2018 Michael Peters # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import cProfile from scipy.stats import norm # annotate a function with @profile to see where it's spending the most time def profile(func): def profiled_func(*args, **kwargs): p = cProfile.Profile() try: p.enable() result = func(*args, **kwargs) p.disable() return result finally: p.print_stats() return profiled_func # annotate a function with @print_models def print_models(func): def printed_func(*args, **kwargs): model = func(*args, **kwargs) cv_keys = ('mean_test_score', 'std_test_score', 'params') for r, _ in enumerate(model.cv_results_['mean_test_score']): print("%0.3f +/- %0.2f %r" % (model.cv_results_[cv_keys[0]][r], model.cv_results_[cv_keys[1]][r] / 2.0, model.cv_results_[cv_keys[2]][r]))<|fim▁hole|> print('Best parameters: %s' % model.best_params_) print('Best accuracy: %.2f' % model.best_score_) return model return printed_func # https://www.pro-football-reference.com/about/win_prob.htm def mov_to_win_percent(u, m=11, offset=0): u = u + offset return 1 - norm.cdf(0.5, loc=u, scale=m) + .5 * (norm.cdf(0.5, loc=u, scale=m) - norm.cdf(-0.5, loc=u, scale=m))<|fim▁end|>
<|file_name|>use-stdout.tsx<|end_file_name|><|fim▁begin|>import React, {FC, useEffect} from 'react'; import {render, useStdout, Text} from '../..'; const WriteToStdout: FC = () => { const {write} = useStdout(); useEffect(() => { write('Hello from Ink to stdout\n'); }, []); return <Text>Hello World</Text>; }; const app = render(<WriteToStdout />); <|fim▁hole|> console.log('exited'); })();<|fim▁end|>
(async () => { await app.waitUntilExit();