prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>phonopy.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # @Author: YangZhou # @Date: 2017-06-16 20:09:09 # @Last Modified by: YangZhou # @Last Modified time: 2017-06-27 16:02:34 from aces.tools import mkdir, mv, cd, cp, mkcd, shell_exec,\ exists, write, passthru, toString, pwd, debug, ls, parseyaml import aces.config as config from aces.binary import pr from aces.runners import Runner from aces.graph import plot, series, pl, fig from aces.script.vasprun import exe as lammpsvasprun import aces.script.vasprun as vasprun import time import numpy as np from aces.io.phonopy.bandplot import plotband, plotbanddos from aces.io.phonopy.meshyaml import meshyaml from aces.io.phonopy.fc import readfc2 from aces.pbs.jobManager import jobManager, th, pbs from aces.io.vasp import writePOTCAR, writevasp, parseVasprun from ase import io from lxml import etree from scanf import sscanf class runner(Runner): def minimizePOSCAR(self): m = self.m if m.engine == "lammps": m.dump2POSCAR(m.home + '/minimize/range', rotate=True) elif m.engine == "vasp": cp(m.home + '/minimize/CONTCAR', 'POSCAR') def optimize(self): mkcd('optimize') cp('../minimize/POSCAR', '.') atoms = io.read('POSCAR') for i in range(100): dir = "%i" % i mkcd(dir) writevasp(atoms) forces, stress, energy = self.energyForce() pos = atoms.get_scaled_positions() pos += forces * 0.01 def energyForce(self): self.getVaspRun_vasp() forces = parseVasprun('forces') stress = parseVasprun('stress') c = shell_exec("grep TOTEN OUTCAR|tail -1") energy = sscanf(c, "free energy TOTEN = %f eV")[0] return forces, stress, energy def cs(self): from aces.cs import runner runner(NAH=2).run() self.check('csfc2') def check1(self, filename='FORCE_CONSTANTS'): ref = io.read('SPOSCAR') fc2 = readfc2(filename) np.set_printoptions(precision=2, suppress=True) files = ['dir_POSCAR-001'] vasprunxml = "dir_SPOSCAR/vasprun.xml" if exists(vasprunxml): vasprun = etree.iterparse(vasprunxml, tag='varray') forces0 = parseVasprun(vasprun, 'forces') print(forces0.max()) else: forces0 = 0.0 for file in files: print(file) POSCAR = 'dirs/%s/POSCAR' % file vasprunxml = "dirs/%s/vasprun.xml" % file atoms = io.read(POSCAR) u = atoms.positions - ref.positions f = -np.einsum('ijkl,jl', fc2, u) vasprun = etree.iterparse(vasprunxml, tag='varray') forces = parseVasprun(vasprun, 'forces') - forces0 print(np.abs(f).max(), "\n") print(np.abs(forces - f).max()) print(np.allclose(f, forces, atol=1e-2)) def check(self, filename='FORCE_CONSTANTS'): ref = io.read('SPOSCAR') files = shell_exec("ls dirs").split('\n') fc2 = readfc2(filename) np.set_printoptions(precision=2, suppress=True) vasprunxml = "dir_SPOSCAR/vasprun.xml" if exists(vasprunxml): vasprun = etree.iterparse(vasprunxml, tag='varray') forces0 = parseVasprun(vasprun, 'forces') print(forces0.max()) else: forces0 = 0.0 for file in files: print(file) POSCAR = 'dirs/%s/POSCAR' % file vasprunxml = "dirs/%s/vasprun.xml" % file atoms = io.read(POSCAR) u = atoms.positions - ref.positions f = -np.einsum('ijkl,jl', fc2, u) vasprun = etree.iterparse(vasprunxml, tag='varray') forces = parseVasprun(vasprun, 'forces') - forces0 print(np.abs(f).max(), "\n") print(np.abs(forces - f).max()) print(np.allclose(f, forces, atol=1e-2)) def stub(self): files = shell_exec("ls dirs").split('\n') files = map(lambda x: x.replace('dir_', ''), files) fc2 = readfc2('fc2') for file in files: ref = io.read('SPOSCAR') a = 'dirs/dir_' + str(file) atoms = io.read(a + "/POSCAR") u = atoms.positions - ref.positions f = -np.einsum('ijkl,jl', fc2, u) forces = "" for force in f: forces += "<v> %f %f %f </v>\n" % tuple(force) vasprun = '<root><calculation><varray name="forces" >\n' vasprun += forces vasprun += '</varray></calculation></root>\n' write(vasprun, a + "/vasprun.xml") def force_constant(self, files): cmd = config.phonopy + "-f " if exists("dir_SPOSCAR/vasprun.xml"): cmd = config.phonopy + "--fz dir_SPOSCAR/vasprun.xml " for file in files: dir = "dirs/dir_" + file cmd += dir + '/vasprun.xml ' # generate FORCE_SETS passthru(cmd) m = self.m # Create FORCE_CONSTANTS passthru(config.phonopy + "--tolerance=1e-4 --writefc --dim='%s'" % (m.dim)) def fc2(self): files = shell_exec("ls dirs").split('\n')<|fim▁hole|> # POSCAR-1500 ,POSCAR-159 files.sort(lambda x, y: int(x.split('-')[1]) - int(y.split('-')[1])) self.force_constant(files) def generate_meshconf(self): # generate mesh.conf m = self.m mesh = """DIM = %s ATOM_NAME = %s MP = %s EIGENVECTORS=.TRUE. FORCE_CONSTANTS = READ MESH_SYMMETRY = .FALSE. PRIMITIVE_AXIS = %s """ % (m.dim, ' '.join(m.elements), ' '.join(map(str, m.kpoints)), toString(m.premitive.flatten())) mesh = mesh.replace(r'^\s+', '') write(mesh, 'mesh.conf') def generate_vconf(self): # generate v.conf m = self.m mesh = """DIM = %s ATOM_NAME = %s MP = %s FORCE_CONSTANTS = READ MESH_SYMMETRY = .FALSE. GROUP_VELOCITY=.TRUE. PRIMITIVE_AXIS = %s """ % (m.dim, ' '.join(m.elements), ' '.join(map(str, m.kpoints)), toString(m.premitive.flatten())) mesh = mesh.replace(r'^\s+', '') write(mesh, 'v.conf') def generate_qconf(self, q): # generate q.conf m = self.m mesh = """DIM = %s ATOM_NAME = %s FORCE_CONSTANTS = READ EIGENVECTORS=.TRUE. QPOINTS=.TRUE. PRIMITIVE_AXIS = %s """ % (m.dim, ' '.join(m.elements), toString(m.premitive.flatten())) mesh = mesh.replace(r'^\s+', '') write(mesh, 'q.conf') s = "%s\n" % len(q) for qq in q: s += "%s\n" % toString(qq) write(s, 'QPOINTS') def generate_vqconf(self, q): # generate q.conf m = self.m mesh = """DIM = %s ATOM_NAME = %s FORCE_CONSTANTS = READ GROUP_VELOCITY=.TRUE. QPOINTS=.TRUE. PRIMITIVE_AXIS = %s """ % (m.dim, ' '.join(m.elements), toString(m.premitive.flatten())) mesh = mesh.replace(r'^\s+', '') write(mesh, 'q.conf') s = "%s\n" % len(q) for qq in q: s += "%s\n" % toString(qq) write(s, 'QPOINTS') def generate_supercells(self): m = self.m # generate supercells passthru(config.phonopy + "--tolerance=1e-4 -d --dim='%s'" % (m.dim)) def writeINCAR(self): m = self.m npar = 1 for i in range(1, int(np.sqrt(m.cores)) + 1): if m.cores % i == 0: npar = i if m.ispin: ispin = "ISPIN=2" else: ispin = "" if m.soc: soc = "LSORBIT=T" else: soc = "" if m.isym: sym = "ISYM = 1" else: sym = "ISYM = 0" s = """SYSTEM=calculate energy PREC = High IBRION = -1 ENCUT = %f EDIFF = 1.0e-8 ISMEAR = %d; SIGMA = 0.01 IALGO = 38 LREAL = .FALSE. ADDGRID = .TRUE. LWAVE = .FALSE. LCHARG = .FALSE. NPAR = %d %s %s %s """ % (self.m.ecut, m.ismear, npar, sym, ispin, soc) if m.vdw: s += """\nIVDW = 1 VDW_RADIUS = 50 VDW_S6 = 0.75 VDW_SR = 1.00 VDW_SCALING = 0.75 VDW_D = 20.0 VDW_C6 = 63.540 31.50 VDW_R0 = 1.898 1.892 """ s = s.replace(r'^\s+', '') write(s, 'INCAR') def getVaspRun_vasp(self): self.writeINCAR() m = self.m writePOTCAR(m, m.elements) if (m.kpointspath): cp(m.kpointspath, "KPOINTS") else: from aces.io.vasp import writeKPOINTS writeKPOINTS(m.ekpoints) if 'jm' in self.__dict__: if not m.th: path = pwd() if m.queue == "q3.4": pb = pbs( queue=m.queue, nodes=12, procs=1, disp=m.pbsname, path=path, content=config.mpirun + " 12 " + config.vasp + ' >log.out') else: pb = pbs( queue=m.queue, nodes=1, procs=12, disp=m.pbsname, path=path, content=config.mpirun + " 12 " + config.vasp + ' >log.out') else: path = pwd() pb = th(disp=m.pbsname, path=path) self.jm.reg(pb) else: shell_exec(config.mpirun + " %s " % m.cores + config.vasp + ' >log.out') def getVaspRun_lammps(self): m = self.m if 'jm' in self.__dict__: path = pwd() pb = pbs( queue=m.queue, nodes=1, procs=4, disp=m.pbsname, path=path, content=config.python + vasprun.__file__ + ' >log.out') self.jm.reg(pb) else: shell_exec(config.python + vasprun.__file__ + ' >log.out') def thcode(self, files, put): s = "" for file in files: dir = "dirs/dir_" + file s += "cd %s\n" % (dir) s += "yhbatch -N 1 aces.pbs\n" s += "cd ../../\n" write(s, put + "/runall.sh") def getvasprun(self, files): m = self.m maindir = pwd() if m.engine == "vasp": calculator = self.getVaspRun_vasp elif m.engine == "lammps": calculator = self.getVaspRun_lammps self.jm = jobManager() for file in files: print(file) dir = "dirs/dir_" + file mkdir(dir) mv(file, dir + '/POSCAR') cd(dir) calculator() cd(maindir) self.jm.run() if m.th: mkdir(m.pbsname) self.thcode(files, m.pbsname) cp("dirs", m.pbsname) passthru("tar zcf %s.tar.gz %s" % (m.pbsname, m.pbsname)) print('start check') self.jm.check() if m.engine == "lammps1": from multiprocessing.dummy import Pool pool = Pool() pool.map_async(lammpsvasprun, files) pool.close() pool.join() def runSPOSCAR(self): m = self.m maindir = pwd() file = "SPOSCAR" dir = "dir_" + file mkdir(dir) cp(file, dir + '/POSCAR') cd(dir) if m.engine == "vasp": self.getVaspRun_vasp() if m.engine == "lammps": self.getVaspRun_lammps() cd(maindir) def checkMinimize(self): import yaml data = yaml.load(open('disp.yaml').read()) disps = [map(float, a['direction']) for a in data['displacements']] maindir = pwd() dirs = ls('dirs/dir_*') ii = 0 L = np.linalg.norm # d,p,d1,p1=self.m.rot out = open('ccos.txt', 'w') for dir in dirs: cd(dir) f = open('dump.force') for i in range(9): f.next() for b in range(ii): f.next() line = f.next() line = line.split() force = np.array(map(float, line[1:4])) # force=RotateVector(force,d1,-p1) # force=RotateVector(force,d,-p) d = disps[i] ccos = force.dot(d) / L(force) / L(d) ii += 1 print >> out, "%d\t%f" % (ii, ccos) cd(maindir) def run(self): m = self.m a = time.time() self.generate_supercells() debug('generate_supercells:%f s' % (time.time() - a)) files = shell_exec("ls *-*").split('\n') assert len(files) > 0 and not files[0] == "" # self.runSPOSCAR() a = time.time() self.getvasprun(files) debug('getvasprun:%f s' % (time.time() - a)) a = time.time() self.fc2() debug('force_constant:%f s' % (time.time() - a)) if m.phofc: return self self.postp() def generate(self): self.minimizePOSCAR() self.run() def get_force_sets(self): files = shell_exec("ls dirs").split('\n') files = map(lambda x: x.replace('dir_', ''), files) self.force_constant(files) def postp(self): m = self.m if m.gamma_only: self.getDos() return self.getband() self.getDos() self.getbanddos() self.drawpr() self.getV() def getqpoints(self, q): self.generate_qconf(q) passthru(config.phonopy + "--tolerance=1e-4 q.conf") def getvqpoints(self, q): self.generate_vqconf(q) passthru(config.phonopy + "--tolerance=1e-4 q.conf") data = parseyaml('qpoints.yaml') file = open("v.txt", 'w') for phonon in data['phonon']: qp = phonon['q-position'] for band in phonon['band']: frequency = band['frequency'] v = np.array(band['group_velocity']) v = np.linalg.norm(v) print >> file, "%s\t%f\t%f" % ('\t'.join(map(str, qp)), frequency, v) file.close() v = np.loadtxt('v.txt') plot( (v[:, 3], 'Frequency (THz)'), (v[:, 4], 'Group Velocity (Angstrom/ps)'), 'v_freq.png', grid=True, scatter=True) def getDos(self): self.generate_meshconf() passthru(config.phonopy + "--tolerance=1e-4 --dos mesh.conf") self.drawDos() def getV(self): if not exists('groupv'): mkdir('groupv') cd('groupv') cp('../FORCE_CONSTANTS', '.') cp('../POSCAR', '.') cp('../disp.yaml', '.') self.generate_vconf() passthru(config.phonopy + "--tolerance=1e-4 v.conf") self.drawV() cd('..') def drawV(self): data = parseyaml('mesh.yaml') file = open("v.txt", 'w') for phonon in data['phonon']: qp = phonon['q-position'] for band in phonon['band']: frequency = band['frequency'] v = np.array(band['group_velocity']) v = np.linalg.norm(v) print >> file, "%s\t%f\t%f" % ('\t'.join(map(str, qp)), frequency, v) file.close() v = np.loadtxt('v.txt') plot( (v[:, 3], 'Frequency (THz)'), (v[:, 4], 'Group Velocity (Angstrom/ps)'), 'v_freq.png', grid=True, scatter=True) def getband(self): self.generate_bandconf() passthru(config.phonopy + "--tolerance=1e-4 -s band.conf") plotband(labels=' '.join(self.m.bandpath)) def getbanddos(self): freq, pdos = self.getpdos() plotbanddos( freq=freq, dos=np.sum(pdos, axis=1), labels=' '.join(self.m.bandpath)) def modulation(self): m = self.m conf = """ DIM = %s MODULATION = 1 1 1, 0 0 0 0 1 0 ATOM_NAME = %s FORCE_CONSTANTS = READ """ % (m.dim, ' '.join(m.elements)) write(conf, 'modulation.conf') passthru(config.phonopy + "--tolerance=1e-4 modulation.conf") def animate(self): m = self.m conf = """ DIM = %s ANIME = 0 5 20 ANIME_TYPE = xyz ATOM_NAME = %s FORCE_CONSTANTS = READ """ % (m.dim, ' '.join(m.elements)) write(conf, 'animate.conf') passthru(config.phonopy + "--tolerance=1e-4 animate.conf") def generate_bandconf(self): # generate mesh.conf m = self.m bp = m.bandpoints bpath = ' '.join([toString(bp[x]) for x in m.bandpath]) band = """DIM = %s ATOM_NAME = %s BAND = %s BAND_POINTS = 101 FORCE_CONSTANTS = READ PRIMITIVE_AXIS = %s """ % (m.dim, ' '.join(m.elements), bpath, toString(m.premitive.flatten())) band = band.replace(r'^\s+', '') write(band, 'band.conf') def getpdos(self): xx = np.loadtxt('partial_dos.dat', skiprows=1) freq = xx[:, 0] pdos = xx[:, 1:] return freq, pdos def drawDos(self): freq, pdos = self.getpdos() datas = [(freq, p, '') for p in pdos.T] series( 'Frequency (THz)', 'Partial Density of States', datas=datas, filename='partial_dos.png', legend=False, grid=True) plot( (freq, 'Frequency (THz)'), (np.sum(pdos, axis=1), 'Density of States'), filename='total_dos.png') # calculate paticipation ratio def mesh(self): """ save mesh.yaml to mesh.npz [description] """ data = meshyaml('mesh.yaml') np.savez('mesh', **data) def drawpr(self): pr() # plot xs = [] ys = [] for line in open('pr.txt'): x, y = map(float, line.split()) xs.append(x) ys.append(y) write("%s" % (sum(ys) / len(ys)), "ave_pr.txt") with fig('Paticipation_ratio.png'): pl.plot(xs, ys, '.', color='red') pl.ylim([0.0, 1.0]) pl.xlabel('Frequency (THz)') pl.ylabel('Paticipation Ratio')<|fim▁end|>
files = map(lambda x: x.replace('dir_', ''), files) # when the number of files >1000, the order is wrong ,POSCAR-001,
<|file_name|>config.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> function config($stateProvider, $urlRouterProvider) { $urlRouterProvider.otherwise('/'); $stateProvider .state('root', { url: "/", views: { header: { templateUrl: 'view/header.html', controller: headerControlledName }, items: { templateUrl: 'view/items.html', controller: require('./controller/ItemsController').name }, footer: { templateUrl: 'view/footer.html', controller: require('./controller/FooterController').name } } }); } export default config;<|fim▁end|>
import {name as headerControlledName} from './controller/HeaderController';
<|file_name|>Worker.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) Martin Kinkelin * * See the "License.txt" file in the root directory for infos * about permitted and prohibited uses of this code. */ #include <algorithm> #include <iostream> #include <locale> #include "Worker.h" #include "StringUtils.h" #include "WinRing0.h" using std::cerr; using std::endl; using std::min; using std::max; using std::string; using std::tolower; using std::vector; static void SplitPair(string& left, string& right, const string& str, char delimiter) { const size_t i = str.find(delimiter); left = str.substr(0, i); if (i == string::npos) right.clear(); else right = str.substr(i + 1); } bool Worker::ParseParams(int argc, const char* argv[]) { const Info& info = *_info; PStateInfo psi; psi.Multi = psi.VID = psi.NBVID = -1; psi.NBPState = -1; NBPStateInfo nbpsi; nbpsi.Multi = 1.0; nbpsi.VID = -1; for (int i = 0; i < info.NumPStates; i++) { _pStates.push_back(psi); _pStates.back().Index = i; } for (int i = 0; i < info.NumNBPStates; i++) { _nbPStates.push_back(nbpsi); _nbPStates.back().Index = i; } for (int i = 1; i < argc; i++) { const string param(argv[i]); string key, value; SplitPair(key, value, param, '='); if (value.empty()) { if (param.length() >= 2 && tolower(param[0]) == 'p') { const int index = atoi(param.c_str() + 1); if (index >= 0 && index < info.NumPStates) { _pState = index; continue; } } } else { if (key.length() >= 2 && tolower(key[0]) == 'p') { const int index = atoi(key.c_str() + 1); if (index >= 0 && index < info.NumPStates) { string multi, vid; SplitPair(multi, vid, value, '@'); if (!multi.empty()) _pStates[index].Multi = info.multiScaleFactor * atof(multi.c_str()); if (!vid.empty()) _pStates[index].VID = info.EncodeVID(atof(vid.c_str())); continue; } } if (key.length() >= 5 && _strnicmp(key.c_str(), "NB_P", 4) == 0) { const int index = atoi(key.c_str() + 4); if (index >= 0 && index < info.NumNBPStates) { string multi, vid; SplitPair(multi, vid, value, '@'); if (!multi.empty()) _nbPStates[index].Multi = atof(multi.c_str()); if (!vid.empty()) _nbPStates[index].VID = info.EncodeVID(atof(vid.c_str())); continue; } } if (_stricmp(key.c_str(), "NB_low") == 0) { const int index = atoi(value.c_str()); int j = 0; for (; j < min(index, info.NumPStates); j++) _pStates[j].NBPState = 0; for (; j < info.NumPStates; j++) _pStates[j].NBPState = 1; continue; } if (_stricmp(key.c_str(), "Turbo") == 0) { const int flag = atoi(value.c_str()); if (flag == 0 || flag == 1) { _turbo = flag; continue; } } if( _stricmp( key.c_str(), "BoostEnAllCores" ) == 0 ) <|fim▁hole|> { const int flag = atoi( value.c_str() ); if( flag == 0 || flag == 1 ) { _boostEnAllCores = flag; continue; } } if( _stricmp( key.c_str(), "IgnoreBoostThresh" ) == 0 ) { const int flag = atoi( value.c_str() ); if( flag == 0 || flag == 1 ) { _ignoreBoostThresh = flag; continue; } } if (_stricmp(key.c_str(), "APM") == 0) { const int flag = atoi(value.c_str()); if (flag == 0 || flag == 1) { _apm = flag; continue; } } if (_stricmp(key.c_str(), "NbPsi0Vid") == 0) { if (!value.empty()) _NbPsi0Vid_VID = info.EncodeVID(atof(value.c_str())); continue; } } cerr << "ERROR: invalid parameter " << param.c_str() << endl; return false; } return true; } static bool ContainsChanges(const PStateInfo& info) { return (info.Multi >= 0 || info.VID >= 0 || info.NBVID >= 0 || info.NBPState >= 0); } static bool ContainsChanges(const NBPStateInfo& info) { return (info.Multi >= 0 || info.VID >= 0); } static void SwitchTo(int logicalCPUIndex) { const HANDLE hThread = GetCurrentThread(); SetThreadAffinityMask(hThread, (DWORD_PTR)1 << logicalCPUIndex); } void Worker::ApplyChanges() { const Info& info = *_info; if (info.Family == 0x15) { for (int i = 0; i < _nbPStates.size(); i++) { const NBPStateInfo& nbpsi = _nbPStates[i]; if (ContainsChanges(nbpsi)) info.WriteNBPState(nbpsi); } } else if (info.Family == 0x10 && (_nbPStates[0].VID >= 0 || _nbPStates[1].VID >= 0)) { for (int i = 0; i < _pStates.size(); i++) { PStateInfo& psi = _pStates[i]; const int nbPState = (psi.NBPState >= 0 ? psi.NBPState : info.ReadPState(i).NBPState); const NBPStateInfo& nbpsi = _nbPStates[nbPState]; if (nbpsi.VID >= 0) psi.NBVID = nbpsi.VID; } } if (_turbo >= 0 && info.IsBoostSupported) info.SetBoostSource(_turbo == 1); if( _boostEnAllCores >= 0 && info.BoostEnAllCores != -1 ) info.SetBoostEnAllCores( _boostEnAllCores ); if( _ignoreBoostThresh >= 0 && info.IgnoreBoostThresh != -1 ) info.SetIgnoreBoostThresh( _ignoreBoostThresh ); if (_apm >= 0 && info.Family == 0x15) info.SetAPM(_apm == 1); if (_NbPsi0Vid_VID >= 0 && info.Family == 0x15) info.WriteNbPsi0Vid(_NbPsi0Vid_VID); SYSTEM_INFO sysInfo; GetSystemInfo(&sysInfo); const int numLogicalCPUs = sysInfo.dwNumberOfProcessors; // switch to the highest thread priority (we do not want to get interrupted often) const HANDLE hProcess = GetCurrentProcess(); const HANDLE hThread = GetCurrentThread(); SetPriorityClass(hProcess, REALTIME_PRIORITY_CLASS); SetThreadPriority(hThread, THREAD_PRIORITY_HIGHEST); // perform one iteration in each logical core for (int j = 0; j < numLogicalCPUs; j++) { SwitchTo(j); for (int i = 0; i < _pStates.size(); i++) { const PStateInfo& psi = _pStates[i]; if (ContainsChanges(psi)) info.WritePState(psi); } if (_turbo >= 0 && info.IsBoostSupported) info.SetCPBDis(_turbo == 1); } for (int j = 0; j < numLogicalCPUs; j++) { SwitchTo(j); const int currentPState = info.GetCurrentPState(); const int newPState = (_pState >= 0 ? _pState : currentPState); if (newPState != currentPState) info.SetCurrentPState(newPState); else { if (ContainsChanges(_pStates[currentPState])) { const int tempPState = (currentPState == info.NumPStates - 1 ? 0 : info.NumPStates - 1); info.SetCurrentPState(tempPState); Sleep(1); info.SetCurrentPState(currentPState); } } } SetThreadPriority(hThread, THREAD_PRIORITY_NORMAL); SetPriorityClass(hProcess, NORMAL_PRIORITY_CLASS); }<|fim▁end|>
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>/* * grunt-simple-templates * https://github.com/jclem/grunt-simple-templates<|fim▁hole|> * Licensed under the MIT license. */ 'use strict'; module.exports = function(grunt) { // Project configuration. grunt.initConfig({ jshint: { all: [ 'Gruntfile.js', 'tasks/*.js', '<%= nodeunit.tests %>', ], options: { jshintrc: '.jshintrc', }, }, // Before generating any new files, remove any previously-created files. clean: { tests: ['tmp'], }, // Configuration to be run (and then tested). templates: { default_options: { src: "test/fixtures/", dest: "tmp/default_options" }, custom_options: { src: "test/fixtures/", dest: "tmp/custom_options", options: { namespace: "CUSTOM_TEMPLATES", extension: "hbs" } } }, // Unit tests. nodeunit: { tests: ['test/*_test.js'], }, }); // Actually load this plugin's task(s). grunt.loadTasks('tasks'); // These plugins provide necessary tasks. grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-clean'); grunt.loadNpmTasks('grunt-contrib-nodeunit'); // Whenever the "test" task is run, first clean the "tmp" dir, then run this // plugin's task(s), then test the result. grunt.registerTask('test', ['clean', 'templates', 'nodeunit']); // By default, lint and run all tests. grunt.registerTask('default', ['jshint', 'test']); };<|fim▁end|>
* * Copyright (c) 2013 Jonathan Clem
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var through = require('through2'); var escapeStr = require('js-string-escape'); var STYLE_HEADER = '!function(){var a="'; var STYLE_FOOTER = '",b=document.createElement("style");b.type="text/css",b.styleSheet?b.styleSheet.cssText=a:b.appendChild(document.createTextNode(a)),(document.head||document.getElementsByTagName("head")[0]).appendChild(b)}();'; function cssTojs() { function transform(file, enc, callback) { var css = file.contents.toString(); var content = STYLE_HEADER + escapeStr(css) + STYLE_FOOTER; file.contents = new Buffer(content); callback(null, file); }<|fim▁hole|> module.exports = cssTojs;<|fim▁end|>
return through.obj(transform); }
<|file_name|>rsqrt.rs<|end_file_name|><|fim▁begin|>#![feature(cfg_target_feature, link_llvm_intrinsics, platform_intrinsics, simd_ffi, test)] extern crate hagane_simd; extern crate test; use hagane_simd::*; use test::*; #[cfg(target_feature = "sse")] extern "platform-intrinsic" { fn x86_mm_sqrt_ps(x: float4) -> float4; fn x86_mm_rsqrt_ps(x: float4) -> float4; } #[cfg(target_feature = "sse")] #[bench] fn bench_sqrt_plus_div(b: &mut Bencher) { let n = black_box(float4(1.0, 3.0, 5.0, 7.0)); b.iter(|| { let mut sum = n; for _ in 0 .. 10000 { let sqrt = unsafe { x86_mm_sqrt_ps(sum) }; sum = sum + 1.0 / sqrt; } sum }); } #[cfg(target_feature = "sse")] #[bench] fn bench_intel_rsqrt(b: &mut Bencher) { let n = black_box(float4(1.0, 3.0, 5.0, 7.0)); b.iter(|| { let mut sum = n; for _ in 0 .. 10000 { let r = unsafe { x86_mm_rsqrt_ps(sum) }; sum = sum - 0.5 * (r * (3.0 - r * r * sum)); } sum }); } <|fim▁hole|>fn bench_apple_rsqrt(b: &mut Bencher) { let n = black_box(float4(1.0, 3.0, 5.0, 7.0)); b.iter(|| { let mut sum = n; for _ in 0 .. 10000 { let r = unsafe { x86_mm_rsqrt_ps(sum) }; let r = r * (1.5 - 0.5 * sum.eq(float4::broadcast(0.0)).bitselect(sum, float4::broadcast(-std::f32::INFINITY)) * r * r); sum = sum + r; } sum }); } #[cfg(target_feature = "sse")] #[bench] fn bench_apple_rsqrt_simplified(b: &mut Bencher) { let n = black_box(float4(1.0, 3.0, 5.0, 7.0)); b.iter(|| { let mut sum = n; for _ in 0 .. 10000 { let r = unsafe { x86_mm_rsqrt_ps(sum) }; let r = r * (1.5 - 0.5 * sum * r * r); sum = sum + r; } sum }); }<|fim▁end|>
#[cfg(target_feature = "sse")] #[bench]
<|file_name|>ShipDamageDiff.ts<|end_file_name|><|fim▁begin|>/// <reference path="BaseBattleDiff.ts"/> module TK.SpaceTac { /** * A ship takes damage (to hull or shield) * * This is only informative, and does not apply the damage on ship values (there are ShipValueDiff for this). */ export class ShipDamageDiff extends BaseBattleShipDiff { // Damage to hull hull: number // Damage to shield shield: number // Evaded damage evaded: number // Theoretical damage value theoretical: number constructor(ship: Ship, hull: number, shield: number, evaded = 0, theoretical = hull + shield + evaded) { super(ship); this.hull = hull;<|fim▁hole|> this.evaded = evaded; this.theoretical = theoretical; } } }<|fim▁end|>
this.shield = shield;
<|file_name|>test_executor.py<|end_file_name|><|fim▁begin|>import daisychain.steps.input from daisychain.executor import Executor, Execution, ExecutorAborted, ConsoleInput, CheckStatusException from . import test_step from mock import patch import py3compat if py3compat.PY2: input_function = 'daisychain.steps.input.input' else: import builtins input_function = 'builtins.input' def test_init(): e = Executor() assert e.scan_interval == 0.0 assert e.execution is None assert e.user_input_class is ConsoleInput assert e.on_failure is Executor.RAISE e = Executor(on_failure=Executor.PROMPT, scan_interval=1.0) assert e.scan_interval == 1.0 assert e.execution is None assert e.user_input_class is ConsoleInput assert e.on_failure is Executor.PROMPT try: e = Executor(on_failure='NOT_A_KNOWN_FAILURE_TYPE') except ValueError: pass else: assert False, "Should have thrown a Value Error for an unknown failure mode" def test_attach_self_as_executor(): e = Executor(name='test_executor') assert e.scan_interval == 0.0 assert e.execution is None assert e.user_input_class is ConsoleInput assert e.on_failure is Executor.RAISE dep = test_step.MockStep(name='mock_step') e.dependencies.add(dep) e.execution = Execution(executor=e) assert dep.executor is e assert dep.root_log_id == e.root_log_id def test_prompt_user_for_step(): with patch(input_function) as mock_raw_input: dep = test_step.MockStep(name='mock_step', run_exception=RuntimeError('Exception while running step')) e = Executor(name='test_executor') global times_called times_called = 0 def raw_input_output(*args, **kwargs): global times_called times_called += 1 responses = ['y','','r',''] prompt = kwargs.get('prompt', args[0]) assert 'mock_step' in prompt assert 'Does this test work (y)/(n)?' in prompt<|fim▁hole|> mock_raw_input.side_effect = raw_input_output assert e.prompt_user_for_step(step=dep, prompt='Does this test work (y)/(n)?') == 'y' assert e.prompt_user_for_step(step=dep, prompt='Does this test work (y)/(n)?', valid_choices=['d','r']) == 'r' assert e.prompt_user_for_step(step=dep, prompt='Does this test work (y)/(n)?', default='n') == 'n' e.execution = Execution() e.execution.aborted = True try: e.prompt_user_for_step(step=dep, prompt='Does this test work (y)/(n)?') except ExecutorAborted: pass else: assert False, 'Should have raised an ExecutorAborted exception if it was previously aborted' def test_execute(): dep = test_step.MockStep(name='mock_step') e = Executor(name='test_executor', dependencies=[dep]) e.execute() assert dep.finished dep_named = test_step.MockStep(name='mock_step_named') dep = test_step.MockStep(name='mock_step', named_reference=dep_named) assert dep.named_reference is dep_named e = Executor(name='test_executor', dependencies=[dep]) e.execute() assert dep_named.finished assert dep.finished dep_dep = test_step.MockStep(name='mock_step_dep') dep = test_step.MockStep(name='mock_step', dependencies=[dep_dep]) assert dep.dependencies == {dep_dep} e = Executor(name='test_executor', dependencies=[dep]) assert e.dependencies == {dep} e.execute() assert dep_named.finished assert dep.finished dep = test_step.MockStep(name='mock_step', run_exception=RuntimeError('Exception while running step')) e = Executor(name='test_executor', dependencies=[dep]) try: e.execute() except RuntimeError: assert dep.failed else: assert False, "Should have thrown the error the step raised" def test_execute_check_status_failure_in_step(): dep = test_step.MockStep(name='mock_step', check_status_exception=TypeError("Exception while checking status")) e = Executor(name='test_executor', dependencies=[dep]) try: e.execute() except CheckStatusException: assert dep.failed else: assert False, "Should have thrown a CheckStatusException on failure" dep = test_step.MockStep(name='mock_step', check_status_exception=TypeError("Exception while checking status")) dep2 = test_step.MockStep(name='mock_failing_step_parent', dependencies=[dep]) e = Executor(name='test_executor', dependencies=[dep2]) try: e.execute() except CheckStatusException: assert dep2.validated assert dep.failed else: assert False, "Should have thrown a CheckStatusException on failure" dep = test_step.MockStep(name='mock_step') def raise_error(): raise RuntimeError("Exception while forwarding callback") dep.status.check = raise_error dep2 = test_step.MockStep(name='mock_failing_step_parent', dependencies=[dep]) e = Executor(name='test_executor', dependencies=[dep2]) try: e.execute() except CheckStatusException: assert dep2.validated assert dep.failed else: assert False, "Should have thrown a CheckStatusException on failure" def test_execute_skip_failures(): dep = test_step.MockStep(name='mock_sibling_step', run_exception=RuntimeError("test_run_exception")) dep2 = test_step.MockStep(name='mock_sibling_step2', run_exception=RuntimeError("test_run_exception")) successful_dep = test_step.MockStep(name='successful_dep') parent = test_step.MockStep(name='mock_parent_step', dependencies=[dep, dep2, successful_dep]) assert parent.dependencies == {dep, dep2, successful_dep} successful_parent = test_step.MockStep(name='mock_successful_parent', dependencies=[successful_dep]) e = Executor(name='test_executor', on_failure=Executor.SKIP, dependencies=[parent, successful_parent]) e.execute() assert dep.failed assert dep2.failed assert successful_dep.finished assert parent.validated assert successful_parent.finished assert not e.execution.aborted def test_execute_graceful_shutdown(): dep = test_step.MockStep(name='mock_sibling_step', run_exception=RuntimeError("test_run_exception")) dep2 = test_step.MockStep(name='mock_sibling_step2') dep2.run = lambda: dep2.status.set_validated() successful_dep = test_step.MockStep(name='successful_dep') parent = test_step.MockStep(name='mock_parent_step', dependencies=[dep, dep2, successful_dep]) assert parent.dependencies == {dep, dep2, successful_dep} successful_parent = test_step.MockStep(name='mock_successful_parent', dependencies=[successful_dep]) e = Executor(name='test_executor', on_failure=Executor.GRACEFUL_SHUTDOWN, dependencies=[parent, successful_parent]) e.execute() assert dep.status.failed assert dep2.status.finished or dep2.status.validated assert successful_dep.status.finished or successful_dep.status.validated assert parent.validated assert successful_parent.validated assert e.execution.aborted def test_execute_graceful_shutdown_with_already_aborted_execution(): dep = test_step.MockStep(name='mock_sibling_step') dep2 = test_step.MockStep(name='mock_sibling_step2', run_exception=RuntimeError("test_run_exception")) successful_dep = test_step.MockStep(name='successful_dep') parent = test_step.MockStep(name='mock_parent_step', dependencies=[dep, dep2, successful_dep]) assert parent.dependencies == {dep, dep2, successful_dep} successful_parent = test_step.MockStep(name='mock_successful_parent', dependencies=[successful_dep]) e = Executor(name='test_executor', on_failure=Executor.GRACEFUL_SHUTDOWN, dependencies=[parent, successful_parent]) e.execution = Execution(executor=e) e.execution.aborted = True e.execute() assert dep.status.pending assert dep2.status.pending assert successful_dep.status.pending assert parent.status.pending assert successful_parent.status.pending def test_prompting_during_execution(): with patch(input_function) as mock_raw_input: dep = test_step.MockStep(name='mock_step', run_exception=RuntimeError('Exception while running step')) e = Executor(name='test_executor', on_failure=Executor.PROMPT, dependencies=[dep]) global times_called times_called = 0 def raw_input_output(*args, **kwargs): global times_called times_called += 1 if times_called == 1: return 'r' elif times_called == 2: return 'f' assert times_called <= 2, "Called raw_input too many times" mock_raw_input.side_effect = raw_input_output e.execute() assert dep.status.finished assert times_called == 2 def test_execute_dry_run(): dep = test_step.MockStep(name='dep') assert not dep.checked_ready assert dep.status.pending e = Executor(dry_run=True,dependencies=[dep]) e.execute() assert dep.status.validated assert not dep.ran_once<|fim▁end|>
return responses[times_called - 1]
<|file_name|>v1_1_0.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:2e4cfe75feb71c39771595f8dea4f59e216650e0454f3f56a2a5b38a062b94cf<|fim▁hole|><|fim▁end|>
size 1360
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>## All rights reserved ## ############################################# from pyxer.base import * @controller def index(): return "/index"<|fim▁end|>
# -*- coding: UTF-8 -*- ############################################# ## (C)opyright by Dirk Holtwick, 2008 ##
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate gcc; <|fim▁hole|>fn main () { println!("cargo:rustc-flags=-lstdc++"); }<|fim▁end|>
<|file_name|>Main.js<|end_file_name|><|fim▁begin|>// @Bind #menu1.#menuItem2.onClick !function() { dorado.MessageBox.alert("Dorado7.0 快速入门"); }; // @Bind #tree1.onDataRowClick !function(self) { // 定义Tab变量 var tab = {}; // self 代表事件所属的控件,此处指 Tree对象 // self.get("currentNode")表示获取当前被点击的节点。 with (self.get("currentNode")) { // 制定当前的tab为IFrameTab tab.$type = "IFrame"; // 定义新Tab的标签 tab.caption = get("label"); // 定义Tab的Path // get("userData")表示获取当前节点的UserData属性, // 也就是刚才设定的 sample.chapter01.HelloWorld.d tab.path = get("userData"); tab.name = get("label"); tab.closeable = true; } // 如果当前节点有指定的Path则打开新的tab if (tab.path) { with (view.get("#tabControl")) { // 根据name查找是否已经打开过当前的Tab。 // 如果没有打开过,则需要添加一个新的Tab var currentTab = getTab(tab.name); if (currentTab) { tab = currentTab; } else { // 获取ID为tabControl的对象,并添加一个新的Tab // 设定ID为tabControl的对象的当前Tab为新创建的Tab <|fim▁hole|> } } };<|fim▁end|>
tab = addTab(tab); } // 设定当前的Tab为制定的tab set("currentTab", tab);
<|file_name|>test_users.py<|end_file_name|><|fim▁begin|>""" Tests for the LTI user management functionality """ import string from django.contrib.auth.models import User from django.core.exceptions import PermissionDenied from django.test import TestCase from django.test.client import RequestFactory from mock import MagicMock, patch import lti_provider.users as users from lti_provider.models import LtiConsumer, LtiUser from student.tests.factories import UserFactory class UserManagementHelperTest(TestCase): """ Tests for the helper functions in users.py """ def setUp(self): super(UserManagementHelperTest, self).setUp() self.request = RequestFactory().post('/') self.old_user = UserFactory.create() self.new_user = UserFactory.create() self.new_user.save() self.request.user = self.old_user self.lti_consumer = LtiConsumer( consumer_name='TestConsumer', consumer_key='TestKey', consumer_secret='TestSecret' ) self.lti_consumer.save() self.lti_user = LtiUser( lti_user_id='lti_user_id', edx_user=self.new_user ) @patch('django.contrib.auth.authenticate', return_value=None) def test_permission_denied_for_unknown_user(self, _authenticate_mock): with self.assertRaises(PermissionDenied): users.switch_user(self.request, self.lti_user, self.lti_consumer) @patch('lti_provider.users.login') def test_authenticate_called(self, _login_mock): with patch('lti_provider.users.authenticate', return_value=self.new_user) as authenticate: users.switch_user(self.request, self.lti_user, self.lti_consumer) authenticate.assert_called_with( username=self.new_user.username, lti_user_id=self.lti_user.lti_user_id, lti_consumer=self.lti_consumer ) @patch('lti_provider.users.login') def test_login_called(self, login_mock): with patch('lti_provider.users.authenticate', return_value=self.new_user): users.switch_user(self.request, self.lti_user, self.lti_consumer) login_mock.assert_called_with(self.request, self.new_user) def test_random_username_generator(self): for _idx in range(1000): username = users.generate_random_edx_username() self.assertLessEqual(len(username), 30, 'Username too long') # Check that the username contains only allowable characters for char in range(len(username)): self.assertIn( username[char], string.ascii_letters + string.digits, "Username has forbidden character '{}'".format(username[char]) ) @patch('lti_provider.users.switch_user', autospec=True) @patch('lti_provider.users.create_lti_user', autospec=True) class AuthenticateLtiUserTest(TestCase): """ Tests for the authenticate_lti_user function in users.py """ def setUp(self): super(AuthenticateLtiUserTest, self).setUp() self.lti_consumer = LtiConsumer( consumer_name='TestConsumer', consumer_key='TestKey', consumer_secret='TestSecret' ) self.lti_consumer.save() self.lti_user_id = 'lti_user_id' self.edx_user_id = 'edx_user_id' self.old_user = UserFactory.create() self.request = RequestFactory().post('/') self.request.user = self.old_user def create_lti_user_model(self): """ Generate and save a User and an LTI user model """ edx_user = User(username=self.edx_user_id) edx_user.save() lti_user = LtiUser( lti_consumer=self.lti_consumer, lti_user_id=self.lti_user_id, edx_user=edx_user ) lti_user.save() return lti_user def test_authentication_with_new_user(self, _create_user, switch_user): lti_user = MagicMock() lti_user.edx_user_id = self.edx_user_id with patch('lti_provider.users.create_lti_user', return_value=lti_user) as create_user: users.authenticate_lti_user(self.request, self.lti_user_id, self.lti_consumer) create_user.assert_called_with(self.lti_user_id, self.lti_consumer) switch_user.assert_called_with(self.request, lti_user, self.lti_consumer) def test_authentication_with_authenticated_user(self, create_user, switch_user): lti_user = self.create_lti_user_model() self.request.user = lti_user.edx_user self.request.user.is_authenticated = MagicMock(return_value=True) users.authenticate_lti_user(self.request, self.lti_user_id, self.lti_consumer) self.assertFalse(create_user.called) self.assertFalse(switch_user.called) def test_authentication_with_unauthenticated_user(self, create_user, switch_user): lti_user = self.create_lti_user_model() self.request.user = lti_user.edx_user self.request.user.is_authenticated = MagicMock(return_value=False) users.authenticate_lti_user(self.request, self.lti_user_id, self.lti_consumer) self.assertFalse(create_user.called) switch_user.assert_called_with(self.request, lti_user, self.lti_consumer) def test_authentication_with_wrong_user(self, create_user, switch_user): lti_user = self.create_lti_user_model() self.request.user = self.old_user self.request.user.is_authenticated = MagicMock(return_value=True) users.authenticate_lti_user(self.request, self.lti_user_id, self.lti_consumer) self.assertFalse(create_user.called) switch_user.assert_called_with(self.request, lti_user, self.lti_consumer) class CreateLtiUserTest(TestCase): """ Tests for the create_lti_user function in users.py """ def setUp(self): super(CreateLtiUserTest, self).setUp() self.lti_consumer = LtiConsumer( consumer_name='TestConsumer', consumer_key='TestKey', consumer_secret='TestSecret' ) self.lti_consumer.save() def test_create_lti_user_creates_auth_user_model(self): users.create_lti_user('lti_user_id', self.lti_consumer) self.assertEqual(User.objects.count(), 1) @patch('uuid.uuid4', return_value='random_uuid') @patch('lti_provider.users.generate_random_edx_username', return_value='edx_id') def test_create_lti_user_creates_correct_user(self, uuid_mock, _username_mock): users.create_lti_user('lti_user_id', self.lti_consumer) self.assertEqual(User.objects.count(), 1) user = User.objects.get(username='edx_id') self.assertEqual(user.email, '[email protected]') uuid_mock.assert_called_with() @patch('lti_provider.users.generate_random_edx_username', side_effect=['edx_id', 'new_edx_id']) def test_unique_username_created(self, username_mock): User(username='edx_id').save() users.create_lti_user('lti_user_id', self.lti_consumer) self.assertEqual(username_mock.call_count, 2) self.assertEqual(User.objects.count(), 2) user = User.objects.get(username='new_edx_id') self.assertEqual(user.email, '[email protected]') class LtiBackendTest(TestCase): """ Tests for the authentication backend that authenticates LTI users. """ def setUp(self): super(LtiBackendTest, self).setUp() self.edx_user = UserFactory.create() self.edx_user.save() self.lti_consumer = LtiConsumer( consumer_key="Consumer Key", consumer_secret="Consumer Secret" ) self.lti_consumer.save() self.lti_user_id = 'LTI User ID' LtiUser( lti_consumer=self.lti_consumer, lti_user_id=self.lti_user_id, edx_user=self.edx_user ).save() def test_valid_user_authenticates(self): user = users.LtiBackend().authenticate( username=self.edx_user.username, lti_user_id=self.lti_user_id, lti_consumer=self.lti_consumer ) self.assertEqual(user, self.edx_user) def test_missing_user_returns_none(self): user = users.LtiBackend().authenticate( username=self.edx_user.username, lti_user_id='Invalid Username', lti_consumer=self.lti_consumer ) self.assertIsNone(user) def test_non_lti_user_returns_none(self): non_edx_user = UserFactory.create() non_edx_user.save() user = users.LtiBackend().authenticate( username=non_edx_user.username, ) self.assertIsNone(user) def test_missing_lti_id_returns_null(self): user = users.LtiBackend().authenticate( username=self.edx_user.username, lti_consumer=self.lti_consumer ) self.assertIsNone(user) def test_missing_lti_consumer_returns_null(self): user = users.LtiBackend().authenticate( username=self.edx_user.username, lti_user_id=self.lti_user_id, ) self.assertIsNone(user) def test_existing_user_returned_by_get_user(self): user = users.LtiBackend().get_user(self.edx_user.id) self.assertEqual(user, self.edx_user) def test_get_user_returns_none_for_invalid_user(self):<|fim▁hole|><|fim▁end|>
user = users.LtiBackend().get_user(-1) self.assertIsNone(user)
<|file_name|>mem.go<|end_file_name|><|fim▁begin|>package capn import ( "bytes" "encoding/binary" "errors" "io" "math" ) var ( errBufferCall = errors.New("capn: can't call on a memory buffer") ErrInvalidSegment = errors.New("capn: invalid segment id") ErrTooMuchData = errors.New("capn: too much data in stream") ) type buffer Segment // NewBuffer creates an expanding single segment buffer. Creating new objects // will expand the buffer. Data can be nil (or length 0 with some capacity) if // creating a new session. If parsing an existing segment then data should be // the segment contents and will not be copied. func NewBuffer(data []byte) *Segment { if uint64(len(data)) > uint64(math.MaxUint32) { return nil } b := &buffer{} b.Message = b b.Data = data return (*Segment)(b) } func (b *buffer) NewSegment(minsz int) (*Segment, error) { if minsz < 4096 { minsz = 4096 } if uint64(len(b.Data)) > uint64(math.MaxUint32)-uint64(minsz) { return nil, ErrOverlarge } b.Data = append(b.Data, make([]byte, minsz)...) b.Data = b.Data[:len(b.Data)-minsz] return (*Segment)(b), nil } func (b *buffer) Lookup(segid uint32) (*Segment, error) { if segid == 0 { return (*Segment)(b), nil } else { return nil, ErrInvalidSegment } } type MultiBuffer struct { Segments []*Segment } // NewMultiBuffer creates a new multi segment message. Creating new objects // will try and reuse the buffers available, but will create new ones if there // is insufficient capacity. When parsing an existing message data should be // the list of segments. The data buffers will not be copied. func NewMultiBuffer(data [][]byte) *Segment { m := &MultiBuffer{make([]*Segment, len(data))} for i, d := range data { m.Segments[i] = &Segment{m, d, uint32(i), false} } if len(data) > 0 { return m.Segments[0] } return &Segment{Message: m, Data: nil, Id: 0xFFFFFFFF, RootDone: false} } var ( MaxSegmentNumber = 1024 MaxTotalSize = 1024 * 1024 * 1024 ) func (m *MultiBuffer) NewSegment(minsz int) (*Segment, error) { for _, s := range m.Segments { if len(s.Data)+minsz <= cap(s.Data) { return s, nil }<|fim▁hole|> if minsz < 4096 { minsz = 4096 } s := &Segment{m, make([]byte, 0, minsz), uint32(len(m.Segments)), false} m.Segments = append(m.Segments, s) return s, nil } func (m *MultiBuffer) Lookup(segid uint32) (*Segment, error) { if uint(segid) < uint(len(m.Segments)) { return m.Segments[segid], nil } else { return nil, ErrInvalidSegment } } // ReadFromStream reads a non-packed serialized stream from r. buf is used to // buffer the read contents, can be nil, and is provided so that the buffer // can be reused between messages. The returned segment is the first segment // read, which contains the root pointer. // // Warning about buf reuse: It is safer to just pass nil for buf. // When making multiple calls to ReadFromStream() with the same buf argument, you // may overwrite the data in a previously returned Segment. // The re-use of buf is an optimization for when you are actually // done with any previously returned Segment which may have data still alive // in buf. // func ReadFromStream(r io.Reader, buf *bytes.Buffer) (*Segment, error) { if buf == nil { buf = new(bytes.Buffer) } else { buf.Reset() } if _, err := io.CopyN(buf, r, 4); err != nil { return nil, err } if binary.LittleEndian.Uint32(buf.Bytes()[:]) >= uint32(MaxSegmentNumber) { return nil, ErrTooMuchData } segnum := int(binary.LittleEndian.Uint32(buf.Bytes()[:]) + 1) hdrsz := 8*(segnum/2) + 4 if _, err := io.CopyN(buf, r, int64(hdrsz)); err != nil { return nil, err } total := 0 for i := 0; i < segnum; i++ { sz := binary.LittleEndian.Uint32(buf.Bytes()[4*i+4:]) if uint64(total)+uint64(sz)*8 > uint64(MaxTotalSize) { return nil, ErrTooMuchData } total += int(sz) * 8 } if _, err := io.CopyN(buf, r, int64(total)); err != nil { return nil, err } hdrv := buf.Bytes()[4 : hdrsz+4] datav := buf.Bytes()[hdrsz+4:] if segnum == 1 { sz := int(binary.LittleEndian.Uint32(hdrv)) * 8 return NewBuffer(datav[:sz]), nil } m := &MultiBuffer{make([]*Segment, segnum)} for i := 0; i < segnum; i++ { sz := int(binary.LittleEndian.Uint32(hdrv[4*i:])) * 8 m.Segments[i] = &Segment{m, datav[:sz], uint32(i), false} datav = datav[sz:] } return m.Segments[0], nil } // ReadFromMemoryZeroCopy: like ReadFromStream, but reads a non-packed // serialized stream that already resides in memory in the argument data. // The returned segment is the first segment read, which contains // the root pointer. The returned bytesRead says how many bytes were // consumed from data in making seg. The caller should advance the // data slice by doing data = data[bytesRead:] between successive calls // to ReadFromMemoryZeroCopy(). func ReadFromMemoryZeroCopy(data []byte) (seg *Segment, bytesRead int64, err error) { if len(data) < 4 { return nil, 0, io.EOF } if binary.LittleEndian.Uint32(data[0:4]) >= uint32(MaxSegmentNumber) { return nil, 0, ErrTooMuchData } segnum := int(binary.LittleEndian.Uint32(data[0:4]) + 1) hdrsz := 8*(segnum/2) + 4 b := data[0:(hdrsz + 4)] total := 0 for i := 0; i < segnum; i++ { sz := binary.LittleEndian.Uint32(b[4*i+4:]) if uint64(total)+uint64(sz)*8 > uint64(MaxTotalSize) { return nil, 0, ErrTooMuchData } total += int(sz) * 8 } if total == 0 { return nil, 0, io.EOF } hdrv := data[4:(hdrsz + 4)] datav := data[hdrsz+4:] m := &MultiBuffer{make([]*Segment, segnum)} for i := 0; i < segnum; i++ { sz := int(binary.LittleEndian.Uint32(hdrv[4*i:])) * 8 m.Segments[i] = &Segment{m, datav[:sz], uint32(i), false} datav = datav[sz:] } return m.Segments[0], int64(4 + hdrsz + total), nil } func NewSingleSegmentMultiBuffer() *MultiBuffer { m := &MultiBuffer{make([]*Segment, 1)} m.Segments[0] = &Segment{} return m } // ReadFromMemoryZeroCopyNoAlloc: like ReadFromMemoryZeroCopy, // but avoid all allocations so we get zero GC pressure. // // This requires some strict but easy to meet pre-requisites: // // PRE: the capnp bytes in data must come from only one segment. Else we panic. // PRE: multi must point to an existing MultiBuffer that has exactly one Segment // that will be re-used and over-written. If in doubt, // you can allocate a correct new one the first time // by calling NewSingleSegmentMultiBuffer(). // func ReadFromMemoryZeroCopyNoAlloc(data []byte, multi *MultiBuffer) (bytesRead int64, err error) { if len(data) < 4 { return 0, io.EOF } if binary.LittleEndian.Uint32(data[0:4]) >= uint32(MaxSegmentNumber) { return 0, ErrTooMuchData } segnum := int(binary.LittleEndian.Uint32(data[0:4]) + 1) if segnum != 1 { panic("only one segment allowed in data read in with ReadFromMemoryZeroCopyNoAlloc()") } if multi == nil { panic("multi must point to an existing MultiBuffer with a single Segment") } if len(multi.Segments) != 1 { panic("only one segment allowed in the multi *MultiBuffer used in ReadFromMemoryZeroCopyNoAlloc()") } if multi.Segments[0] == nil { panic("multi.Segment[0] must point to an allocated Segment{} to be resused in ReadFromMemoryZeroCopyNoAlloc()") } hdrsz := 8*(segnum/2) + 4 b := data[0:(hdrsz + 4)] total := 0 for i := 0; i < segnum; i++ { sz := binary.LittleEndian.Uint32(b[4*i+4:]) if uint64(total)+uint64(sz)*8 > uint64(MaxTotalSize) { return 0, ErrTooMuchData } total += int(sz) * 8 } if total == 0 { return 0, io.EOF } hdrv := data[4:(hdrsz + 4)] datav := data[hdrsz+4:] sz := int(binary.LittleEndian.Uint32(hdrv)) * 8 seg := multi.Segments[0] seg.Message = multi seg.Data = datav[:sz] seg.Id = 0 seg.RootDone = false datav = datav[sz:] return int64(4 + hdrsz + total), nil } // WriteTo writes the message that the segment is part of to the // provided stream in serialized form. func (s *Segment) WriteTo(w io.Writer) (int64, error) { segnum := uint32(1) for { if seg, _ := s.Message.Lookup(segnum); seg == nil { break } segnum++ } hdrv := make([]uint8, 8*(segnum/2)+8) binary.LittleEndian.PutUint32(hdrv, segnum-1) for i := uint32(0); i < segnum; i++ { seg, _ := s.Message.Lookup(i) binary.LittleEndian.PutUint32(hdrv[4*i+4:], uint32(len(seg.Data)/8)) } if n, err := w.Write(hdrv); err != nil { return int64(n), err } written := int64(len(hdrv)) for i := uint32(0); i < segnum; i++ { seg, _ := s.Message.Lookup(i) if n, err := w.Write(seg.Data); err != nil { return written + int64(n), err } else { written += int64(n) } } return written, nil }<|fim▁end|>
}
<|file_name|>fs.ts<|end_file_name|><|fim▁begin|>export interface FS {<|fim▁hole|> existsSync: (filepath: string) => boolean; /** read a file asynchronously */ readFile: (filepath: string) => Promise<string>; /** read a file synchronously */ readFileSync: (filepath: string) => string; /** resolve a file against directory, for given `ext` option */ resolve: (dir: string, file: string, ext: string) => string; /** check if file is contained in `root`, always return `true` by default. Warning: not setting this could expose path traversal vulnerabilities. */ contains?: (root: string, file: string) => boolean; /** defaults to "/" */ sep?: string; /** required for relative path resolving */ dirname?: (file: string) => string; /** fallback file for lookup failure */ fallback?: (file: string) => string | undefined; }<|fim▁end|>
/** check if a file exists asynchronously */ exists: (filepath: string) => Promise<boolean>; /** check if a file exists synchronously */
<|file_name|>FakeQuantWithMinMaxVars.java<|end_file_name|><|fim▁begin|>package org.nd4j.linalg.api.ops.impl.transforms.custom; import org.nd4j.autodiff.samediff.SDVariable; import org.nd4j.autodiff.samediff.SameDiff; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.DynamicCustomOp; import org.tensorflow.framework.AttrValue; import org.tensorflow.framework.GraphDef; import org.tensorflow.framework.NodeDef; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; /** * Fake quantization operation. * Quantized into range [0, 2^numBits - 1] when narrowRange is false, or [1, 2^numBits - 1] when narrowRange is true. * Note that numBits must be in range 2 to 16 (inclusive). * @author Alex Black */ public class FakeQuantWithMinMaxVars extends DynamicCustomOp { protected boolean narrowRange; protected int numBits; public FakeQuantWithMinMaxVars(SameDiff sd, SDVariable input, SDVariable min, SDVariable max, boolean narrowRange, int numBits){ super(sd, new SDVariable[]{input, min, max}); Preconditions.checkState(numBits >= 2 && numBits <= 16, "NumBits arg must be in range 2 to 16 inclusive, got %s", numBits); this.narrowRange = narrowRange; this.numBits = numBits; addArgs(); } public FakeQuantWithMinMaxVars(INDArray x, INDArray min, INDArray max, int num_bits, boolean narrow) { Preconditions.checkArgument(min.isVector() && max.isVector() && min.length() == max.length(), "FakeQuantWithMinMaxVars: min and max should be 1D tensors with the same length"); addInputArgument(x,min,max); addIArgument(num_bits); addBArgument(narrow); } public FakeQuantWithMinMaxVars(){ } protected void addArgs(){ iArguments.clear(); bArguments.clear(); addIArgument(numBits); addBArgument(narrowRange); } @Override public String opName(){ return "fake_quant_with_min_max_vars"; } @Override public String tensorflowName(){ return "FakeQuantWithMinMaxVars"; } @Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) { if(attributesForNode.containsKey("narrow_range")){ this.narrowRange = attributesForNode.get("narrow_range").getB(); } this.numBits = (int)attributesForNode.get("num_bits").getI(); addArgs(); } @Override<|fim▁hole|> public List<DataType> calculateOutputDataTypes(List<DataType> inputDataTypes){ Preconditions.checkState(inputDataTypes != null && inputDataTypes.size() == 3, "Expected exactly 3 inputs, got %s", inputDataTypes); return Collections.singletonList(inputDataTypes.get(0)); } @Override public List<SDVariable> doDiff(List<SDVariable> gradients){ return Arrays.asList(sameDiff.zerosLike(arg(0)), sameDiff.zerosLike(arg(1)), sameDiff.zerosLike(arg(2))); } }<|fim▁end|>
<|file_name|>coerce-date-property.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved.<|fim▁hole|> */ import {DateAdapter} from '@angular/material/core'; /** * Function that attempts to coerce a value to a date using a DateAdapter. Date instances, null, * and undefined will be passed through. Empty strings will be coerced to null. Valid ISO 8601 * strings (https://www.ietf.org/rfc/rfc3339.txt) will be coerced to dates. All other values will * result in an error being thrown. * @param adapter The date adapter to use for coercion * @param value The value to coerce. * @return A date object coerced from the value. * @throws Throws when the value cannot be coerced. */ export function coerceDateProperty<D>(adapter: DateAdapter<D>, value: any): D | null { if (typeof value === 'string') { if (value == '') { value = null; } else { value = adapter.fromIso8601(value) || value; } } if (value == null || adapter.isDateInstance(value)) { return value; } throw Error(`Datepicker: Value must be either a date object recognized by the DateAdapter or ` + `an ISO 8601 string. Instead got: ${value}`); }<|fim▁end|>
* * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license
<|file_name|>TextRank4Keyword.py<|end_file_name|><|fim▁begin|>#-*- encoding:utf-8 -*- ''' Created on Nov 30, 2014 @author: letian ''' import networkx as nx from Segmentation import Segmentation import numpy as np class TextRank4Keyword(object): def __init__(self, stop_words_file = None, delimiters = '?!;?!。;…\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\n'`,用来将文本拆分为句子。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.text = '' self.keywords = [] self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.word_index = {} self.index_word = {} self.graph = None def train(self, text, window = 2, lower = False, speech_tag_filter=True, vertex_source = 'all_filters', edge_source = 'no_stop_words'): ''' `text`:文本内容,字符串。 `window`:窗口大小,int,用来构造单词之间的边。默认值为2。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `vertex_source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来构造pagerank对应的图中的节点。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。关键词也来自`vertex_source`。 `edge_source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来构造pagerank对应的图中的节点之间的边。 默认值为`'no_stop_words'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。边的构造要结合`window`参数。 ''' self.text = text self.word_index = {} self.index_word = {} self.keywords = [] self.graph = None (_, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter) if vertex_source == 'no_filter': vertex_source = self.words_no_filter elif vertex_source == 'no_stop_words': vertex_source = self.words_no_stop_words else: vertex_source = self.words_all_filters if edge_source == 'no_filter': edge_source = self.words_no_filter elif vertex_source == 'all_filters': edge_source = self.words_all_filters else: edge_source = self.words_no_stop_words index = 0 for words in vertex_source: for word in words: if not self.word_index.has_key(word): self.word_index[word] = index self.index_word[index] = word index += 1 words_number = index # 单词数量 self.graph = np.zeros((words_number, words_number)) <|fim▁hole|> for word_list in edge_source: for w1, w2 in self.combine(word_list, window): if not self.word_index.has_key(w1): continue if not self.word_index.has_key(w2): continue index1 = self.word_index[w1] index2 = self.word_index[w2] self.graph[index1][index2] = 1.0 self.graph[index2][index1] = 1.0 # for x in xrange(words_number): # row_sum = np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x, :] = self.graph[x, :] / row_sum nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True) for index, _ in sorted_scores: self.keywords.append(self.index_word[index]) def combine(self, word_list, window = 2): ''' 构造在window下的单词组合,用来构造单词之间的边。使用了生成器。 word_list: 由单词组成的列表。 windows:窗口大小。 ''' window = int(window) if window < 2: window = 2 for x in xrange(1, window): if x >= len(word_list): break word_list2 = word_list[x:] res = zip(word_list, word_list2) for r in res: yield r def get_keywords(self, num = 6, word_min_len = 1): ''' 获取最重要的num个长度大于等于word_min_len的关键词。 返回关键词列表。 ''' result = [] count = 0 for word in self.keywords: if count >= num: break if len(word) >= word_min_len: result.append(word) count += 1 return result def get_keyphrases(self, keywords_num = 12, min_occur_num = 2): ''' 获取关键短语。 获取 keywords_num 个关键词构造在可能出现的短语,要求这个短语在原文本中至少出现的次数为min_occur_num。 返回关键短语的列表。 ''' keywords_set = set(self.get_keywords(num=keywords_num, word_min_len = 1)) keyphrases = set() one = [] for sentence_list in self.words_no_filter: for word in sentence_list: # print '/'.join(one) # print word if word in keywords_set: one.append(word) else: if len(one)>1: keyphrases.add(''.join(one)) one = [] continue one = [] return [phrase for phrase in keyphrases if self.text.count(phrase) >= min_occur_num] if __name__ == '__main__': import codecs text = codecs.open('../text/02.txt', 'r', 'utf-8').read() # text = "坏人" tr4w = TextRank4Keyword(stop_words_file='../stopword.data') tr4w.train(text=text, speech_tag_filter=True, lower=True, window=2) for word in tr4w.get_keywords(10, word_min_len=2): print word print '---' for phrase in tr4w.get_keyphrases(keywords_num=20, min_occur_num= 2): print phrase<|fim▁end|>
<|file_name|>test.py<|end_file_name|><|fim▁begin|>from test_support import * <|fim▁hole|>prove_all(prover=["plop"], opt=["--why3-conf=test.conf"])<|fim▁end|>
# this test calls a prover which is correctly configured but whose execution # gives an error (here: the prover executable doesn't exist). The intent is to # test the output of gnatprove in this specific case
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::fs::File; use std::io::{Read, Write}; use system::scheme::{Packet, Scheme}; extern crate system; <|fim▁hole|>} fn main() { //In order to handle example:, we create :example let mut scheme = File::create(":example").unwrap(); loop { let mut packet = Packet::default(); if scheme.read(&mut packet).unwrap() == 0 { panic!("Unexpected EOF"); } println!("Received: {:?}", packet); packet.a = 0; scheme.write(&packet).unwrap(); } }<|fim▁end|>
struct ExampleScheme; impl Scheme for ExampleScheme {
<|file_name|>LightAttribute.java<|end_file_name|><|fim▁begin|>package es.upm.oeg.farolapi.model; import lombok.Data; import lombok.ToString; import java.util.Arrays; import java.util.List; /** * Created on 23/05/16: * * @author cbadenes */ @Data @ToString (callSuper = true) public class LightAttribute extends Attribute { @Override public List<String> getRange() { return Arrays.asList(new String[]{"P", "F", "E", "AA", "AC", "ER", "O"});<|fim▁hole|><|fim▁end|>
} }
<|file_name|>test_filters.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import User from django.test import TestCase from dcim.choices import * from dcim.filters import * from dcim.models import ( Cable, ConsolePort, ConsolePortTemplate, ConsoleServerPort, ConsoleServerPortTemplate, Device, DeviceBay, DeviceBayTemplate, DeviceRole, DeviceType, FrontPort, FrontPortTemplate, Interface, InterfaceTemplate, InventoryItem, Manufacturer, Platform, PowerFeed, PowerPanel, PowerPort, PowerPortTemplate, PowerOutlet, PowerOutletTemplate, Rack, RackGroup, RackReservation, RackRole, RearPort, RearPortTemplate, Region, Site, VirtualChassis, ) from ipam.models import IPAddress from tenancy.models import Tenant, TenantGroup from virtualization.models import Cluster, ClusterType class RegionTestCase(TestCase): queryset = Region.objects.all() filterset = RegionFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1', description='A'), Region(name='Region 2', slug='region-2', description='B'), Region(name='Region 3', slug='region-3', description='C'), ) for region in regions: region.save() child_regions = ( Region(name='Region 1A', slug='region-1a', parent=regions[0]), Region(name='Region 1B', slug='region-1b', parent=regions[0]), Region(name='Region 2A', slug='region-2a', parent=regions[1]), Region(name='Region 2B', slug='region-2b', parent=regions[1]), Region(name='Region 3A', slug='region-3a', parent=regions[2]), Region(name='Region 3B', slug='region-3b', parent=regions[2]), ) for region in child_regions: region.save() def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Region 1', 'Region 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['region-1', 'region-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_parent(self): parent_regions = Region.objects.filter(parent__isnull=True)[:2] params = {'parent_id': [parent_regions[0].pk, parent_regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'parent': [parent_regions[0].slug, parent_regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) class SiteTestCase(TestCase): queryset = Site.objects.all() filterset = SiteFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() tenant_groups = ( TenantGroup(name='Tenant group 1', slug='tenant-group-1'), TenantGroup(name='Tenant group 2', slug='tenant-group-2'), TenantGroup(name='Tenant group 3', slug='tenant-group-3'), ) for tenantgroup in tenant_groups: tenantgroup.save() tenants = ( Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]), Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]), Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]), ) Tenant.objects.bulk_create(tenants) sites = ( Site(name='Site 1', slug='site-1', region=regions[0], tenant=tenants[0], status=SiteStatusChoices.STATUS_ACTIVE, facility='Facility 1', asn=65001, latitude=10, longitude=10, contact_name='Contact 1', contact_phone='123-555-0001', contact_email='[email protected]'), Site(name='Site 2', slug='site-2', region=regions[1], tenant=tenants[1], status=SiteStatusChoices.STATUS_PLANNED, facility='Facility 2', asn=65002, latitude=20, longitude=20, contact_name='Contact 2', contact_phone='123-555-0002', contact_email='[email protected]'), Site(name='Site 3', slug='site-3', region=regions[2], tenant=tenants[2], status=SiteStatusChoices.STATUS_RETIRED, facility='Facility 3', asn=65003, latitude=30, longitude=30, contact_name='Contact 3', contact_phone='123-555-0003', contact_email='[email protected]'), ) Site.objects.bulk_create(sites) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Site 1', 'Site 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['site-1', 'site-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_facility(self): params = {'facility': ['Facility 1', 'Facility 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_asn(self): params = {'asn': [65001, 65002]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_latitude(self): params = {'latitude': [10, 20]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_longitude(self): params = {'longitude': [10, 20]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_contact_name(self): params = {'contact_name': ['Contact 1', 'Contact 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_contact_phone(self): params = {'contact_phone': ['123-555-0001', '123-555-0002']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_contact_email(self): params = {'contact_email': ['[email protected]', '[email protected]']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_status(self): params = {'status': [SiteStatusChoices.STATUS_ACTIVE, SiteStatusChoices.STATUS_PLANNED]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant(self): tenants = Tenant.objects.all()[:2] params = {'tenant_id': [tenants[0].pk, tenants[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant': [tenants[0].slug, tenants[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant_group(self): tenant_groups = TenantGroup.objects.all()[:2] params = {'tenant_group_id': [tenant_groups[0].pk, tenant_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class RackGroupTestCase(TestCase): queryset = RackGroup.objects.all() filterset = RackGroupFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) parent_rack_groups = ( RackGroup(name='Parent Rack Group 1', slug='parent-rack-group-1', site=sites[0]), RackGroup(name='Parent Rack Group 2', slug='parent-rack-group-2', site=sites[1]), RackGroup(name='Parent Rack Group 3', slug='parent-rack-group-3', site=sites[2]), ) for rackgroup in parent_rack_groups: rackgroup.save() rack_groups = ( RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0], parent=parent_rack_groups[0], description='A'), RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1], parent=parent_rack_groups[1], description='B'), RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2], parent=parent_rack_groups[2], description='C'), ) for rackgroup in rack_groups: rackgroup.save() def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Rack Group 1', 'Rack Group 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['rack-group-1', 'rack-group-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_parent(self): parent_groups = RackGroup.objects.filter(name__startswith='Parent')[:2] params = {'parent_id': [parent_groups[0].pk, parent_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'parent': [parent_groups[0].slug, parent_groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class RackRoleTestCase(TestCase): queryset = RackRole.objects.all() filterset = RackRoleFilterSet @classmethod def setUpTestData(cls): rack_roles = ( RackRole(name='Rack Role 1', slug='rack-role-1', color='ff0000'), RackRole(name='Rack Role 2', slug='rack-role-2', color='00ff00'), RackRole(name='Rack Role 3', slug='rack-role-3', color='0000ff'), ) RackRole.objects.bulk_create(rack_roles) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Rack Role 1', 'Rack Role 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['rack-role-1', 'rack-role-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_color(self): params = {'color': ['ff0000', '00ff00']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class RackTestCase(TestCase): queryset = Rack.objects.all() filterset = RackFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) rack_groups = ( RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0]), RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1]), RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2]), ) for rackgroup in rack_groups: rackgroup.save() rack_roles = ( RackRole(name='Rack Role 1', slug='rack-role-1'), RackRole(name='Rack Role 2', slug='rack-role-2'), RackRole(name='Rack Role 3', slug='rack-role-3'), ) RackRole.objects.bulk_create(rack_roles) tenant_groups = ( TenantGroup(name='Tenant group 1', slug='tenant-group-1'), TenantGroup(name='Tenant group 2', slug='tenant-group-2'), TenantGroup(name='Tenant group 3', slug='tenant-group-3'), ) for tenantgroup in tenant_groups: tenantgroup.save() tenants = ( Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]), Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]), Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]), ) Tenant.objects.bulk_create(tenants) racks = ( Rack(name='Rack 1', facility_id='rack-1', site=sites[0], group=rack_groups[0], tenant=tenants[0], status=RackStatusChoices.STATUS_ACTIVE, role=rack_roles[0], serial='ABC', asset_tag='1001', type=RackTypeChoices.TYPE_2POST, width=RackWidthChoices.WIDTH_19IN, u_height=42, desc_units=False, outer_width=100, outer_depth=100, outer_unit=RackDimensionUnitChoices.UNIT_MILLIMETER), Rack(name='Rack 2', facility_id='rack-2', site=sites[1], group=rack_groups[1], tenant=tenants[1], status=RackStatusChoices.STATUS_PLANNED, role=rack_roles[1], serial='DEF', asset_tag='1002', type=RackTypeChoices.TYPE_4POST, width=RackWidthChoices.WIDTH_21IN, u_height=43, desc_units=False, outer_width=200, outer_depth=200, outer_unit=RackDimensionUnitChoices.UNIT_MILLIMETER), Rack(name='Rack 3', facility_id='rack-3', site=sites[2], group=rack_groups[2], tenant=tenants[2], status=RackStatusChoices.STATUS_RESERVED, role=rack_roles[2], serial='GHI', asset_tag='1003', type=RackTypeChoices.TYPE_CABINET, width=RackWidthChoices.WIDTH_23IN, u_height=44, desc_units=True, outer_width=300, outer_depth=300, outer_unit=RackDimensionUnitChoices.UNIT_INCH), ) Rack.objects.bulk_create(racks) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Rack 1', 'Rack 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_facility_id(self): params = {'facility_id': ['rack-1', 'rack-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_asset_tag(self): params = {'asset_tag': ['1001', '1002']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): params = {'type': [RackTypeChoices.TYPE_2POST, RackTypeChoices.TYPE_4POST]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_width(self): params = {'width': [RackWidthChoices.WIDTH_19IN, RackWidthChoices.WIDTH_21IN]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_u_height(self): params = {'u_height': [42, 43]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_desc_units(self): params = {'desc_units': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) params = {'desc_units': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_outer_width(self): params = {'outer_width': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_outer_depth(self): params = {'outer_depth': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_outer_unit(self): self.assertEqual(Rack.objects.filter(outer_unit__isnull=False).count(), 3) params = {'outer_unit': RackDimensionUnitChoices.UNIT_MILLIMETER} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_group(self): groups = RackGroup.objects.all()[:2] params = {'group_id': [groups[0].pk, groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'group': [groups[0].slug, groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_status(self): params = {'status': [RackStatusChoices.STATUS_ACTIVE, RackStatusChoices.STATUS_PLANNED]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_role(self): roles = RackRole.objects.all()[:2] params = {'role_id': [roles[0].pk, roles[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'role': [roles[0].slug, roles[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_serial(self): params = {'serial': 'ABC'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) params = {'serial': 'abc'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_tenant(self): tenants = Tenant.objects.all()[:2] params = {'tenant_id': [tenants[0].pk, tenants[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant': [tenants[0].slug, tenants[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant_group(self): tenant_groups = TenantGroup.objects.all()[:2] params = {'tenant_group_id': [tenant_groups[0].pk, tenant_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class RackReservationTestCase(TestCase): queryset = RackReservation.objects.all() filterset = RackReservationFilterSet @classmethod def setUpTestData(cls): sites = ( Site(name='Site 1', slug='site-1'), Site(name='Site 2', slug='site-2'), Site(name='Site 3', slug='site-3'), ) Site.objects.bulk_create(sites) rack_groups = ( RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0]), RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1]), RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2]), ) for rackgroup in rack_groups: rackgroup.save() racks = ( Rack(name='Rack 1', site=sites[0], group=rack_groups[0]), Rack(name='Rack 2', site=sites[1], group=rack_groups[1]), Rack(name='Rack 3', site=sites[2], group=rack_groups[2]), ) Rack.objects.bulk_create(racks) users = ( User(username='User 1'), User(username='User 2'), User(username='User 3'), ) User.objects.bulk_create(users) tenant_groups = ( TenantGroup(name='Tenant group 1', slug='tenant-group-1'), TenantGroup(name='Tenant group 2', slug='tenant-group-2'), TenantGroup(name='Tenant group 3', slug='tenant-group-3'), ) for tenantgroup in tenant_groups: tenantgroup.save() tenants = ( Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]), Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]), Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]), ) Tenant.objects.bulk_create(tenants) reservations = ( RackReservation(rack=racks[0], units=[1, 2, 3], user=users[0], tenant=tenants[0]), RackReservation(rack=racks[1], units=[4, 5, 6], user=users[1], tenant=tenants[1]), RackReservation(rack=racks[2], units=[7, 8, 9], user=users[2], tenant=tenants[2]), ) RackReservation.objects.bulk_create(reservations) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_group(self): groups = RackGroup.objects.all()[:2] params = {'group_id': [groups[0].pk, groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'group': [groups[0].slug, groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_user(self): users = User.objects.all()[:2] params = {'user_id': [users[0].pk, users[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'user': [users[0].username, users[1].username]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant(self): tenants = Tenant.objects.all()[:2] params = {'tenant_id': [tenants[0].pk, tenants[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant': [tenants[0].slug, tenants[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant_group(self): tenant_groups = TenantGroup.objects.all()[:2] params = {'tenant_group_id': [tenant_groups[0].pk, tenant_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class ManufacturerTestCase(TestCase): queryset = Manufacturer.objects.all() filterset = ManufacturerFilterSet @classmethod def setUpTestData(cls): manufacturers = ( Manufacturer(name='Manufacturer 1', slug='manufacturer-1', description='A'), Manufacturer(name='Manufacturer 2', slug='manufacturer-2', description='B'), Manufacturer(name='Manufacturer 3', slug='manufacturer-3', description='C'), ) Manufacturer.objects.bulk_create(manufacturers) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Manufacturer 1', 'Manufacturer 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['manufacturer-1', 'manufacturer-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class DeviceTypeTestCase(TestCase): queryset = DeviceType.objects.all() filterset = DeviceTypeFilterSet @classmethod def setUpTestData(cls): manufacturers = ( Manufacturer(name='Manufacturer 1', slug='manufacturer-1'), Manufacturer(name='Manufacturer 2', slug='manufacturer-2'), Manufacturer(name='Manufacturer 3', slug='manufacturer-3'), ) Manufacturer.objects.bulk_create(manufacturers) device_types = ( DeviceType(manufacturer=manufacturers[0], model='Model 1', slug='model-1', part_number='Part Number 1', u_height=1, is_full_depth=True), DeviceType(manufacturer=manufacturers[1], model='Model 2', slug='model-2', part_number='Part Number 2', u_height=2, is_full_depth=True, subdevice_role=SubdeviceRoleChoices.ROLE_PARENT), DeviceType(manufacturer=manufacturers[2], model='Model 3', slug='model-3', part_number='Part Number 3', u_height=3, is_full_depth=False, subdevice_role=SubdeviceRoleChoices.ROLE_CHILD), ) DeviceType.objects.bulk_create(device_types) # Add component templates for filtering ConsolePortTemplate.objects.bulk_create(( ConsolePortTemplate(device_type=device_types[0], name='Console Port 1'), ConsolePortTemplate(device_type=device_types[1], name='Console Port 2'), )) ConsoleServerPortTemplate.objects.bulk_create(( ConsoleServerPortTemplate(device_type=device_types[0], name='Console Server Port 1'), ConsoleServerPortTemplate(device_type=device_types[1], name='Console Server Port 2'), )) PowerPortTemplate.objects.bulk_create(( PowerPortTemplate(device_type=device_types[0], name='Power Port 1'), PowerPortTemplate(device_type=device_types[1], name='Power Port 2'), )) PowerOutletTemplate.objects.bulk_create(( PowerOutletTemplate(device_type=device_types[0], name='Power Outlet 1'), PowerOutletTemplate(device_type=device_types[1], name='Power Outlet 2'), )) InterfaceTemplate.objects.bulk_create(( InterfaceTemplate(device_type=device_types[0], name='Interface 1'), InterfaceTemplate(device_type=device_types[1], name='Interface 2'), )) rear_ports = ( RearPortTemplate(device_type=device_types[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C), RearPortTemplate(device_type=device_types[1], name='Rear Port 2', type=PortTypeChoices.TYPE_8P8C), ) RearPortTemplate.objects.bulk_create(rear_ports) FrontPortTemplate.objects.bulk_create(( FrontPortTemplate(device_type=device_types[0], name='Front Port 1', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[0]), FrontPortTemplate(device_type=device_types[1], name='Front Port 2', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[1]), )) DeviceBayTemplate.objects.bulk_create(( DeviceBayTemplate(device_type=device_types[0], name='Device Bay 1'), DeviceBayTemplate(device_type=device_types[1], name='Device Bay 2'), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_model(self): params = {'model': ['Model 1', 'Model 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['model-1', 'model-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_part_number(self): params = {'part_number': ['Part Number 1', 'Part Number 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_u_height(self): params = {'u_height': [1, 2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_is_full_depth(self): params = {'is_full_depth': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'is_full_depth': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_subdevice_role(self): params = {'subdevice_role': SubdeviceRoleChoices.ROLE_PARENT} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_manufacturer(self): manufacturers = Manufacturer.objects.all()[:2] params = {'manufacturer_id': [manufacturers[0].pk, manufacturers[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'manufacturer': [manufacturers[0].slug, manufacturers[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_console_ports(self): params = {'console_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'console_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_console_server_ports(self): params = {'console_server_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'console_server_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_power_ports(self): params = {'power_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'power_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_power_outlets(self): params = {'power_outlets': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'power_outlets': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_interfaces(self): params = {'interfaces': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'interfaces': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_pass_through_ports(self): params = {'pass_through_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'pass_through_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_device_bays(self): params = {'device_bays': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device_bays': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class ConsolePortTemplateTestCase(TestCase): queryset = ConsolePortTemplate.objects.all() filterset = ConsolePortTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) ConsolePortTemplate.objects.bulk_create(( ConsolePortTemplate(device_type=device_types[0], name='Console Port 1'), ConsolePortTemplate(device_type=device_types[1], name='Console Port 2'), ConsolePortTemplate(device_type=device_types[2], name='Console Port 3'), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Console Port 1', 'Console Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class ConsoleServerPortTemplateTestCase(TestCase): queryset = ConsoleServerPortTemplate.objects.all() filterset = ConsoleServerPortTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) ConsoleServerPortTemplate.objects.bulk_create(( ConsoleServerPortTemplate(device_type=device_types[0], name='Console Server Port 1'), ConsoleServerPortTemplate(device_type=device_types[1], name='Console Server Port 2'), ConsoleServerPortTemplate(device_type=device_types[2], name='Console Server Port 3'), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Console Server Port 1', 'Console Server Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class PowerPortTemplateTestCase(TestCase): queryset = PowerPortTemplate.objects.all() filterset = PowerPortTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) PowerPortTemplate.objects.bulk_create(( PowerPortTemplate(device_type=device_types[0], name='Power Port 1', maximum_draw=100, allocated_draw=50), PowerPortTemplate(device_type=device_types[1], name='Power Port 2', maximum_draw=200, allocated_draw=100), PowerPortTemplate(device_type=device_types[2], name='Power Port 3', maximum_draw=300, allocated_draw=150), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Power Port 1', 'Power Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_maximum_draw(self): params = {'maximum_draw': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_allocated_draw(self): params = {'allocated_draw': [50, 100]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class PowerOutletTemplateTestCase(TestCase): queryset = PowerOutletTemplate.objects.all() filterset = PowerOutletTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) PowerOutletTemplate.objects.bulk_create(( PowerOutletTemplate(device_type=device_types[0], name='Power Outlet 1', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A), PowerOutletTemplate(device_type=device_types[1], name='Power Outlet 2', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_B), PowerOutletTemplate(device_type=device_types[2], name='Power Outlet 3', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_C), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Power Outlet 1', 'Power Outlet 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_feed_leg(self): # TODO: Support filtering for multiple values params = {'feed_leg': PowerOutletFeedLegChoices.FEED_LEG_A} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class InterfaceTemplateTestCase(TestCase): queryset = InterfaceTemplate.objects.all() filterset = InterfaceTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) InterfaceTemplate.objects.bulk_create(( InterfaceTemplate(device_type=device_types[0], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED, mgmt_only=True), InterfaceTemplate(device_type=device_types[1], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_GBIC, mgmt_only=False), InterfaceTemplate(device_type=device_types[2], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_SFP, mgmt_only=False), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Interface 1', 'Interface 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): # TODO: Support filtering for multiple values params = {'type': InterfaceTypeChoices.TYPE_1GE_FIXED} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_mgmt_only(self): params = {'mgmt_only': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) params = {'mgmt_only': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class FrontPortTemplateTestCase(TestCase): queryset = FrontPortTemplate.objects.all() filterset = FrontPortTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) rear_ports = ( RearPortTemplate(device_type=device_types[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C), RearPortTemplate(device_type=device_types[1], name='Rear Port 2', type=PortTypeChoices.TYPE_8P8C), RearPortTemplate(device_type=device_types[2], name='Rear Port 3', type=PortTypeChoices.TYPE_8P8C), ) RearPortTemplate.objects.bulk_create(rear_ports) FrontPortTemplate.objects.bulk_create(( FrontPortTemplate(device_type=device_types[0], name='Front Port 1', rear_port=rear_ports[0], type=PortTypeChoices.TYPE_8P8C), FrontPortTemplate(device_type=device_types[1], name='Front Port 2', rear_port=rear_ports[1], type=PortTypeChoices.TYPE_110_PUNCH), FrontPortTemplate(device_type=device_types[2], name='Front Port 3', rear_port=rear_ports[2], type=PortTypeChoices.TYPE_BNC), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Front Port 1', 'Front Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): # TODO: Support filtering for multiple values params = {'type': PortTypeChoices.TYPE_8P8C} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class RearPortTemplateTestCase(TestCase): queryset = RearPortTemplate.objects.all() filterset = RearPortTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) RearPortTemplate.objects.bulk_create(( RearPortTemplate(device_type=device_types[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C, positions=1), RearPortTemplate(device_type=device_types[1], name='Rear Port 2', type=PortTypeChoices.TYPE_110_PUNCH, positions=2), RearPortTemplate(device_type=device_types[2], name='Rear Port 3', type=PortTypeChoices.TYPE_BNC, positions=3), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Rear Port 1', 'Rear Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): # TODO: Support filtering for multiple values params = {'type': PortTypeChoices.TYPE_8P8C} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_positions(self): params = {'positions': [1, 2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class DeviceBayTemplateTestCase(TestCase): queryset = DeviceBayTemplate.objects.all() filterset = DeviceBayTemplateFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_types = ( DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'), DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'), DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'), ) DeviceType.objects.bulk_create(device_types) DeviceBayTemplate.objects.bulk_create(( DeviceBayTemplate(device_type=device_types[0], name='Device Bay 1'), DeviceBayTemplate(device_type=device_types[1], name='Device Bay 2'), DeviceBayTemplate(device_type=device_types[2], name='Device Bay 3'), )) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Device Bay 1', 'Device Bay 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype_id(self): device_types = DeviceType.objects.all()[:2] params = {'devicetype_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class DeviceRoleTestCase(TestCase): queryset = DeviceRole.objects.all() filterset = DeviceRoleFilterSet @classmethod def setUpTestData(cls): device_roles = ( DeviceRole(name='Device Role 1', slug='device-role-1', color='ff0000', vm_role=True), DeviceRole(name='Device Role 2', slug='device-role-2', color='00ff00', vm_role=True), DeviceRole(name='Device Role 3', slug='device-role-3', color='0000ff', vm_role=False), ) DeviceRole.objects.bulk_create(device_roles) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Device Role 1', 'Device Role 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['device-role-1', 'device-role-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_color(self): params = {'color': ['ff0000', '00ff00']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_vm_role(self): params = {'vm_role': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'vm_role': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class PlatformTestCase(TestCase): queryset = Platform.objects.all() filterset = PlatformFilterSet @classmethod def setUpTestData(cls): manufacturers = ( Manufacturer(name='Manufacturer 1', slug='manufacturer-1'), Manufacturer(name='Manufacturer 2', slug='manufacturer-2'), Manufacturer(name='Manufacturer 3', slug='manufacturer-3'), ) Manufacturer.objects.bulk_create(manufacturers) platforms = ( Platform(name='Platform 1', slug='platform-1', manufacturer=manufacturers[0], napalm_driver='driver-1', description='A'), Platform(name='Platform 2', slug='platform-2', manufacturer=manufacturers[1], napalm_driver='driver-2', description='B'), Platform(name='Platform 3', slug='platform-3', manufacturer=manufacturers[2], napalm_driver='driver-3', description='C'), ) Platform.objects.bulk_create(platforms) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Platform 1', 'Platform 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_slug(self): params = {'slug': ['platform-1', 'platform-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_napalm_driver(self): params = {'napalm_driver': ['driver-1', 'driver-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_manufacturer(self): manufacturers = Manufacturer.objects.all()[:2] params = {'manufacturer_id': [manufacturers[0].pk, manufacturers[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'manufacturer': [manufacturers[0].slug, manufacturers[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class DeviceTestCase(TestCase): queryset = Device.objects.all() filterset = DeviceFilterSet @classmethod def setUpTestData(cls): manufacturers = ( Manufacturer(name='Manufacturer 1', slug='manufacturer-1'), Manufacturer(name='Manufacturer 2', slug='manufacturer-2'), Manufacturer(name='Manufacturer 3', slug='manufacturer-3'), ) Manufacturer.objects.bulk_create(manufacturers) device_types = ( DeviceType(manufacturer=manufacturers[0], model='Model 1', slug='model-1', is_full_depth=True), DeviceType(manufacturer=manufacturers[1], model='Model 2', slug='model-2', is_full_depth=True), DeviceType(manufacturer=manufacturers[2], model='Model 3', slug='model-3', is_full_depth=False), ) DeviceType.objects.bulk_create(device_types) device_roles = ( DeviceRole(name='Device Role 1', slug='device-role-1'), DeviceRole(name='Device Role 2', slug='device-role-2'), DeviceRole(name='Device Role 3', slug='device-role-3'), ) DeviceRole.objects.bulk_create(device_roles) platforms = ( Platform(name='Platform 1', slug='platform-1'), Platform(name='Platform 2', slug='platform-2'), Platform(name='Platform 3', slug='platform-3'), ) Platform.objects.bulk_create(platforms) regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) rack_groups = ( RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0]), RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1]), RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2]), ) for rackgroup in rack_groups: rackgroup.save() racks = ( Rack(name='Rack 1', site=sites[0], group=rack_groups[0]), Rack(name='Rack 2', site=sites[1], group=rack_groups[1]), Rack(name='Rack 3', site=sites[2], group=rack_groups[2]), ) Rack.objects.bulk_create(racks) cluster_type = ClusterType.objects.create(name='Cluster Type 1', slug='cluster-type-1') clusters = ( Cluster(name='Cluster 1', type=cluster_type), Cluster(name='Cluster 2', type=cluster_type), Cluster(name='Cluster 3', type=cluster_type), ) Cluster.objects.bulk_create(clusters) tenant_groups = ( TenantGroup(name='Tenant group 1', slug='tenant-group-1'), TenantGroup(name='Tenant group 2', slug='tenant-group-2'), TenantGroup(name='Tenant group 3', slug='tenant-group-3'), ) for tenantgroup in tenant_groups: tenantgroup.save() tenants = ( Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]), Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]), Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]), ) Tenant.objects.bulk_create(tenants) devices = ( Device(name='Device 1', device_type=device_types[0], device_role=device_roles[0], platform=platforms[0], tenant=tenants[0], serial='ABC', asset_tag='1001', site=sites[0], rack=racks[0], position=1, face=DeviceFaceChoices.FACE_FRONT, status=DeviceStatusChoices.STATUS_ACTIVE, cluster=clusters[0], local_context_data={"foo": 123}), Device(name='Device 2', device_type=device_types[1], device_role=device_roles[1], platform=platforms[1], tenant=tenants[1], serial='DEF', asset_tag='1002', site=sites[1], rack=racks[1], position=2, face=DeviceFaceChoices.FACE_FRONT, status=DeviceStatusChoices.STATUS_STAGED, cluster=clusters[1]), Device(name='Device 3', device_type=device_types[2], device_role=device_roles[2], platform=platforms[2], tenant=tenants[2], serial='GHI', asset_tag='1003', site=sites[2], rack=racks[2], position=3, face=DeviceFaceChoices.FACE_REAR, status=DeviceStatusChoices.STATUS_FAILED, cluster=clusters[2]), ) Device.objects.bulk_create(devices) # Add components for filtering ConsolePort.objects.bulk_create(( ConsolePort(device=devices[0], name='Console Port 1'), ConsolePort(device=devices[1], name='Console Port 2'), )) ConsoleServerPort.objects.bulk_create(( ConsoleServerPort(device=devices[0], name='Console Server Port 1'), ConsoleServerPort(device=devices[1], name='Console Server Port 2'), )) PowerPort.objects.bulk_create(( PowerPort(device=devices[0], name='Power Port 1'), PowerPort(device=devices[1], name='Power Port 2'), )) PowerOutlet.objects.bulk_create(( PowerOutlet(device=devices[0], name='Power Outlet 1'), PowerOutlet(device=devices[1], name='Power Outlet 2'), )) interfaces = ( Interface(device=devices[0], name='Interface 1', mac_address='00-00-00-00-00-01'), Interface(device=devices[1], name='Interface 2', mac_address='00-00-00-00-00-02'), ) Interface.objects.bulk_create(interfaces) rear_ports = ( RearPort(device=devices[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C), RearPort(device=devices[1], name='Rear Port 2', type=PortTypeChoices.TYPE_8P8C), ) RearPort.objects.bulk_create(rear_ports) FrontPort.objects.bulk_create(( FrontPort(device=devices[0], name='Front Port 1', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[0]), FrontPort(device=devices[1], name='Front Port 2', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[1]), )) DeviceBay.objects.bulk_create(( DeviceBay(device=devices[0], name='Device Bay 1'), DeviceBay(device=devices[1], name='Device Bay 2'), )) # Assign primary IPs for filtering ipaddresses = ( IPAddress(address='192.0.2.1/24', assigned_object=interfaces[0]), IPAddress(address='192.0.2.2/24', assigned_object=interfaces[1]), ) IPAddress.objects.bulk_create(ipaddresses) Device.objects.filter(pk=devices[0].pk).update(primary_ip4=ipaddresses[0]) Device.objects.filter(pk=devices[1].pk).update(primary_ip4=ipaddresses[1]) # VirtualChassis assignment for filtering virtual_chassis = VirtualChassis.objects.create(master=devices[0]) Device.objects.filter(pk=devices[0].pk).update(virtual_chassis=virtual_chassis, vc_position=1, vc_priority=1) Device.objects.filter(pk=devices[1].pk).update(virtual_chassis=virtual_chassis, vc_position=2, vc_priority=2) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Device 1', 'Device 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_asset_tag(self): params = {'asset_tag': ['1001', '1002']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_face(self): params = {'face': DeviceFaceChoices.FACE_FRONT} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_position(self): params = {'position': [1, 2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_vc_position(self): params = {'vc_position': [1, 2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_vc_priority(self): params = {'vc_priority': [1, 2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_manufacturer(self): manufacturers = Manufacturer.objects.all()[:2] params = {'manufacturer_id': [manufacturers[0].pk, manufacturers[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'manufacturer': [manufacturers[0].slug, manufacturers[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicetype(self): device_types = DeviceType.objects.all()[:2] params = {'device_type_id': [device_types[0].pk, device_types[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_devicerole(self): device_roles = DeviceRole.objects.all()[:2] params = {'role_id': [device_roles[0].pk, device_roles[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'role': [device_roles[0].slug, device_roles[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_platform(self): platforms = Platform.objects.all()[:2] params = {'platform_id': [platforms[0].pk, platforms[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'platform': [platforms[0].slug, platforms[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_rackgroup(self): rack_groups = RackGroup.objects.all()[:2] params = {'rack_group_id': [rack_groups[0].pk, rack_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_rack(self): racks = Rack.objects.all()[:2] params = {'rack_id': [racks[0].pk, racks[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cluster(self): clusters = Cluster.objects.all()[:2] params = {'cluster_id': [clusters[0].pk, clusters[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_model(self): params = {'model': ['model-1', 'model-2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_status(self): params = {'status': [DeviceStatusChoices.STATUS_ACTIVE, DeviceStatusChoices.STATUS_STAGED]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_is_full_depth(self): params = {'is_full_depth': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'is_full_depth': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_mac_address(self): params = {'mac_address': ['00-00-00-00-00-01', '00-00-00-00-00-02']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_serial(self): params = {'serial': 'ABC'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) params = {'serial': 'abc'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_has_primary_ip(self): params = {'has_primary_ip': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'has_primary_ip': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_virtual_chassis_id(self): params = {'virtual_chassis_id': [VirtualChassis.objects.first().pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_virtual_chassis_member(self): params = {'virtual_chassis_member': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'virtual_chassis_member': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_console_ports(self): params = {'console_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'console_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_console_server_ports(self): params = {'console_server_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'console_server_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_power_ports(self): params = {'power_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'power_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_power_outlets(self):<|fim▁hole|> params = {'power_outlets': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'power_outlets': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_interfaces(self): params = {'interfaces': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'interfaces': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_pass_through_ports(self): params = {'pass_through_ports': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'pass_through_ports': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_device_bays(self): params = {'device_bays': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device_bays': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_local_context_data(self): params = {'local_context_data': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) params = {'local_context_data': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant(self): tenants = Tenant.objects.all()[:2] params = {'tenant_id': [tenants[0].pk, tenants[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant': [tenants[0].slug, tenants[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_tenant_group(self): tenant_groups = TenantGroup.objects.all()[:2] params = {'tenant_group_id': [tenant_groups[0].pk, tenant_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class ConsolePortTestCase(TestCase): queryset = ConsolePort.objects.all() filterset = ConsolePortFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) console_server_ports = ( ConsoleServerPort(device=devices[3], name='Console Server Port 1'), ConsoleServerPort(device=devices[3], name='Console Server Port 2'), ) ConsoleServerPort.objects.bulk_create(console_server_ports) console_ports = ( ConsolePort(device=devices[0], name='Console Port 1', label='A', description='First'), ConsolePort(device=devices[1], name='Console Port 2', label='B', description='Second'), ConsolePort(device=devices[2], name='Console Port 3', label='C', description='Third'), ) ConsolePort.objects.bulk_create(console_ports) # Cables Cable(termination_a=console_ports[0], termination_b=console_server_ports[0]).save() Cable(termination_a=console_ports[1], termination_b=console_server_ports[1]).save() # Third port is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Console Port 1', 'Console Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_connected(self): params = {'connected': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'connected': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class ConsoleServerPortTestCase(TestCase): queryset = ConsoleServerPort.objects.all() filterset = ConsoleServerPortFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) console_ports = ( ConsolePort(device=devices[3], name='Console Server Port 1'), ConsolePort(device=devices[3], name='Console Server Port 2'), ) ConsolePort.objects.bulk_create(console_ports) console_server_ports = ( ConsoleServerPort(device=devices[0], name='Console Server Port 1', label='A', description='First'), ConsoleServerPort(device=devices[1], name='Console Server Port 2', label='B', description='Second'), ConsoleServerPort(device=devices[2], name='Console Server Port 3', label='C', description='Third'), ) ConsoleServerPort.objects.bulk_create(console_server_ports) # Cables Cable(termination_a=console_server_ports[0], termination_b=console_ports[0]).save() Cable(termination_a=console_server_ports[1], termination_b=console_ports[1]).save() # Third port is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Console Server Port 1', 'Console Server Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_connected(self): params = {'connected': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'connected': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class PowerPortTestCase(TestCase): queryset = PowerPort.objects.all() filterset = PowerPortFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) power_outlets = ( PowerOutlet(device=devices[3], name='Power Outlet 1'), PowerOutlet(device=devices[3], name='Power Outlet 2'), ) PowerOutlet.objects.bulk_create(power_outlets) power_ports = ( PowerPort(device=devices[0], name='Power Port 1', label='A', maximum_draw=100, allocated_draw=50, description='First'), PowerPort(device=devices[1], name='Power Port 2', label='B', maximum_draw=200, allocated_draw=100, description='Second'), PowerPort(device=devices[2], name='Power Port 3', label='C', maximum_draw=300, allocated_draw=150, description='Third'), ) PowerPort.objects.bulk_create(power_ports) # Cables Cable(termination_a=power_ports[0], termination_b=power_outlets[0]).save() Cable(termination_a=power_ports[1], termination_b=power_outlets[1]).save() # Third port is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Power Port 1', 'Power Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_maximum_draw(self): params = {'maximum_draw': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_allocated_draw(self): params = {'allocated_draw': [50, 100]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_connected(self): params = {'connected': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'connected': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class PowerOutletTestCase(TestCase): queryset = PowerOutlet.objects.all() filterset = PowerOutletFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) power_ports = ( PowerPort(device=devices[3], name='Power Outlet 1'), PowerPort(device=devices[3], name='Power Outlet 2'), ) PowerPort.objects.bulk_create(power_ports) power_outlets = ( PowerOutlet(device=devices[0], name='Power Outlet 1', label='A', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A, description='First'), PowerOutlet(device=devices[1], name='Power Outlet 2', label='B', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_B, description='Second'), PowerOutlet(device=devices[2], name='Power Outlet 3', label='C', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_C, description='Third'), ) PowerOutlet.objects.bulk_create(power_outlets) # Cables Cable(termination_a=power_outlets[0], termination_b=power_ports[0]).save() Cable(termination_a=power_outlets[1], termination_b=power_ports[1]).save() # Third port is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Power Outlet 1', 'Power Outlet 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_feed_leg(self): # TODO: Support filtering for multiple values params = {'feed_leg': PowerOutletFeedLegChoices.FEED_LEG_A} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_connected(self): params = {'connected': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'connected': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class InterfaceTestCase(TestCase): queryset = Interface.objects.all() filterset = InterfaceFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) interfaces = ( Interface(device=devices[0], name='Interface 1', label='A', type=InterfaceTypeChoices.TYPE_1GE_SFP, enabled=True, mgmt_only=True, mtu=100, mode=InterfaceModeChoices.MODE_ACCESS, mac_address='00-00-00-00-00-01', description='First'), Interface(device=devices[1], name='Interface 2', label='B', type=InterfaceTypeChoices.TYPE_1GE_GBIC, enabled=True, mgmt_only=True, mtu=200, mode=InterfaceModeChoices.MODE_TAGGED, mac_address='00-00-00-00-00-02', description='Second'), Interface(device=devices[2], name='Interface 3', label='C', type=InterfaceTypeChoices.TYPE_1GE_FIXED, enabled=False, mgmt_only=False, mtu=300, mode=InterfaceModeChoices.MODE_TAGGED_ALL, mac_address='00-00-00-00-00-03', description='Third'), Interface(device=devices[3], name='Interface 4', label='D', type=InterfaceTypeChoices.TYPE_OTHER, enabled=True, mgmt_only=True), Interface(device=devices[3], name='Interface 5', label='E', type=InterfaceTypeChoices.TYPE_OTHER, enabled=True, mgmt_only=True), Interface(device=devices[3], name='Interface 6', label='F', type=InterfaceTypeChoices.TYPE_OTHER, enabled=False, mgmt_only=False), ) Interface.objects.bulk_create(interfaces) # Cables Cable(termination_a=interfaces[0], termination_b=interfaces[3]).save() Cable(termination_a=interfaces[1], termination_b=interfaces[4]).save() # Third pair is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Interface 1', 'Interface 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_connected(self): params = {'connected': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'connected': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_enabled(self): params = {'enabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'enabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_mtu(self): params = {'mtu': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_mgmt_only(self): params = {'mgmt_only': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'mgmt_only': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_mode(self): params = {'mode': InterfaceModeChoices.MODE_ACCESS} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_kind(self): params = {'kind': 'physical'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 6) params = {'kind': 'virtual'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 0) def test_mac_address(self): params = {'mac_address': ['00-00-00-00-00-01', '00-00-00-00-00-02']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): params = {'type': [InterfaceTypeChoices.TYPE_1GE_FIXED, InterfaceTypeChoices.TYPE_1GE_GBIC]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class FrontPortTestCase(TestCase): queryset = FrontPort.objects.all() filterset = FrontPortFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) rear_ports = ( RearPort(device=devices[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C, positions=6), RearPort(device=devices[1], name='Rear Port 2', type=PortTypeChoices.TYPE_8P8C, positions=6), RearPort(device=devices[2], name='Rear Port 3', type=PortTypeChoices.TYPE_8P8C, positions=6), RearPort(device=devices[3], name='Rear Port 4', type=PortTypeChoices.TYPE_8P8C, positions=6), RearPort(device=devices[3], name='Rear Port 5', type=PortTypeChoices.TYPE_8P8C, positions=6), RearPort(device=devices[3], name='Rear Port 6', type=PortTypeChoices.TYPE_8P8C, positions=6), ) RearPort.objects.bulk_create(rear_ports) front_ports = ( FrontPort(device=devices[0], name='Front Port 1', label='A', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[0], rear_port_position=1, description='First'), FrontPort(device=devices[1], name='Front Port 2', label='B', type=PortTypeChoices.TYPE_110_PUNCH, rear_port=rear_ports[1], rear_port_position=2, description='Second'), FrontPort(device=devices[2], name='Front Port 3', label='C', type=PortTypeChoices.TYPE_BNC, rear_port=rear_ports[2], rear_port_position=3, description='Third'), FrontPort(device=devices[3], name='Front Port 4', label='D', type=PortTypeChoices.TYPE_FC, rear_port=rear_ports[3], rear_port_position=1), FrontPort(device=devices[3], name='Front Port 5', label='E', type=PortTypeChoices.TYPE_FC, rear_port=rear_ports[4], rear_port_position=1), FrontPort(device=devices[3], name='Front Port 6', label='F', type=PortTypeChoices.TYPE_FC, rear_port=rear_ports[5], rear_port_position=1), ) FrontPort.objects.bulk_create(front_ports) # Cables Cable(termination_a=front_ports[0], termination_b=front_ports[3]).save() Cable(termination_a=front_ports[1], termination_b=front_ports[4]).save() # Third port is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Front Port 1', 'Front Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): # TODO: Test for multiple values params = {'type': PortTypeChoices.TYPE_8P8C} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class RearPortTestCase(TestCase): queryset = RearPort.objects.all() filterset = RearPortFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), Device(name=None, device_type=device_type, device_role=device_role, site=sites[3]), # For cable connections ) Device.objects.bulk_create(devices) rear_ports = ( RearPort(device=devices[0], name='Rear Port 1', label='A', type=PortTypeChoices.TYPE_8P8C, positions=1, description='First'), RearPort(device=devices[1], name='Rear Port 2', label='B', type=PortTypeChoices.TYPE_110_PUNCH, positions=2, description='Second'), RearPort(device=devices[2], name='Rear Port 3', label='C', type=PortTypeChoices.TYPE_BNC, positions=3, description='Third'), RearPort(device=devices[3], name='Rear Port 4', label='D', type=PortTypeChoices.TYPE_FC, positions=4), RearPort(device=devices[3], name='Rear Port 5', label='E', type=PortTypeChoices.TYPE_FC, positions=5), RearPort(device=devices[3], name='Rear Port 6', label='F', type=PortTypeChoices.TYPE_FC, positions=6), ) RearPort.objects.bulk_create(rear_ports) # Cables Cable(termination_a=rear_ports[0], termination_b=rear_ports[3]).save() Cable(termination_a=rear_ports[1], termination_b=rear_ports[4]).save() # Third port is not connected def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Rear Port 1', 'Rear Port 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_type(self): # TODO: Test for multiple values params = {'type': PortTypeChoices.TYPE_8P8C} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_positions(self): params = {'positions': [1, 2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class DeviceBayTestCase(TestCase): queryset = DeviceBay.objects.all() filterset = DeviceBayFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = Site.objects.bulk_create(( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), Site(name='Site X', slug='site-x'), )) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), ) Device.objects.bulk_create(devices) device_bays = ( DeviceBay(device=devices[0], name='Device Bay 1', label='A', description='First'), DeviceBay(device=devices[1], name='Device Bay 2', label='B', description='Second'), DeviceBay(device=devices[2], name='Device Bay 3', label='C', description='Third'), ) DeviceBay.objects.bulk_create(device_bays) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Device Bay 1', 'Device Bay 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_description(self): params = {'description': ['First', 'Second']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class InventoryItemTestCase(TestCase): queryset = InventoryItem.objects.all() filterset = InventoryItemFilterSet @classmethod def setUpTestData(cls): manufacturers = ( Manufacturer(name='Manufacturer 1', slug='manufacturer-1'), Manufacturer(name='Manufacturer 2', slug='manufacturer-2'), Manufacturer(name='Manufacturer 3', slug='manufacturer-3'), ) Manufacturer.objects.bulk_create(manufacturers) device_type = DeviceType.objects.create(manufacturer=manufacturers[0], model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[1]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[2]), ) Device.objects.bulk_create(devices) inventory_items = ( InventoryItem(device=devices[0], manufacturer=manufacturers[0], name='Inventory Item 1', label='A', part_id='1001', serial='ABC', asset_tag='1001', discovered=True, description='First'), InventoryItem(device=devices[1], manufacturer=manufacturers[1], name='Inventory Item 2', label='B', part_id='1002', serial='DEF', asset_tag='1002', discovered=True, description='Second'), InventoryItem(device=devices[2], manufacturer=manufacturers[2], name='Inventory Item 3', label='C', part_id='1003', serial='GHI', asset_tag='1003', discovered=False, description='Third'), ) for i in inventory_items: i.save() child_inventory_items = ( InventoryItem(device=devices[0], name='Inventory Item 1A', parent=inventory_items[0]), InventoryItem(device=devices[1], name='Inventory Item 2A', parent=inventory_items[1]), InventoryItem(device=devices[2], name='Inventory Item 3A', parent=inventory_items[2]), ) for i in child_inventory_items: i.save() def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Inventory Item 1', 'Inventory Item 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['A', 'B']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_part_id(self): params = {'part_id': ['1001', '1002']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_asset_tag(self): params = {'asset_tag': ['1001', '1002']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_discovered(self): # TODO: Fix boolean value params = {'discovered': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'discovered': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_device(self): # TODO: Allow multiple values device = Device.objects.first() params = {'device_id': device.pk} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'device': device.name} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_parent_id(self): parent_items = InventoryItem.objects.filter(parent__isnull=True)[:2] params = {'parent_id': [parent_items[0].pk, parent_items[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_manufacturer(self): manufacturers = Manufacturer.objects.all()[:2] params = {'manufacturer_id': [manufacturers[0].pk, manufacturers[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'manufacturer': [manufacturers[0].slug, manufacturers[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_serial(self): params = {'serial': 'ABC'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) params = {'serial': 'abc'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) class VirtualChassisTestCase(TestCase): queryset = VirtualChassis.objects.all() filterset = VirtualChassisFilterSet @classmethod def setUpTestData(cls): manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0], vc_position=1), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[0], vc_position=2), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[1], vc_position=1), Device(name='Device 4', device_type=device_type, device_role=device_role, site=sites[1], vc_position=2), Device(name='Device 5', device_type=device_type, device_role=device_role, site=sites[2], vc_position=1), Device(name='Device 6', device_type=device_type, device_role=device_role, site=sites[2], vc_position=2), ) Device.objects.bulk_create(devices) virtual_chassis = ( VirtualChassis(name='VC 1', master=devices[0], domain='Domain 1'), VirtualChassis(name='VC 2', master=devices[2], domain='Domain 2'), VirtualChassis(name='VC 3', master=devices[4], domain='Domain 3'), ) VirtualChassis.objects.bulk_create(virtual_chassis) Device.objects.filter(pk=devices[1].pk).update(virtual_chassis=virtual_chassis[0]) Device.objects.filter(pk=devices[3].pk).update(virtual_chassis=virtual_chassis[1]) Device.objects.filter(pk=devices[5].pk).update(virtual_chassis=virtual_chassis[2]) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_domain(self): params = {'domain': ['Domain 1', 'Domain 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_master(self): masters = Device.objects.all() params = {'master_id': [masters[0].pk, masters[2].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'master': [masters[0].name, masters[2].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['VC 1', 'VC 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class CableTestCase(TestCase): queryset = Cable.objects.all() filterset = CableFilterSet @classmethod def setUpTestData(cls): sites = ( Site(name='Site 1', slug='site-1'), Site(name='Site 2', slug='site-2'), Site(name='Site 3', slug='site-3'), ) Site.objects.bulk_create(sites) tenants = ( Tenant(name='Tenant 1', slug='tenant-1'), Tenant(name='Tenant 2', slug='tenant-2'), ) Tenant.objects.bulk_create(tenants) racks = ( Rack(name='Rack 1', site=sites[0]), Rack(name='Rack 2', site=sites[1]), Rack(name='Rack 3', site=sites[2]), ) Rack.objects.bulk_create(racks) manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model 1', slug='model-1') device_role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1') devices = ( Device(name='Device 1', device_type=device_type, device_role=device_role, site=sites[0], rack=racks[0], position=1, tenant=tenants[0]), Device(name='Device 2', device_type=device_type, device_role=device_role, site=sites[0], rack=racks[0], position=2, tenant=tenants[0]), Device(name='Device 3', device_type=device_type, device_role=device_role, site=sites[1], rack=racks[1], position=1, tenant=tenants[1]), Device(name='Device 4', device_type=device_type, device_role=device_role, site=sites[1], rack=racks[1], position=2), Device(name='Device 5', device_type=device_type, device_role=device_role, site=sites[2], rack=racks[2], position=1), Device(name='Device 6', device_type=device_type, device_role=device_role, site=sites[2], rack=racks[2], position=2), ) Device.objects.bulk_create(devices) interfaces = ( Interface(device=devices[0], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[0], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[1], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[1], name='Interface 4', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[2], name='Interface 5', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[2], name='Interface 6', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[3], name='Interface 7', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[3], name='Interface 8', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[4], name='Interface 9', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[4], name='Interface 10', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[5], name='Interface 11', type=InterfaceTypeChoices.TYPE_1GE_FIXED), Interface(device=devices[5], name='Interface 12', type=InterfaceTypeChoices.TYPE_1GE_FIXED), ) Interface.objects.bulk_create(interfaces) # Cables Cable(termination_a=interfaces[1], termination_b=interfaces[2], label='Cable 1', type=CableTypeChoices.TYPE_CAT3, status=CableStatusChoices.STATUS_CONNECTED, color='aa1409', length=10, length_unit=CableLengthUnitChoices.UNIT_FOOT).save() Cable(termination_a=interfaces[3], termination_b=interfaces[4], label='Cable 2', type=CableTypeChoices.TYPE_CAT3, status=CableStatusChoices.STATUS_CONNECTED, color='aa1409', length=20, length_unit=CableLengthUnitChoices.UNIT_FOOT).save() Cable(termination_a=interfaces[5], termination_b=interfaces[6], label='Cable 3', type=CableTypeChoices.TYPE_CAT5E, status=CableStatusChoices.STATUS_CONNECTED, color='f44336', length=30, length_unit=CableLengthUnitChoices.UNIT_FOOT).save() Cable(termination_a=interfaces[7], termination_b=interfaces[8], label='Cable 4', type=CableTypeChoices.TYPE_CAT5E, status=CableStatusChoices.STATUS_PLANNED, color='f44336', length=40, length_unit=CableLengthUnitChoices.UNIT_FOOT).save() Cable(termination_a=interfaces[9], termination_b=interfaces[10], label='Cable 5', type=CableTypeChoices.TYPE_CAT6, status=CableStatusChoices.STATUS_PLANNED, color='e91e63', length=10, length_unit=CableLengthUnitChoices.UNIT_METER).save() Cable(termination_a=interfaces[11], termination_b=interfaces[0], label='Cable 6', type=CableTypeChoices.TYPE_CAT6, status=CableStatusChoices.STATUS_PLANNED, color='e91e63', length=20, length_unit=CableLengthUnitChoices.UNIT_METER).save() def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_label(self): params = {'label': ['Cable 1', 'Cable 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_length(self): params = {'length': [10, 20]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_length_unit(self): params = {'length_unit': CableLengthUnitChoices.UNIT_FOOT} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_type(self): params = {'type': [CableTypeChoices.TYPE_CAT3, CableTypeChoices.TYPE_CAT5E]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_status(self): params = {'status': [CableStatusChoices.STATUS_CONNECTED]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3) params = {'status': [CableStatusChoices.STATUS_PLANNED]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3) def test_color(self): params = {'color': ['aa1409', 'f44336']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) def test_device(self): devices = Device.objects.all()[:2] params = {'device_id': [devices[0].pk, devices[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3) params = {'device': [devices[0].name, devices[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3) def test_rack(self): racks = Rack.objects.all()[:2] params = {'rack_id': [racks[0].pk, racks[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 5) params = {'rack': [racks[0].name, racks[1].name]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 5) def test_site(self): site = Site.objects.all()[:2] params = {'site_id': [site[0].pk, site[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 5) params = {'site': [site[0].slug, site[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 5) def test_tenant(self): tenant = Tenant.objects.all()[:2] params = {'tenant_id': [tenant[0].pk, tenant[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) params = {'tenant': [tenant[0].slug, tenant[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) class PowerPanelTestCase(TestCase): queryset = PowerPanel.objects.all() filterset = PowerPanelFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) rack_groups = ( RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0]), RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1]), RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2]), ) for rackgroup in rack_groups: rackgroup.save() power_panels = ( PowerPanel(name='Power Panel 1', site=sites[0], rack_group=rack_groups[0]), PowerPanel(name='Power Panel 2', site=sites[1], rack_group=rack_groups[1]), PowerPanel(name='Power Panel 3', site=sites[2], rack_group=rack_groups[2]), ) PowerPanel.objects.bulk_create(power_panels) def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Power Panel 1', 'Power Panel 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_rack_group(self): rack_groups = RackGroup.objects.all()[:2] params = {'rack_group_id': [rack_groups[0].pk, rack_groups[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) class PowerFeedTestCase(TestCase): queryset = PowerFeed.objects.all() filterset = PowerFeedFilterSet @classmethod def setUpTestData(cls): regions = ( Region(name='Region 1', slug='region-1'), Region(name='Region 2', slug='region-2'), Region(name='Region 3', slug='region-3'), ) for region in regions: region.save() sites = ( Site(name='Site 1', slug='site-1', region=regions[0]), Site(name='Site 2', slug='site-2', region=regions[1]), Site(name='Site 3', slug='site-3', region=regions[2]), ) Site.objects.bulk_create(sites) racks = ( Rack(name='Rack 1', site=sites[0]), Rack(name='Rack 2', site=sites[1]), Rack(name='Rack 3', site=sites[2]), ) Rack.objects.bulk_create(racks) power_panels = ( PowerPanel(name='Power Panel 1', site=sites[0]), PowerPanel(name='Power Panel 2', site=sites[1]), PowerPanel(name='Power Panel 3', site=sites[2]), ) PowerPanel.objects.bulk_create(power_panels) power_feeds = ( PowerFeed(power_panel=power_panels[0], rack=racks[0], name='Power Feed 1', status=PowerFeedStatusChoices.STATUS_ACTIVE, type=PowerFeedTypeChoices.TYPE_PRIMARY, supply=PowerFeedSupplyChoices.SUPPLY_AC, phase=PowerFeedPhaseChoices.PHASE_3PHASE, voltage=100, amperage=100, max_utilization=10), PowerFeed(power_panel=power_panels[1], rack=racks[1], name='Power Feed 2', status=PowerFeedStatusChoices.STATUS_FAILED, type=PowerFeedTypeChoices.TYPE_PRIMARY, supply=PowerFeedSupplyChoices.SUPPLY_AC, phase=PowerFeedPhaseChoices.PHASE_3PHASE, voltage=200, amperage=200, max_utilization=20), PowerFeed(power_panel=power_panels[2], rack=racks[2], name='Power Feed 3', status=PowerFeedStatusChoices.STATUS_OFFLINE, type=PowerFeedTypeChoices.TYPE_REDUNDANT, supply=PowerFeedSupplyChoices.SUPPLY_DC, phase=PowerFeedPhaseChoices.PHASE_SINGLE, voltage=300, amperage=300, max_utilization=30), ) PowerFeed.objects.bulk_create(power_feeds) manufacturer = Manufacturer.objects.create(name='Manufacturer', slug='manufacturer') device_type = DeviceType.objects.create(manufacturer=manufacturer, model='Model', slug='model') device_role = DeviceRole.objects.create(name='Device Role', slug='device-role') device = Device.objects.create(name='Device', device_type=device_type, device_role=device_role, site=sites[0]) power_ports = [ PowerPort(device=device, name='Power Port 1'), PowerPort(device=device, name='Power Port 2'), ] PowerPort.objects.bulk_create(power_ports) Cable(termination_a=power_feeds[0], termination_b=power_ports[0]).save() Cable(termination_a=power_feeds[1], termination_b=power_ports[1]).save() def test_id(self): params = {'id': self.queryset.values_list('pk', flat=True)[:2]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_name(self): params = {'name': ['Power Feed 1', 'Power Feed 2']} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_status(self): # TODO: Test for multiple values params = {'status': PowerFeedStatusChoices.STATUS_ACTIVE} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_type(self): params = {'type': PowerFeedTypeChoices.TYPE_PRIMARY} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_supply(self): params = {'supply': PowerFeedSupplyChoices.SUPPLY_AC} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_phase(self): params = {'phase': PowerFeedPhaseChoices.PHASE_3PHASE} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_voltage(self): params = {'voltage': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_amperage(self): params = {'amperage': [100, 200]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_max_utilization(self): params = {'max_utilization': [10, 20]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_region(self): regions = Region.objects.all()[:2] params = {'region_id': [regions[0].pk, regions[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'region': [regions[0].slug, regions[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_site(self): sites = Site.objects.all()[:2] params = {'site_id': [sites[0].pk, sites[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'site': [sites[0].slug, sites[1].slug]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_power_panel_id(self): power_panels = PowerPanel.objects.all()[:2] params = {'power_panel_id': [power_panels[0].pk, power_panels[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_rack_id(self): racks = Rack.objects.all()[:2] params = {'rack_id': [racks[0].pk, racks[1].pk]} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) def test_cabled(self): params = {'cabled': 'true'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'cabled': 'false'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_connected(self): params = {'connected': True} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) params = {'connected': False} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) # TODO: Connection filters<|fim▁end|>
<|file_name|>tags.py<|end_file_name|><|fim▁begin|>from django.test import TestCase from score.templatetags.score_tags import display_score <|fim▁hole|> def test_none(self): assert "?" in display_score(None) def test_danger(self): assert "danger" in display_score("30") def test_warning(self): assert "warning" in display_score("50") def test_primary(self): assert "primary" in display_score("65") def test_info(self): assert "info" in display_score("79") def test_success(self): assert "success" in display_score("81")<|fim▁end|>
class TestDisplayScore(TestCase): def test_string(self): assert "?" in display_score("")
<|file_name|>GR32_RepaintOpt.cpp<|end_file_name|><|fim▁begin|>//unit GR32_RepaintOpt; #include "stdafx.h" #include "GR32_RepaintOpt.h" void InflateArea(var Area: TRect; Dx, Dy: Integer); { if Area.Left > Area.Right then Dx := -Dx; if Area.Top > Area.Bottom then Dy := -Dy; Dec(Area.Left, Dx); Dec(Area.Top, Dy); Inc(Area.Right, Dx); Inc(Area.Bottom, Dy); } type TLayerCollectionAccess = class(TLayerCollection); { TCustomRepaintManager } constructor TCustomRepaintOptimizer.Create(Buffer: TBitmap32; InvalidRects: TRectList); { FLayerCollections := TList.Create; FInvalidRects := InvalidRects; FBuffer := Buffer; } destructor TCustomRepaintOptimizer.Destroy; var I: Integer; { for I := 0 to FLayerCollections.Count - 1 do UnregisterLayerCollection(TLayerCollection(FLayerCollections[I])); FLayerCollections.Free; inherited; } function TCustomRepaintOptimizer.GetEnabled: Boolean; { Result := FEnabled; } void TCustomRepaintOptimizer.SetEnabled(const Value: Boolean); { FEnabled := Value; } void TCustomRepaintOptimizer.RegisterLayerCollection(Layers: TLayerCollection); { if FLayerCollections.IndexOf(Layers) = -1 then { FLayerCollections.Add(Layers); TLayerCollectionAccess(Layers).OnListNotify := LayerCollectionNotifyHandler; } } void TCustomRepaintOptimizer.UnregisterLayerCollection(Layers: TLayerCollection); { TLayerCollectionAccess(Layers).OnListNotify := nil; FLayerCollections.Remove(Layers); } <|fim▁hole|> void TCustomRepaintOptimizer.{Paint; { // do nothing by default } void TCustomRepaintOptimizer.EndPaint; { // do nothing by default } void TCustomRepaintOptimizer.{PaintBuffer; { // do nothing by default } void TCustomRepaintOptimizer.EndPaintBuffer; { // do nothing by default }<|fim▁end|>
<|file_name|>util.js<|end_file_name|><|fim▁begin|>/** * Module util.js * Its static common helpers methods */ (function (window) { if (!Array.isArray) { Array.isArray = function (arg) { return Object.prototype.toString.call(arg) === '[object Array]'; }; } var util = {}; /** * Deeply extends two objects * @param {Object} destination The destination object, This object will change * @param {Object} source The custom options to extend destination by * @return {Object} The desination object */ util.extend = function (destination, source) { var property; for (property in source) { if (source[property] && source[property].constructor && source[property].constructor === Object) { destination[property] = destination[property] || {}; util.extend(destination[property], source[property]); } else { destination[property] = source[property]; } } return destination; }; /** * Clone object * @param {Object} source * @returns {*} */ util.objClone = function (source) { if (source === null || typeof source !== 'object') return source; var temp = source.constructor(); for (var key in source) temp[key] = util.objClone(source[key]); return temp; }; /** * Count object length * @param {Object} source * @returns {number} */ util.objLen = function (source) { var it = 0; for (var k in source) it++; return it; }; /** * Merge an object `src` into the object `objectBase` * @param objectBase main object of merge * @param src the elements of this object will be added/replaced to main object `obj` * @returns {*} object result */ util.objMerge = function (objectBase, src) { if (typeof objectBase !== 'object' || typeof src !== 'object') return false; if (Object.key) { Object.keys(src).forEach(function (key) { objectBase[key] = src[key]; }); return objectBase; } else { for (var key in src) if (src.hasOwnProperty(key)) objectBase[key] = src[key]; return objectBase; } }; /** * Merge objects if `objectBase` key not exists * @param objectBase * @param src * @returns {*} */ util.objMergeNotExists = function (objectBase, src) { for (var key in src) if (objectBase[key] === undefined) objectBase[key] = src[key]; return objectBase; }; /** * Merge objects if `objectBase` key is exists * @param objectBase * @param src * @returns {*} */ util.objMergeOnlyExists = function (objectBase, src) { for (var key in src) if (objectBase[key] !== undefined) objectBase[key] = src[key]; return objectBase; }; /** * Merge an array `src` into the array `arrBase` * @param arrBase * @param src * @returns {*} */ util.arrMerge = function (arrBase, src) { if (!Array.isArray(arrBase) || !Array.isArray(src)) return false; for (var i = 0; i < src.length; i++) arrBase.push(src[i]) return arrBase; }; /** * Computes the difference of arrays * Compares arr1 against one or more other arrays and returns the values in arr1 * that are not present in any of the other arrays. * @param arr1 * @param arr2 * @returns {*} */ util.arrDiff = function (arr1, arr2) { if (util.isArr(arr1) && util.isArr(arr2)) { return arr1.slice(0).filter(function (item) { return arr2.indexOf(item) === -1; }) } return false; }; util.objToArr = function (obj) { return [].slice.call(obj); }; util.realObjToArr = function (obj) { var arr = []; for (var key in obj) arr.push(obj[key]) return arr; }; util.cloneFunction = function (func) { var temp = function temporary() { return func.apply(this, arguments); }; for (var key in this) { if (this.hasOwnProperty(key)) { temp[key] = this[key]; } } return temp; }; /** * Check on typeof is string a param * @param param * @returns {boolean} */ util.isStr = function (param) { return typeof param === 'string'; }; /** * Check on typeof is array a param * @param param * @returns {boolean} */ util.isArr = function (param) { return Array.isArray(param); }; /** * Check on typeof is object a param * @param param * @returns {boolean} */ util.isObj = function (param) { return (param !== null && typeof param == 'object'); }; /** * Determine param is a number or a numeric string * @param param * @returns {boolean} */ util.isNum = function (param) { return !isNaN(param); }; // Determine whether a variable is empty util.isEmpty = function (param) { return (param === "" || param === 0 || param === "0" || param === null || param === undefined || param === false || (util.isArr(param) && param.length === 0)); }; util.isHtml = function (param) { if (util.isNode(param)) return true; return util.isNode(util.html2node(param)); }; util.isNode = function (param) { var types = [1, 9, 11]; if (typeof param === 'object' && types.indexOf(param.nodeType) !== -1) return true; else return false; }; /** * * Node.ELEMENT_NODE - 1 - ELEMENT * Node.TEXT_NODE - 3 - TEXT * Node.PROCESSING_INSTRUCTION_NODE - 7 - PROCESSING * Node.COMMENT_NODE - 8 - COMMENT * Node.DOCUMENT_NODE - 9 - DOCUMENT * Node.DOCUMENT_TYPE_NODE - 10 - DOCUMENT_TYPE * Node.DOCUMENT_FRAGMENT_NODE - 11 - FRAGMENT * Uses: Util.isNodeType(elem, 'element') */ util.isNodeType = function (param, type) { type = String((type ? type : 1)).toUpperCase(); if (typeof param === 'object') { switch (type) { case '1': case 'ELEMENT': return param.nodeType === Node.ELEMENT_NODE; break; case '3': case 'TEXT': return param.nodeType === Node.TEXT_NODE; break; case '7': case 'PROCESSING': return param.nodeType === Node.PROCESSING_INSTRUCTION_NODE; break; case '8': case 'COMMENT': return param.nodeType === Node.COMMENT_NODE; break; case '9': case 'DOCUMENT': return param.nodeType === Node.DOCUMENT_NODE; break; case '10': case 'DOCUMENT_TYPE': return param.nodeType === Node.DOCUMENT_TYPE_NODE; break; case '11': case 'FRAGMENT': return param.nodeType === Node.DOCUMENT_FRAGMENT_NODE; break; default: return false; } } else return false; }; /** * Determine param to undefined type * @param param * @returns {boolean} */ util.defined = function (param) { return typeof(param) != 'undefined'; }; /** * Javascript object to JSON data * @param data */ util.objToJson = function (data) { return JSON.stringify(data); }; /** * JSON data to Javascript object * @param data */ util.jsonToObj = function (data) { return JSON.parse(data); }; /** * Cleans the array of empty elements * @param src * @returns {Array} */ util.cleanArr = function (src) { var arr = []; for (var i = 0; i < src.length; i++) if (src[i]) arr.push(src[i]); return arr; }; /** * Return type of data as name object "Array", "Object", "String", "Number", "Function" * @param data * @returns {string} */ util.typeOf = function (data) { return Object.prototype.toString.call(data).slice(8, -1); }; /** * Convert HTML form to encode URI string * @param form * @param asObject * @returns {*} */ util.formData = function (form, asObject) { var obj = {}, str = ''; for (var i = 0; i < form.length; i++) { var f = form[i]; if (f.type == 'submit' || f.type == 'button') continue; if ((f.type == 'radio' || f.type == 'checkbox') && f.checked == false) continue; var fName = f.nodeName.toLowerCase(); if (fName == 'input' || fName == 'select' || fName == 'textarea') { obj[f.name] = f.value; str += ((str == '') ? '' : '&') + f.name + '=' + encodeURIComponent(f.value); } } return (asObject === true) ? obj : str; }; /** * HTML string convert to DOM Elements Object * @param data * @returns {*} */ util.toNode = function (data) { var parser = new DOMParser(); var node = parser.parseFromString(data, "text/xml"); console.log(node); if (typeof node == 'object' && node.firstChild.nodeType == Node.ELEMENT_NODE) return node.firstChild; else return false; }; /** * Removes duplicate values from an array * @param arr * @returns {Array} */ util.uniqueArr = function (arr) { var tmp = []; for (var i = 0; i < arr.length; i++) { if (tmp.indexOf(arr[i]) == "-1") tmp.push(arr[i]); } return tmp; }; /** * Reads entire file into a string, synchronously * This function uses XmlHttpRequest and cannot retrieve resource from different domain. * @param url * @returns {*|string|null|string} */ util.fileGetContents = function (url) { var req = null; try { req = new ActiveXObject("Msxml2.XMLHTTP"); } catch (e) { try { req = new ActiveXObject("Microsoft.XMLHTTP"); } catch (e) { try { req = new XMLHttpRequest(); } catch (e) { } } } if (req == null) throw new Error('XMLHttpRequest not supported'); req.open("GET", url, false); req.send(null); return req.responseText; }; /** * Calculates the position and size of elements. * * @param elem * @returns {{y: number, x: number, width: number, height: number}} */ util.getPosition = function (elem) { var top = 0, left = 0; if (elem.getBoundingClientRect) { var box = elem.getBoundingClientRect(); var body = document.body; var docElem = document.documentElement; var scrollTop = window.pageYOffset || docElem.scrollTop || body.scrollTop; var scrollLeft = window.pageXOffset || docElem.scrollLeft || body.scrollLeft; var clientTop = docElem.clientTop || body.clientTop || 0; var clientLeft = docElem.clientLeft || body.clientLeft || 0; top = box.top + scrollTop - clientTop; left = box.left + scrollLeft - clientLeft; return {y: Math.round(top), x: Math.round(left), width: elem.offsetWidth, height: elem.offsetHeight}; } else { //fallback to naive approach while (elem) { top = top + parseInt(elem.offsetTop, 10); left = left + parseInt(elem.offsetLeft, 10); elem = elem.offsetParent; } return {x: left, y: top, width: elem.offsetWidth, height: elem.offsetHeight}; } }; /** * Returns the coordinates of the mouse on any element * @param event * @param element * @returns {{x: number, y: number}} */ util.getMousePosition = function (event, element) { var positions = {x: 0, y: 0}; element = element || document.body; if (element instanceof HTMLElement && event instanceof MouseEvent) { if (element.getBoundingClientRect) { var rect = element.getBoundingClientRect(); positions.x = event.clientX - rect.left; positions.y = event.clientY - rect.top; } else { positions.x = event.pageX - element.offsetLeft; positions.y = event.pageY - element.offsetTop; } } return positions; }; /** * Creator of styles, return style-element or style-text. * * <pre>var style = createStyle('body','font-size:10px'); *style.add('body','font-size:10px') // added style *style.add( {'background-color':'red'} ) // added style *style.getString() // style-text *style.getObject() // style-element</pre> * * @param selector name of selector styles * @param property string "display:object" or object {'background-color':'red'} * @returns {*} return object with methods : getString(), getObject(), add() */ util.createStyle = function (selector, property) { var o = { content: '', getString: function () { return '<style rel="stylesheet">' + "\n" + o.content + "\n" + '</style>'; }, getObject: function () { var st = document.createElement('style'); st.setAttribute('rel', 'stylesheet'); st.textContent = o.content; return st; }, add: function (select, prop) { if (typeof prop === 'string') { o.content += select + "{" + ( (prop.substr(-1) == ';') ? prop : prop + ';' ) + "}"; } else if (typeof prop === 'object') { o.content += select + "{"; for (var key in prop) o.content += key + ':' + prop[key] + ';'; o.content += "}"; } return this; } }; return o.add(selector, property); }; /** * Create new NodeElement * @param tag element tag name 'p, div, h3 ... other' * @param attrs object with attributes key=value * @param inner text, html or NodeElement * @returns {Element} */ util.createElement = function (tag, attrs, inner) { var elem = document.createElement(tag); if (typeof attrs === 'object') { for (var key in attrs) elem.setAttribute(key, attrs[key]); } if (typeof inner === 'string') { elem.innerHTML = inner; } else if (typeof inner === 'object') { elem.appendChild(elem); } return elem; }; /** * Returns a random integer between min, max, if not specified the default of 0 to 100 * @param min * @param max * @returns {number} */ util.rand = function (min, max) { min = min || 0; max = max || 100; return Math.floor(Math.random() * (max - min + 1) + min); }; /** * Returns random string color, HEX format * @returns {string} */ util.randColor = function () { var letters = '0123456789ABCDEF'.split(''), color = '#'; for (var i = 0; i < 6; i++) color += letters[Math.floor(Math.random() * 16)]; return color; }; /** * Converts degrees to radians * @param deg * @returns {number} */ util.degreesToRadians = function (deg) { return (deg * Math.PI) / 180; }; /** * Converts radians to degrees * @param rad * @returns {number} */ util.radiansToDegrees = function (rad) { return (rad * 180) / Math.PI; }; /** * The calculation of the distance between points * The point is an object with properties `x` and `y` {x:100,y:100} * @param point1 * @param point2 * @returns {number} */ util.distanceBetween = function (point1, point2) {<|fim▁hole|> return Math.sqrt(dx * dx + dy * dy); }; /** * Encode URI params * @param data Object key=value * @returns {*} query string */ util.encodeData = function (data) { if (typeof data === 'string') return data; if (typeof data !== 'object') return ''; var convertData = []; Object.keys(data).forEach(function (key) { convertData.push(key + '=' + encodeURIComponent(data[key])); }); return convertData.join('&'); }; /** * Parse URI Request data into object * @param url * @returns {{}} */ util.parseGet = function (url) { url = url || document.location; var params = {}; var parser = document.createElement('a'); parser.href = url; if (parser.search.length > 1) { parser.search.substr(1).split('&').forEach(function (part) { var item = part.split('='); params[item[0]] = decodeURIComponent(item[1]); }); } return params; }; /** * Parse Url string/location into object * @param url * @returns {{}} */ util.parseUrl = function (url) { url = url || document.location; var params = {}; var parser = document.createElement('a'); parser.href = url; params.protocol = parser.protocol; params.host = parser.host; params.hostname = parser.hostname; params.port = parser.port; params.pathname = parser.pathname; params.hash = parser.hash; params.search = parser.search; params.get = util.parseGet(parser.search); return params; }; util.each = function (data, callback) { if (util.isArr(data)) { for (var i = 0; i < data.length; i++) callback.call(null, data[i]); } else if (util.isObj(data)) { for (var k in data) callback.call(null, k, data[k]); } else return false; }; util.ucfirst = function (string) { return string && string[0].toUpperCase() + string.slice(1); }; util.node2html = function (element) { var container = document.createElement("div"); container.appendChild(element.cloneNode(true)); return container.innerHTML; }; util.html2node = function (string) { var i, fragment = document.createDocumentFragment(), container = document.createElement("div"); container.innerHTML = string; while (i = container.firstChild) fragment.appendChild(i); return fragment.childNodes.length === 1 ? fragment.firstChild : fragment; }; util.base64encode = function (str) { var b64chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; var b64encoded = ''; var chr1, chr2, chr3; var enc1, enc2, enc3, enc4; for (var i = 0; i < str.length;) { chr1 = str.charCodeAt(i++); chr2 = str.charCodeAt(i++); chr3 = str.charCodeAt(i++); enc1 = chr1 >> 2; enc2 = ((chr1 & 3) << 4) | (chr2 >> 4); enc3 = isNaN(chr2) ? 64 : (((chr2 & 15) << 2) | (chr3 >> 6)); enc4 = isNaN(chr3) ? 64 : (chr3 & 63); b64encoded += b64chars.charAt(enc1) + b64chars.charAt(enc2) + b64chars.charAt(enc3) + b64chars.charAt(enc4); } return b64encoded; }; util.base64decode = function (str) { var b64chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; var b64decoded = ''; var chr1, chr2, chr3; var enc1, enc2, enc3, enc4; str = str.replace(/[^a-z0-9\+\/\=]/gi, ''); for (var i = 0; i < str.length;) { enc1 = b64chars.indexOf(str.charAt(i++)); enc2 = b64chars.indexOf(str.charAt(i++)); enc3 = b64chars.indexOf(str.charAt(i++)); enc4 = b64chars.indexOf(str.charAt(i++)); chr1 = (enc1 << 2) | (enc2 >> 4); chr2 = ((enc2 & 15) << 4) | (enc3 >> 2); chr3 = ((enc3 & 3) << 6) | enc4; b64decoded = b64decoded + String.fromCharCode(chr1); if (enc3 < 64) { b64decoded += String.fromCharCode(chr2); } if (enc4 < 64) { b64decoded += String.fromCharCode(chr3); } } return b64decoded; }; /** * Cross-browser function for the character of the event keypress: * @param event event.type must keypress * @returns {*} */ util.getChar = function (event) { if (event.which == null) { if (event.keyCode < 32) return null; return String.fromCharCode(event.keyCode) } if (event.which != 0 && event.charCode != 0) { if (event.which < 32) return null; return String.fromCharCode(event.which); } return null; }; util.Date = function () { }; util.Date.time = function (date) { "use strict"; return date instanceof Date ? date.getTime() : (new Date).getTime(); }; /** * Add days to some date * @param day number of days. 0.04 - 1 hour, 0.5 - 12 hour, 1 - 1 day * @param startDate type Date, start date * @returns {*} type Date */ util.Date.addDays = function (day, startDate) { var date = startDate ? new Date(startDate) : new Date(); date.setTime(date.getTime() + (day * 86400000)); return date; }; util.Date.daysBetween = function (date1, date2) { var ONE_DAY = 86400000, date1_ms = date1.getTime(), date2_ms = date2.getTime(); return Math.round((Math.abs(date1_ms - date2_ms)) / ONE_DAY) }; util.Storage = function (name, value) { if (!name) { return false; } else if (value === undefined) { return util.Storage.get(name); } else if (!value) { return util.Storage.remove(name); } else { return util.Storage.set(name, value); } }; util.Storage.set = function (name, value) { try { value = JSON.stringify(value) } catch (error) { } return window.localStorage.setItem(name, value); }; util.Storage.get = function (name) { var value = window.localStorage.getItem(name); if (value) try { value = JSON.parse(value) } catch (error) { } return value; }; util.Storage.remove = function (name) { return window.localStorage.removeItem(name); }; util.Storage.key = function (name) { return window.localStorage.key(key); }; // when invoked, will empty all keys out of the storage. util.Storage.clear = function () { return window.localStorage.clear(); }; // returns an integer representing the number of data items stored in the Storage object. util.Storage.length = function () { return window.localStorage.length; }; /** * возвращает cookie с именем name, если есть, если нет, то undefined * @param name * @param value */ util.Cookie = function (name, value) { "use strict"; if (value === undefined) { return util.Cookie.get(name); } else if (value === false || value === null) { util.Cookie.delete(name); } else { util.Cookie.set(name, value); } }; util.Cookie.get = function (name) { var matches = document.cookie.match(new RegExp( "(?:^|; )" + name.replace(/([\.$?*|{}\(\)\[\]\\\/\+^])/g, '\\$1') + "=([^;]*)" )); return matches ? decodeURIComponent(matches[1]) : undefined; }; /** * * @param name * @param value * @param {{}} options {expires: 0, path: '/', domain: 'site.com', secure: false} * expires - ms, Date, -1, 0 */ util.Cookie.set = function (name, value, options) { options = options || {}; var expires = options.expires; if (typeof expires == "number" && expires) { var d = new Date(); d.setTime(d.getTime() + expires * 1000); expires = options.expires = d; } if (expires && expires.toUTCString) { options.expires = expires.toUTCString(); } value = encodeURIComponent(value); var updatedCookie = name + "=" + value; for (var propName in options) { updatedCookie += "; " + propName; var propValue = options[propName]; if (propValue !== true) { updatedCookie += "=" + propValue; } } document.cookie = updatedCookie; }; util.Cookie.delete = util.Cookie.remove = function (name, option) { "use strict"; option = typeof option === 'object' ? option : {}; option.expires = -1; util.Cookie.set(name, "", option); }; util.getURLParameter = function (name) { var reg = (RegExp(name + '=' + '(.+?)(&|$)').exec(location.search) || [, null])[1]; return reg === null ? undefined : decodeURI(reg); }; /** * An asynchronous for-each loop * * @param {Array} array The array to loop through * * @param {function} done Callback function (when the loop is finished or an error occurs) * * @param {function} iterator * The logic for each iteration. Signature is `function(item, index, next)`. * Call `next()` to continue to the next item. Call `next(Error)` to throw an error and cancel the loop. * Or don't call `next` at all to break out of the loop. */ util.asyncForEach = function (array, done, iterator) { var i = 0; next(); function next(err) { if (err) done(err); else if (i >= array.length) done(); else if (i < array.length) { var item = array[i++]; setTimeout(function () { iterator(item, i - 1, next); }, 0); } } }; /** * Calls the callback in a given interval until it returns true * @param {function} callback * @param {number} interval in milliseconds */ util.waitFor = function (callback, interval) { var internalCallback = function () { if (callback() !== true) { setTimeout(internalCallback, interval); } }; internalCallback(); }; /** * Remove item from array * @param item * @param stack * @returns {Array} */ util.rmInArray = function (item, stack) { var newStack = []; for (var i = 0; i < stack.length; i++) { if (stack[i] && stack[i] != item) newStack.push(stack[i]); } return newStack; }; /** * @param text * @returns {string|void|XML} */ util.toTranslit = function (text) { return text.replace(/([а-яё])|([\s_-])|([^a-z\d])/gi, function (all, ch, space, words, i) { if (space || words) return space ? '-' : ''; var code = ch.charCodeAt(0), index = code == 1025 || code == 1105 ? 0 : code > 1071 ? code - 1071 : code - 1039, t = ['yo', 'a', 'b', 'v', 'g', 'd', 'e', 'zh', 'z', 'i', 'y', 'k', 'l', 'm', 'n', 'o', 'p', 'r', 's', 't', 'u', 'f', 'h', 'c', 'ch', 'sh', 'shch', '', 'y', '', 'e', 'yu', 'ya']; return t[index]; }); }; window.Util = util; })(window);<|fim▁end|>
var dx = point2.x - point1.x; var dy = point2.y - point1.y;
<|file_name|>queue.rs<|end_file_name|><|fim▁begin|>use collections::vec::Vec; /// A FIFO Queue pub struct Queue<T> { /// The queue as a vector pub vec: Vec<T>, } impl<T> Queue<T> { /// Create new queue pub fn new() -> Self { Queue { vec: Vec::new() } } /// Push element to queue pub fn push(&mut self, value: T) { self.vec.push(value); } /// Pop the last element pub fn pop(&mut self) -> Option<T> { if !self.vec.is_empty() { Some(self.vec.remove(0)) } else { None } } /// Get the length of the queue pub fn len(&self) -> usize { self.vec.len()<|fim▁hole|> impl<T> Clone for Queue<T> where T: Clone { fn clone(&self) -> Self { Queue { vec: self.vec.clone() } } }<|fim▁end|>
} }
<|file_name|>Cleaner.py<|end_file_name|><|fim▁begin|># vim: ts=4:sw=4:expandtab # BleachBit # Copyright (C) 2008-2017 Andrew Ziem # https://www.bleachbit.org # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Perform (or assist with) cleaning operations. """ from __future__ import absolute_import, print_function from bleachbit import _, expanduser, expandvars from bleachbit.FileUtilities import children_in_directory from bleachbit.Options import options from bleachbit import Command, FileUtilities, Memory, Special,GuiBasic import glob import logging import os.path import re import sys import warnings import subprocess if 'posix' == os.name: from bleachbit import Unix elif 'nt' == os.name: from bleachbit import Windows # Suppress GTK warning messages while running in CLI #34 warnings.simplefilter("ignore", Warning) try: import gtk HAVE_GTK = True except: HAVE_GTK = False # a module-level variable for holding cleaners backends = {} class Cleaner: """Base class for a cleaner""" def __init__(self): self.actions = [] self.id = None self.description = None self.name = None self.options = {} self.running = [] self.warnings = {} def add_action(self, option_id, action): """Register 'action' (instance of class Action) to be executed for ''option_id'. The actions must implement list_files and other_cleanup()""" self.actions += ((option_id, action), ) def add_option(self, option_id, name, description): """Register option (such as 'cache')""" self.options[option_id] = (name, description) def add_running(self, detection_type, pathname): """Add a way to detect this program is currently running""" self.running += ((detection_type, pathname), ) def auto_hide(self): """Return boolean whether it is OK to automatically hide this cleaner""" for (option_id, __name) in self.get_options(): try: for cmd in self.get_commands(option_id): for dummy in cmd.execute(False): return False for ds in self.get_deep_scan(option_id): if isinstance(ds, dict): return False except Exception as e: logger = logging.getLogger(__name__) logger.exception('exception in auto_hide(), cleaner=%s, option=%s', self.name, option_id) return True def get_commands(self, option_id): """Get list of Command instances for option 'option_id'""" for action in self.actions: if option_id == action[0]: for cmd in action[1].get_commands(): yield cmd if option_id not in self.options: raise RuntimeError("Unknown option '%s'" % option_id) def get_deep_scan(self, option_id): """Get dictionary used to build a deep scan""" for action in self.actions: if option_id == action[0]: for ds in action[1].get_deep_scan(): yield ds if option_id not in self.options: raise RuntimeError("Unknown option '%s'" % option_id) def get_description(self): """Brief description of the cleaner""" return self.description def get_id(self): """Return the unique name of this cleaner""" return self.id def get_name(self): """Return the human name of this cleaner""" return self.name def get_option_descriptions(self): """Yield the names and descriptions of each option in a 2-tuple""" if self.options: for key in sorted(self.options.keys()): yield (self.options[key][0], self.options[key][1]) def get_options(self): """Return user-configurable options in 2-tuple (id, name)""" if self.options: for key in sorted(self.options.keys()): yield (key, self.options[key][0]) def get_warning(self, option_id): """Return a warning as string.""" if option_id in self.warnings: return self.warnings[option_id] else: return None def is_running(self): """Return whether the program is currently running""" resp_cli="" logger = logging.getLogger(__name__) for running in self.running: test = running[0] pathname = running[1] if 'exe' == test and 'posix' == os.name: if Unix.is_running(pathname): #print "debug: process '%s' is running" % pathname logger.debug("Debug: process '%s' is running", pathname) if options.get("close_run"): if not subprocess.mswindows: #print "debug: Closing process '%s'" % pathname if "--preset" in sys.argv: resp_cli = raw_input("Do you Want BleachBit to Close " + pathname + " y/n : ") else: resp = GuiBasic.message_dialog(None,"Do you Want BleachBit to Close " + pathname,gtk.MESSAGE_WARNING, gtk.BUTTONS_YES_NO) if gtk.RESPONSE_YES == resp or resp_cli.lower() == "y": # user cancelled, so don't toggle option logger.debug("Debug: Closing process '%s'",pathname) subprocess.check_output(["killall", "-9", pathname]) if not Unix.is_running(pathname): logger.debug("Debug: Closing process '%s' successful",pathname) return False return True elif 'exe' == test and 'nt' == os.name: if Windows.is_process_running(pathname): #print "debug: process '%s' is running" % pathname logger.debug("Debug: process '%s' is running", pathname) if options.get("close_run"): if subprocess.mswindows: #print "debug: Closing process '%s'" % pathname if "--preset" in sys.argv: resp_cli = raw_input("Do you Want BleachBit to Close " + pathname + " y/n : ") else: resp = GuiBasic.message_dialog(None,"Do you Want BleachBit to Close " + pathname,gtk.MESSAGE_WARNING, gtk.BUTTONS_YES_NO) if gtk.RESPONSE_YES == resp or resp_cli.lower() == "y": logger.debug("debug: Closing process '%s'",pathname) subprocess.check_output(["taskkill", "/IM", pathname]) if not Windows.is_process_running(pathname): logger.debug("debug: Closing process '%s' successful",pathname) return False logger.debug("process '%s' is running", pathname) return True elif 'exe' == test and 'nt' == os.name: if Windows.is_process_running(pathname): logger.debug("process '%s' is running", pathname) return True elif 'pathname' == test: expanded = expanduser(expandvars(pathname)) for globbed in glob.iglob(expanded): if os.path.exists(globbed): logger.debug("file '%s' exists indicating '%s' is running", self.name) return True else: raise RuntimeError( "Unknown running-detection test '%s'" % test) return False def is_usable(self): """Return whether the cleaner is usable (has actions)""" return len(self.actions) > 0 def set_warning(self, option_id, description): """Set a warning to be displayed when option is selected interactively""" self.warnings[option_id] = description class Firefox(Cleaner): """Mozilla Firefox""" def __init__(self): Cleaner.__init__(self) self.add_option('backup', _('Backup files'), _( 'Delete the backup files')) self.add_option('cache', _('Cache'), _( 'Delete the web cache, which reduces time to display revisited pages')) self.add_option('cookies', _('Cookies'), _( 'Delete cookies, which contain information such as web site preferences, authentication, and tracking identification')) self.add_option( 'crash_reports', _('Crash reports'), _('Delete the files')) # TRANSLATORS: DOM = Document Object Model. self.add_option('dom', _('DOM Storage'), _('Delete HTML5 cookies')) self.add_option('download_history', _( 'Download history'), _('List of files downloaded')) self.add_option('forms', _('Form history'), _( 'A history of forms entered in web sites and in the Search bar')) self.add_option('session_restore', _('Session restore'), _( 'Loads the initial session after the browser closes or crashes')) self.add_option('site_preferences', _( 'Site preferences'), _('Settings for individual sites')) self.add_option('passwords', _('Passwords'), _( 'A database of usernames and passwords as well as a list of sites that should not store passwords')) self.set_warning( 'passwords', _('This option will delete your saved passwords.')) self.add_option( 'url_history', _('URL history'), _('List of visited web pages')) self.add_option('vacuum', _('Vacuum'), _( 'Clean database fragmentation to reduce space and improve speed without removing any data')) if 'posix' == os.name: self.profile_dir = "~/.mozilla/firefox*/*.default*/" self.add_running('exe', 'firefox') self.add_running('exe', 'firefox-bin') self.add_running('pathname', self.profile_dir + 'lock') elif 'nt' == os.name: self.profile_dir = "$USERPROFILE\\Application Data\\Mozilla\\Firefox\\Profiles\\*.default*\\" self.add_running('exe', 'firefox.exe') self.description = _("Web browser") self.id = 'firefox' self.name = "Firefox" def get_commands(self, option_id): files = [] # backup files if 'backup' == option_id: bookmark_bu_dir = os.path.join(self.profile_dir, 'bookmarkbackups') files += FileUtilities.expand_glob_join(bookmark_bu_dir, "*.json") files += FileUtilities.expand_glob_join( bookmark_bu_dir, "*.jsonlz4") # browser cache cache_base = None if 'posix' == os.name: cache_base = self.profile_dir elif 'nt' == os.name: cache_base = "$localappdata\\Mozilla\\Firefox\\Profiles\\*.default*" if 'cache' == option_id: dirs = FileUtilities.expand_glob_join(cache_base, "Cache*") dirs += FileUtilities.expand_glob_join(cache_base, "OfflineCache") if 'nt' == os.name: dirs += FileUtilities.expand_glob_join( cache_base, "jumpListCache") # Windows 8 if 'posix' == os.name: # This path is whitelisted under the System - Cache cleaner, # so it can be cleaned here. dirs += [expanduser('~/.cache/mozilla')] for dirname in dirs: for filename in children_in_directory(dirname, False): yield Command.Delete(filename) # Necko Predictive Network Actions # https://wiki.mozilla.org/Privacy/Reviews/Necko files += FileUtilities.expand_glob_join( self.profile_dir, "netpredictions.sqlite") # cookies if 'cookies' == option_id: files += FileUtilities.expand_glob_join( self.profile_dir, "cookies.txt") files += FileUtilities.expand_glob_join( self.profile_dir, "cookies.sqlite") # crash reports if 'posix' == os.name: crashdir = expanduser("~/.mozilla/firefox/Crash Reports") if 'nt' == os.name: crashdir = expandvars( "$USERPROFILE\\Application Data\\Mozilla\\Firefox\\Crash Reports") if 'crash_reports' == option_id: for filename in children_in_directory(crashdir, False): files += [filename] files += FileUtilities.expand_glob_join( self.profile_dir, "minidumps/*.dmp") # DOM storage if 'dom' == option_id: files += FileUtilities.expand_glob_join( self.profile_dir, "webappsstore.sqlite") # download history if 'download_history' == option_id: # Firefox version 1 files += FileUtilities.expand_glob_join( self.profile_dir, "downloads.rdf") # Firefox version 3 files += FileUtilities.expand_glob_join( self.profile_dir, "downloads.sqlite") # forms if 'forms' == option_id: files += FileUtilities.expand_glob_join( self.profile_dir, "formhistory.dat") files += FileUtilities.expand_glob_join( self.profile_dir, "formhistory.sqlite") # passwords if 'passwords' == option_id: # http://kb.mozillazine.org/Password_Manager files += FileUtilities.expand_glob_join( self.profile_dir, "signons.txt") files += FileUtilities.expand_glob_join( self.profile_dir, "signons[2-3].txt") files += FileUtilities.expand_glob_join( self.profile_dir, "signons.sqlite") files += FileUtilities.expand_glob_join( self.profile_dir, "logins.json") # session restore if 'session_restore' == option_id: # Names include sessionstore.js, sessionstore.bak, # sessionstore.bak-20140715214327, sessionstore-1.js files += FileUtilities.expand_glob_join( self.profile_dir, "sessionstore*.js") files += FileUtilities.expand_glob_join( self.profile_dir, "sessionstore.bak*") ss_bu_dir = os.path.join(self.profile_dir, 'sessionstore-backups') files += FileUtilities.expand_glob_join( ss_bu_dir, 'previous.js') files += FileUtilities.expand_glob_join( ss_bu_dir, 'upgrade.js-20*') files += FileUtilities.expand_glob_join( ss_bu_dir, 'recovery.js') files += FileUtilities.expand_glob_join( ss_bu_dir, 'recovery.bak') # site-specific preferences if 'site_preferences' == option_id: files += FileUtilities.expand_glob_join( self.profile_dir, "content-prefs.sqlite") # URL history if 'url_history' == option_id: # Firefox version 1 files += FileUtilities.expand_glob_join( self.profile_dir, "history.dat") # Firefox 21 on Windows if 'nt' == os.name: files += FileUtilities.expand_glob_join( cache_base, "thumbnails/*.png") # see also function other_cleanup() # finish for filename in files: yield Command.Delete(filename) # URL history if 'url_history' == option_id: for path in FileUtilities.expand_glob_join(self.profile_dir, "places.sqlite"): yield Command.Function(path, Special.delete_mozilla_url_history, _('Delete the usage history')) # vacuum if 'vacuum' == option_id: paths = [] paths += FileUtilities.expand_glob_join( self.profile_dir, "*.sqlite") if not cache_base == self.profile_dir: paths += FileUtilities.expand_glob_join(cache_base, "*.sqlite") for path in paths: yield Command.Function(path, FileUtilities.vacuum_sqlite3, _("Vacuum")) class OpenOfficeOrg(Cleaner): """Delete OpenOffice.org cache""" def __init__(self): Cleaner.__init__(self) self.options = {} self.add_option('cache', _('Cache'), _('Delete the cache')) self.add_option('recent_documents', _('Most recently used'), _( "Delete the list of recently used documents")) self.id = 'openofficeorg' self.name = 'OpenOffice.org' self.description = _("Office suite") # reference: http://katana.oooninja.com/w/editions_of_openoffice.org if 'posix' == os.name: self.prefixes = ["~/.ooo-2.0", "~/.openoffice.org2", "~/.openoffice.org2.0", "~/.openoffice.org/3"] self.prefixes += ["~/.ooo-dev3"] if 'nt' == os.name: self.prefixes = [ "$APPDATA\\OpenOffice.org\\3", "$APPDATA\\OpenOffice.org2"] def get_commands(self, option_id): # paths for which to run expand_glob_join egj = [] if 'recent_documents' == option_id: egj.append( "user/registry/data/org/openoffice/Office/Histories.xcu") egj.append( "user/registry/cache/org.openoffice.Office.Histories.dat") if 'recent_documents' == option_id and not 'cache' == option_id: egj.append("user/registry/cache/org.openoffice.Office.Common.dat") for egj_ in egj: for prefix in self.prefixes: for path in FileUtilities.expand_glob_join(prefix, egj_): if 'nt' == os.name: path = os.path.normpath(path) if os.path.lexists(path): yield Command.Delete(path) if 'cache' == option_id: dirs = [] for prefix in self.prefixes:<|fim▁hole|> dirs += FileUtilities.expand_glob_join( prefix, "user/registry/cache/") for dirname in dirs: if 'nt' == os.name: dirname = os.path.normpath(dirname) for filename in children_in_directory(dirname, False): yield Command.Delete(filename) if 'recent_documents' == option_id: for prefix in self.prefixes: for path in FileUtilities.expand_glob_join(prefix, "user/registry/data/org/openoffice/Office/Common.xcu"): if os.path.lexists(path): yield Command.Function(path, Special.delete_ooo_history, _('Delete the usage history')) # ~/.openoffice.org/3/user/registrymodifications.xcu # Apache OpenOffice.org 3.4.1 from openoffice.org on Ubuntu 13.04 # %AppData%\OpenOffice.org\3\user\registrymodifications.xcu # Apache OpenOffice.org 3.4.1 from openoffice.org on Windows XP for path in FileUtilities.expand_glob_join(prefix, "user/registrymodifications.xcu"): if os.path.lexists(path): yield Command.Function(path, Special.delete_office_registrymodifications, _('Delete the usage history')) class System(Cleaner): """Clean the system in general""" def __init__(self): Cleaner.__init__(self) # # options for Linux and BSD # if 'posix' == os.name: # TRANSLATORS: desktop entries are .desktop files in Linux that # make up the application menu (the menu that shows BleachBit, # Firefox, and others. The .desktop files also associate file # types, so clicking on an .html file in Nautilus brings up # Firefox. # More information: # http://standards.freedesktop.org/menu-spec/latest/index.html#introduction self.add_option('desktop_entry', _('Broken desktop files'), _( 'Delete broken application menu entries and file associations')) self.add_option('cache', _('Cache'), _('Delete the cache')) # TRANSLATORS: Localizations are files supporting specific # languages, so applications appear in Spanish, etc. self.add_option('localizations', _('Localizations'), _( 'Delete files for unwanted languages')) self.set_warning( 'localizations', _("Configure this option in the preferences.")) # TRANSLATORS: 'Rotated logs' refers to old system log files. # Linux systems often have a scheduled job to rotate the logs # which means compress all except the newest log and then delete # the oldest log. You could translate this 'old logs.' self.add_option( 'rotated_logs', _('Rotated logs'), _('Delete old system logs')) self.add_option('recent_documents', _('Recent documents list'), _( 'Delete the list of recently used documents')) self.add_option('trash', _('Trash'), _('Empty the trash')) # # options just for Linux # if sys.platform.startswith('linux'): self.add_option('memory', _('Memory'), # TRANSLATORS: 'free' means 'unallocated' _('Wipe the swap and free memory')) self.set_warning( 'memory', _('This option is experimental and may cause system problems.')) # # options just for Microsoft Windows # if 'nt' == os.name: self.add_option('logs', _('Logs'), _('Delete the logs')) self.add_option( 'memory_dump', _('Memory dump'), _('Delete the file memory.dmp')) self.add_option('muicache', 'MUICache', _('Delete the cache')) # TRANSLATORS: Prefetch is Microsoft Windows jargon. self.add_option('prefetch', _('Prefetch'), _('Delete the cache')) self.add_option( 'recycle_bin', _('Recycle bin'), _('Empty the recycle bin')) # TRANSLATORS: 'Update' is a noun, and 'Update uninstallers' is an option to delete # the uninstallers for software updates. self.add_option('updates', _('Update uninstallers'), _( 'Delete uninstallers for Microsoft updates including hotfixes, service packs, and Internet Explorer updates')) # # options for GTK+ # if HAVE_GTK: self.add_option('clipboard', _('Clipboard'), _( 'The desktop environment\'s clipboard used for copy and paste operations')) # # options common to all platforms # # TRANSLATORS: "Custom" is an option allowing the user to specify which # files and folders will be erased. self.add_option('custom', _('Custom'), _( 'Delete user-specified files and folders')) # TRANSLATORS: 'free' means 'unallocated' self.add_option('free_disk_space', _('Free disk space'), # TRANSLATORS: 'free' means 'unallocated' _('Overwrite free disk space to hide deleted files')) self.set_warning('free_disk_space', _('This option is very slow.')) self.add_option( 'tmp', _('Temporary files'), _('Delete the temporary files')) self.description = _("The system in general") self.id = 'system' self.name = _("System") def get_commands(self, option_id): # This variable will collect fully expanded file names, and # at the end of this function, they will be checked they exist # and processed through Command.Delete(). files = [] # cache if 'posix' == os.name and 'cache' == option_id: dirname = expanduser("~/.cache/") for filename in children_in_directory(dirname, True): if self.whitelisted(filename): continue files += [filename] # custom if 'custom' == option_id: for (c_type, c_path) in options.get_custom_paths(): if 'file' == c_type: files += [c_path] elif 'folder' == c_type: files += [c_path] for path in children_in_directory(c_path, True): files += [path] else: raise RuntimeError( 'custom folder has invalid type %s' % c_type) # menu menu_dirs = ['~/.local/share/applications', '~/.config/autostart', '~/.gnome/apps/', '~/.gnome2/panel2.d/default/launchers', '~/.gnome2/vfolders/applications/', '~/.kde/share/apps/RecentDocuments/', '~/.kde/share/mimelnk', '~/.kde/share/mimelnk/application/ram.desktop', '~/.kde2/share/mimelnk/application/', '~/.kde2/share/applnk'] if 'posix' == os.name and 'desktop_entry' == option_id: for dirname in menu_dirs: for filename in [fn for fn in children_in_directory(dirname, False) if fn.endswith('.desktop')]: if Unix.is_broken_xdg_desktop(filename): yield Command.Delete(filename) # unwanted locales if 'posix' == os.name and 'localizations' == option_id: for path in Unix.locales.localization_paths(locales_to_keep=options.get_languages()): if os.path.isdir(path): for f in FileUtilities.children_in_directory(path, True): yield Command.Delete(f) yield Command.Delete(path) # Windows logs if 'nt' == os.name and 'logs' == option_id: paths = ( '$ALLUSERSPROFILE\\Application Data\\Microsoft\\Dr Watson\\*.log', '$ALLUSERSPROFILE\\Application Data\\Microsoft\\Dr Watson\\user.dmp', '$LocalAppData\\Microsoft\\Windows\\WER\\ReportArchive\\*\\*', '$LocalAppData\\Microsoft\\Windows\WER\\ReportQueue\\*\\*', '$programdata\\Microsoft\\Windows\\WER\\ReportArchive\\*\\*', '$programdata\\Microsoft\\Windows\\WER\\ReportQueue\\*\\*', '$localappdata\\Microsoft\\Internet Explorer\\brndlog.bak', '$localappdata\\Microsoft\\Internet Explorer\\brndlog.txt', '$windir\\*.log', '$windir\\imsins.BAK', '$windir\\OEWABLog.txt', '$windir\\SchedLgU.txt', '$windir\\ntbtlog.txt', '$windir\\setuplog.txt', '$windir\\REGLOCS.OLD', '$windir\\Debug\\*.log', '$windir\\Debug\\Setup\\UpdSh.log', '$windir\\Debug\\UserMode\\*.log', '$windir\\Debug\\UserMode\\ChkAcc.bak', '$windir\\Debug\\UserMode\\userenv.bak', '$windir\\Microsoft.NET\Framework\*\*.log', '$windir\\pchealth\\helpctr\\Logs\\hcupdate.log', '$windir\\security\\logs\\*.log', '$windir\\security\\logs\\*.old', '$windir\\SoftwareDistribution\\*.log', '$windir\\SoftwareDistribution\\DataStore\\Logs\\*', '$windir\\system32\\TZLog.log', '$windir\\system32\\config\\systemprofile\\Application Data\\Microsoft\\Internet Explorer\\brndlog.bak', '$windir\\system32\\config\\systemprofile\\Application Data\\Microsoft\\Internet Explorer\\brndlog.txt', '$windir\\system32\\LogFiles\\AIT\\AitEventLog.etl.???', '$windir\\system32\\LogFiles\\Firewall\\pfirewall.log*', '$windir\\system32\\LogFiles\\Scm\\SCM.EVM*', '$windir\\system32\\LogFiles\\WMI\\Terminal*.etl', '$windir\\system32\\LogFiles\\WMI\\RTBackup\EtwRT.*etl', '$windir\\system32\\wbem\\Logs\\*.lo_', '$windir\\system32\\wbem\\Logs\\*.log', ) for path in paths: expanded = expandvars(path) for globbed in glob.iglob(expanded): files += [globbed] # memory if sys.platform.startswith('linux') and 'memory' == option_id: yield Command.Function(None, Memory.wipe_memory, _('Memory')) # memory dump # how to manually create this file # http://www.pctools.com/guides/registry/detail/856/ if 'nt' == os.name and 'memory_dump' == option_id: fname = expandvars('$windir\\memory.dmp') if os.path.exists(fname): files += [fname] for fname in glob.iglob(expandvars('$windir\\Minidump\\*.dmp')): files += [fname] # most recently used documents list if 'posix' == os.name and 'recent_documents' == option_id: files += [expanduser("~/.recently-used")] # GNOME 2.26 (as seen on Ubuntu 9.04) will retain the list # in memory if it is simply deleted, so it must be shredded # (or at least truncated). # # GNOME 2.28.1 (Ubuntu 9.10) and 2.30 (10.04) do not re-read # the file after truncation, but do re-read it after # shredding. # # https://bugzilla.gnome.org/show_bug.cgi?id=591404 def gtk_purge_items(): """Purge GTK items""" gtk.RecentManager().purge_items() yield 0 for pathname in ["~/.recently-used.xbel", "~/.local/share/recently-used.xbel"]: pathname = expanduser(pathname) if os.path.lexists(pathname): yield Command.Shred(pathname) if HAVE_GTK: # Use the Function to skip when in preview mode yield Command.Function(None, gtk_purge_items, _('Recent documents list')) if 'posix' == os.name and 'rotated_logs' == option_id: for path in Unix.rotated_logs(): yield Command.Delete(path) # temporary files if 'posix' == os.name and 'tmp' == option_id: dirnames = ['/tmp', '/var/tmp'] for dirname in dirnames: for path in children_in_directory(dirname, True): is_open = FileUtilities.openfiles.is_open(path) ok = not is_open and os.path.isfile(path) and \ not os.path.islink(path) and \ FileUtilities.ego_owner(path) and \ not self.whitelisted(path) if ok: yield Command.Delete(path) # temporary files if 'nt' == os.name and 'tmp' == option_id: dirname = expandvars( "$USERPROFILE\\Local Settings\\Temp\\") # whitelist the folder %TEMP%\Low but not its contents # https://bugs.launchpad.net/bleachbit/+bug/1421726 low = os.path.join(dirname, 'low').lower() for filename in children_in_directory(dirname, True): if not low == filename.lower(): yield Command.Delete(filename) dirname = expandvars("$windir\\temp\\") for filename in children_in_directory(dirname, True): yield Command.Delete(filename) # trash if 'posix' == os.name and 'trash' == option_id: dirname = expanduser("~/.Trash") for filename in children_in_directory(dirname, False): yield Command.Delete(filename) # fixme http://www.ramendik.ru/docs/trashspec.html # http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html # ~/.local/share/Trash # * GNOME 2.22, Fedora 9 # * KDE 4.1.3, Ubuntu 8.10 dirname = expanduser("~/.local/share/Trash/files") for filename in children_in_directory(dirname, True): yield Command.Delete(filename) dirname = expanduser("~/.local/share/Trash/info") for filename in children_in_directory(dirname, True): yield Command.Delete(filename) dirname = expanduser("~/.local/share/Trash/expunged") # [email protected] tells me that the trash # backend puts files in here temporary, but in some situations # the files are stuck. for filename in children_in_directory(dirname, True): yield Command.Delete(filename) # clipboard if HAVE_GTK and 'clipboard' == option_id: def clear_clipboard(): gtk.gdk.threads_enter() clipboard = gtk.clipboard_get() clipboard.set_text("") gtk.gdk.threads_leave() return 0 yield Command.Function(None, clear_clipboard, _('Clipboard')) # overwrite free space shred_drives = options.get_list('shred_drives') if 'free_disk_space' == option_id and shred_drives: for pathname in shred_drives: # TRANSLATORS: 'Free' means 'unallocated.' # %s expands to a path such as C:\ or /tmp/ display = _("Overwrite free disk space %s") % pathname def wipe_path_func(): for ret in FileUtilities.wipe_path(pathname, idle=True): # Yield control to GTK idle because this process # is very slow. Also display progress. yield ret yield 0 yield Command.Function(None, wipe_path_func, display) # MUICache if 'nt' == os.name and 'muicache' == option_id: keys = ( 'HKCU\\Software\\Microsoft\\Windows\\ShellNoRoam\\MUICache', 'HKCU\\Software\\Classes\\Local Settings\\Software\\Microsoft\\Windows\\Shell\\MuiCache') for key in keys: yield Command.Winreg(key, None) # prefetch if 'nt' == os.name and 'prefetch' == option_id: for path in glob.iglob(expandvars('$windir\\Prefetch\\*.pf')): yield Command.Delete(path) # recycle bin if 'nt' == os.name and 'recycle_bin' == option_id: # This method allows shredding recycled_any = False for path in Windows.get_recycle_bin(): recycled_any = True yield Command.Delete(path) # If there were any files deleted, Windows XP will show the # wrong icon for the recycle bin indicating it is not empty. # The icon will be incorrect until logging in to Windows again # or until it is emptied using the Windows API call for emptying # the recycle bin. # Windows 10 refreshes the recycle bin icon when the user # opens the recycle bin folder. # This is a hack to refresh the icon. def empty_recycle_bin_func(): import tempfile tmpdir = tempfile.mkdtemp() Windows.move_to_recycle_bin(tmpdir) try: Windows.empty_recycle_bin(None, True) except: logging.getLogger(__name__).info('error in empty_recycle_bin()', exc_info=True) yield 0 # Using the Function Command prevents emptying the recycle bin # when in preview mode. if recycled_any: yield Command.Function(None, empty_recycle_bin_func, _('Empty the recycle bin')) # Windows Updates if 'nt' == os.name and 'updates' == option_id: for wu in Windows.delete_updates(): yield wu # return queued files for filename in files: if os.path.lexists(filename): yield Command.Delete(filename) def whitelisted(self, pathname): """Return boolean whether file is whitelisted""" regexes = [ '^/tmp/.X0-lock$', '^/tmp/.truecrypt_aux_mnt.*/(control|volume)$', '^/tmp/.vbox-[^/]+-ipc/lock$', '^/tmp/.wine-[0-9]+/server-.*/lock$', '^/tmp/gconfd-[^/]+/lock/ior$', '^/tmp/fsa/', # fsarchiver '^/tmp/kde-', '^/tmp/kdesudo-', '^/tmp/ksocket-', '^/tmp/orbit-[^/]+/bonobo-activation-register[a-z0-9-]*.lock$', '^/tmp/orbit-[^/]+/bonobo-activation-server-[a-z0-9-]*ior$', '^/tmp/pulse-[^/]+/pid$', '^/var/tmp/kdecache-', '^' + expanduser('~/.cache/wallpaper/'), # Clean Firefox cache from Firefox cleaner (LP#1295826) '^' + expanduser('~/.cache/mozilla'), # Clean Google Chrome cache from Google Chrome cleaner (LP#656104) '^' + expanduser('~/.cache/google-chrome'), '^' + expanduser('~/.cache/gnome-control-center/'), # iBus Pinyin # https://bugs.launchpad.net/bleachbit/+bug/1538919 '^' + expanduser('~/.cache/ibus/')] for regex in regexes: if re.match(regex, pathname) is not None: return True return False def register_cleaners(): """Register all known cleaners: system, CleanerML, and Winapp2""" global backends # wipe out any registrations # Because this is a global variable, cannot use backends = {} backends.clear() # initialize "hard coded" (non-CleanerML) backends backends["firefox"] = Firefox() backends["openofficeorg"] = OpenOfficeOrg() backends["system"] = System() # register CleanerML cleaners from bleachbit import CleanerML CleanerML.load_cleaners() # register Winapp2.ini cleaners if 'nt' == os.name: from bleachbit import Winapp Winapp.load_cleaners() def create_simple_cleaner(paths): """Shred arbitrary files (used in CLI and GUI)""" cleaner = Cleaner() cleaner.add_option(option_id='files', name='', description='') cleaner.name = _("System") # shows up in progress bar from bleachbit import Action class CustomFileAction(Action.ActionProvider): action_key = '__customfileaction' def get_commands(self): for path in paths: if not isinstance(path, (str, unicode)): raise RuntimeError( 'expected path as string but got %s' % str(path)) if not os.path.isabs(path): path = os.path.abspath(path) if os.path.isdir(path): for child in children_in_directory(path, True): yield Command.Shred(child) yield Command.Shred(path) else: yield Command.Shred(path) provider = CustomFileAction(None) cleaner.add_action('files', provider) return cleaner def create_wipe_cleaner(path): """Wipe free disk space of arbitrary paths (used in GUI)""" cleaner = Cleaner() cleaner.add_option( option_id='free_disk_space', name='', description='') cleaner.name = '' # create a temporary cleaner object display = _("Overwrite free disk space %s") % path def wipe_path_func(): for ret in FileUtilities.wipe_path(path, idle=True): yield ret yield 0 from bleachbit import Action class CustomWipeAction(Action.ActionProvider): action_key = '__customwipeaction' def get_commands(self): yield Command.Function(None, wipe_path_func, display) provider = CustomWipeAction(None) cleaner.add_action('free_disk_space', provider) return cleaner<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub use self::circular::Circular2d;<|fim▁hole|>pub use self::sunflower::SunflowerSeed2d; mod circular; mod sunflower;<|fim▁end|>
<|file_name|>newTests.js<|end_file_name|><|fim▁begin|>"use strict" const should = require('should') const rewire = require('rewire') const IC = rewire('../commands/new.js') describe('New command',() => { let mockFS = { outputFile : (filename,content) => { console.log(`mock writing to ${filename}`); return { then : () => { return { catch : () => {} }} } } } let mockFindADRDir = ( callback,startFrom,notFoundHandler) => { callback('.') } let mockEditorCommand = "mockEditor" let mockPropUtil = { parse : (file,opts,cb) => { cb(undefined,{editor : mockEditorCommand}) } } let dummyLaunchEditor = _ => {} let commonMocks = { fs : mockFS , propUtil : mockPropUtil , launchEditorForADR : dummyLaunchEditor , writeADR : (adrFilename,newADR) => { console.log(`Pretending to write to ${adrFilename}`)} } function modifiedCommonMocks(specificMocks) { let copy = {} for (var k in commonMocks) copy[k] = commonMocks[k] for (var j in specificMocks) copy[j] = specificMocks[j] return copy; } it("Should fail if passed an invalid title - that can't be used as a filename", () => { let revert = IC.__set__({ findADRDir : (startFrom, callback,notFoundHandler) => { callback('.') } , withAllADRFiles : (callback) => { callback(['1-adr1.md','2-adr2.md'])} }) let block = () => {IC(["bla","###"])} block.should.throw() revert() }) it ("Should assign the next number for the new ADR - one higher than the last available ADR", () => { let testTitle = "test" let mocksWithHighestNumber = n => { return { findADRDir : mockFindADRDir , withAllADRFiles : callback => {callback(['1-adr1.md', n + '-adr2.md'])} , adrContent : (num,title,date) => { num.should.eql(n+1)} } } var revert = IC.__set__(modifiedCommonMocks(mocksWithHighestNumber(2))) IC([testTitle]) revert() revert = IC.__set__(modifiedCommonMocks(mocksWithHighestNumber(5))) IC(["test"]) revert(); }) it("Should use the title given as title parts when creating the new ADR content", () => { let testTitle = "test" var revert = IC.__set__(modifiedCommonMocks({ findADRDir : mockFindADRDir , withAllADRFiles : (callback) => { callback(['1-adr1.md'])} , adrContent : (num,title,date) => {<|fim▁hole|> title.should.eql(testTitle) } })) IC([testTitle]) revert(); let adrWithSeveralParts = ["adr","part","2"] revert = IC.__set__(modifiedCommonMocks({ findADRDir : mockFindADRDir , withAllADRFiles : (callback) => { callback(['1-adr1.md'])} , adrContent : (num,title,date) => { console.log(`Title mock got: ${title}`) title.should.eql(adrWithSeveralParts.join(' ')) } })) IC(adrWithSeveralParts) revert(); }) it("should attempt writing the content to a file", function() { let testTitle = "test" var revert = IC.__set__(modifiedCommonMocks({ findADRDir : mockFindADRDir , withAllADRFiles : (callback) => { callback(['1-adr1.md'])} , common : { writeTextFileAndNotifyUser : (filename, content, msg) => { filename.should.startWith('2') //default file name scheme starts with the expected ID content.should.match(new RegExp(testTitle)) //The title should be somewhere in the content msg.should.startWith('Writing') } } })) IC([testTitle]) revert() }) })<|fim▁end|>
<|file_name|>BasicAuthCallBackHandler.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.entitlement.filter.callback; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.entitlement.filter.exception.EntitlementFilterException; <|fim▁hole|>public class BasicAuthCallBackHandler extends EntitlementFilterCallBackHandler { private static final Log log = LogFactory.getLog(BasicAuthCallBackHandler.class); public BasicAuthCallBackHandler(HttpServletRequest request) throws EntitlementFilterException { String authHeaderEn = null; if (!(request.getHeader("Authorization") == null || request.getHeader("Authorization").equals("null"))) { authHeaderEn = request.getHeader("Authorization"); String tempArr[] = authHeaderEn.split(" "); if (tempArr.length == 2) { String authHeaderDc = new String(Base64.decodeBase64(tempArr[1].getBytes())); tempArr = authHeaderDc.split(":"); if (tempArr.length == 2) { setUserName(tempArr[0]); } } throw new EntitlementFilterException("Unable to retrieve username from Authorization header"); } } }<|fim▁end|>
import javax.servlet.http.HttpServletRequest;
<|file_name|>function-declaration.js<|end_file_name|><|fim▁begin|>function foo() { console.log('foo');<|fim▁hole|><|fim▁end|>
}
<|file_name|>protocol.py<|end_file_name|><|fim▁begin|><|fim▁hole|>__params__ = {'la': 32, 'lb': 32, 'da': 10} def protocol(client, server, params): la = params['la'] lb = params['lb'] da = params["da"] server.a = UnsignedVec(bitlen=la, dim=da).input(src=driver, desc="a") server.b = Unsigned(bitlen=lb).input(src=driver, desc="b") client.a <<= server.a client.b <<= server.b client.c = client.a * client.b client.c.output(dest=driver, desc="c")<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>assign_wizard.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- ############################################################################### # # ODOO (ex OpenERP) # Open Source Management Solution # Copyright (C) 2001-2015 Micronaet S.r.l. (<https://micronaet.com>) # Developer: Nicola Riolini @thebrush (<https://it.linkedin.com/in/thebrush>) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero General Public License for more details.<|fim▁hole|># You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import os import sys import logging import openerp import openerp.addons.decimal_precision as dp from openerp.osv import fields, osv, expression, orm from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta from openerp import SUPERUSER_ID from openerp import tools from openerp.tools.translate import _ from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP, float_compare) _logger = logging.getLogger(__name__) class SaleOrderLine(orm.Model): """ Model name: SaleOrderLine """ _inherit = 'sale.order.line' def restore_stock_status_user_value( self, cr, uid, no_inventory_status, context=None): ''' Update with previous value ''' return self.pool.get('res.users').write( cr, uid, [uid], { 'no_inventory_status': no_inventory_status, }, context=context) def return_view_assign_wizard(self, cr, uid, ids, context=None): ''' Open wizard view: ''' wiz_pool = self.pool.get('sale.order.line.assign.stock.wizard') # Activate stock status: user_pool = self.pool.get('res.users') user = user_pool.browse(cr, uid, uid, context=context) no_inventory_status = user.no_inventory_status user_pool.write(cr, uid, [uid], { 'no_inventory_status': False, }, context=context) # --------------------------------------------------------------------- # Check data: # --------------------------------------------------------------------- # A. Check previsional order: line = self.browse(cr, uid, ids, context=context)[0] order = line.order_id if order.previsional: self.restore_stock_status_user_value( cr, uid, no_inventory_status, context=context) raise osv.except_osv( _('Errore'), _('''Ordine previsionale, non permessa una assegnazione da magazzino in quando viene fatto per caricare il magazzino ''')) # B. State of order: if order.state not in ('manual', 'progress'): self.restore_stock_status_user_value( cr, uid, no_inventory_status, context=context) raise osv.except_osv( _('Errore'), _('''Ordine non nel corretto stato: solo gli ordini attivi non chiusi possono avere assegnazioni da magazzino. ''')) # C. Available in stock: product = line.product_id available = product.mx_net_mrp_qty - product.mx_mrp_b_locked # To reenter if this product has assigned if available + line.mx_assigned_qty <= 0.0: self.restore_stock_status_user_value( cr, uid, no_inventory_status, context=context) raise osv.except_osv( _(u'Errore'), _(u'Il prodotto %s non ha disponibilità a magazzino!' % ( product.default_code or product.name or '?' )), ) # D. Remain positive: oc_qty = line.product_uom_qty delivery_qty = line.delivered_qty assigned = line.mx_assigned_qty # current to_assign = oc_qty # all ordered maked = 0.0 warning = '' if 'product_uom_maked_sync_qty' in line._columns: maked = line.product_uom_maked_sync_qty # XXX if yet production use wait the production? if line.mrp_id: warning = 'PRESENTE UNA PRODUZIONE COLLEGATA' if maked: to_assign = oc_qty - maked # remain to produce warning += ' CON MATERIALE PRECEDENTEMENTE CARICATO' if warning: warning += '!!!' if to_assign <= 0: self.restore_stock_status_user_value( cr, uid, no_inventory_status, context=context) raise osv.except_osv( _(u'Errore'), _(u'Al prodotto %s non servono assegnazioni di magazzino!' % ( product.default_code or product.name or '?' )), ) # XXX To remove assign I cannot add this check!!! #elif abs(to_assign - assigned) <= 0.01: # approx check # self.restore_stock_status_user_value( # cr, uid, no_inventory_status, context=context) # raise osv.except_osv( # _(u'Errore'), # _(u'Al prodotto %s sono già assegnati %s!' % ( # product.default_code or product.name or '?', # assigned, # )), # ) # --------------------------------------------------------------------- # Create record for wizard and open: # --------------------------------------------------------------------- # Default assignement: if to_assign >= (available + assigned): new_assigned_qty = available + assigned else: new_assigned_qty = to_assign wiz_id = wiz_pool.create(cr, uid, { 'new_assigned_qty': new_assigned_qty, 'line_id': ids[0], 'status': ''' OC originale: <b>[ %s ]</b> - Prodotte: <b>[ %s ]</b> - Consegnate: <b>[ %s ]</b><br/><br/> <i>Disponibili a magazzino: <b>[ %s ]</b> + Assegnati in precedenza <b>[ %s ]</b> = Disponibili <b>[ %s ]</b> <br/> </i> <font color="red"><b>%s</b></font> ''' % ( oc_qty, maked, delivery_qty, available, assigned, available + assigned, warning, ) }, context=context) # Get and return correct view: model_pool = self.pool.get('ir.model.data') view_id = model_pool.get_object_reference( cr, uid, 'inventory_status_assign_wizard', 'sale_order_line_assign_stock_wizard_view')[1] self.restore_stock_status_user_value( cr, uid, no_inventory_status, context=context) return { 'type': 'ir.actions.act_window', 'name': _('Assegna q. magazzino'), 'view_type': 'form', 'view_mode': 'form', 'res_id': wiz_id, 'res_model': 'sale.order.line.assign.stock.wizard', 'view_id': view_id, 'views': [(view_id, 'form')], 'domain': [], 'context': context, 'target': 'new', 'nodestroy': False, } class SaleOrderLineAssignStockWizard(orm.TransientModel): ''' Wizard for stock wizard ''' _name = 'sale.order.line.assign.stock.wizard' # ------------------------------------------------------------------------- # Wizard button event: # ------------------------------------------------------------------------- def action_remove_qty(self, cr, uid, ids, context=None): ''' Event for button done ''' if context is None: context = {} # Remove assignement: line_pool = self.pool.get('sale.order.line') wiz_browse = self.browse(cr, uid, ids, context=context)[0] line_id = wiz_browse.line_id return line_pool.write(cr, uid, line_id.id, { 'mx_assigned_qty': 0, }, context=context) def action_assign_qty(self, cr, uid, ids, context=None): ''' Event for button done ''' if context is None: context = {} line_pool = self.pool.get('sale.order.line') wiz_browse = self.browse(cr, uid, ids, context=context)[0] # Parameters: line_id = wiz_browse.line_id new_assigned_qty = wiz_browse.new_assigned_qty # Update new assignement: return line_pool.write(cr, uid, line_id.id, { 'mx_assigned_qty': new_assigned_qty, }, context=context) _columns = { 'line_id': fields.many2one( 'sale.order.line', 'Sale line'), 'status': fields.text('Stato riga'), 'new_assigned_qty': fields.float('Nuova assegnazione', digits=(16, 2)), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
#
<|file_name|>PathDressupDogbone.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # *************************************************************************** # * Copyright (c) 2014 Yorik van Havre <[email protected]> * # * * # * This program is free software; you can redistribute it and/or modify * # * it under the terms of the GNU Lesser General Public License (LGPL) * # * as published by the Free Software Foundation; either version 2 of * # * the License, or (at your option) any later version. * # * for detail see the LICENCE text file. * # * * # * This program is distributed in the hope that it will be useful, * # * but WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * # * GNU Library General Public License for more details. * # * * # * You should have received a copy of the GNU Library General Public * # * License along with this program; if not, write to the Free Software * # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * # * USA * # * * # *************************************************************************** from __future__ import print_function import FreeCAD import Path import PathScripts.PathDressup as PathDressup import PathScripts.PathGeom as PathGeom import PathScripts.PathLog as PathLog import PathScripts.PathUtil as PathUtil import PathScripts.PathUtils as PathUtils import math from PySide import QtCore # lazily loaded modules from lazy_loader.lazy_loader import LazyLoader DraftGeomUtils = LazyLoader('DraftGeomUtils', globals(), 'DraftGeomUtils') Part = LazyLoader('Part', globals(), 'Part') LOG_MODULE = PathLog.thisModule() PathLog.setLevel(PathLog.Level.NOTICE, LOG_MODULE) #PathLog.trackModule(LOG_MODULE) # Qt translation handling def translate(context, text, disambig=None): return QtCore.QCoreApplication.translate(context, text, disambig) movecommands = ['G0', 'G00', 'G1', 'G01', 'G2', 'G02', 'G3', 'G03'] movestraight = ['G1', 'G01'] movecw = ['G2', 'G02'] moveccw = ['G3', 'G03'] movearc = movecw + moveccw def debugMarker(vector, label, color=None, radius=0.5): if PathLog.getLevel(LOG_MODULE) == PathLog.Level.DEBUG: obj = FreeCAD.ActiveDocument.addObject("Part::Sphere", label) obj.Label = label obj.Radius = radius obj.Placement = FreeCAD.Placement(vector, FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), 0)) if color: obj.ViewObject.ShapeColor = color def debugCircle(vector, r, label, color=None): if PathLog.getLevel(LOG_MODULE) == PathLog.Level.DEBUG: obj = FreeCAD.ActiveDocument.addObject("Part::Cylinder", label) obj.Label = label obj.Radius = r obj.Height = 1 obj.Placement = FreeCAD.Placement(vector, FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), 0)) obj.ViewObject.Transparency = 90 if color: obj.ViewObject.ShapeColor = color def addAngle(a1, a2): a = a1 + a2 while a <= -math.pi: a += 2*math.pi while a > math.pi: a -= 2*math.pi return a def anglesAreParallel(a1, a2): an1 = addAngle(a1, 0) an2 = addAngle(a2, 0) if an1 == an2: return True if an1 == addAngle(an2, math.pi): return True return False def getAngle(v): a = v.getAngle(FreeCAD.Vector(1, 0, 0)) if v.y < 0: return -a return a def pointFromCommand(cmd, pt, X='X', Y='Y', Z='Z'): x = cmd.Parameters.get(X, pt.x) y = cmd.Parameters.get(Y, pt.y) z = cmd.Parameters.get(Z, pt.z) return FreeCAD.Vector(x, y, z) def edgesForCommands(cmds, startPt): edges = [] lastPt = startPt for cmd in cmds: if cmd.Name in movecommands: pt = pointFromCommand(cmd, lastPt) if cmd.Name in movestraight: edges.append(Part.Edge(Part.LineSegment(lastPt, pt))) elif cmd.Name in movearc: center = lastPt + pointFromCommand(cmd, FreeCAD.Vector(0, 0, 0), 'I', 'J', 'K') A = lastPt - center B = pt - center d = -B.x * A.y + B.y * A.x if d == 0: # we're dealing with half a circle here angle = getAngle(A) + math.pi/2 if cmd.Name in movecw: angle -= math.pi else: C = A + B angle = getAngle(C) R = (lastPt - center).Length ptm = center + FreeCAD.Vector(math.cos(angle), math.sin(angle), 0) * R edges.append(Part.Edge(Part.Arc(lastPt, ptm, pt))) lastPt = pt return edges class Style(object): # pylint: disable=no-init Dogbone = 'Dogbone' Tbone_H = 'T-bone horizontal' Tbone_V = 'T-bone vertical' Tbone_L = 'T-bone long edge' Tbone_S = 'T-bone short edge' All = [Dogbone, Tbone_H, Tbone_V, Tbone_L, Tbone_S] class Side(object): # pylint: disable=no-init Left = 'Left' Right = 'Right' All = [Left, Right] @classmethod def oppositeOf(cls, side): if side == cls.Left: return cls.Right if side == cls.Right: return cls.Left return None class Incision(object): # pylint: disable=no-init Fixed = 'fixed' Adaptive = 'adaptive' Custom = 'custom' All = [Adaptive, Fixed, Custom] class Smooth(object): # pylint: disable=no-init Neither = 0 In = 1 Out = 2 InAndOut = In | Out # Chord # A class to represent the start and end point of a path command. If the underlying # Command is a rotate command the receiver does represent a chord in the geometric # sense of the word. If the underlying command is a straight move then the receiver # represents the actual move. # This implementation really only deals with paths in the XY plane. Z is assumed to # be constant in all calculated results. # Instances of Chord are generally considered immutable and all movement member # functions return new instances. class Chord (object): def __init__(self, start=None, end=None): if not start: start = FreeCAD.Vector() if not end: end = FreeCAD.Vector() self.Start = start self.End = end def __str__(self): return "Chord([%g, %g, %g] -> [%g, %g, %g])" % (self.Start.x, self.Start.y, self.Start.z, self.End.x, self.End.y, self.End.z) def moveTo(self, newEnd): return Chord(self.End, newEnd) def moveToParameters(self, params): x = params.get('X', self.End.x) y = params.get('Y', self.End.y) z = params.get('Z', self.End.z) return self.moveTo(FreeCAD.Vector(x, y, z)) def moveBy(self, x, y, z): return self.moveTo(self.End + FreeCAD.Vector(x, y, z)) def move(self, distance, angle): dx = distance * math.cos(angle) dy = distance * math.sin(angle) return self.moveBy(dx, dy, 0) def asVector(self): return self.End - self.Start def asDirection(self): return self.asVector().normalize() def asLine(self): return Part.LineSegment(self.Start, self.End) def asEdge(self): return Part.Edge(self.asLine()) def getLength(self): return self.asVector().Length def getDirectionOfVector(self, B): A = self.asDirection() # if the 2 vectors are identical, they head in the same direction PathLog.debug(" {}.getDirectionOfVector({})".format(A, B)) if PathGeom.pointsCoincide(A, B): return 'Straight' d = -A.x*B.y + A.y*B.x if d < 0: return Side.Left<|fim▁hole|> return 'Back' def getDirectionOf(self, chordOrVector): if type(chordOrVector) is Chord: return self.getDirectionOfVector(chordOrVector.asDirection()) return self.getDirectionOfVector(chordOrVector.normalize()) def getAngleOfVector(self, ref): angle = self.asVector().getAngle(ref) # unfortunately they never figure out the sign :( # positive angles go up, so when the reference vector is left # then the receiver must go down if self.getDirectionOfVector(ref) == Side.Left: return -angle return angle def getAngle(self, refChordOrVector): if type(refChordOrVector) is Chord: return self.getAngleOfVector(refChordOrVector.asDirection()) return self.getAngleOfVector(refChordOrVector.normalize()) def getAngleXY(self): return self.getAngle(FreeCAD.Vector(1, 0, 0)) def commandParams(self, f): params = {"X": self.End.x, "Y": self.End.y, "Z": self.End.z} if f: params['F'] = f return params def g1Command(self, f): return Path.Command("G1", self.commandParams(f)) def arcCommand(self, cmd, center, f): params = self.commandParams(f) d = center - self.Start params['I'] = d.x params['J'] = d.y params['K'] = 0 return Path.Command(cmd, params) def g2Command(self, center, f): return self.arcCommand("G2", center, f) def g3Command(self, center, f): return self.arcCommand("G3", center, f) def isAPlungeMove(self): return not PathGeom.isRoughly(self.End.z, self.Start.z) def isANoopMove(self): PathLog.debug("{}.isANoopMove(): {}".format(self, PathGeom.pointsCoincide(self.Start, self.End))) return PathGeom.pointsCoincide(self.Start, self.End) def foldsBackOrTurns(self, chord, side): direction = chord.getDirectionOf(self) PathLog.info(" - direction = %s/%s" % (direction, side)) return direction == 'Back' or direction == side def connectsTo(self, chord): return PathGeom.pointsCoincide(self.End, chord.Start) class Bone(object): def __init__(self, boneId, obj, lastCommand, inChord, outChord, smooth, F): self.obj = obj self.boneId = boneId self.lastCommand = lastCommand self.inChord = inChord self.outChord = outChord self.smooth = smooth self.smooth = Smooth.Neither self.F = F # initialized later self.cDist = None self.cAngle = None self.tAngle = None self.cPt = None def angle(self): if self.cAngle is None: baseAngle = self.inChord.getAngleXY() turnAngle = self.outChord.getAngle(self.inChord) theta = addAngle(baseAngle, (turnAngle - math.pi)/2) if self.obj.Side == Side.Left: theta = addAngle(theta, math.pi) self.tAngle = turnAngle self.cAngle = theta return self.cAngle def distance(self, toolRadius): if self.cDist is None: self.angle() # make sure the angles are initialized self.cDist = toolRadius / math.cos(self.tAngle/2) return self.cDist def corner(self, toolRadius): if self.cPt is None: self.cPt = self.inChord.move(self.distance(toolRadius), self.angle()).End return self.cPt def location(self): return (self.inChord.End.x, self.inChord.End.y) def locationZ(self): return (self.inChord.End.x, self.inChord.End.y, self.inChord.End.z) def adaptiveLength(self, boneAngle, toolRadius): theta = self.angle() distance = self.distance(toolRadius) # there is something weird happening if the boneAngle came from a horizontal/vertical t-bone # for some reason pi/2 is not equal to pi/2 if math.fabs(theta - boneAngle) < 0.00001: # moving directly towards the corner PathLog.debug("adaptive - on target: %.2f - %.2f" % (distance, toolRadius)) return distance - toolRadius PathLog.debug("adaptive - angles: corner=%.2f bone=%.2f diff=%.12f" % (theta/math.pi, boneAngle/math.pi, theta - boneAngle)) # The bones root and end point form a triangle with the intersection of the tool path # with the toolRadius circle around the bone end point. # In case the math looks questionable, look for "triangle ssa" # c = distance # b = self.toolRadius # beta = fabs(boneAngle - theta) beta = math.fabs(addAngle(boneAngle, -theta)) # pylint: disable=invalid-unary-operand-type D = (distance / toolRadius) * math.sin(beta) if D > 1: # no intersection PathLog.debug("adaptive - no intersection - no bone") return 0 gamma = math.asin(D) alpha = math.pi - beta - gamma if PathGeom.isRoughly(0.0, math.sin(beta)): # it is not a good idea to divide by 0 length = 0.0 else: length = toolRadius * math.sin(alpha) / math.sin(beta) if D < 1 and toolRadius < distance: # there exists a second solution beta2 = beta gamma2 = math.pi - gamma alpha2 = math.pi - beta2 - gamma2 length2 = toolRadius * math.sin(alpha2) / math.sin(beta2) length = min(length, length2) PathLog.debug("adaptive corner=%.2f * %.2f˚ -> bone=%.2f * %.2f˚" % (distance, theta, length, boneAngle)) return length class ObjectDressup(object): def __init__(self, obj, base): # Tool Properties obj.addProperty("App::PropertyLink", "Base", "Base", QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "The base path to modify")) obj.addProperty("App::PropertyEnumeration", "Side", "Dressup", QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "The side of path to insert bones")) obj.Side = [Side.Left, Side.Right] obj.Side = Side.Right obj.addProperty("App::PropertyEnumeration", "Style", "Dressup", QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "The style of bones")) obj.Style = Style.All obj.Style = Style.Dogbone obj.addProperty("App::PropertyIntegerList", "BoneBlacklist", "Dressup", QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "Bones that aren't dressed up")) obj.BoneBlacklist = [] obj.setEditorMode('BoneBlacklist', 2) # hide this one obj.addProperty("App::PropertyEnumeration", "Incision", "Dressup", QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "The algorithm to determine the bone length")) obj.Incision = Incision.All obj.Incision = Incision.Adaptive obj.addProperty("App::PropertyFloat", "Custom", "Dressup", QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "Dressup length if Incision == custom")) obj.Custom = 0.0 obj.Proxy = self obj.Base = base # initialized later self.boneShapes = None self.toolRadius = 0 self.dbg = None self.locationBlacklist = None self.shapes = None self.boneId = None self.bones = None def onDocumentRestored(self, obj): obj.setEditorMode('BoneBlacklist', 2) # hide this one def __getstate__(self): return None def __setstate__(self, state): return None def theOtherSideOf(self, side): if side == Side.Left: return Side.Right return Side.Left # Answer true if a dogbone could be on either end of the chord, given its command def canAttachDogbone(self, cmd, chord): return cmd.Name in movestraight and not chord.isAPlungeMove() and not chord.isANoopMove() def shouldInsertDogbone(self, obj, inChord, outChord): return outChord.foldsBackOrTurns(inChord, self.theOtherSideOf(obj.Side)) def findPivotIntersection(self, pivot, pivotEdge, edge, refPt, d, color): # pylint: disable=unused-argument PathLog.track("(%.2f, %.2f)^%.2f - [(%.2f, %.2f), (%.2f, %.2f)]" % (pivotEdge.Curve.Center.x, pivotEdge.Curve.Center.y, pivotEdge.Curve.Radius, edge.Vertexes[0].Point.x, edge.Vertexes[0].Point.y, edge.Vertexes[1].Point.x, edge.Vertexes[1].Point.y)) ppt = None pptDistance = 0 for pt in DraftGeomUtils.findIntersection(edge, pivotEdge, dts=False): # debugMarker(pt, "pti.%d-%s.in" % (self.boneId, d), color, 0.2) distance = (pt - refPt).Length PathLog.debug(" --> (%.2f, %.2f): %.2f" % (pt.x, pt.y, distance)) if not ppt or pptDistance < distance: ppt = pt pptDistance = distance if not ppt: tangent = DraftGeomUtils.findDistance(pivot, edge) if tangent: PathLog.debug("Taking tangent as intersect %s" % tangent) ppt = pivot + tangent else: PathLog.debug("Taking chord start as intersect %s" % edge.Vertexes[0].Point) ppt = edge.Vertexes[0].Point # debugMarker(ppt, "ptt.%d-%s.in" % (self.boneId, d), color, 0.2) PathLog.debug(" --> (%.2f, %.2f)" % (ppt.x, ppt.y)) return ppt def pointIsOnEdge(self, point, edge): param = edge.Curve.parameter(point) return edge.FirstParameter <= param <= edge.LastParameter def smoothChordCommands(self, bone, inChord, outChord, edge, wire, corner, smooth, color=None): if smooth == 0: PathLog.info(" No smoothing requested") return [bone.lastCommand, outChord.g1Command(bone.F)] d = 'in' refPoint = inChord.Start if smooth == Smooth.Out: d = 'out' refPoint = outChord.End if DraftGeomUtils.areColinear(inChord.asEdge(), outChord.asEdge()): PathLog.info(" straight edge %s" % d) return [outChord.g1Command(bone.F)] pivot = None pivotDistance = 0 PathLog.info("smooth: (%.2f, %.2f)-(%.2f, %.2f)" % (edge.Vertexes[0].Point.x, edge.Vertexes[0].Point.y, edge.Vertexes[1].Point.x, edge.Vertexes[1].Point.y)) for e in wire.Edges: self.dbg.append(e) if type(e.Curve) == Part.LineSegment or type(e.Curve) == Part.Line: PathLog.debug(" (%.2f, %.2f)-(%.2f, %.2f)" % (e.Vertexes[0].Point.x, e.Vertexes[0].Point.y, e.Vertexes[1].Point.x, e.Vertexes[1].Point.y)) else: PathLog.debug(" (%.2f, %.2f)^%.2f" % (e.Curve.Center.x, e.Curve.Center.y, e.Curve.Radius)) for pt in DraftGeomUtils.findIntersection(edge, e, True, findAll=True): if not PathGeom.pointsCoincide(pt, corner) and self.pointIsOnEdge(pt, e): # debugMarker(pt, "candidate-%d-%s" % (self.boneId, d), color, 0.05) PathLog.debug(" -> candidate") distance = (pt - refPoint).Length if not pivot or pivotDistance > distance: pivot = pt pivotDistance = distance else: PathLog.debug(" -> corner intersect") if pivot: # debugCircle(pivot, self.toolRadius, "pivot.%d-%s" % (self.boneId, d), color) pivotEdge = Part.Edge(Part.Circle(pivot, FreeCAD.Vector(0, 0, 1), self.toolRadius)) t1 = self.findPivotIntersection(pivot, pivotEdge, inChord.asEdge(), inChord.End, d, color) t2 = self.findPivotIntersection(pivot, pivotEdge, outChord.asEdge(), inChord.End, d, color) commands = [] if not PathGeom.pointsCoincide(t1, inChord.Start): PathLog.debug(" add lead in") commands.append(Chord(inChord.Start, t1).g1Command(bone.F)) if bone.obj.Side == Side.Left: PathLog.debug(" add g3 command") commands.append(Chord(t1, t2).g3Command(pivot, bone.F)) else: PathLog.debug(" add g2 command center=(%.2f, %.2f) -> from (%2f, %.2f) to (%.2f, %.2f" % (pivot.x, pivot.y, t1.x, t1.y, t2.x, t2.y)) commands.append(Chord(t1, t2).g2Command(pivot, bone.F)) if not PathGeom.pointsCoincide(t2, outChord.End): PathLog.debug(" add lead out") commands.append(Chord(t2, outChord.End).g1Command(bone.F)) # debugMarker(pivot, "pivot.%d-%s" % (self.boneId, d), color, 0.2) # debugMarker(t1, "pivot.%d-%s.in" % (self.boneId, d), color, 0.1) # debugMarker(t2, "pivot.%d-%s.out" % (self.boneId, d), color, 0.1) return commands PathLog.info(" no pivot found - straight command") return [inChord.g1Command(bone.F), outChord.g1Command(bone.F)] def inOutBoneCommands(self, bone, boneAngle, fixedLength): corner = bone.corner(self.toolRadius) bone.tip = bone.inChord.End # in case there is no bone PathLog.debug("corner = (%.2f, %.2f)" % (corner.x, corner.y)) # debugMarker(corner, 'corner', (1., 0., 1.), self.toolRadius) length = fixedLength if bone.obj.Incision == Incision.Custom: length = bone.obj.Custom if bone.obj.Incision == Incision.Adaptive: length = bone.adaptiveLength(boneAngle, self.toolRadius) if length == 0: PathLog.info("no bone after all ..") return [bone.lastCommand, bone.outChord.g1Command(bone.F)] # track length for marker visuals self.length = max(self.length, length) boneInChord = bone.inChord.move(length, boneAngle) boneOutChord = boneInChord.moveTo(bone.outChord.Start) # debugCircle(boneInChord.Start, self.toolRadius, 'boneStart') # debugCircle(boneInChord.End, self.toolRadius, 'boneEnd') bone.tip = boneInChord.End if bone.smooth == 0: return [bone.lastCommand, boneInChord.g1Command(bone.F), boneOutChord.g1Command(bone.F), bone.outChord.g1Command(bone.F)] # reconstruct the corner and convert to an edge offset = corner - bone.inChord.End iChord = Chord(bone.inChord.Start + offset, bone.inChord.End + offset) oChord = Chord(bone.outChord.Start + offset, bone.outChord.End + offset) iLine = iChord.asLine() oLine = oChord.asLine() cornerShape = Part.Shape([iLine, oLine]) # construct a shape representing the cut made by the bone vt0 = FreeCAD.Vector(0, self.toolRadius, 0) vt1 = FreeCAD.Vector(length, self.toolRadius, 0) vb0 = FreeCAD.Vector(0, -self.toolRadius, 0) vb1 = FreeCAD.Vector(length, -self.toolRadius, 0) vm2 = FreeCAD.Vector(length + self.toolRadius, 0, 0) boneBot = Part.LineSegment(vb1, vb0) boneLid = Part.LineSegment(vb0, vt0) boneTop = Part.LineSegment(vt0, vt1) # what we actually want is an Arc - but findIntersect only returns the coincident if one exists # which really sucks because that's the one we're probably not interested in .... boneArc = Part.Arc(vt1, vm2, vb1) # boneArc = Part.Circle(FreeCAD.Vector(length, 0, 0), FreeCAD.Vector(0,0,1), self.toolRadius) boneWire = Part.Shape([boneTop, boneArc, boneBot, boneLid]) boneWire.rotate(FreeCAD.Vector(0, 0, 0), FreeCAD.Vector(0, 0, 1), boneAngle * 180 / math.pi) boneWire.translate(bone.inChord.End) self.boneShapes = [cornerShape, boneWire] bone.inCommands = self.smoothChordCommands(bone, bone.inChord, boneInChord, Part.Edge(iLine), boneWire, corner, bone.smooth & Smooth.In, (1., 0., 0.)) bone.outCommands = self.smoothChordCommands(bone, boneOutChord, bone.outChord, Part.Edge(oLine), boneWire, corner, bone.smooth & Smooth.Out, (0., 1., 0.)) return bone.inCommands + bone.outCommands def dogbone(self, bone): boneAngle = bone.angle() length = self.toolRadius * 0.41422 # 0.41422 = 2/sqrt(2) - 1 + (a tiny bit) return self.inOutBoneCommands(bone, boneAngle, length) def tboneHorizontal(self, bone): angle = bone.angle() boneAngle = 0 if math.fabs(angle) > math.pi/2: boneAngle = math.pi return self.inOutBoneCommands(bone, boneAngle, self.toolRadius) def tboneVertical(self, bone): angle = bone.angle() boneAngle = math.pi/2 if PathGeom.isRoughly(angle, math.pi) or angle < 0: boneAngle = -boneAngle return self.inOutBoneCommands(bone, boneAngle, self.toolRadius) def tboneEdgeCommands(self, bone, onIn): if onIn: boneAngle = bone.inChord.getAngleXY() else: boneAngle = bone.outChord.getAngleXY() if Side.Right == bone.outChord.getDirectionOf(bone.inChord): boneAngle = boneAngle - math.pi/2 else: boneAngle = boneAngle + math.pi/2 onInString = 'out' if onIn: onInString = 'in' PathLog.debug("tboneEdge boneAngle[%s]=%.2f (in=%.2f, out=%.2f)" % (onInString, boneAngle/math.pi, bone.inChord.getAngleXY()/math.pi, bone.outChord.getAngleXY()/math.pi)) return self.inOutBoneCommands(bone, boneAngle, self.toolRadius) def tboneLongEdge(self, bone): inChordIsLonger = bone.inChord.getLength() > bone.outChord.getLength() return self.tboneEdgeCommands(bone, inChordIsLonger) def tboneShortEdge(self, bone): inChordIsShorter = bone.inChord.getLength() < bone.outChord.getLength() return self.tboneEdgeCommands(bone, inChordIsShorter) def boneIsBlacklisted(self, bone): blacklisted = False parentConsumed = False if bone.boneId in bone.obj.BoneBlacklist: blacklisted = True elif bone.location() in self.locationBlacklist: bone.obj.BoneBlacklist.append(bone.boneId) blacklisted = True elif hasattr(bone.obj.Base, 'BoneBlacklist'): parentConsumed = bone.boneId not in bone.obj.Base.BoneBlacklist blacklisted = parentConsumed if blacklisted: self.locationBlacklist.add(bone.location()) return (blacklisted, parentConsumed) # Generate commands necessary to execute the dogbone def boneCommands(self, bone, enabled): if enabled: if bone.obj.Style == Style.Dogbone: return self.dogbone(bone) if bone.obj.Style == Style.Tbone_H: return self.tboneHorizontal(bone) if bone.obj.Style == Style.Tbone_V: return self.tboneVertical(bone) if bone.obj.Style == Style.Tbone_L: return self.tboneLongEdge(bone) if bone.obj.Style == Style.Tbone_S: return self.tboneShortEdge(bone) else: return [bone.lastCommand, bone.outChord.g1Command(bone.F)] def insertBone(self, bone): PathLog.debug(">----------------------------------- %d --------------------------------------" % bone.boneId) self.boneShapes = [] blacklisted, inaccessible = self.boneIsBlacklisted(bone) enabled = not blacklisted self.bones.append((bone.boneId, bone.locationZ(), enabled, inaccessible)) self.boneId = bone.boneId if False and PathLog.getLevel(LOG_MODULE) == PathLog.Level.DEBUG and bone.boneId > 2: commands = self.boneCommands(bone, False) else: commands = self.boneCommands(bone, enabled) bone.commands = commands self.shapes[bone.boneId] = self.boneShapes PathLog.debug("<----------------------------------- %d --------------------------------------" % bone.boneId) return commands def removePathCrossing(self, commands, bone1, bone2): commands.append(bone2.lastCommand) bones = bone2.commands if True and hasattr(bone1, "outCommands") and hasattr(bone2, "inCommands"): inEdges = edgesForCommands(bone1.outCommands, bone1.tip) outEdges = edgesForCommands(bone2.inCommands, bone2.inChord.Start) for i in range(len(inEdges)): e1 = inEdges[i] for j in range(len(outEdges)-1, -1, -1): e2 = outEdges[j] cutoff = DraftGeomUtils.findIntersection(e1, e2) for pt in cutoff: # debugCircle(e1.Curve.Center, e1.Curve.Radius, "bone.%d-1" % (self.boneId), (1.,0.,0.)) # debugCircle(e2.Curve.Center, e2.Curve.Radius, "bone.%d-2" % (self.boneId), (0.,1.,0.)) if PathGeom.pointsCoincide(pt, e1.valueAt(e1.LastParameter)) or PathGeom.pointsCoincide(pt, e2.valueAt(e2.FirstParameter)): continue # debugMarker(pt, "it", (0.0, 1.0, 1.0)) # 1. remove all redundant commands commands = commands[:-(len(inEdges) - i)] # 2., correct where c1 ends c1 = bone1.outCommands[i] c1Params = c1.Parameters c1Params.update({'X': pt.x, 'Y': pt.y, 'Z': pt.z}) c1 = Path.Command(c1.Name, c1Params) commands.append(c1) # 3. change where c2 starts, this depends on the command itself c2 = bone2.inCommands[j] if c2.Name in movearc: center = e2.Curve.Center offset = center - pt c2Params = c2.Parameters c2Params.update({'I': offset.x, 'J': offset.y, 'K': offset.z}) c2 = Path.Command(c2.Name, c2Params) bones = [c2] bones.extend(bone2.commands[j+1:]) else: bones = bone2.commands[j:] # there can only be the one ... return commands, bones return commands, bones def execute(self, obj, forReal=True): if not obj.Base: return if forReal and not obj.Base.isDerivedFrom("Path::Feature"): return if not obj.Base.Path: return if not obj.Base.Path.Commands: return self.setup(obj, False) commands = [] # the dressed commands lastChord = Chord() # the last chord lastCommand = None # the command that generated the last chord lastBone = None # track last bone for optimizations oddsAndEnds = [] # track chords that are connected to plunges - in case they form a loop boneId = 1 self.bones = [] self.locationBlacklist = set() self.length = 0 # boneIserted = False for (i, thisCommand) in enumerate(obj.Base.Path.Commands): # if i > 14: # if lastCommand: # commands.append(lastCommand) # lastCommand = None # commands.append(thisCommand) # continue PathLog.info("%3d: %s" % (i, thisCommand)) if thisCommand.Name in movecommands: thisChord = lastChord.moveToParameters(thisCommand.Parameters) thisIsACandidate = self.canAttachDogbone(thisCommand, thisChord) if thisIsACandidate and lastCommand and self.shouldInsertDogbone(obj, lastChord, thisChord): PathLog.info(" Found bone corner: {}".format(lastChord.End)) bone = Bone(boneId, obj, lastCommand, lastChord, thisChord, Smooth.InAndOut, thisCommand.Parameters.get('F')) bones = self.insertBone(bone) boneId += 1 if lastBone: PathLog.info(" removing potential path crossing") # debugMarker(thisChord.Start, "it", (1.0, 0.0, 1.0)) commands, bones = self.removePathCrossing(commands, lastBone, bone) commands.extend(bones[:-1]) lastCommand = bones[-1] lastBone = bone elif lastCommand and thisChord.isAPlungeMove(): PathLog.info(" Looking for connection in odds and ends") haveNewLastCommand = False for chord in (chord for chord in oddsAndEnds if lastChord.connectsTo(chord)): if self.shouldInsertDogbone(obj, lastChord, chord): PathLog.info(" and there is one") PathLog.debug(" odd/end={} last={}".format(chord, lastChord)) bone = Bone(boneId, obj, lastCommand, lastChord, chord, Smooth.In, lastCommand.Parameters.get('F')) bones = self.insertBone(bone) boneId += 1 if lastBone: PathLog.info(" removing potential path crossing") # debugMarker(chord.Start, "it", (0.0, 1.0, 1.0)) commands, bones = self.removePathCrossing(commands, lastBone, bone) commands.extend(bones[:-1]) lastCommand = bones[-1] haveNewLastCommand = True if not haveNewLastCommand: commands.append(lastCommand) lastCommand = None commands.append(thisCommand) lastBone = None elif thisIsACandidate: PathLog.info(" is a candidate, keeping for later") if lastCommand: commands.append(lastCommand) lastCommand = thisCommand lastBone = None elif thisChord.isANoopMove(): PathLog.info(" ignoring and dropping noop move") continue else: PathLog.info(" nope") if lastCommand: commands.append(lastCommand) lastCommand = None commands.append(thisCommand) lastBone = None if lastChord.isAPlungeMove() and thisIsACandidate: PathLog.info(" adding to odds and ends") oddsAndEnds.append(thisChord) lastChord = thisChord else: if thisCommand.Name[0] != '(': PathLog.info(" Clean slate") if lastCommand: commands.append(lastCommand) lastCommand = None lastBone = None commands.append(thisCommand) # for cmd in commands: # PathLog.debug("cmd = '%s'" % cmd) path = Path.Path(commands) obj.Path = path def setup(self, obj, initial): PathLog.info("Here we go ... ") if initial: if hasattr(obj.Base, "BoneBlacklist"): # dressing up a bone dressup obj.Side = obj.Base.Side else: PathLog.info("Default side = right") # otherwise dogbones are opposite of the base path's side side = Side.Right if hasattr(obj.Base, 'Side') and obj.Base.Side == 'Inside': PathLog.info("inside -> side = left") side = Side.Left else: PathLog.info("not inside -> side stays right") if hasattr(obj.Base, 'Direction') and obj.Base.Direction == 'CCW': PathLog.info("CCW -> switch sides") side = Side.oppositeOf(side) else: PathLog.info("CW -> stay on side") obj.Side = side self.toolRadius = 5 tc = PathDressup.toolController(obj.Base) if tc is None or tc.ToolNumber == 0: self.toolRadius = 5 else: tool = tc.Proxy.getTool(tc) # PathUtils.getTool(obj, tc.ToolNumber) if not tool or float(tool.Diameter) == 0: self.toolRadius = 5 else: self.toolRadius = float(tool.Diameter) / 2 self.shapes = {} self.dbg = [] def boneStateList(self, obj): state = {} # If the receiver was loaded from file, then it never generated the bone list. if not hasattr(self, 'bones'): self.execute(obj) for (nr, loc, enabled, inaccessible) in self.bones: item = state.get((loc[0], loc[1])) if item: item[2].append(nr) item[3].append(loc[2]) else: state[(loc[0], loc[1])] = (enabled, inaccessible, [nr], [loc[2]]) return state class Marker(object): def __init__(self, pt, r, h): if PathGeom.isRoughly(h, 0): h = 0.1 self.pt = pt self.r = r self.h = h self.sep = coin.SoSeparator() self.pos = coin.SoTranslation() self.pos.translation = (pt.x, pt.y, pt.z + h / 2) self.rot = coin.SoRotationXYZ() self.rot.axis = self.rot.X self.rot.angle = math.pi / 2 self.cyl = coin.SoCylinder() self.cyl.radius = r self.cyl.height = h # self.cyl.removePart(self.cyl.TOP) # self.cyl.removePart(self.cyl.BOTTOM) self.material = coin.SoMaterial() self.sep.addChild(self.pos) self.sep.addChild(self.rot) self.sep.addChild(self.material) self.sep.addChild(self.cyl) self.lowlight() def setSelected(self, selected): if selected: self.highlight() else: self.lowlight() def highlight(self): self.material.diffuseColor = self.color(1) self.material.transparency = 0.45 def lowlight(self): self.material.diffuseColor = self.color(0) self.material.transparency = 0.75 def color(self, id): if id == 1: return coin.SbColor(.9, .9, .5) return coin.SbColor(.9, .5, .9) class TaskPanel(object): DataIds = QtCore.Qt.ItemDataRole.UserRole DataKey = QtCore.Qt.ItemDataRole.UserRole + 1 DataLoc = QtCore.Qt.ItemDataRole.UserRole + 2 def __init__(self, viewProvider, obj): self.viewProvider = viewProvider self.obj = obj self.form = FreeCADGui.PySideUic.loadUi(":/panels/DogboneEdit.ui") self.s = None FreeCAD.ActiveDocument.openTransaction(translate("Path_DressupDogbone", "Edit Dogbone Dress-up")) self.height = 10 self.markers = [] def reject(self): FreeCAD.ActiveDocument.abortTransaction() FreeCADGui.Control.closeDialog() FreeCAD.ActiveDocument.recompute() FreeCADGui.Selection.removeObserver(self.s) self.cleanup() def accept(self): self.getFields() FreeCAD.ActiveDocument.commitTransaction() FreeCADGui.ActiveDocument.resetEdit() FreeCADGui.Control.closeDialog() FreeCAD.ActiveDocument.recompute() FreeCADGui.Selection.removeObserver(self.s) FreeCAD.ActiveDocument.recompute() self.cleanup() def cleanup(self): self.viewProvider.showMarkers(False) for m in self.markers: self.viewProvider.switch.removeChild(m.sep) self.markers = [] def getFields(self): self.obj.Style = str(self.form.styleCombo.currentText()) self.obj.Side = str(self.form.sideCombo.currentText()) self.obj.Incision = str(self.form.incisionCombo.currentText()) self.obj.Custom = self.form.custom.value() blacklist = [] for i in range(0, self.form.bones.count()): item = self.form.bones.item(i) if item.checkState() == QtCore.Qt.CheckState.Unchecked: blacklist.extend(item.data(self.DataIds)) self.obj.BoneBlacklist = sorted(blacklist) self.obj.Proxy.execute(self.obj) def updateBoneList(self): itemList = [] for loc, (enabled, inaccessible, ids, zs) in PathUtil.keyValueIter(self.obj.Proxy.boneStateList(self.obj)): lbl = '(%.2f, %.2f): %s' % (loc[0], loc[1], ','.join(str(id) for id in ids)) item = QtGui.QListWidgetItem(lbl) if enabled: item.setCheckState(QtCore.Qt.CheckState.Checked) else: item.setCheckState(QtCore.Qt.CheckState.Unchecked) flags = QtCore.Qt.ItemFlag.ItemIsSelectable if not inaccessible: flags |= QtCore.Qt.ItemFlag.ItemIsEnabled | QtCore.Qt.ItemFlag.ItemIsUserCheckable item.setFlags(flags) item.setData(self.DataIds, ids) item.setData(self.DataKey, ids[0]) item.setData(self.DataLoc, loc) itemList.append(item) self.form.bones.clear() markers = [] for item in sorted(itemList, key=lambda item: item.data(self.DataKey)): self.form.bones.addItem(item) loc = item.data(self.DataLoc) r = max(self.obj.Proxy.length, 1) markers.append(Marker(FreeCAD.Vector(loc[0], loc[1], min(zs)), r, max(1, max(zs) - min(zs)))) for m in self.markers: self.viewProvider.switch.removeChild(m.sep) for m in markers: self.viewProvider.switch.addChild(m.sep) self.markers = markers def updateUI(self): customSelected = self.obj.Incision == Incision.Custom self.form.custom.setEnabled(customSelected) self.form.customLabel.setEnabled(customSelected) self.updateBoneList() if PathLog.getLevel(LOG_MODULE) == PathLog.Level.DEBUG: for obj in FreeCAD.ActiveDocument.Objects: if obj.Name.startswith('Shape'): FreeCAD.ActiveDocument.removeObject(obj.Name) PathLog.info('object name %s' % self.obj.Name) if hasattr(self.obj.Proxy, "shapes"): PathLog.info("showing shapes attribute") for shapes in self.obj.Proxy.shapes.values(): for shape in shapes: Part.show(shape) else: PathLog.info("no shapes attribute found") def updateModel(self): self.getFields() self.updateUI() FreeCAD.ActiveDocument.recompute() def setupCombo(self, combo, text, items): if items and len(items) > 0: for i in range(combo.count(), -1, -1): combo.removeItem(i) combo.addItems(items) index = combo.findText(text, QtCore.Qt.MatchFixedString) if index >= 0: combo.setCurrentIndex(index) def setFields(self): self.setupCombo(self.form.styleCombo, self.obj.Style, Style.All) self.setupCombo(self.form.sideCombo, self.obj.Side, Side.All) self.setupCombo(self.form.incisionCombo, self.obj.Incision, Incision.All) self.form.custom.setMinimum(0.0) self.form.custom.setDecimals(3) self.form.custom.setValue(self.obj.Custom) self.updateUI() def open(self): self.s = SelObserver() # install the function mode resident FreeCADGui.Selection.addObserver(self.s) def setupUi(self): self.setFields() # now that the form is filled, setup the signal handlers self.form.styleCombo.currentIndexChanged.connect(self.updateModel) self.form.sideCombo.currentIndexChanged.connect(self.updateModel) self.form.incisionCombo.currentIndexChanged.connect(self.updateModel) self.form.custom.valueChanged.connect(self.updateModel) self.form.bones.itemChanged.connect(self.updateModel) self.form.bones.itemSelectionChanged.connect(self.updateMarkers) self.viewProvider.showMarkers(True) def updateMarkers(self): index = self.form.bones.currentRow() for i, m in enumerate(self.markers): m.setSelected(i == index) class SelObserver(object): def __init__(self): import PathScripts.PathSelection as PST PST.eselect() def __del__(self): import PathScripts.PathSelection as PST PST.clear() def addSelection(self, doc, obj, sub, pnt): # pylint: disable=unused-argument FreeCADGui.doCommand('Gui.Selection.addSelection(FreeCAD.ActiveDocument.' + obj + ')') FreeCADGui.updateGui() class ViewProviderDressup(object): def __init__(self, vobj): self.vobj = vobj self.obj = None def attach(self, vobj): self.obj = vobj.Object if self.obj and self.obj.Base: for i in self.obj.Base.InList: if hasattr(i, "Group"): group = i.Group for g in group: if g.Name == self.obj.Base.Name: group.remove(g) i.Group = group # FreeCADGui.ActiveDocument.getObject(obj.Base.Name).Visibility = False self.switch = coin.SoSwitch() vobj.RootNode.addChild(self.switch) def showMarkers(self, on): sw = coin.SO_SWITCH_ALL if on else coin.SO_SWITCH_NONE self.switch.whichChild = sw def claimChildren(self): return [self.obj.Base] def setEdit(self, vobj, mode=0): # pylint: disable=unused-argument FreeCADGui.Control.closeDialog() panel = TaskPanel(self, vobj.Object) FreeCADGui.Control.showDialog(panel) panel.setupUi() return True def __getstate__(self): return None def __setstate__(self, state): return None def onDelete(self, arg1=None, arg2=None): '''this makes sure that the base operation is added back to the project and visible''' # pylint: disable=unused-argument if arg1.Object and arg1.Object.Base: FreeCADGui.ActiveDocument.getObject(arg1.Object.Base.Name).Visibility = True job = PathUtils.findParentJob(arg1.Object) if job: job.Proxy.addOperation(arg1.Object.Base, arg1.Object) arg1.Object.Base = None return True def Create(base, name='DogboneDressup'): ''' Create(obj, name='DogboneDressup') ... dresses the given PathProfile/PathContour object with dogbones. ''' obj = FreeCAD.ActiveDocument.addObject('Path::FeaturePython', name) dbo = ObjectDressup(obj, base) job = PathUtils.findParentJob(base) job.Proxy.addOperation(obj, base) if FreeCAD.GuiUp: obj.ViewObject.Proxy = ViewProviderDressup(obj.ViewObject) obj.Base.ViewObject.Visibility = False dbo.setup(obj, True) return obj class CommandDressupDogbone(object): # pylint: disable=no-init def GetResources(self): return {'Pixmap': 'Path_Dressup', 'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "Dogbone Dress-up"), 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_DressupDogbone", "Creates a Dogbone Dress-up object from a selected path")} def IsActive(self): if FreeCAD.ActiveDocument is not None: for o in FreeCAD.ActiveDocument.Objects: if o.Name[:3] == "Job": return True return False def Activated(self): # check that the selection contains exactly what we want selection = FreeCADGui.Selection.getSelection() if len(selection) != 1: FreeCAD.Console.PrintError(translate("Path_DressupDogbone", "Please select one path object")+"\n") return baseObject = selection[0] if not baseObject.isDerivedFrom("Path::Feature"): FreeCAD.Console.PrintError(translate("Path_DressupDogbone", "The selected object is not a path")+"\n") return # everything ok! FreeCAD.ActiveDocument.openTransaction(translate("Path_DressupDogbone", "Create Dogbone Dress-up")) FreeCADGui.addModule('PathScripts.PathDressupDogbone') FreeCADGui.doCommand("PathScripts.PathDressupDogbone.Create(FreeCAD.ActiveDocument.%s)" % baseObject.Name) FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() if FreeCAD.GuiUp: import FreeCADGui from PySide import QtGui from pivy import coin FreeCADGui.addCommand('Path_DressupDogbone', CommandDressupDogbone()) FreeCAD.Console.PrintLog("Loading DressupDogbone... done\n")<|fim▁end|>
if d > 0: return Side.Right # at this point the only direction left is backwards
<|file_name|>JoinGame.java<|end_file_name|><|fim▁begin|>package edu.hm.gamedev.server.packets.client2server; import org.codehaus.jackson.annotate.JsonCreator; import org.codehaus.jackson.annotate.JsonProperty; import edu.hm.gamedev.server.packets.Packet; import edu.hm.gamedev.server.packets.Type; public class JoinGame extends Packet { private final String gameName; @JsonCreator public JoinGame(@JsonProperty("gameName") String gameName) { super(Type.JOIN_GAME); this.gameName = gameName; } public String getGameName() { return gameName; } @Override public String toString() { return "JoinGame{" + "gameName='" + gameName + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false;<|fim▁hole|> } JoinGame joinGame = (JoinGame) o; if (gameName != null ? !gameName.equals(joinGame.gameName) : joinGame.gameName != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (gameName != null ? gameName.hashCode() : 0); return result; } }<|fim▁end|>
} if (!super.equals(o)) { return false;
<|file_name|>ex_1.rs<|end_file_name|><|fim▁begin|>/* Copyright © 2013 Free Software Foundation, Inc See licensing in LICENSE file File: examples/ex_1.rs Author: Jesse 'Jeaye' Wilkerson Description: Simple "Hello, world" example. */ extern crate ncurses;<|fim▁hole|>fn main() { /* If your locale env is unicode, you should use `setlocale`. */ // let locale_conf = LcCategory::all; // setlocale(locale_conf, "zh_CN.UTF-8"); // if your locale is like mine(zh_CN.UTF-8). /* Start ncurses. */ initscr(); /* Print to the back buffer. */ printw("Hello, world!"); /* Print some unicode(Chinese) string. */ // printw("Great Firewall dislike VPN protocol.\nGFW 不喜欢 VPN 协议。"; /* Update the screen. */ refresh(); /* Wait for a key press. */ getch(); /* Terminate ncurses. */ endwin(); }<|fim▁end|>
use ncurses::*;
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>""" Tests for Calendar Sync views. """ import ddt from django.test import TestCase from django.urls import reverse from openedx.features.calendar_sync.api import SUBSCRIBE, UNSUBSCRIBE from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory TEST_PASSWORD = 'test' @ddt.ddt class TestCalendarSyncView(SharedModuleStoreTestCase, TestCase): """Tests for the calendar sync view.""" @classmethod def setUpClass(cls): """ Set up any course data """<|fim▁hole|> def setUp(self): super(TestCalendarSyncView, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments self.user = self.create_user_for_course(self.course) self.client.login(username=self.user.username, password=TEST_PASSWORD) self.calendar_sync_url = reverse('openedx.calendar_sync', args=[self.course.id]) @ddt.data( # Redirect on successful subscribe [{'tool_data': "{{'toggle_data': '{}'}}".format(SUBSCRIBE)}, 302, ''], # Redirect on successful unsubscribe [{'tool_data': "{{'toggle_data': '{}'}}".format(UNSUBSCRIBE)}, 302, ''], # 422 on unknown toggle_data [{'tool_data': "{{'toggle_data': '{}'}}".format('gibberish')}, 422, 'Toggle data was not provided or had unknown value.'], # 422 on no toggle_data [{'tool_data': "{{'random_data': '{}'}}".format('gibberish')}, 422, 'Toggle data was not provided or had unknown value.'], # 422 on no tool_data [{'nonsense': "{{'random_data': '{}'}}".format('gibberish')}, 422, 'Tool data was not provided.'], ) @ddt.unpack def test_course_dates_fragment(self, data, expected_status_code, contained_text): response = self.client.post(self.calendar_sync_url, data) assert response.status_code == expected_status_code assert contained_text in str(response.content)<|fim▁end|>
super(TestCalendarSyncView, cls).setUpClass() cls.course = CourseFactory.create()
<|file_name|>auth.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # @first_date 20160129 # @date 20160129 # @version 0.0 """auth for Users API """ from flask import abort from flask.views import MethodView from flask.ext.login import login_required, current_user from sqlalchemy.exc import IntegrityError from webargs.flaskparser import use_args from . import users_bp from ..mixins import RestfulViewMixin from ...models.users import User from ...schemas.users import SignupSchema, LoginSchema, ResetPasswordSchema from ...error_handlers import user_errors class SignupView(RestfulViewMixin, MethodView): @use_args(SignupSchema, locations=('json',)) def post(self, args): user = User(**args) try: user.add() except IntegrityError as err: err.data = user_errors.USER_ERR_1001_REGISTERED_ACC raise return self.get_response(status=201) class LoginView(RestfulViewMixin, MethodView): @use_args(LoginSchema, locations=('json',)) def post(self, args): user = User.authenticate(**args) if not user: abort(401) key = user.login() # It will return key return self.get_response({"key": key}, status=200) class LogoutView(RestfulViewMixin, MethodView): decorators = (login_required,)<|fim▁hole|> def post(self): user = current_user user.logout() return self.get_response(status=200) class ResetPasswordView(RestfulViewMixin, MethodView): decorators = (login_required,) @use_args(ResetPasswordSchema, locations=('json',)) def put(self, args): user = current_user if not user.check_password(args['old_password']): abort(401) user.set_password(args['new_password']) user.update() return self.get_response(status=200) # Url patterns: To register views in blueprint users_bp.add_url_rule('/signup', view_func=SignupView.as_view('signup')) users_bp.add_url_rule('/login', view_func=LoginView.as_view('login')) users_bp.add_url_rule('/logout', view_func=LogoutView.as_view('logout')) users_bp.add_url_rule('/reset_password', view_func=ResetPasswordView.as_view('reset-password'))<|fim▁end|>
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|>You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package registrytest provides tests for Registry implementations // for storing Minions, Pods, Schedulers and Services. package registrytest // import "k8s.io/kubernetes/pkg/registry/registrytest"<|fim▁end|>
you may not use this file except in compliance with the License.
<|file_name|>Main.js<|end_file_name|><|fim▁begin|>Ext.define('CustomIcons.view.Main', { extend: 'Ext.tab.Panel', xtype: 'main', requires: [ 'Ext.TitleBar', 'Ext.Video' ], config: { tabBarPosition: 'bottom', items: [ { title: 'Welcome', iconCls: 'headphones', styleHtmlContent: true, scrollable: true, items: { docked: 'top', xtype: 'titlebar', title: 'Welcome to Sencha Touch 2' }, html: [ "You've just generated a new Sencha Touch 2 project. What you're looking at right now is the ", "contents of <a target='_blank' href=\"app/view/Main.js\">app/view/Main.js</a> - edit that file ", "and refresh to change what's rendered here." ].join("") }, { title: 'Get Started', iconCls: 'facebook2', items: [ { docked: 'top', xtype: 'titlebar', title: 'Getting Started' },<|fim▁hole|> } ] } ] } });<|fim▁end|>
{ xtype: 'video', url: 'http://av.vimeo.com/64284/137/87347327.mp4?token=1330978144_f9b698fea38cd408d52a2393240c896c', posterUrl: 'http://b.vimeocdn.com/ts/261/062/261062119_640.jpg'
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import * from django_de.apps.authors.models import Author urlpatterns = patterns('django.views.generic.list_detail', (r'^$', 'object_list', dict( queryset = Author.objects.order_by('name', 'slug'), template_object_name = 'author', allow_empty=True,<|fim▁hole|> ) )<|fim▁end|>
),
<|file_name|>watch-tree-unix.js<|end_file_name|><|fim▁begin|>var path = require("path"); var fs = require("fs"); var wt = require("./walk-tree"); function createEvent(dirs, event, dir, fileName) { var fullPath = path.join(dir, fileName); var exists = dirs.some(function (d) { return d === fullPath; }); var statObj; function stat() { if (statObj) { return statObj; } if (!fullPath) { statObj = { isDirectory: function () { return false; } }; } else { try { statObj = fs.statSync(fullPath); } catch (e) { statObj = { isDirectory: function () { return false; }, deleted: true }; } } return statObj; } return { name: fullPath, isDirectory: function () { return stat().isDirectory(); }, isMkdir: function () { return this.isDirectory() && !exists; }, isDelete: function () { return !!stat().deleted; }, isModify: function () { return !this.isDelete() && !this.isMkdir(); } }; } var addWatch; function watch(state, dir, options, callback) { return fs.watch(dir, function (event, fileName) { var e = createEvent(state.dirs, event, dir, fileName); if (e.isDirectory() && e.isMkdir()) { addWatch(state, e.name, options, callback); } if (!wt.isExcluded(e.name, options.exclude) && typeof callback === "function") { callback(e); } }); } function addWatch(state, dir, options, callback) { state.dirs = state.dirs || []; state.dirs.push(dir); state.watches = state.watches || []; state.watches.push(watch(state, dir, options, callback)); } function watchTree(dir, options, callback) { var opt = options, cb = callback; if (arguments.length === 2 && typeof opt === "function") { cb = opt; opt = {}; } var state = {}; opt = opt || {}; opt.exclude = wt.excludeRegExes(opt.exclude); addWatch(state, dir, opt, cb); wt.walkTree(dir, opt, function (err, dir) { if (err) { return; } addWatch(state, dir, opt, cb); });<|fim▁hole|> } }; } exports.watchTree = watchTree;<|fim▁end|>
return { end: function () { state.watches.forEach(function (w) { w.close(); });
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>/* Package scope provides context objects for the sharing of scope across goroutines. This context object provides a number of utilities for coordinating concurrent work, in addition to sharing data. Lifecycle Contexts are nodes in a tree. A context is born either by forking from an existing context (becoming a child of that node in the tree), or a new tree is started by calling New(). A context can be terminated at any time. This is usually done by calling the Terminate() or Cancel() method. Termination is associated with an error value (which may be nil if one wants to indicate success). When a node in the tree is terminated, that termination is propagated down to all its unterminated descendents. For example, here is how one might fan out a search: // Fan out queries. for _, q := range queries { go func() { a, err := q.Run(ctx.Fork()) if err != nil { answers <- nil } else { answers <- a } }() } // Receive answers (or failures). for answer := range answers { if answer != nil { ctx.Cancel() // tell outstanding queries to give up return answer, nil } } return nil, fmt.Errorf("all queries failed") Contexts can be terminated at any time. You can even fork a context with a deadline: ctx := scope.New() result, err := Search(ctx.ForkWithTimeout(5 * time.Second), queries) if err == scope.TimedOut { // one or more backends timed out, have the caller back off } There is a termination channel, Done(), available if you want to interrupt your work when a context is terminated: // Wait for 10 seconds or termination incurred from another goroutine, // whichever occurs first. select { case <-ctx.Done(): return ctx.Err() case <-timer.After(10*time.Second): return nil } You can also spot-check for termination with a call to the Alive() method. for ctx.Alive() { readChunk() } Data Sharing Contexts provide a data store for key value pairs, shared across the entire scope. When a context is forked, the child context shares the same data map as its parent. This data store maps blank interfaces to blank interfaces, in the exact same manner as http://www.gorillatoolkit.org/pkg/context. This means you must use type assertions at runtime. To keep this reasonably safe, it's recommended to define and use your own unexported type for all keys maintained by your package. type myKey int const ( loggerKey myKey = iota dbKey // etc. ) func SetLogger(ctx scope.Context, logger *log.Logger) { ctx.Set(loggerKey, logger) } func GetLogger(ctx scope.Context) logger *log.Logger) { return ctx.Get(loggerKey).(*log.Logger) } The shared data store is managed in a copy-on-write fashion as the tree branches. When a context is forked, the child maintains a pointer to the parent's data map. When Set() is called on the child, the original map is duplicated for the child, and the update is only applied to the child's map. Common WaitGroup Each context provides a WaitGroup() method, which returns the same pointer across the entire tree. You can use this to spin off background tasks and then wait for them before you completely shut down the scope. ctx.WaitGroup().Add(1) go func() { doSomeThing(ctx) ctx.WaitGroup().Done() }() ctx.WaitGroup().Wait() Breakpoints Contexts provide an optional feature to facilitate unit testing, called breakpoints. A breakpoint is identified by a list of hashable values. Production code can pass this list to the Check() method to synchronize and allow for an error to be injected. Test code can register a breakpoint with Breakpoint(), which returns a channel of errors. The test can receive from this channel to synchronize with the entry of the corresponding Check() call, and then write back an error to synchronize with the exit. func Get(ctx scope.Context, url string) (*http.Response, error) { if err := ctx.Check("http.Get", url); err != nil { return nil, err } return http.Get(url) } func TestGetError(t *testing.T) { ctx := scope.New() ctrl := ctx.Breakpoint("http.Get", "http://google.com") testErr := fmt.Errorf("test error") go func() { <-ctrl ctrl <- testErr }() if err := Get(ctx, "http://google.com"); err != testErr { t.Fail()<|fim▁hole|>*/ package scope<|fim▁end|>
} }
<|file_name|>TCPSerServer.java<|end_file_name|><|fim▁begin|>// Week 5 - Task 7 import java.io.PrintWriter; import java.io.BufferedReader; import java.io.InputStreamReader; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.io.*; public class TCPSerServer { private static ServerSocket servSock; private static final int PORT = 1234; public static void main(String[] args) { System.out.println("!!!Opening port...\n"); try { servSock = new ServerSocket(PORT); } catch(IOException e) { System.out.println("Unable to attach to port!"); System.exit(1); } do { run(); }while (true); } private static void run() { Socket link = null; try{ link = servSock.accept(); PrintWriter out = new PrintWriter(link.getOutputStream(),true); ObjectInputStream istream = new ObjectInputStream (link.getInputStream()); Person p = null; while(true){ try{ p = (Person)istream.readObject(); System.out.println("SERVER - Received: New object.\n"); System.out.println("SERVER - Received: Person name=" + p.getName()); System.out.println("SERVER - Received: Person age=" + p.getAge()); System.out.println("SERVER - Received: Person address=" + p.getAddress()); out.println("Person object received."); } catch (Exception e) { System.out.println("Exception in run"); System.out.println("\n* Closing connection... *"); break; } } }<|fim▁hole|> catch(IOException e) { e.printStackTrace(); } finally { try { System.out.println("\n* Closing connection... *"); link.close(); } catch(IOException e) { System.out.println("Unable to disconnect!"); System.exit(1); } } } }<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2011, 2012, 2014, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License<|fim▁hole|>"""Knowledge database models.""" import os from invenio_base.globals import cfg from invenio.ext.sqlalchemy import db from invenio.ext.sqlalchemy.utils import session_manager from invenio_collections.models import Collection from invenio.utils.text import slugify from sqlalchemy.dialects import mysql from sqlalchemy.event import listens_for from sqlalchemy.orm.collections import attribute_mapped_collection from sqlalchemy.schema import Index class KnwKB(db.Model): """Represent a KnwKB record.""" KNWKB_TYPES = { 'written_as': 'w', 'dynamic': 'd', 'taxonomy': 't', } __tablename__ = 'knwKB' id = db.Column(db.MediumInteger(8, unsigned=True), nullable=False, primary_key=True, autoincrement=True) _name = db.Column(db.String(255), server_default='', unique=True, name="name") _description = db.Column(db.Text, nullable=False, name="description", default="") _kbtype = db.Column(db.Char(1), nullable=True, default='w', name="kbtype") slug = db.Column(db.String(255), unique=True, nullable=False, default="") # Enable or disable the access from REST API is_api_accessible = db.Column(db.Boolean, default=True, nullable=False) @db.hybrid_property def name(self): """Get name.""" return self._name @name.setter def name(self, value): """Set name and generate the slug.""" self._name = value # generate slug if not self.slug: self.slug = KnwKB.generate_slug(value) @db.hybrid_property def description(self): """Get description.""" return self._description @description.setter def description(self, value): """Set description.""" # TEXT in mysql don't support default value # @see http://bugs.mysql.com/bug.php?id=21532 self._description = value or '' @db.hybrid_property def kbtype(self): """Get kbtype.""" return self._kbtype @kbtype.setter def kbtype(self, value): """Set kbtype.""" if value is None: # set the default value return # or set one of the available values kbtype = value[0] if len(value) > 0 else 'w' if kbtype not in ['t', 'd', 'w']: raise ValueError('unknown type "{value}", please use one of \ following values: "taxonomy", "dynamic" or \ "written_as"'.format(value=value)) self._kbtype = kbtype def is_dynamic(self): """Return true if the type is dynamic.""" return self._kbtype == 'd' def to_dict(self): """Return a dict representation of KnwKB.""" mydict = {'id': self.id, 'name': self.name, 'description': self.description, 'kbtype': self.kbtype} if self.kbtype == 'd': mydict.update((self.kbdefs.to_dict() if self.kbdefs else {}) or {}) return mydict def get_kbr_items(self, searchkey="", searchvalue="", searchtype='s'): """ Return dicts of 'key' and 'value' from a knowledge base. :param kb_name the name of the knowledge base :param searchkey search using this key :param searchvalue search using this value :param searchtype s=substring, e=exact, sw=startswith :return a list of dictionaries [{'key'=>x, 'value'=>y},..] """ import warnings warnings.warn("The function is deprecated. Please use the " "`KnwKBRVAL.query_kb_mappings()` instead. " "E.g. [kval.to_dict() for kval in " "KnwKBRVAL.query_kb_mappings(kb_id).all()]") if searchtype == 's' and searchkey: searchkey = '%' + searchkey + '%' if searchtype == 's' and searchvalue: searchvalue = '%' + searchvalue + '%' if searchtype == 'sw' and searchvalue: # startswith searchvalue = searchvalue + '%' if not searchvalue: searchvalue = '%' if not searchkey: searchkey = '%' kvals = KnwKBRVAL.query.filter( KnwKBRVAL.id_knwKB.like(self.id), KnwKBRVAL.m_value.like(searchvalue), KnwKBRVAL.m_key.like(searchkey)).all() return [kval.to_dict() for kval in kvals] def get_kbr_values(self, searchkey="", searchvalue="", searchtype='s'): """ Return dicts of 'key' and 'value' from a knowledge base. :param kb_name the name of the knowledge base :param searchkey search using this key :param searchvalue search using this value :param searchtype s=substring, e=exact, sw=startswith :return a list of dictionaries [{'key'=>x, 'value'=>y},..] """ import warnings warnings.warn("The function is deprecated. Please use the " "`KnwKBRVAL.query_kb_mappings()` instead. " "E.g. [(kval.m_value,) for kval in " "KnwKBRVAL.query_kb_mappings(kb_id).all()]") # prepare filters if searchtype == 's': searchkey = '%' + searchkey + '%' if searchtype == 's' and searchvalue: searchvalue = '%' + searchvalue + '%' if searchtype == 'sw' and searchvalue: # startswith searchvalue = searchvalue + '%' if not searchvalue: searchvalue = '%' # execute query return db.session.execute( db.select([KnwKBRVAL.m_value], db.and_(KnwKBRVAL.id_knwKB.like(self.id), KnwKBRVAL.m_value.like(searchvalue), KnwKBRVAL.m_key.like(searchkey)))) @session_manager def set_dyn_config(self, field, expression, collection=None): """Set dynamic configuration.""" if self.kbdefs: # update self.kbdefs.output_tag = field self.kbdefs.search_expression = expression self.kbdefs.collection = collection db.session.merge(self.kbdefs) else: # insert self.kbdefs = KnwKBDDEF(output_tag=field, search_expression=expression, collection=collection) @staticmethod def generate_slug(name): """Generate a slug for the knowledge. :param name: text to slugify :return: slugified text """ slug = slugify(name) i = KnwKB.query.filter(db.or_( KnwKB.slug.like(slug), KnwKB.slug.like(slug + '-%'), )).count() return slug + ('-{0}'.format(i) if i > 0 else '') @staticmethod def exists(kb_name): """Return True if a kb with the given name exists. :param kb_name: the name of the knowledge base :return: True if kb exists """ return KnwKB.query_exists(KnwKB.name.like(kb_name)) @staticmethod def query_exists(filters): """Return True if a kb with the given filters exists. E.g: KnwKB.query_exists(KnwKB.name.like('FAQ')) :param filters: filter for sqlalchemy :return: True if kb exists """ return db.session.query( KnwKB.query.filter( filters).exists()).scalar() def get_filename(self): """Construct the file name for taxonomy knoledge.""" return cfg['CFG_WEBDIR'] + "/kbfiles/" \ + str(self.id) + ".rdf" @listens_for(KnwKB, 'after_delete') def del_kwnkb(mapper, connection, target): """Remove taxonomy file.""" if(target.kbtype == KnwKB.KNWKB_TYPES['taxonomy']): # Delete taxonomy file if os.path.isfile(target.get_filename()): os.remove(target.get_filename()) class KnwKBDDEF(db.Model): """Represent a KnwKBDDEF record.""" __tablename__ = 'knwKBDDEF' id_knwKB = db.Column(db.MediumInteger(8, unsigned=True), db.ForeignKey(KnwKB.id), nullable=False, primary_key=True) id_collection = db.Column(db.MediumInteger(unsigned=True), db.ForeignKey(Collection.id), nullable=True) output_tag = db.Column(db.Text, nullable=True) search_expression = db.Column(db.Text, nullable=True) kb = db.relationship( KnwKB, backref=db.backref('kbdefs', uselist=False, cascade="all, delete-orphan"), single_parent=True) collection = db.relationship( Collection, backref=db.backref('kbdefs')) def to_dict(self): """Return a dict representation of KnwKBDDEF.""" return {'field': self.output_tag, 'expression': self.search_expression, 'coll_id': self.id_collection, 'collection': self.collection.name if self.collection else None} class KnwKBRVAL(db.Model): """Represent a KnwKBRVAL record.""" __tablename__ = 'knwKBRVAL' m_key = db.Column(db.String(255), nullable=False, primary_key=True, index=True) m_value = db.Column( db.Text().with_variant(mysql.TEXT(30), 'mysql'), nullable=False) id_knwKB = db.Column( db.MediumInteger( 8, unsigned=True), db.ForeignKey( KnwKB.id), nullable=False, server_default='0', primary_key=True) kb = db.relationship( KnwKB, backref=db.backref( 'kbrvals', cascade="all, delete-orphan", collection_class=attribute_mapped_collection("m_key"))) @staticmethod def query_kb_mappings(kbid, sortby="to", key="", value="", match_type="s"): """Return a list of all mappings from the given kb, ordered by key. If key given, give only those with left side (mapFrom) = key. If value given, give only those with right side (mapTo) = value. :param kb_name: knowledge base name. if "", return all :param sortby: the sorting criteria ('from' or 'to') :param key: return only entries where key matches this :param value: return only entries where value matches this :param match_type: s=substring, e=exact, sw=startswith """ # query query = KnwKBRVAL.query.filter( KnwKBRVAL.id_knwKB == kbid) # filter if len(key) > 0: if match_type == "s": key = "%" + key + "%" elif match_type == "sw": key = key + "%" else: key = '%' if len(value) > 0: if match_type == "s": value = "%" + value + "%" elif match_type == "sw": value = value + "%" else: value = '%' query = query.filter( KnwKBRVAL.m_key.like(key), KnwKBRVAL.m_value.like(value)) # order by if sortby == "from": query = query.order_by(KnwKBRVAL.m_key) else: query = query.order_by(KnwKBRVAL.m_value) return query def to_dict(self): """Return a dict representation of KnwKBRVAL.""" # FIXME remove 'id' dependency from invenio modules return {'id': self.m_key + "_" + str(self.id_knwKB), 'key': self.m_key, 'value': self.m_value, 'kbid': self.kb.id if self.kb else None, 'kbname': self.kb.name if self.kb else None} Index('ix_knwKBRVAL_m_value', KnwKBRVAL.m_value, mysql_length=30) __all__ = ('KnwKB', 'KnwKBDDEF', 'KnwKBRVAL')<|fim▁end|>
# along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
<|file_name|>net_vlan.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2017, Ansible by Red Hat, inc # # This file is part of Ansible by Red Hat # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'core'} DOCUMENTATION = """ --- module: net_vlan version_added: "2.4" author: "Ricardo Carrillo Cruz (@rcarrillocruz)" short_description: Manage VLANs on network devices description: - This module provides declarative management of VLANs on network devices. options: name: description: - Name of the VLAN. vlan_id: description: - ID of the VLAN. interfaces: description: - List of interfaces the VLAN should be configured on. collection: description: List of VLANs definitions. purge:<|fim▁hole|> - Purge VLANs not defined in the collections parameter. default: no state: description: - State of the VLAN configuration. default: present choices: ['present', 'absent', 'active', 'suspend'] """ EXAMPLES = """ - name: configure VLAN ID and name net_vlan: vlan_id: 20 name: test-vlan - name: remove configuration net_vlan: state: absent - name: configure VLAN state net_vlan: vlan_id: state: suspend """ RETURN = """ commands: description: The list of configuration mode commands to send to the device returned: always type: list sample: - vlan 20 - name test-vlan rpc: description: load-configuration RPC send to the device returned: C(rpc) is returned only for junos device when configuration is changed on device type: string sample: "<vlans><vlan><name>test-vlan-4</name></vlan></vlans>" """<|fim▁end|>
description:
<|file_name|>AgendamentoResource.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> */ package br.com.stylehair.servicos; import br.com.stylehair.dao.AgendamentoDAO; import br.com.stylehair.entity.Agendamento; import com.google.gson.Gson; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.ejb.Stateless; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.UriInfo; /** * * @author vinicius */ @Stateless @Path("agendamento") public class AgendamentoResource { @PersistenceContext(unitName = "StyleHairPU") private EntityManager em; private Gson gson = new Gson(); @Context private UriInfo context; @GET @Produces("application/json") public String getJson() { AgendamentoDAO dao = new AgendamentoDAO(em); List<Agendamento> agendamentos; agendamentos = dao.buscarTodosAgendamentos(); return gson.toJson(agendamentos); } @GET @Path("{agendamentoId}") @Produces("application/json") public String getAgendamento(@PathParam("agendamentoId") String id){ System.out.println("pegando o cliente"); Long n = Long.parseLong(id); System.out.println(n); AgendamentoDAO dao = new AgendamentoDAO(em); Agendamento agend = dao.consultarPorId(Agendamento.class, Long.parseLong(id)); return gson.toJson(agend); } @GET @Path("{buscardata}/{dia}/{mes}/{ano}") @Produces("application/json") public String getAgendamentoPorData(@PathParam("dia") String dia,@PathParam("mes") String mes,@PathParam("ano") String ano ) { AgendamentoDAO dao = new AgendamentoDAO(em); List<Agendamento> agendamentos; SimpleDateFormat dateFormat_hora = new SimpleDateFormat("HH:mm"); Date data = new Date(); String horaAtual = dateFormat_hora.format(data); System.out.println("hora Atual" + horaAtual); Date d1 = new Date(); SimpleDateFormat dateFormataData = new SimpleDateFormat("dd/MM/yyyy"); String dataHoje = dateFormataData.format(d1); System.out.println("dataHoje ----" + dataHoje + "-------- " + dia+"/"+mes+"/"+ano ); if(dataHoje.equalsIgnoreCase(dia+"/"+mes+"/"+ano)){ agendamentos = dao.buscarAgendamentoPorData(dia+"/"+mes+"/"+ano + " ",horaAtual); return gson.toJson(agendamentos); } agendamentos = dao.buscarAgendamentoPorData(dia+"/"+mes+"/"+ano + " ","08:00"); return gson.toJson(agendamentos); } @POST @Consumes("application/json") @Produces("application/json") public String salvarAgendamento(String agendamento) throws Exception{ Agendamento ag1 = gson.fromJson(agendamento, Agendamento.class); AgendamentoDAO dao = new AgendamentoDAO(em); return gson.toJson(dao.salvar(ag1)); } @PUT @Consumes("application/json") public void atualizarAgendamento(String agendamento) throws Exception { salvarAgendamento(agendamento); } }<|fim▁end|>
* To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor.
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import subprocess import sys import setup_util import os <|fim▁hole|> try: subprocess.check_call("mvn clean compile assembly:single", shell=True, cwd="netty", stderr=errfile, stdout=logfile) subprocess.Popen("java -jar netty-example-0.1-jar-with-dependencies.jar".rsplit(" "), cwd="netty/target", stderr=errfile, stdout=logfile) return 0 except subprocess.CalledProcessError: return 1 def stop(logfile, errfile): if os.name == 'nt': subprocess.check_call("wmic process where \"CommandLine LIKE '%netty-example%'\" call terminate", stderr=errfile, stdout=logfile) else: p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.splitlines(): if 'netty-example' in line: pid = int(line.split(None, 2)[1]) os.kill(pid, 9) return 0<|fim▁end|>
def start(args, logfile, errfile):
<|file_name|>ParamsUserNew.java<|end_file_name|><|fim▁begin|>package com.syncano.android.lib.modules.users; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import com.syncano.android.lib.modules.Params; import com.syncano.android.lib.modules.Response; /** * Params to create new user. */ public class ParamsUserNew extends Params { /** Name of user */ @Expose @SerializedName(value = "user_name") private String userName; /** Nickname of user */ @Expose private String nick; /** Avatar base64 for user */ @Expose private String avatar; /** User's password. */ @Expose @SerializedName(value = "password") private String password; /** * @param userName * User name defining user. Can be <code>null</code>. */ public ParamsUserNew(String userName) { setUserName(userName); } @Override public String getMethodName() { return "user.new"; } public Response instantiateResponse() { return new ResponseUserNew(); } /** * @return user name */ public String getUserName() { return userName; } /** * Sets user name * * @param user_name * user name */ public void setUserName(String userName) { this.userName = userName; } /** * @return user nickname */ public String getNick() { return nick; } /** * Sets user nickname * * @param nick * nickname */ public void setNick(String nick) { this.nick = nick; } /** * @return avatar base64 */ public String getAvatar() { return avatar; } /** * Sets avatar base64 * * @param avatar * avatar base64 */ public void setAvatar(String avatar) { this.avatar = avatar; } /** * @return password */ public String getPassword() { return password; } /** * @param Sets * user password<|fim▁hole|> */ public void setPassword(String password) { this.password = password; } }<|fim▁end|>
<|file_name|>stats.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! Statistical functions. use bigint::prelude::*; /// Discretised histogram. #[derive(Debug, PartialEq)] pub struct Histogram { /// Bounds of each bucket. pub bucket_bounds: Vec<U256>, /// Count within each bucket. pub counts: Vec<u64> } impl Histogram { /// Histogram of a sorted corpus if it at least spans the buckets. Bounds are left closed. pub fn new(corpus: &[U256], bucket_number: usize) -> Option<Histogram> { if corpus.len() < 1 { return None; } let corpus_end = corpus.last().expect("there is at least 1 element; qed").clone(); let corpus_start = corpus.first().expect("there is at least 1 element; qed").clone(); trace!(target: "stats", "Computing histogram from {} to {} with {} buckets.", corpus_start, corpus_end, bucket_number); // Bucket needs to be at least 1 wide. let bucket_size = { // Round up to get the entire corpus included. let raw_bucket_size = (corpus_end - corpus_start + bucket_number.into()) / bucket_number.into(); if raw_bucket_size == 0.into() { 1.into() } else { raw_bucket_size } }; let mut bucket_end = corpus_start + bucket_size; let mut bucket_bounds = vec![corpus_start; bucket_number + 1]; let mut counts = vec![0; bucket_number]; let mut corpus_i = 0; // Go through the corpus adding to buckets. for bucket in 0..bucket_number { while corpus.get(corpus_i).map_or(false, |v| v < &bucket_end) { // Initialized to size bucket_number above; iterates up to bucket_number; qed counts[bucket] += 1; corpus_i += 1; } // Initialized to size bucket_number + 1 above; iterates up to bucket_number; subscript is in range; qed bucket_bounds[bucket + 1] = bucket_end; bucket_end = bucket_end + bucket_size; } Some(Histogram { bucket_bounds: bucket_bounds, counts: counts }) } } #[cfg(test)] mod tests { use bigint::prelude::U256; use super::Histogram; #[test] fn check_histogram() { let hist = Histogram::new(slice_into![643,689,1408,2000,2296,2512,4250,4320,4842,4958,5804,6065,6098,6354,7002,7145,7845,8589,8593,8895], 5).unwrap(); let correct_bounds: Vec<U256> = vec_into![643, 2294, 3945, 5596, 7247, 8898]; assert_eq!(Histogram { bucket_bounds: correct_bounds, counts: vec![4,2,4,6,4] }, hist); } #[test] fn smaller_data_range_than_bucket_range() { assert_eq!( Histogram::new(slice_into![1, 2, 2], 3), Some(Histogram { bucket_bounds: vec_into![1, 2, 3, 4], counts: vec![1, 2, 0] }) ); } #[test] fn data_range_is_not_multiple_of_bucket_range() { assert_eq!( Histogram::new(slice_into![1, 2, 5], 2), Some(Histogram { bucket_bounds: vec_into![1, 4, 7], counts: vec![2, 1] }) ); } #[test] fn data_range_is_multiple_of_bucket_range() { assert_eq!( Histogram::new(slice_into![1, 2, 6], 2),<|fim▁hole|> #[test] fn none_when_too_few_data() { assert!(Histogram::new(slice_into![], 1).is_none()); } }<|fim▁end|>
Some(Histogram { bucket_bounds: vec_into![1, 4, 7], counts: vec![2, 1] }) ); }
<|file_name|>reboot_cam.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys def usage(): print "{0} <feed>".format(os.path.basename(__file__)) if __name__ == '__main__': kmotion_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(kmotion_dir) from core.camera_lost import CameraLost<|fim▁hole|> cam_lost = CameraLost(kmotion_dir, feed) if cam_lost.reboot_camera(): sys.exit() else: usage() sys.exit(1)<|fim▁end|>
feed = '' if len(sys.argv) > 1: feed = sys.argv[1]
<|file_name|>aws-ecs-service_deploymentconfiguration.go<|end_file_name|><|fim▁begin|>package cloudformation // AWSECSService_DeploymentConfiguration AWS CloudFormation Resource (AWS::ECS::Service.DeploymentConfiguration) // See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-service-deploymentconfiguration.html type AWSECSService_DeploymentConfiguration struct { // MaximumPercent AWS CloudFormation Property // Required: false // See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-service-deploymentconfiguration.html#cfn-ecs-service-deploymentconfiguration-maximumpercent MaximumPercent int `json:"MaximumPercent,omitempty"` // MinimumHealthyPercent AWS CloudFormation Property // Required: false // See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-service-deploymentconfiguration.html#cfn-ecs-service-deploymentconfiguration-minimumhealthypercent MinimumHealthyPercent int `json:"MinimumHealthyPercent,omitempty"` } // AWSCloudFormationType returns the AWS CloudFormation resource type func (r *AWSECSService_DeploymentConfiguration) AWSCloudFormationType() string { return "AWS::ECS::Service.DeploymentConfiguration"<|fim▁hole|><|fim▁end|>
}
<|file_name|>feed_parse_extractTranslasiSanusiMe.py<|end_file_name|><|fim▁begin|>def extractTranslasiSanusiMe(item): ''' Parser for 'translasi.sanusi.me' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [<|fim▁hole|> ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False<|fim▁end|>
('PRC', 'PRC', 'translated'),
<|file_name|>ws.go<|end_file_name|><|fim▁begin|>package ui import ( "io" "net/http" "path" "sync" "github.com/gorilla/websocket" ) const ( // Determines the chunking size of messages used by gorilla MaxMessageSize = 1024 ) var ( upgrader = &websocket.Upgrader{ReadBufferSize: 1024, WriteBufferSize: MaxMessageSize} ) // UIChannel represents a data channel to/from the UI. UIChannel will have one // underlying websocket connection for each connected browser window. All // messages from any browser window are available via In and all messages sent // to Out will be published to all browser windows. type UIChannel struct { URL string In <-chan []byte Out chan<- []byte in chan []byte out chan []byte nextId int conns map[int]*wsconn m sync.Mutex } type ConnectFunc func(write func([]byte) error) error // NewChannel establishes a new channel to the UI at the given path. When the UI // connects to this path, we will establish a websocket to the UI to carry // messages for this UIChannel. The given onConnect function is called anytime // that the UI connects. func NewChannel(p string, onConnect ConnectFunc) *UIChannel { c := newUIChannel(path.Join(uiaddr, p)) r.HandleFunc(p, func(resp http.ResponseWriter, req *http.Request) { log.Tracef("Got connection to %v", c.URL) var err error if req.Method != "GET" { http.Error(resp, "Method not allowed", 405) return } // Upgrade with a HTTP request returns a websocket connection ws, err := upgrader.Upgrade(resp, req, nil) if err != nil { log.Errorf("Unable to upgrade %v to websocket: %v", p, err) return } log.Tracef("Upgraded to websocket at %v", c.URL) c.m.Lock() if onConnect != nil { err = onConnect(func(b []byte) error { log.Tracef("Writing initial message: %q", b) return ws.WriteMessage(websocket.TextMessage, b) }) if err != nil { log.Errorf("Error processing onConnect, disconnecting websocket: %v", err)<|fim▁hole|> log.Debugf("Error closing WebSockets connection: %s", err) } c.m.Unlock() return } } c.nextId += 1 conn := &wsconn{ id: c.nextId, c: c, ws: ws, } c.conns[conn.id] = conn c.m.Unlock() log.Tracef("About to read from connection to %v", c.URL) conn.read() }) return c } func newUIChannel(url string) *UIChannel { in := make(chan []byte, 100) out := make(chan []byte) c := &UIChannel{ URL: url, In: in, in: in, Out: out, out: out, nextId: 0, conns: make(map[int]*wsconn), } go c.write() return c } func (c *UIChannel) write() { defer func() { log.Tracef("Closing all websockets to %v", c.URL) c.m.Lock() for _, conn := range c.conns { if err := conn.ws.Close(); err != nil { log.Debugf("Error closing WebSockets connection", err) } delete(c.conns, conn.id) } c.m.Unlock() }() for msg := range c.out { c.m.Lock() for _, conn := range c.conns { err := conn.ws.WriteMessage(websocket.TextMessage, msg) if err != nil { log.Debugf("Error writing to UI %v for: %v", err, c.URL) delete(c.conns, conn.id) } } c.m.Unlock() } } func (c *UIChannel) Close() { log.Tracef("Closing channel") close(c.out) } // wsconn ties a websocket.Conn to a UIChannel type wsconn struct { id int c *UIChannel ws *websocket.Conn } func (c *wsconn) read() { for { _, b, err := c.ws.ReadMessage() log.Tracef("Read message: %q", b) if err != nil { if err != io.EOF { log.Debugf("Error reading from UI: %v", err) } if err := c.ws.Close(); err != nil { log.Debugf("Error closing WebSockets connection", err) } return } log.Tracef("Sending to channel...") c.c.in <- b } }<|fim▁end|>
if err := ws.Close(); err != nil {
<|file_name|>car.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- import pigpio class Car(object): PINS = ['left_pin', 'right_pin', 'forward_pin', 'backward_pin', 'enable_moving', 'enable_turning'] def __init__(self, left_pin, right_pin, forward_pin, backward_pin, enable_moving, enable_turning, start_power=65): self._left_pin = left_pin self._right_pin = right_pin self._forward_pin = forward_pin self._backward_pin = backward_pin self._enable_moving = enable_moving self._enable_turning = enable_turning self._setup_gpio() self._moving_pwm_started = False self._power = start_power def _setup_gpio(self): self._pi = pigpio.pi() self._pi.set_mode(self._left_pin, pigpio.OUTPUT) self._pi.set_mode(self._right_pin, pigpio.OUTPUT) self._pi.set_mode(self._forward_pin, pigpio.OUTPUT) self._pi.set_mode(self._backward_pin, pigpio.OUTPUT) self._pi.set_mode(self._enable_moving, pigpio.OUTPUT) self._pi.set_mode(self._enable_turning, pigpio.OUTPUT) self._pi.set_PWM_range(self._enable_moving, 100) self._pi.set_PWM_frequency(self._enable_moving, 100) # channel, frequency def turn_left(self): self._pi.write(self._enable_turning, True) self._pi.write(self._right_pin, False) self._pi.write(self._left_pin, True) def turn_right(self): self._pi.write(self._enable_turning, True) self._pi.write(self._left_pin, False) self._pi.write(self._right_pin, True) def straight(self): self._pi.write(self._left_pin, False) self._pi.write(self._right_pin, False) self._pi.write(self._enable_turning, False) def move_forward(self): self._pi.write(self._backward_pin, False) self._pi.write(self._forward_pin, True) self._start_moving_pwm() def move_backward(self): self._pi.write(self._forward_pin, False) self._pi.write(self._backward_pin, True)<|fim▁hole|> if self._power + change_value > 100: self._power = 100 else: self._power += change_value self._change_power() def slower(self, change_value=15): if self._power - change_value < 30: self._power = 30 else: self._power -= change_value self._change_power() def stop_moving(self): self._pi.set_PWM_dutycycle(self._enable_turning, 0) self._pi.write(self._backward_pin, False) self._pi.write(self._forward_pin, False) self._moving_pwm_started = False def stop(self): self.stop_moving() self._pi.write(self._left_pin, False) self._pi.write(self._right_pin, False) self._pi.write(self._enable_turning, False) def _start_moving_pwm(self): if self._moving_pwm_started: return self._pi.set_PWM_dutycycle(self._enable_moving, self._power) self._moving_pwm_started = True def _change_power(self): self._pi.set_PWM_dutycycle(self._enable_moving, self._power)<|fim▁end|>
self._start_moving_pwm() def faster(self, change_value=15):
<|file_name|>gobgp.go<|end_file_name|><|fim▁begin|>// -*- coding: utf-8 -*- // Copyright (C) 2019 Nippon Telegraph and Telephone Corporation. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. // See the License for the specific language governing permissions and // limitations under the License. package mkpb import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) const ( GobgpConfFile = "gobgp.conf" GobgpdConfFile = "gobgpd.conf" ) type GobgpCmd struct { *Command gobgpConfFile string gobgpdConfFile string } func NewGobgpCmd() *GobgpCmd { return &GobgpCmd{ Command: NewCommand(), gobgpConfFile: GobgpConfFile, gobgpdConfFile: GobgpdConfFile, } } func (c *GobgpCmd) setConfigFlags(cmd *cobra.Command) *cobra.Command { cmd.Flags().StringVarP(&c.gobgpConfFile, "gobgp-conf", "", GobgpConfFile, "gobgpd service config") cmd.Flags().StringVarP(&c.gobgpdConfFile, "gobgpd-conf", "", GobgpdConfFile, "gobgpd config") return c.Command.setConfigFlags(cmd) } <|fim▁hole|> } if err := c.createGoBGPdConf(playbookName); err != nil { return err } return c.createGoBGPConf(playbookName) } func (c *GobgpCmd) createGoBGPConf(playbookName string) error { opt := c.optionConfig() path := c.filesPath(playbookName, c.gobgpConfFile) f, err := createFile(path, c.overwrite, func(backup string) { log.Debugf("%s backup", backup) }) if err != nil { return err } defer f.Close() log.Debugf("%s created.", path) t := NewPlaybookGoBGPConf() t.APIAddr = opt.GoBGPAPIAddr t.APIPort = opt.GoBGPAPIPort return t.Execute(f) } func (c *GobgpCmd) createGoBGPdConf(playbookName string) error { opt := c.optionConfig() g := c.globalConfig() r, err := c.routerConfig(playbookName) if err != nil { return err } path := c.filesPath(playbookName, c.gobgpdConfFile) f, err := createFile(path, c.overwrite, func(backup string) { log.Debugf("%s backup", backup) }) if err != nil { return err } defer f.Close() log.Debugf("%s created.", path) zapiEnabled := func() bool { if g.Vpn { return r.NodeID != 0 // VPN-MIC => false, VPN-RIC => true } return opt.GoBGPZAPIEnable }() t := NewPlaybookGoBGPdConf() t.RouterID = g.ReID t.AS = opt.GoBGPAs t.ZAPIVersion = opt.GoBGPZAPIVersion t.ZAPIEnable = zapiEnabled return t.Execute(f) } func NewGoBGPCommand() *cobra.Command { rootCmd := &cobra.Command{ Use: "gobgp", Short: "gobgp command.", } gobgp := NewGobgpCmd() rootCmd.AddCommand(gobgp.setConfigFlags( &cobra.Command{ Use: "create <playbook name>", Short: "Crate new gobgp config file.", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { if err := gobgp.readConfig(); err != nil { return err } return gobgp.createConf(args[0]) }, }, )) return rootCmd }<|fim▁end|>
func (c *GobgpCmd) createConf(playbookName string) error { if err := c.mkDirAll(playbookName); err != nil { return err
<|file_name|>AmazonHoneycodeClientBuilder.java<|end_file_name|><|fim▁begin|>/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.honeycode; import javax.annotation.Generated; import com.amazonaws.ClientConfigurationFactory; import com.amazonaws.annotation.NotThreadSafe; import com.amazonaws.client.builder.AwsSyncClientBuilder; import com.amazonaws.client.AwsSyncClientParams; /** * Fluent builder for {@link com.amazonaws.services.honeycode.AmazonHoneycode}. Use of the builder is preferred over * using constructors of the client class. **/ @NotThreadSafe @Generated("com.amazonaws:aws-java-sdk-code-generator") public final class AmazonHoneycodeClientBuilder extends AwsSyncClientBuilder<AmazonHoneycodeClientBuilder, AmazonHoneycode> { private static final ClientConfigurationFactory CLIENT_CONFIG_FACTORY = new ClientConfigurationFactory(); /** * @return Create new instance of builder with all defaults set. */ public static AmazonHoneycodeClientBuilder standard() { return new AmazonHoneycodeClientBuilder(); } /** * @return Default client using the {@link com.amazonaws.auth.DefaultAWSCredentialsProviderChain} and<|fim▁hole|> */ public static AmazonHoneycode defaultClient() { return standard().build(); } private AmazonHoneycodeClientBuilder() { super(CLIENT_CONFIG_FACTORY); } /** * Construct a synchronous implementation of AmazonHoneycode using the current builder configuration. * * @param params * Current builder configuration represented as a parameter object. * @return Fully configured implementation of AmazonHoneycode. */ @Override protected AmazonHoneycode build(AwsSyncClientParams params) { return new AmazonHoneycodeClient(params); } }<|fim▁end|>
* {@link com.amazonaws.regions.DefaultAwsRegionProviderChain} chain
<|file_name|>test_gridmap0.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """Unit tests for gridmap classed""" # ---------------------------------- # Bjørn Ådlandsvik <[email protected]> # Institute of Marine Research # ---------------------------------- import sys from math import pi import unittest import numpy as np sys.path = ['..'] + sys.path # import from developing version import gridmap # ------------------------------------ class test_PolarStereographic0(unittest.TestCase): """Test some analytic properties of the polar stereographic map""" xp, yp, dx, ylon = 418.25, 257.25, 10000.0, 58.0 map0 = gridmap.PolarStereographic(xp, yp, dx, ylon) map1 = gridmap.PolarStereographic(xp, yp, dx, ylon, ellipsoid=gridmap.WGS84) # Flytt de to første, til test_Interface def test_scalar(self): """Should return a scalar for scalar input""" pass def test_vector(self): """Return arrays of the same shape as the input""" def test_north_pole_forward(self): """The coordinates of the North Pole are xp, yp""" lon, lat = 17.2, 90.0 # sphere x0, y0 = self.map0.ll2grid(lon, lat) self.assertEqual((x0, y0), (self.xp, self.yp)) # WGS84 x1, y1 = self.map1.ll2grid(lon, lat)<|fim▁hole|> # Should raise an exception # sphere lon0, lat0 = self.map0.grid2ll(self.xp, self.yp) # WGS84 lon1, lat1 = self.map1.grid2ll(self.xp, self.yp) def test_ylon(self): """lon = ylon <=> x = xp""" # lon = ylon => x = xp lon, lat = self.ylon, 72.3 # sphere x0, y0 = self.map0.ll2grid(lon, lat) self.assertEqual(x0, self.xp) # WGS84 x1, y1 = self.map1.ll2grid(lon, lat) self.assertEqual(x1, self.xp) # x = xp => y = ylon x, y = self.xp, 222.222 # sphere lon0, lat0 = self.map0.grid2ll(x, y) self.assertAlmostEqual(lon0, self.ylon, places=13) # WGS84 lon1, lat1 = self.map1.grid2ll(x, y) self.assertAlmostEqual(lon1, self.ylon, places=13) # x = xp, => angle = 0 x, y = self.xp, 222.222 # sphere angle0 = self.map0.angle(x, y) self.assertEqual(angle0, 0.0) # WGS84 angle1 = self.map1.angle(x, y) self.assertEqual(angle1, 0.0) def test_inverse(self): """grid2ll and ll2grid are inverse""" lon, lat = 5.323333, 60.3925 # Bergen # sphere: ll -> xy -> ll x0, y0 = self.map0.ll2grid(lon, lat) lon0, lat0 = self.map0.grid2ll(x0, y0) self.assertAlmostEqual(lon0, lon, places=14) self.assertEqual(lat0, lat) # WGS84: ll -> zy -> ll x1, y1 = self.map1.ll2grid(lon, lat) lon1, lat1 = self.map1.grid2ll(x1, y1) self.assertAlmostEqual(lon1, lon, places=14) self.assertAlmostEqual(lat1, lat, places=10) x, y = 200.0, 133.12345 # "Arbitrary" # sphere xy -> ll -> xy lon0, lat0 = self.map0.grid2ll(x, y) x0, y0 = self.map0.ll2grid(lon0, lat0) self.assertAlmostEqual(x0, x, places=12) self.assertAlmostEqual(y0, y, places=12) # WGS84: xy -> ll -> xy lon1, lat1 = self.map1.grid2ll(x, y) x1, y1 = self.map1.ll2grid(lon1, lat1) self.assertAlmostEqual(x1, x, places=9) self.assertAlmostEqual(y1, y, places=9) def test_angle(self): """angle = ylon - lon [rad]""" lon, lat = 5.323333, 60.3925 # Bergen angle = (self.ylon - lon)*pi/180 # sphere x0, y0 = self.map0.ll2grid(lon, lat) angle0 = self.map0.angle(x0, y0) self.assertAlmostEqual(angle0, angle, places=15) # WGS84 x1, y1 = self.map1.ll2grid(lon, lat) angle1 = self.map1.angle(x1, y1) self.assertAlmostEqual(angle1, angle, places=15) def test_scale(self): """scale = 1 at 60 deg""" lon, lat = -10.0, 60.0 # sphere x0, y0 = self.map0.ll2grid(lon, lat) scale0 = self.map0.map_scale(x0, y0) self.assertAlmostEqual(scale0, 1.0, places=15) # WGS84 x1, y1 = self.map1.ll2grid(lon, lat) scale1 = self.map1.map_scale(x1, y1) self.assertAlmostEqual(scale1, 1.0, places=12) if __name__ == '__main__': unittest.main()<|fim▁end|>
self.assertEqual((x1, y1), (self.xp, self.yp)) def test_north_pole_backward(self): """Longitude is not defined at the North Pole"""
<|file_name|>CoreutilsLimits.java<|end_file_name|><|fim▁begin|>/* * Core Utils - Common Utilities. * Copyright 2015-2016 GRyCAP (Universitat Politecnica de Valencia) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This product combines work with different licenses. See the "NOTICE" text * file for details on the various modules and licenses. * * The "NOTICE" text file is part of the distribution. Any derivative works * that you distribute must include a readable copy of the "NOTICE" text file. */ package es.upv.grycap.coreutils.common; import com.google.common.collect.Range; /** * Hard-coded configuration limits. * @author Erik Torres * @since 0.2.0 */ public interface CoreutilsLimits { public static final int NUM_AVAILABLE_PROCESSORS = Runtime.getRuntime().availableProcessors(); public static final Range<Long> TRY_LOCK_TIMEOUT_RANGE = Range.closed(1l, 2000l); public static final Range<Integer> MAX_POOL_SIZE_RANGE = Range.closed(Math.min(2, NUM_AVAILABLE_PROCESSORS), Math.max(128, NUM_AVAILABLE_PROCESSORS)); public static final Range<Long> KEEP_ALIVE_TIME_RANGE = Range.closed(60000l, 3600000l); public static final Range<Long> WAIT_TERMINATION_TIMEOUT_RANGE = Range.closed(1000l, 60000l); }<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # EForge project management system, Copyright © 2010, Element43 # # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # from django.db import models from django.core.urlresolvers import reverse from eforge.models import Project from eforge.update.models import Update, register_update_type from eforge.vcs import project_repository class Revision(models.Model): id_no = models.AutoField(primary_key=True) id = models.CharField(max_length=40, db_index=True) project = models.ForeignKey(Project) parents = models.ManyToManyField('self', related_name='children') date = models.DateTimeField() @property def vcs_revision(self): """ Revision object from the VCS plugin """ if not getattr(self, '_vcs_revision', None): self._vcs_revision = project_repository(self.project).revision(self.id) return self._vcs_revision class Update: @classmethod def user(self, revision): return revision.author_user @classmethod def project(self, revision): return revision.project @classmethod def summary(self, revision): return 'Revision %s' % revision.short_id @classmethod def description(self, revision): return revision.message @classmethod<|fim▁hole|> def url(self, revision): return reverse('browse-revision', args=[revision.project.slug, revision.id]) @classmethod def date(self, revision): return revision.date register_update_type(Revision) def _proxy_property(name): def _proxy(self): return getattr(self.vcs_revision, name) setattr(Revision, name, property(_proxy)) _proxy_property('short_id') _proxy_property('author_email') _proxy_property('author_name') _proxy_property('author_user') _proxy_property('message') _proxy_property('short_message') _proxy_property('root')<|fim▁end|>
<|file_name|>reader.hpp<|end_file_name|><|fim▁begin|>#ifndef OSMIUM_IO_READER_HPP #define OSMIUM_IO_READER_HPP /* This file is part of Osmium (http://osmcode.org/libosmium). Copyright 2013-2015 Jochen Topf <[email protected]> and others (see README). Boost Software License - Version 1.0 - August 17th, 2003 Permission is hereby granted, free of charge, to any person or organization obtaining a copy of the software and accompanying documentation covered by this license (the "Software") to use, reproduce, display, distribute, execute, and transmit the Software, and to prepare derivative works of the Software, and to permit third-parties to whom the Software is furnished to do so, all subject to the following: The copyright notices in the Software and this entire statement, including the above license grant, this restriction and the following disclaimer, must be included in all copies of the Software, in whole or in part, and all derivative works of the Software, unless such copies or derivative works are solely in the form of machine-executable object code generated by a source language processor. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include <atomic> #include <cerrno> #include <cstdlib> #include <fcntl.h> #include <memory> #include <string> #include <system_error> #include <thread> #include <utility> #ifndef _WIN32 # include <sys/wait.h> #endif #ifndef _MSC_VER # include <unistd.h> #endif #include <osmium/io/compression.hpp> #include <osmium/io/detail/input_format.hpp> #include <osmium/io/detail/read_thread.hpp> #include <osmium/io/detail/read_write.hpp> #include <osmium/io/file.hpp> #include <osmium/io/header.hpp> #include <osmium/memory/buffer.hpp> #include <osmium/osm/entity_bits.hpp> #include <osmium/thread/util.hpp> #include <osmium/thread/queue.hpp> namespace osmium { namespace io { /** * This is the user-facing interface for reading OSM files. Instantiate * an object of this class with a file name or osmium::io::File object * and then call read() on it in a loop until it returns an invalid * Buffer. */ class Reader { osmium::io::File m_file; osmium::osm_entity_bits::type m_read_which_entities; std::atomic<bool> m_input_done; int m_childpid; osmium::thread::Queue<std::string> m_input_queue; std::unique_ptr<osmium::io::Decompressor> m_decompressor; std::future<bool> m_read_future; std::unique_ptr<osmium::io::detail::InputFormat> m_input; #ifndef _WIN32 /** * Fork and execute the given command in the child. * A pipe is created between the child and the parent. * The child writes to the pipe, the parent reads from it. * This function never returns in the child. * * @param command Command to execute in the child. * @param filename Filename to give to command as argument. * @returns File descriptor of pipe in the parent. * @throws std::system_error if a system call fails. */ static int execute(const std::string& command, const std::string& filename, int* childpid) { int pipefd[2]; if (pipe(pipefd) < 0) { throw std::system_error(errno, std::system_category(), "opening pipe failed"); } pid_t pid = fork(); if (pid < 0) { throw std::system_error(errno, std::system_category(), "fork failed"); } if (pid == 0) { // child // close all file descriptors except one end of the pipe for (int i=0; i < 32; ++i) { if (i != pipefd[1]) { ::close(i); } } if (dup2(pipefd[1], 1) < 0) { // put end of pipe as stdout/stdin exit(1); } ::open("/dev/null", O_RDONLY); // stdin ::open("/dev/null", O_WRONLY); // stderr // hack: -g switches off globbing in curl which allows [] to be used in file names // this is important for XAPI URLs // in theory this execute() function could be used for other commands, but it is // only used for curl at the moment, so this is okay. if (::execlp(command.c_str(), command.c_str(), "-g", filename.c_str(), nullptr) < 0) { exit(1); } } // parent *childpid = pid; ::close(pipefd[1]); return pipefd[0]; } #endif /** * Open File for reading. Handles URLs or normal files. URLs * are opened by executing the "curl" program (which must be installed) * and reading from its output. * * @returns File descriptor of open file or pipe. * @throws std::system_error if a system call fails. */ static int open_input_file_or_url(const std::string& filename, int* childpid) { std::string protocol = filename.substr(0, filename.find_first_of(':')); if (protocol == "http" || protocol == "https" || protocol == "ftp" || protocol == "file") { #ifndef _WIN32 return execute("curl", filename, childpid); #else throw std::runtime_error("Reading OSM files from the network currently not supported on Windows."); #endif } else { return osmium::io::detail::open_for_reading(filename); } } public: /** * Create new Reader object. * * @param file The file we want to open. * @param read_which_entities Which OSM entities (nodes, ways, relations, and/or changesets) * should be read from the input file. It can speed the read up * significantly if objects that are not needed anyway are not * parsed. */ explicit Reader(const osmium::io::File& file, osmium::osm_entity_bits::type read_which_entities = osmium::osm_entity_bits::all) : m_file(file), m_read_which_entities(read_which_entities), m_input_done(false), m_childpid(0), m_input_queue(20, "raw_input"), // XXX m_decompressor(m_file.buffer() ? osmium::io::CompressionFactory::instance().create_decompressor(file.compression(), m_file.buffer(), m_file.buffer_size()) : osmium::io::CompressionFactory::instance().create_decompressor(file.compression(), open_input_file_or_url(m_file.filename(), &m_childpid))),<|fim▁hole|> explicit Reader(const std::string& filename, osmium::osm_entity_bits::type read_types = osmium::osm_entity_bits::all) : Reader(osmium::io::File(filename), read_types) { } explicit Reader(const char* filename, osmium::osm_entity_bits::type read_types = osmium::osm_entity_bits::all) : Reader(osmium::io::File(filename), read_types) { } Reader(const Reader&) = delete; Reader& operator=(const Reader&) = delete; ~Reader() { try { close(); } catch (...) { } } /** * Close down the Reader. A call to this is optional, because the * destructor of Reader will also call this. But if you don't call * this function first, the destructor might throw an exception * which is not good. * * @throws Some form of std::runtime_error when there is a problem. */ void close() { // Signal to input child process that it should wrap up. m_input_done = true; m_input->close(); #ifndef _WIN32 if (m_childpid) { int status; pid_t pid = ::waitpid(m_childpid, &status, 0); #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wold-style-cast" if (pid < 0 || !WIFEXITED(status) || WEXITSTATUS(status) != 0) { throw std::system_error(errno, std::system_category(), "subprocess returned error"); } #pragma GCC diagnostic pop m_childpid = 0; } #endif osmium::thread::wait_until_done(m_read_future); } /** * Get the header data from the file. */ osmium::io::Header header() const { return m_input->header(); } /** * Reads the next buffer from the input. An invalid buffer signals * end-of-file. After end-of-file all read() calls will return an * invalid buffer. An invalid buffer is also always returned if * osmium::osm_entity_bits::nothing was set when the Reader was * constructed. * * @returns Buffer. * @throws Some form of std::runtime_error if there is an error. */ osmium::memory::Buffer read() { // If an exception happened in the input thread, re-throw // it in this (the main) thread. osmium::thread::check_for_exception(m_read_future); if (m_read_which_entities == osmium::osm_entity_bits::nothing || m_input_done) { // If the caller didn't want anything but the header, it will // always get an empty buffer here. return osmium::memory::Buffer(); } // m_input->read() can return an invalid buffer to signal EOF, // or a valid buffer with or without data. A valid buffer // without data is not an error, it just means we have to get // keep getting the next buffer until there is one with data. while (true) { osmium::memory::Buffer buffer = m_input->read(); if (!buffer) { m_input_done = true; return buffer; } if (buffer.committed() > 0) { return buffer; } } } /** * Has the end of file been reached? This is set after the last * data has been read. It is also set by calling close(). */ bool eof() const { return m_input_done; } }; // class Reader /** * Read contents of the given file into a buffer in one go. Takes * the same arguments as any of the Reader constructors. * * The buffer can take up quite a lot of memory, so don't do this * unless you are working with small OSM files and/or have lots of * RAM. */ template <class... TArgs> osmium::memory::Buffer read_file(TArgs&&... args) { osmium::memory::Buffer buffer(1024*1024, osmium::memory::Buffer::auto_grow::yes); Reader reader(std::forward<TArgs>(args)...); while (osmium::memory::Buffer read_buffer = reader.read()) { buffer.add_buffer(read_buffer); buffer.commit(); } return buffer; } } // namespace io } // namespace osmium #endif // OSMIUM_IO_READER_HPP<|fim▁end|>
m_read_future(std::async(std::launch::async, detail::ReadThread(m_input_queue, m_decompressor.get(), m_input_done))), m_input(osmium::io::detail::InputFormatFactory::instance().create_input(m_file, m_read_which_entities, m_input_queue)) { }
<|file_name|>Event.ts<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////////////////<|fim▁hole|>// Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the Egret nor the // names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS // OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA, // OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ////////////////////////////////////////////////////////////////////////////////////// module dragonBones { /** * @class dragonBones.Event * @classdesc * 事件 */ export class Event extends egret.Event { /** * 创建一个Event实例 * @param type 事件的类型 */ public constructor(type:string, bubbles:boolean = false, cancelable:boolean = false) { super(type, bubbles, cancelable) } } }<|fim▁end|>
// // Copyright (c) 2014-2015, Egret Technology Inc. // All rights reserved.
<|file_name|>BootState.js<|end_file_name|><|fim▁begin|>var Phaser = Phaser || {}; var Mst = Mst || {}; Mst.BootState = function () { "use strict"; Phaser.State.call(this); }; Mst.prototype = Object.create(Phaser.State.prototype); Mst.prototype.constructor = Mst.BootState; Mst.BootState.prototype.init = function (map_int, usr_id) { "use strict"; var d = new Date(); var n = d.getTime(); this.core_file = "assets/maps/core.json"; this.quest_file = "assets/maps/quest.json"; this.map_int = map_int; this.map_file = "map.php?time="+n+"&uid="+usr_id+"&mapi="+map_int; //this.map_file = "assets/maps/map"+map_int+".json?time="+n+"&uid="+usr_id+"&mapi="+map_int; console.log(this.map_file); this.usr_id = usr_id; }; Mst.BootState.prototype.preload = function () { "use strict"; if (this.usr_id > 0) { this.load.text("core", this.core_file); this.load.text("quest", this.quest_file); this.load.text("map", this.map_file); } else { var a = this.load.image("login", "assets/images/loader2.png"); //console.log(a); } }; Mst.BootState.prototype.create = function () { "use strict"; var map_text, map_data, core_text, core_data, root_data, quest_text, quest_data; if (this.usr_id > 0) { map_text = this.game.cache.getText("map"); var n = map_text.lastIndexOf(">"); if (n > -1) { map_text = map_text.substring(n + 1); } //console.log(map_text); map_data = JSON.parse(map_text); console.log(map_data); core_text = this.game.cache.getText("core"); core_data = JSON.parse(core_text); quest_text = this.game.cache.getText("quest"); quest_data = JSON.parse(quest_text); console.log(quest_data); } root_data = { map_int: this.map_int, usr_id: this.usr_id }; console.log("Boot State"); console.log(root_data); <|fim▁hole|>};<|fim▁end|>
this.game.state.start("LoadingState", true, false, core_data, map_data, root_data, quest_data);
<|file_name|>player.rs<|end_file_name|><|fim▁begin|>// MIT License // // Copyright (c) 2017 Franziska Becker, René Warking // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use std::collections::HashMap; use chess::logic::{Color, Board, Position}; /// Types the player can have #[derive(Debug, Copy, Clone, PartialEq)] pub enum PlayerType { Human, Dumb, Smart } /// The player #[derive(Debug)] pub struct Player { ptype: PlayerType, color: Color, castling: [bool; 3], pub figures: HashMap<String, Vec<Position>> } impl Player { /// Returns an instance of a Player with the given PlayerType pub fn new(p: PlayerType, c: Color) -> Self { match c { Color::Black => Player::create_black_player(p, c), _ => Player::create_white_player(p, c), } }<|fim▁hole|> fn create_black_player(p: PlayerType, c: Color) -> Self { let mut f = HashMap::with_capacity(16); let mut pos = Vec::new(); // Pawns for bla in 0..8 { pos.push(Position::new(bla, 6)) } f.insert("pawn".to_string(), pos); // King f.insert("king".to_string(), vec![Position::new(4, 7)]); // Queen f.insert("queen".to_string(), vec![Position::new(3, 7)]); // Bishops f.insert("bishop".to_string(), vec![Position::new(2, 7), Position::new(5, 7)]); // Knights f.insert("knight".to_string(), vec![Position::new(1, 7), Position::new(6, 7)]); // Rooks f.insert("rook".to_string(), vec![Position::new(0, 7), Position::new(7, 7)]); Player { ptype: p, color: c, figures: f, castling: [true, true, true] } } /// Create a new white player fn create_white_player(p: PlayerType, c: Color) -> Self { let mut f = HashMap::with_capacity(16); let mut pos = Vec::new(); // Pawns for bla in 0..8 { pos.push(Position::new(bla, 1)) } f.insert("pawn".to_string(), pos); // King f.insert("king".to_string(), vec![Position::new(4, 0)]); // Queen f.insert("queen".to_string(), vec![Position::new(3, 0)]); // Bishops f.insert("bishop".to_string(), vec![Position::new(2, 0), Position::new(5, 0)]); // Knights f.insert("knight".to_string(), vec![Position::new(1, 0), Position::new(6, 0)]); // Rooks f.insert("rook".to_string(), vec![Position::new(0, 0), Position::new(7, 0)]); Player { ptype: p, color: c, figures: f, castling: [true, true, true] } } /// Return player color pub fn color(&self) -> Color { self.color } /// Return player type pub fn ptype(&self) -> PlayerType { self.ptype } /// Set player type pub fn set_ptype(&mut self, p: PlayerType) { self.ptype = p; } pub fn upgrade_pawn(&mut self, pos: Position) { self.capture("pawn".to_string(), pos); let mut found = false; if let Some(mut positions) = self.figures.get_mut("queen") { positions.push(pos); found = true; } if !found { self.figures.insert("queen".to_string(), vec![pos]); } } /// Return the player's king which should always be there because one /// cannot actually 'capture' a king pub fn king(&self) -> Position { self.figures.get("king").unwrap()[0] } /// Returns a vector of possible moves for all figures of the player pub fn get_possible_moves(&mut self, board: &mut Board, opponent: &mut Player) -> Vec<(Position, Position)> { let mut moves = Vec::new(); for v in self.figures.values() { for i in 0..v.len() { for outer in 0..8 { for inner in 0..8 { let try = Position::new(inner, outer); if board.is_move_valid(v[i], try, &mut self.clone(), opponent) { moves.push((v[i], try)); } } } } } moves } /// If the player is an AI this returns a valid move pub fn get_ai_move(&self, board: &Board, other: &Player) -> (Position, Position) { return super::ai::get_move(board, self, other); } /// Move a figure from 'before' to 'after' pub fn move_figure(&mut self, before: Position, after: Position) { for mut v in self.figures.values_mut() { for i in 0..v.len() { if v[i] == before { v[i] = after; return } } } unreachable!() } /// Capture a figure pub fn capture(&mut self, name: String, pos: Position) { let mut delete = false; if let Some(mut positions) = self.figures.get_mut(&name) { for i in 0..positions.len() { if positions[i] == pos && positions.len() > 1 { positions.remove(i); break; } else if positions[i] == pos { delete = true; break; } } } else { unreachable!() } if delete { self.figures.remove(&name); } } /// Reverse a capture pub fn reverse_capture(&mut self, name: String, pos: Position) { let mut found = false; if let Some(mut v) = self.figures.get_mut(&name) { v.push(pos); found = true; } if !found { self.figures.insert(name, vec![pos]); } } /// Returns whether the player's king can be saved from checkmate in one move pub fn can_king_be_saved(&mut self, board: &mut Board, two: &mut Player) -> bool { self.get_possible_moves(board, two).len() > 0 } } impl ::std::fmt::Display for Player { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { for (name, v) in self.figures.iter() { try!(write!(f, "{}: {:?}\n", name, v)); } write!(f, "\n") } } impl ::std::clone::Clone for Player { fn clone(&self) -> Self { let mut f = HashMap::new(); for (name, pos) in self.figures.iter() { f.insert(name.clone(), pos.clone()); } Player{ figures: f, color: self.color, ptype: self.ptype, castling: self.castling } } fn clone_from(&mut self, source: &Self) { self.figures.clear(); self.color = source.color; self.ptype = source.ptype; for (name, pos) in source.figures.iter() { self.figures.insert(name.clone(), pos.clone()); } } }<|fim▁end|>
/// Create a new black player
<|file_name|>lda_preprocessing.py<|end_file_name|><|fim▁begin|>from optparse import OptionParser import re import os import sys import numpy as np from ..util import dirs from ..util import file_handling as fh from ..preprocessing import data_splitting as ds from ..feature_extractors.vocabulary_with_counts import VocabWithCounts def main(): usage = "%prog project" parser = OptionParser(usage=usage) parser.add_option('-v', dest='vocab_size', default=10000, help='Vocabulary size (most frequent words): default=%default') parser.add_option('--seed', dest='seed', default=42, help='Random seed: default=%default') #parser.add_option('--boolarg', action="store_true", dest="boolarg", default=False, # help='Keyword argument: default=%default') (options, args) = parser.parse_args() project_name = args[0] dirs.make_base_dir(project_name) vocab_size = int(options.vocab_size) suffixes = {"'s", "n't"} pronouns = {"i", 'you', 'he', 'his', 'she', 'her', 'hers', 'it', 'its', 'we', 'you', 'your', 'they', 'them', 'their'} determiners = {'a', 'an', 'the', 'this', 'that', 'these', 'those'} prepositions = {'at', 'by', 'for', 'from', 'in', 'into', 'of', 'on', 'than', 'to', 'with'} transitional = {'and', 'also', 'as', 'but', 'if', 'or', 'then'} common_verbs = {'are', 'be', 'been', 'had', 'has', 'have', 'is', 'said', 'was', 'were'} stopwords = suffixes.union(pronouns).union(determiners).union(prepositions).union(transitional).union(common_verbs) print "Removing %d stopwords:" % len(stopwords) for s in stopwords: print s # set random seed np.random.seed(int(options.seed)) # read in data dirs.make_base_dir(project_name) sentences = fh.read_json(dirs.get_processed_text_file()) all_documents = sentences.keys() documents = list(set(all_documents)) # create a vocabulary and fill it with the tokenized documents tokenized, vocab = tokenize(sentences, documents, stopwords=stopwords) print "Most common words in corpus:" most_common = vocab.most_common(50) most_common.sort() for v in most_common: print v # set vocabulary size and prune tokens print "Pruning vocabulary" vocab.prune(n_words=vocab_size) n_words = 0 for k in documents: tokens = [t for t in tokenized[k] if t in vocab.token2index] n_words += len(tokens) tokenized[k] = tokens n_documents = len(documents) n_vocab = len(vocab) print n_documents, "documents" print n_vocab, "word types" print n_words, "word tokens" # create the count matrices vocab_assignments = np.zeros(n_words, dtype=int) # vocab index of the ith word #topic_assignments = np.zeros(n_words, dtype=int) # topic of the ith word doc_assignments = np.zeros(n_words, dtype=int) # document of the ith word count = 0 for d_i, d in enumerate(documents): tokens = tokenized[d] for t in tokens: v_index = vocab.get_index(t) assert v_index >= 0 #w_topic = np.random.randint(n_topics) vocab_assignments[count] = v_index #topic_assignments[count] = w_topic doc_assignments[count] = d_i #topic_counts[w_topic] += 1 #vocab_topics[v_index, w_topic] += 1 #doc_topics[d_i, w_topic] += 1 count += 1<|fim▁hole|> assert count == n_words output_filename = os.path.join(dirs.lda_dir, 'word_num.json') fh.write_to_json(list(vocab_assignments), output_filename, sort_keys=False) output_filename = os.path.join(dirs.lda_dir, 'word_doc.json') fh.write_to_json(list(doc_assignments), output_filename, sort_keys=False) output_filename = os.path.join(dirs.lda_dir, 'vocab.json') fh.write_to_json(vocab.index2token, output_filename, sort_keys=False) output_filename = os.path.join(dirs.lda_dir, 'documents.json') fh.write_to_json(documents, output_filename, sort_keys=False) # just exit after writing data def tokenize(sentences, documents_to_tokenize, stopwords=set()): print "Tokenizing" vocab = VocabWithCounts('', add_oov=False) tokenized = {} for k in documents_to_tokenize: text = sentences[k].lower() text = re.sub('\d', '#', text) tokens = text.split() tokens = [t for t in tokens if re.search('[a-zA-Z]', t)] tokens = [t for t in tokens if t not in stopwords] vocab.add_tokens(tokens) tokenized[k] = tokens return tokenized, vocab if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>backend.js<|end_file_name|><|fim▁begin|>'use strict'; describe('Service: backend', function () { // load the service's module beforeEach(module('yeomanIonicAngularPhonegapSeedApp')); // instantiate service var backend; beforeEach(inject(function(_backend_) { backend = _backend_; })); it('should do something', function () {<|fim▁hole|>});<|fim▁end|>
expect(!!backend).toBe(true); });
<|file_name|>IntegrationEngine.java<|end_file_name|><|fim▁begin|>package pt.uminho.sysbio.biosynthframework.integration.model; import pt.uminho.sysbio.biosynth.integration.io.dao.neo4j.MetaboliteMajorLabel; <|fim▁hole|>}<|fim▁end|>
public interface IntegrationEngine { public IntegrationMap<String, MetaboliteMajorLabel> integrate(IntegrationMap<String, MetaboliteMajorLabel> imap);
<|file_name|>Frames.py<|end_file_name|><|fim▁begin|>from pyjamas.ui.Sink import Sink, SinkInfo from pyjamas.ui.Frame import Frame class Frames(Sink): def __init__(self): Sink.__init__(self) self.frame=Frame(self.baseURL() + "rembrandt/LaMarcheNocturne.html") self.frame.setWidth("100%") self.frame.setHeight("48em") self.initWidget(self.frame) <|fim▁hole|> return SinkInfo("Frames", text, Frames)<|fim▁end|>
def init(): text="If you need to include multiple pages of good ol' static HTML, it's easy to do using the <code>Frame</code> class."
<|file_name|>crawl.py<|end_file_name|><|fim▁begin|>from scrapy.commands.crawl import Command from scrapy.exceptions import UsageError class CustomCrawlCommand(Command): def run(self, args, opts): if len(args) < 1: raise UsageError() elif len(args) > 1: raise UsageError("running 'scrapy crawl' with more than one spider is no longer supported")<|fim▁hole|> # added new code spider_settings_path = self.settings.getdict('SPIDER_SETTINGS', {}).get(spname, None) if spider_settings_path is not None: self.settings.setmodule(spider_settings_path, priority='cmdline') # end crawler = self.crawler_process.create_crawler() spider = crawler.spiders.create(spname, **opts.spargs) crawler.crawl(spider) self.crawler_process.start()<|fim▁end|>
spname = args[0]
<|file_name|>DedicatedServer.java<|end_file_name|><|fim▁begin|>package net.minecraft.server.dedicated; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import net.minecraft.command.ICommandSender; import net.minecraft.command.ServerCommand; import net.minecraft.crash.CrashReport; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.logging.ILogAgent; import net.minecraft.logging.LogAgent; import net.minecraft.network.NetworkListenThread; import net.minecraft.network.rcon.IServer; import net.minecraft.network.rcon.RConThreadMain; import net.minecraft.network.rcon.RConThreadQuery; import net.minecraft.profiler.PlayerUsageSnooper; import net.minecraft.server.MinecraftServer; import net.minecraft.server.dedicated.CallableServerType; import net.minecraft.server.dedicated.CallableType; import net.minecraft.server.dedicated.DedicatedPlayerList; import net.minecraft.server.dedicated.DedicatedServerCommandThread; import net.minecraft.server.dedicated.DedicatedServerListenThread; import net.minecraft.server.dedicated.DedicatedServerSleepThread; import net.minecraft.server.dedicated.PropertyManager; import net.minecraft.server.gui.MinecraftServerGui; import net.minecraft.server.management.ServerConfigurationManager; import net.minecraft.util.ChunkCoordinates; import net.minecraft.util.CryptManager; import net.minecraft.util.MathHelper; import net.minecraft.world.EnumGameType; import net.minecraft.world.World; import net.minecraft.world.WorldSettings; import net.minecraft.world.WorldType; public class DedicatedServer extends MinecraftServer implements IServer { private final List field_71341_l = Collections.synchronizedList(new ArrayList()); private final ILogAgent field_98131_l; private RConThreadQuery field_71342_m; private RConThreadMain field_71339_n; private PropertyManager field_71340_o; private boolean field_71338_p; private EnumGameType field_71337_q; private NetworkListenThread field_71336_r; private boolean field_71335_s; public DedicatedServer(File p_i1508_1_) { super(p_i1508_1_); this.field_98131_l = new LogAgent("Minecraft-Server", (String)null, (new File(p_i1508_1_, "server.log")).getAbsolutePath()); new DedicatedServerSleepThread(this); } protected boolean func_71197_b() throws IOException { DedicatedServerCommandThread var1 = new DedicatedServerCommandThread(this); var1.setDaemon(true); var1.start(); this.func_98033_al().func_98233_a("Starting minecraft server version 1.6.4"); if(Runtime.getRuntime().maxMemory() / 1024L / 1024L < 512L) { this.func_98033_al().func_98236_b("To start the server with more ram, launch it as \"java -Xmx1024M -Xms1024M -jar minecraft_server.jar\""); } this.func_98033_al().func_98233_a("Loading properties"); this.field_71340_o = new PropertyManager(new File("server.properties"), this.func_98033_al()); if(this.func_71264_H()) { this.func_71189_e("127.0.0.1"); } else { this.func_71229_d(this.field_71340_o.func_73670_a("online-mode", true)); this.func_71189_e(this.field_71340_o.func_73671_a("server-ip", "")); } this.func_71251_e(this.field_71340_o.func_73670_a("spawn-animals", true)); this.func_71257_f(this.field_71340_o.func_73670_a("spawn-npcs", true)); this.func_71188_g(this.field_71340_o.func_73670_a("pvp", true)); this.func_71245_h(this.field_71340_o.func_73670_a("allow-flight", false)); this.func_71269_o(this.field_71340_o.func_73671_a("texture-pack", "")); this.func_71205_p(this.field_71340_o.func_73671_a("motd", "A Minecraft Server")); this.func_104055_i(this.field_71340_o.func_73670_a("force-gamemode", false)); this.func_143006_e(this.field_71340_o.func_73669_a("player-idle-timeout", 0)); if(this.field_71340_o.func_73669_a("difficulty", 1) < 0) { this.field_71340_o.func_73667_a("difficulty", Integer.valueOf(0)); } else if(this.field_71340_o.func_73669_a("difficulty", 1) > 3) { this.field_71340_o.func_73667_a("difficulty", Integer.valueOf(3)); } this.field_71338_p = this.field_71340_o.func_73670_a("generate-structures", true); int var2 = this.field_71340_o.func_73669_a("gamemode", EnumGameType.SURVIVAL.func_77148_a()); this.field_71337_q = WorldSettings.func_77161_a(var2); this.func_98033_al().func_98233_a("Default game type: " + this.field_71337_q); InetAddress var3 = null; if(this.func_71211_k().length() > 0) { var3 = InetAddress.getByName(this.func_71211_k()); } if(this.func_71215_F() < 0) { this.func_71208_b(this.field_71340_o.func_73669_a("server-port", 25565)); } this.func_98033_al().func_98233_a("Generating keypair"); this.func_71253_a(CryptManager.func_75891_b()); this.func_98033_al().func_98233_a("Starting Minecraft server on " + (this.func_71211_k().length() == 0?"*":this.func_71211_k()) + ":" + this.func_71215_F()); try { this.field_71336_r = new DedicatedServerListenThread(this, var3, this.func_71215_F()); } catch (IOException var16) { this.func_98033_al().func_98236_b("**** FAILED TO BIND TO PORT!"); this.func_98033_al().func_98231_b("The exception was: {0}", new Object[]{var16.toString()}); this.func_98033_al().func_98236_b("Perhaps a server is already running on that port?"); return false; } if(!this.func_71266_T()) { this.func_98033_al().func_98236_b("**** SERVER IS RUNNING IN OFFLINE/INSECURE MODE!"); this.func_98033_al().func_98236_b("The server will make no attempt to authenticate usernames. Beware."); this.func_98033_al().func_98236_b("While this makes the game possible to play without internet access, it also opens up the ability for hackers to connect with any username they choose."); this.func_98033_al().func_98236_b("To change this, set \"online-mode\" to \"true\" in the server.properties file."); } this.func_71210_a(new DedicatedPlayerList(this)); long var4 = System.nanoTime(); if(this.func_71270_I() == null) { this.func_71261_m(this.field_71340_o.func_73671_a("level-name", "world")); } String var6 = this.field_71340_o.func_73671_a("level-seed", ""); String var7 = this.field_71340_o.func_73671_a("level-type", "DEFAULT"); String var8 = this.field_71340_o.func_73671_a("generator-settings", ""); long var9 = (new Random()).nextLong(); if(var6.length() > 0) { try { long var11 = Long.parseLong(var6); if(var11 != 0L) { var9 = var11; } } catch (NumberFormatException var15) { var9 = (long)var6.hashCode(); } } WorldType var17 = WorldType.func_77130_a(var7); if(var17 == null) { var17 = WorldType.field_77137_b; } this.func_71191_d(this.field_71340_o.func_73669_a("max-build-height", 256)); this.func_71191_d((this.func_71207_Z() + 8) / 16 * 16); this.func_71191_d(MathHelper.func_76125_a(this.func_71207_Z(), 64, 256)); this.field_71340_o.func_73667_a("max-build-height", Integer.valueOf(this.func_71207_Z())); this.func_98033_al().func_98233_a("Preparing level \"" + this.func_71270_I() + "\""); this.func_71247_a(this.func_71270_I(), this.func_71270_I(), var9, var17, var8); long var12 = System.nanoTime() - var4; String var14 = String.format("%.3fs", new Object[]{Double.valueOf((double)var12 / 1.0E9D)}); this.func_98033_al().func_98233_a("Done (" + var14 + ")! For help, type \"help\" or \"?\""); if(this.field_71340_o.func_73670_a("enable-query", false)) { this.func_98033_al().func_98233_a("Starting GS4 status listener"); this.field_71342_m = new RConThreadQuery(this); this.field_71342_m.func_72602_a(); } if(this.field_71340_o.func_73670_a("enable-rcon", false)) { this.func_98033_al().func_98233_a("Starting remote control listener"); this.field_71339_n = new RConThreadMain(this); this.field_71339_n.func_72602_a(); } return true; } public boolean func_71225_e() { return this.field_71338_p; } public EnumGameType func_71265_f() { return this.field_71337_q; } public int func_71232_g() { return this.field_71340_o.func_73669_a("difficulty", 1); } public boolean func_71199_h() { return this.field_71340_o.func_73670_a("hardcore", false); } protected void func_71228_a(CrashReport p_71228_1_) { while(this.func_71278_l()) { this.func_71333_ah(); try { Thread.sleep(10L); } catch (InterruptedException var3) { var3.printStackTrace(); } } } public CrashReport func_71230_b(CrashReport p_71230_1_) { p_71230_1_ = super.func_71230_b(p_71230_1_); p_71230_1_.func_85056_g().func_71500_a("Is Modded", new CallableType(this)); p_71230_1_.func_85056_g().func_71500_a("Type", new CallableServerType(this)); return p_71230_1_; } protected void func_71240_o() { System.exit(0); } protected void func_71190_q() { super.func_71190_q(); this.func_71333_ah(); } public boolean func_71255_r() { return this.field_71340_o.func_73670_a("allow-nether", true); } public boolean func_71193_K() { return this.field_71340_o.func_73670_a("spawn-monsters", true); } public void func_70000_a(PlayerUsageSnooper p_70000_1_) { p_70000_1_.func_76472_a("whitelist_enabled", Boolean.valueOf(this.func_71334_ai().func_72383_n())); p_70000_1_.func_76472_a("whitelist_count", Integer.valueOf(this.func_71334_ai().func_72388_h().size())); super.func_70000_a(p_70000_1_); } public boolean func_70002_Q() { return this.field_71340_o.func_73670_a("snooper-enabled", true); } public void func_71331_a(String p_71331_1_, ICommandSender p_71331_2_) { this.field_71341_l.add(new ServerCommand(p_71331_1_, p_71331_2_)); } public void func_71333_ah() { while(!this.field_71341_l.isEmpty()) { ServerCommand var1 = (ServerCommand)this.field_71341_l.remove(0); this.func_71187_D().func_71556_a(var1.field_73701_b, var1.field_73702_a); } } public boolean func_71262_S() { return true; } public DedicatedPlayerList func_71334_ai() { return (DedicatedPlayerList)super.func_71203_ab(); } public NetworkListenThread func_71212_ac() { return this.field_71336_r; } <|fim▁hole|> } public String func_71330_a(String p_71330_1_, String p_71330_2_) { return this.field_71340_o.func_73671_a(p_71330_1_, p_71330_2_); } public boolean func_71332_a(String p_71332_1_, boolean p_71332_2_) { return this.field_71340_o.func_73670_a(p_71332_1_, p_71332_2_); } public void func_71328_a(String p_71328_1_, Object p_71328_2_) { this.field_71340_o.func_73667_a(p_71328_1_, p_71328_2_); } public void func_71326_a() { this.field_71340_o.func_73668_b(); } public String func_71329_c() { File var1 = this.field_71340_o.func_73665_c(); return var1 != null?var1.getAbsolutePath():"No settings file"; } public boolean func_71279_ae() { return this.field_71335_s; } public String func_71206_a(EnumGameType p_71206_1_, boolean p_71206_2_) { return ""; } public boolean func_82356_Z() { return this.field_71340_o.func_73670_a("enable-command-block", false); } public int func_82357_ak() { return this.field_71340_o.func_73669_a("spawn-protection", super.func_82357_ak()); } public boolean func_96290_a(World p_96290_1_, int p_96290_2_, int p_96290_3_, int p_96290_4_, EntityPlayer p_96290_5_) { if(p_96290_1_.field_73011_w.field_76574_g != 0) { return false; } else if(this.func_71334_ai().func_72376_i().isEmpty()) { return false; } else if(this.func_71334_ai().func_72353_e(p_96290_5_.func_70005_c_())) { return false; } else if(this.func_82357_ak() <= 0) { return false; } else { ChunkCoordinates var6 = p_96290_1_.func_72861_E(); int var7 = MathHelper.func_76130_a(p_96290_2_ - var6.field_71574_a); int var8 = MathHelper.func_76130_a(p_96290_4_ - var6.field_71573_c); int var9 = Math.max(var7, var8); return var9 <= this.func_82357_ak(); } } public ILogAgent func_98033_al() { return this.field_98131_l; } public int func_110455_j() { return this.field_71340_o.func_73669_a("op-permission-level", 4); } public void func_143006_e(int p_143006_1_) { super.func_143006_e(p_143006_1_); this.field_71340_o.func_73667_a("player-idle-timeout", Integer.valueOf(p_143006_1_)); this.func_71326_a(); } // $FF: synthetic method public ServerConfigurationManager func_71203_ab() { return this.func_71334_ai(); } @SideOnly(Side.SERVER) public void func_120011_ar() { MinecraftServerGui.func_120016_a(this); this.field_71335_s = true; } }<|fim▁end|>
public int func_71327_a(String p_71327_1_, int p_71327_2_) { return this.field_71340_o.func_73669_a(p_71327_1_, p_71327_2_);
<|file_name|>http404check.js<|end_file_name|><|fim▁begin|>angular.module('piwikApp').factory('http404CheckInterceptor', function($q) { function isClientError(rejection) { if (rejection.status === 500) { return true; } return rejection.status >= 400 && rejection.status < 408; } return { <|fim▁hole|> rejection.config.url && -1 !== rejection.config.url.indexOf('.html') && -1 !== rejection.config.url.indexOf('plugins')) { var posEndUrl = rejection.config.url.indexOf('.html') + 5; var url = rejection.config.url.substr(0, posEndUrl); var message = 'Please check your server configuration. You may want to whitelist "*.html" files from the "plugins" directory.'; message += ' The HTTP status code is ' + rejection.status + ' for URL "' + url + '"'; var UI = require('piwik/UI'); var notification = new UI.Notification(); notification.show(message, { title: 'Failed to load HTML file:', context: 'error', id: 'Network_HtmlFileLoadingError' }); } return $q.reject(rejection); } }; }); angular.module('piwikApp').config(['$httpProvider',function($httpProvider) { $httpProvider.interceptors.push('http404CheckInterceptor'); }]);<|fim▁end|>
'responseError': function(rejection) { if (rejection && isClientError(rejection) && rejection.config &&
<|file_name|>researchoutcomeindicatorviews.py<|end_file_name|><|fim▁begin|>from csacompendium.indicators.models import ResearchOutcomeIndicator from csacompendium.utils.pagination import APILimitOffsetPagination from csacompendium.utils.permissions import IsOwnerOrReadOnly from csacompendium.utils.viewsutils import DetailViewUpdateDelete, get_http_request from rest_framework.filters import DjangoFilterBackend<|fim▁hole|> def research_outcome_indicator_views(): """ Research outcome indicator views :return: All research outcome indicator views :rtype: Object """ class ResearchOutcomeIndicatorCreateAPIView(CreateAPIView): """ Creates a single record. """ queryset = ResearchOutcomeIndicator.objects.all() permission_classes = [IsAuthenticated] def get_serializer_class(self): """ Gets serializer class :return: Research outcome indicator object :rtype: Object """ model_type, url_parameter, user = get_http_request(self.request, slug=False) create_research_outcome_indicator_serializer = research_outcome_indicator_serializers[ 'create_research_outcome_indicator_serializer' ] return create_research_outcome_indicator_serializer(model_type, url_parameter, user) class ResearchOutcomeIndicatorListAPIView(ListAPIView): """ API list view. Gets all records API. """ queryset = ResearchOutcomeIndicator.objects.all() serializer_class = research_outcome_indicator_serializers['ResearchOutcomeIndicatorListSerializer'] filter_backends = (DjangoFilterBackend,) filter_class = ResearchOutcomeIndicatorListFilter pagination_class = APILimitOffsetPagination class ResearchOutcomeIndicatorDetailAPIView(DetailViewUpdateDelete): """ Creates, deletes and updates a record. """ queryset = ResearchOutcomeIndicator.objects.all() serializer_class = research_outcome_indicator_serializers['ResearchOutcomeIndicatorDetailSerializer'] permission_classes = [IsAuthenticated, IsAdminUser] lookup_field = 'pk' return { 'ResearchOutcomeIndicatorCreateAPIView': ResearchOutcomeIndicatorCreateAPIView, 'ResearchOutcomeIndicatorListAPIView': ResearchOutcomeIndicatorListAPIView, 'ResearchOutcomeIndicatorDetailAPIView': ResearchOutcomeIndicatorDetailAPIView }<|fim▁end|>
from rest_framework.generics import CreateAPIView, ListAPIView from rest_framework.permissions import IsAuthenticated, IsAdminUser from .filters import ResearchOutcomeIndicatorListFilter from csacompendium.indicators.api.serializers import research_outcome_indicator_serializers
<|file_name|>connect_nodes_response.rs<|end_file_name|><|fim▁begin|>use libflo_action_std::{ Action, action_str_id, NumberOrString }; use string; #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct ConnectNodesResponse { #[serde(rename = "type")] pub action_type: NumberOrString, pub start_connector_index: usize, pub finish_connector_index: usize, } impl ConnectNodesResponse { pub fn new(start_connector_index: usize, finish_connector_index: usize) -> Self { ConnectNodesResponse { action_type: action_str_id(string::module(), string::connect_nodes_response_action()), start_connector_index: start_connector_index,<|fim▁hole|> } } impl Action for ConnectNodesResponse { fn get_type(&self) -> &NumberOrString { &self.action_type } }<|fim▁end|>
finish_connector_index: finish_connector_index, }
<|file_name|>string-matches-pattern.test.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai'; import stringMatchesPattern, { _convertPatternToRegExpString } from '../../src/utils/string-matches-pattern'; describe('_convertPatternToRegExpString', () => { function test(rawPattern: string, expected: string): void { const result = _convertPatternToRegExpString(rawPattern); if (result !== expected) { expect.fail(`Expected "${rawPattern}" to become "${expected}" but it was "${result}"`); } } it('works with a pattern that is simply single wildcard', () => { test('*', '.+'); }); it('works with a pattern that starts with a wildcard', () => { test('*/log/dmesg', '.+/log/dmesg'); }); it('works with a pattern that has multiple wildcards', () => { test('**', '.+'); test('/var/**/*dmesg', '/var/.+/.+dmesg'); }); it('escapes RegExp special characters', () => { test('/var/log/*/something.log', '/var/log/.+/something\\.log'); test('foo[0]{x}*bar', 'foo\\[0\\]\\{x\\}.+bar'); test('foo\\bar*.log', 'foo\\\\bar.+\\.log'); test('1 + 1 = *', '1 \\+ 1 = .+'); test('1 ? 1 = *', '1 \\? 1 = .+'); test('1 ^ 1 = *', '1 \\^ 1 = .+'); test('1 $ 1 = *', '1 \\$ 1 = .+'); test('1 (x) 1 = *', '1 \\(x\\) 1 = .+'); test('1 | 1 = *', '1 \\| 1 = .+'); }); }); describe('stringMatchesPattern', () => { it('returns true for exact matches', () => { expect(stringMatchesPattern('some/pattern', 'some/pattern')).to.eql(true); }); it('returns false for non-matches when there are no wildcards', () => { expect(stringMatchesPattern('some/pattern', 'some/value')).to.eql(false); }); it('returns true for * matches mid-string', () => { expect(stringMatchesPattern('/var/log/*/something.log', '/var/log/folder/something.log')).to.eql(true); });<|fim▁hole|> }); it('returns true for multiple * matches', () => { expect(stringMatchesPattern('/var/log/*/*.log', '/var/log/folder/something.log')).to.eql(true); expect(stringMatchesPattern('/var/*/folder/*.log', '/var/log/folder/something.log')).to.eql(true); expect(stringMatchesPattern('/*/*/*/*.log', '/var/log/folder/something.log')).to.eql(true); // This type of pattern matching should not be confused with bash expansion or // similar globs. That is, a star does not match *only* a single "unit", as there is // no defined unit (in this case, it appears as if folders are units if you're // thinking of this as a bash expansion). A wildcard is equivalent to `.*` in a // regexp. expect(stringMatchesPattern('/*/*.log', '/var/log/folder/something.log')).to.eql(true); }); it('returns true for * matches at the beginning of the string', () => { // See comment above about not comparing this to bash expansion expect(stringMatchesPattern('*.log', '/var/log/folder/something.log')).to.eql(true); }); it('returns true for multiple * matches, including one at the end of the string', () => { expect(stringMatchesPattern('/var/*/folder/*', '/var/log/folder/something.log')).to.eql(true); }); it('returns true for multiple * matches, including one at the beginning of the string', () => { expect(stringMatchesPattern('*/folder/*.log', '/var/log/folder/something.log')).to.eql(true); }); it('returns true for multiple * matches, including ones at the beginning and end of the string', () => { expect(stringMatchesPattern('*/folder/*', '/var/log/folder/something.log')).to.eql(true); }); it('returns true when there are multiple next-pattern-part matches after a wildcard', () => { // The `*` needs to act as a greedy operator. That is, it can't just look at the // first post-wildcard match (in this case, ":e:f:g") because there may be another // match for the next part of the pattern later in the string. In this case, it // can't stop at the first ":e:f:g" because that's not the end of the string (and // there's no trailing wildcard on the pattern), so the pattern would not match ... // but the pattern SHOULD match because it does start with "a:b:c:", end with // ":e:f:g", and have characters in between. expect(stringMatchesPattern('a:b:c:*:e:f:g', 'a:b:c:d:e:f:g:a:b:c:d:e:f:g')).to.eql(true); }); it('returns true when the entire pattern is a single wildcard and the value is at least one char', () => { expect(stringMatchesPattern('*', 'a:b:c:d:e:f:g:a:b:c:d:e:f:g')).to.eql(true); expect(stringMatchesPattern('*', '/var/log/folder/something.log')).to.eql(true); expect(stringMatchesPattern('*', 'a')).to.eql(true); }); it('returns false when the entire pattern is a single wildcard and the value is an empty string', () => { // because wildcards require a match of at least one character expect(stringMatchesPattern('*', '')).to.eql(false); }); it('returns false when the value does not match the pattern', () => { // TODO: insert more tests }); });<|fim▁end|>
it('returns true for * matches at the end of the string', () => { expect(stringMatchesPattern('/var/log/*', '/var/log/folder/something.log')).to.eql(true);
<|file_name|>fold-navigator.js<|end_file_name|><|fim▁begin|>'use babel'; //showSearch import FoldNavigatorView from './fold-navigator-view'; import fuzzaldrinPlus from 'fuzzaldrin-plus'; import _ from 'lodash'; import { CompositeDisposable } from 'atom'; export default { config: { autofold: { title: 'Autofold on open', description: 'Autofold all folds when you open a document. config.cson key : autofold', type: 'boolean', default: false }, keepFolding: { title: 'Keep code folded.', description: 'Everytime you click on one of the fold navigator element all code folds will be folded before the selected fold opens. Usefull if you want to keep your code folded all the time. Note that if you are using ctrl-alt-cmd-up/down keys to navigate it will not close the folds. Also you can temporarily enable/disable this behaviour by holding down the option key while you click. config.cson key : keepFolding', type: 'boolean', default: false }, showLineNumbers: { title: 'Show line number.', description: 'Show line numbers in fold navigator. config.cson key : showLineNumbers', type: 'boolean', default: true }, indentationCharacter: { title: 'Indentation character.', description: 'The character used for indentation in the fold navigator panel. config.cson key : indentationCharacter', type: 'string', default: 'x' }, maxLineContentLength: { title: 'Maximum line length.', description: 'Fold Navigator will take the line on which the fold is on but if the line is longer than this many characters then it will truncate it. config.cson key : maxLineContentLength', type: 'integer', default: 60 }, minLineLength: { title: 'Minimum line length.', description: 'Sometimes the fold falls on line which contains very little information. Typically comments like /** are meaningless. If the line content is less then this many characters use the next line for the fold description. config.cson key : minLineLength', type: 'integer', default: 6 }, keepFoldingAllTime: { title: 'Keep code folded even on shortcuts.', description: 'It will fold all folds before opening a new one. config.cson key : keepFoldingAllTime', type: 'boolean', default: false }, autoScrollFoldNavigatorPanel: { title: 'Auto scroll fold navigator panel.', description: 'Scrolls the fold navigator panel to the active fold. config.cson key : autoScrollFoldNavigatorPanel', type: 'boolean', default: true }, unfoldAllSubfolds: { title: 'Unfold all subfolds.', description: 'When a fold is selected/active all subfolds will be unfolded as well. When you have lots of subfolds to open this can be sluggish. config.cson key : unfoldAllSubfolds', type: 'boolean', default: true }, maxFoldLevel: { title: 'Maximum fold level fold navigator will list.', description: 'It is possibly not much use listing every single fold. With this option you can limit the fold level depth we list on the panel hopefully giving you a better overview of the code. config.cson key : maxFoldLevel', type: 'integer', default: 10, }, whenMatchedUsePreviousLine: { title: 'Previous line should be used for description.', description: 'Comma separated values. If the content of the line matches any of these values the previous line is going to be used for the fold description. This is so that we avoid listing just a single bracket for example which would be pretty meaningless.', type: 'string', default: '{,{ ', }, log: { title: 'Turn on logging', description: 'It might help to sort out mysterious bugs.', type: 'boolean', default: false, }, }, activate(state) { //console.log(arguments.callee.name); /* this.settings = null; */ this.settings = null; this.iniSettings(); /* this.lines2fold = []; Key is the line number and the value is the line number of the last fold looped through in the document. currently the fold ending are not observed maybe I should change this in the future note that we will work with the line numbers displayed and not the actuall line number which can be 0 */ this.lines2fold = []; /* this.folds = []; array of row numbers where the folds are */ this.folds = []; /* this.visibleFolds = []; same as this.folds but limited by this.settings.maxFoldLevel */ this.visibleFolds = []; /* this.foldObjects = {}; we need this to be able to navigate levels an example bellow see this.parse it should really be a new Map(); { line: i, children: [], parent: parent, indentation: indentLevel, content: '', } */ this.foldObjects = {}; /* exactly the same as this.foldObjects but came later because of fuzzaldrin-plus which only seems to work with arrays as far as I can tell */ this.foldObjectsArray = []; /* this.foldLevels = {}; row numbers of the folds orgenised by level usefull for the commands which fold unfold levels */ this.foldLevels = {}; /* this.history = []; only used as a short term log so that we can navigate fold level down */ this.history = []; /* this.activeFold line number of the fold which we are on this is what gets highlighted on the fold navigator panel item */ this.activeFold = null; // subscriptions this.subscriptions = new CompositeDisposable(); this.onDidChangeCursorPositionSubscription = null; this.foldNavigatorView = new FoldNavigatorView(state.foldNavigatorViewState); // when active pane item changed parse code and change content of navigator panel atom.workspace.observeActivePaneItem((pane) => { this.observeActivePaneItem(pane) }); // parse content of editor each time it stopped changing atom.workspace.observeTextEditors((editor) => { this.observeTextEditors(editor) }); // attach onclick event to the fold navigator lines this.navigatorElementOnClick(); this.registerCommands(); /* this.panel = null; foldnavigator panel */ this.panel = null; this.addNavigatorPanel(); /* this.searchModal */ this.searchModal = null; this.searchModalElement = null; this.searchModalInput = null; this.searchModalItems = null; this.addSearchModal(); }, // observer text editors coming and going observeTextEditors(editor) { //console.log(arguments.callee.name); if (this.settings && this.settings.autofold && editor){ editor.foldAll(); } }, // every time the active pane changes this will get called observeActivePaneItem(pane) { //console.log(arguments.callee.name); var editor = atom.workspace.getActiveTextEditor(); var listener; var editorView; if (!editor) return; //dispose of previous subscription if (this.onDidChangeCursorPositionSubscription) { this.onDidChangeCursorPositionSubscription.dispose(); } // follow cursor in fold navigator register subscription so that we can remove it this.onDidChangeCursorPositionSubscription = editor.onDidChangeCursorPosition( _.debounce((event) => this.onDidChangeCursorPosition(event), 500) ); //dispose of previous subscription if (this.onDidStopChangingSubscription) { this.onDidStopChangingSubscription.dispose(); } // if document changed subscription this.onDidStopChangingSubscription = editor.onDidStopChanging( _.debounce((event) => this.parse(editor), 500) ); this.parse(editor); }, clearSearchModal() { if (!this.searchModal) return; this.searchModalItems.innerHTML = ''; this.searchModalInput.value = ''; }, alignEditorToSearch() { var selectedArr = this.searchModalItems.getElementsByClassName('fold-navigator-search-modal-item-selected'); var selected = selectedArr[0]; var editor = atom.workspace.getActiveTextEditor(); if (selected && selected.dataset.row >= 0) { this.moveCursor(selected.dataset.row, false); } }, searchModalOnClick(event) { var row; var editor = atom.workspace.getActiveTextEditor(); var clicked = null; this.hideSearch(); if (event.target.matches('.fold-navigator-search-modal-item')) { clicked = event.target; } else if (event.target.matches('.fold-navigator-indentation') && event.target.parentNode && event.target.parentNode.matches('.fold-navigator-search-modal-item')) { clicked = event.target.parentNode; } if (!clicked) return; row = clicked.dataset.row; //problem if (!row) return; this.moveCursor(row, false); }, addSearchModal() { this.searchModalElement = document.createElement('div'); this.searchModalElement.classList.add('fold-navigator-search-modal'); this.searchModalElement.classList.add('native-key-bindings'); this.searchModalInput = document.createElement('input'); this.searchModalItems = document.createElement('div'); this.searchModalElement.appendChild(this.searchModalInput); this.searchModalElement.appendChild(this.searchModalItems); this.searchModal = atom.workspace.addModalPanel({ item: this.searchModalElement, visible: false }); // on blur this.searchModalInput.addEventListener('blur', (event) => { // delay hiding because of the on click event won't fire otherwise WHAT an ugly way to solve it :) setTimeout(() => { this.hideSearch(); }, 200); }); // on click this.searchModalElement.addEventListener('click', (event) => { this.searchModalOnClick(event); }, true); // on input this.searchModalInput.addEventListener('input', () => { this.searchModalItems.innerHTML = ''; var query = this.searchModalInput.value; if (!query || query.length < 1) return; var filteredItems = fuzzaldrinPlus.filter(this.foldObjectsArray, query, { key: 'content' }); var html = ''; filteredItems.forEach((item, index) => { let selected = ' fold-navigator-search-modal-item-selected'; if (index > 0) { selected = ''; } //let html2add = '<div id="' + id + '" class="' + classList + '" data-row="' + i + '">' + gutter + lineNumberSpan + indentHtml + content.innerHTML + '</div>'; let indentHtml = ''; for (let j = 0; j < item.indentation; j++) { indentHtml += '<span class="fold-navigator-indentation">' + this.settings.indentationCharacter + '</span>'; } html += '<div class="fold-navigator-search-modal-item fold-navigator-item-indent-' + item.indentation + selected + '" data-row="' + item.line + '">' + indentHtml + item.content + '</div>'; }); this.searchModalItems.innerHTML = html; //var matches = fuzzaldrinPlus.match(displayName, this.searchModalInput.value); //filterQuery box from text input //items ?? // { key: @getFilterKey() } }); this.searchModalElement.addEventListener('keydown', (e) => { var sc = 'fold-navigator-search-modal-item-selected'; if (e.keyCode === 38 || e.keyCode === 40 || e.keyCode == 13 || e.keyCode == 27) { var items = this.searchModalItems.getElementsByClassName('fold-navigator-search-modal-item'); if (!items) return; // remove selected var selectedArr = this.searchModalItems.getElementsByClassName(sc); var selected = selectedArr[0]; if (selected) { selected.classList.remove(sc); } var first = items[0] ? items[0] : false; var last = items[items.length - 1] ? items[items.length - 1] : false; var next = null; if (e.keyCode === 38) { // up if (selected) { next = selected.previousElementSibling; } } else if (e.keyCode === 40) { // down if (selected) { next = selected.nextElementSibling; } } else if (e.keyCode === 27) { // esc this.hideSearch(); } else if (e.keyCode == 13) { // enter if (selected) { if (selected.dataset.row >= 0) { let editor = atom.workspace.getActiveTextEditor(); this.moveCursor(selected.dataset.row, false); this.hideSearch(); } } } // end of line or not selected if (!next) { if (e.keyCode === 38) next = last; else { next = first; } } if (next) { next.classList.add(sc); } } }); <|fim▁hole|> //var matches = fuzzaldrinPlus.match(displayName, filterQuery) }, hideSearch() { this.searchModal.hide(); let editor = atom.workspace.getActiveTextEditor(); if (editor) atom.views.getView(editor).focus(); }, showSearch() { if (!editor) return; this.searchModal.show(); this.searchModalInput.focus(); this.searchModalInput.select(); }, toggleSearch() { var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; if (this.searchModal.isVisible()) { this.searchModal.hide(); atom.views.getView(editor).focus(); } else { this.searchModal.show(); this.searchModalInput.focus(); this.searchModalInput.select(); } }, onDidChangeCursorPosition(event) { //console.log(arguments.callee.name); this.selectRow(event.newBufferPosition.row); }, addNavigatorPanel() { //console.log(arguments.callee.name); var element = this.foldNavigatorView.getElement(); if (atom.config.get('tree-view.showOnRightSide')) { this.panel = atom.workspace.addLeftPanel({ item: element, visible: false }); } else { this.panel = atom.workspace.addRightPanel({ item: element, visible: false }); } }, iniSettings() { //console.log(arguments.callee.name); var editor = atom.workspace.getActiveTextEditor(); var languageSettings = null; if (editor) { let scope = editor.getGrammar().scopeName; languageSettings = atom.config.get('fold-navigator', { 'scope': [scope] }); } this.settings = atom.config.get('fold-navigator'); if (languageSettings){ Object.assign(this.settings, languageSettings); } // parse the comma separated string whenMatchedUsePreviousLine if(this.settings.whenMatchedUsePreviousLine && this.settings.whenMatchedUsePreviousLine.trim() != ''){ this.settings.whenMatchedUsePreviousLineArray = this.settings.whenMatchedUsePreviousLine.split(','); if(this.settings.whenMatchedUsePreviousLineArray.constructor !== Array){ this.settings.whenMatchedUsePreviousLineArray = null; } } }, registerCommands() { //"ctrl-alt-cmd-up": "fold-navigator:previousFoldAtCurrentLevel", //"ctrl-alt-cmd-down": "fold-navigator:nextFoldAtCurrentLevel", //"ctrl-alt-cmd-up": "fold-navigator:previousFold", //"ctrl-alt-cmd-down": "fold-navigator:nextFold", // //console.log(arguments.callee.name); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggle': () => this.toggle() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:open': () => this.open() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:close': () => this.close() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:previousFold': () => this.previousFold() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:nextFold': () => this.nextFold() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:moveLevelUp': () => this.moveLevelUp() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:moveLevelDown': () => this.moveLevelDown() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:nextFoldAtCurrentLevel': () => this.nextFoldAtCurrentLevel() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:previousFoldAtCurrentLevel': () => this.previousFoldAtCurrentLevel() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:unfoldSubfolds': () => this.unfoldSubfoldsPublic() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:foldActive': () => this.foldActivePublic() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:unfoldAtLevel1': () => this.unfoldAtLevel1() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:unfoldAtLevel2': () => this.unfoldAtLevel2() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:unfoldAtLevel3': () => this.unfoldAtLevel3() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:unfoldAtLevel4': () => this.unfoldAtLevel4() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:unfoldAtLevel5': () => this.unfoldAtLevel5() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:foldAtLevel1': () => this.foldAtLevel1() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:foldAtLevel2': () => this.foldAtLevel2() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:foldAtLevel3': () => this.foldAtLevel3() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:foldAtLevel4': () => this.foldAtLevel4() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:foldAtLevel5': () => this.foldAtLevel5() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleFoldsLevel1': () => this.toggleFoldsLevel1() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleFoldsLevel2': () => this.toggleFoldsLevel2() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleFoldsLevel3': () => this.toggleFoldsLevel3() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleFoldsLevel4': () => this.toggleFoldsLevel4() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleFoldsLevel5': () => this.toggleFoldsLevel5() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleSearch': () => this.toggleSearch() })); this.subscriptions.add(atom.commands.add('atom-workspace', { 'fold-navigator:toggleActiveFold': () => this.toggleActiveFold() })); }, previousFold() { if (this.searchModal.isVisible()) { this.alignEditorToSearch(); return; } var folds = this.visibleFolds; if (!folds || folds.length === 0) return; //console.log(arguments.callee.name); this.clearHistory(); var fold = this.foldNavigatorView.getActiveFold(); var index = folds.indexOf(fold); var previous; var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; if (index !== 0) { previous = folds[index - 1]; } else { previous = folds[folds.length - 1]; } if (previous || previous === 0) { this.moveCursor(previous); } }, nextFold() { if (this.searchModal.isVisible()) { this.alignEditorToSearch(); return; } //console.log(arguments.callee.name); this.clearHistory(); var folds = this.visibleFolds; if (!folds || folds.length === 0) return; var fold = this.foldNavigatorView.getActiveFold(); var index = folds.indexOf(fold); var next; var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; if (folds.length !== (index + 1)) { next = folds[index + 1]; } else { next = folds[0]; } if (next || next === 0) { this.moveCursor(next); } }, previousFoldAtCurrentLevel() { if (this.searchModal.isVisible()) { this.alignEditorToSearch(); return; } //console.log(arguments.callee.name); this.clearHistory(); var fold = this.foldNavigatorView.getActiveFold(); var previous; var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; var indentation = 0; if (fold || fold === 0) { indentation = editor.indentationForBufferRow(fold); } var level = this.getLevel(indentation); if (!level) return; var index = level.indexOf(fold); if (index !== 0) { previous = level[index - 1]; } else { previous = level[level.length - 1]; } if (previous || previous === 0) { this.moveCursor(previous); } }, nextFoldAtCurrentLevel() { if (this.searchModal.isVisible()) { this.alignEditorToSearch(); return; } this.clearHistory(); //console.log(arguments.callee.name); var fold = this.foldNavigatorView.getActiveFold(); var next; var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; var indentation = 0; if (fold || fold === 0) { indentation = editor.indentationForBufferRow(fold); } var level = this.getLevel(indentation); if (!level) return; var index = level.indexOf(fold); if (level.length !== (index + 1)) { next = level[index + 1]; } else { next = level[0]; } if (next || next === 0) { this.moveCursor(next); } }, moveLevelUp() { if (this.searchModal.isVisible()) { this.alignEditorToSearch(); return; } var fold = this.foldNavigatorView.getActiveFold(); var foldObj = this.foldObjects[fold] ? this.foldObjects[fold] : false; var editor = atom.workspace.getActiveTextEditor(); var parent; if (!editor || !foldObj) { return; } parent = this.getParentFold(foldObj); if ((parent || parent === 0) && parent !== 'root') { this.moveCursor(parent); this.addToHistory(fold); } }, moveLevelDown() { if (this.searchModal.isVisible()) { this.alignEditorToSearch(); return; } var fold = this.foldNavigatorView.getActiveFold(); var foldObj = this.foldObjects[fold] ? this.foldObjects[fold] : false; var editor = atom.workspace.getActiveTextEditor(); var child; if (!editor || !foldObj || foldObj.children.length === 0) { return; } child = this.getLastFromHistory(); // check if the last item in history actually belongs to this parent if (!child && foldObj.children.indexOf(child) === -1) child = foldObj.children[0]; if (child) { this.moveCursor(child); } }, getParentFold(foldObj) { if (!foldObj) { return false; } // badly indented/formated code - there must be a parent so return the previous fold the next best chance of being the parent if (foldObj.parent === 'root' && foldObj.indentation > 0) { let index = this.folds.indexOf(foldObj.line); let prev = this.folds[index - 1]; if (prev || prev === 0) return prev; return false; } return foldObj.parent; }, addToHistory(fold) { var maxSize = 10; if (!this.history) { this.history = []; } else if (this.history.length > maxSize) { this.history.shift(); } this.history.push(fold); }, getLastFromHistory() { if (!this.history) { return undefined; } return this.history.pop(); }, clearHistory() { if (!this.history) return; this.history.length = 0; }, // gets all folds at indentation level getLevel(level) { //console.log(arguments.callee.name); return this.foldLevels[level]; }, parse(editor) { //console.log(arguments.callee.name); if (!editor) return; // initialize this.iniSettings(); this.clearSearchModal(); this.foldNavigatorView.clearContent(); this.clearHistory(); this.lines2fold = []; this.folds = []; this.visibleFolds = []; this.foldObjects = {}; this.foldObjectsArray = []; // we need this because fuzzaldrin-plus not able to find things in objects only in arrays or I do not know how this.foldLevels = {}; var numberOfRows = editor.getLastBufferRow(); var html = ""; var currentFold = null; var temporarilyLastParent = []; //loop through the lines of the active editor for (var i = 0; numberOfRows > i; i++) { if (editor.isFoldableAtBufferRow(i)) { let indentLevel = editor.indentationForBufferRow(i); let indentHtml = ""; let lineNumberSpan = ""; let lineContent = ""; let lineContentTrimmed = ""; let classList = "fold-navigator-item"; let id = ""; let gutter = '<span class="fold-navigator-gutter"></span>'; let content = ''; let parent; // add this line to folds this.folds.push(i); // add this line to foldLevels if (!this.foldLevels.hasOwnProperty(indentLevel)) { this.foldLevels[indentLevel] = []; } this.foldLevels[indentLevel].push(i); // chop array down - it can not be larger than the current indentLevel temporarilyLastParent.length = parseInt(indentLevel); parent = 'root'; if (temporarilyLastParent[indentLevel - 1] || temporarilyLastParent[indentLevel - 1] === 0) parent = temporarilyLastParent[indentLevel - 1]; if (this.foldObjects[parent]) { this.foldObjects[parent]['children'].push(i); } this.foldObjects[i] = { line: i, children: [], parent: parent, indentation: indentLevel, content: '', }; //temporarilyLastParent temporarilyLastParent[indentLevel] = i; for (let j = 0; j < indentLevel; j++) { indentHtml += '<span class="fold-navigator-indentation">' + this.settings.indentationCharacter + '</span>'; } lineContent = editor.lineTextForBufferRow(i); lineContentTrimmed = lineContent.trim(); // check if the content of the string matches one of those values when the previous line's content should be used instead // see issue here https://github.com/turigeza/fold-navigator/issues/12 if(this.settings.whenMatchedUsePreviousLineArray && this.settings.whenMatchedUsePreviousLineArray.indexOf(lineContentTrimmed) !== -1){ //&& i !== 0 lineContent = editor.lineTextForBufferRow(i - 1); }else if (lineContentTrimmed.length < this.settings.minLineLength) { // check if the line is longer than the minimum in the settings if not grab the next line instead lineContent = editor.lineTextForBufferRow(i + 1); } // default it to string seems to return undefined sometimes most likely only when the first row is { if(!lineContent){ lineContent = ''; } // check if line is too long if (lineContent.length > this.settings.maxLineContentLength) { lineContent = lineContent.substring(0, this.settings.maxLineContentLength) + '...'; } /* maybe in the future we should check for lines which are too short and grab the next row */ if (this.settings.showLineNumbers) { lineNumberSpan = '<span class="fold-navigator-line-number ">' + (i + 1) + '</span>'; } id = 'fold-navigator-item-' + i; classList += ' fold-navigator-item-' + i; classList += ' fold-navigator-item-indent-' + indentLevel; // escape html // add content to navigator if (indentLevel <= this.settings.maxFoldLevel) { currentFold = i; content = document.createElement('div'); content.appendChild(document.createTextNode(lineContent)); html += '<div id="' + id + '" class="' + classList + '" data-row="' + i + '">' + gutter + lineNumberSpan + indentHtml + content.innerHTML + '</div>'; this.foldObjects[i]['content'] = lineContent.trim(); this.foldObjectsArray.push(this.foldObjects[i]); this.visibleFolds.push(i); } } // add this fold to the line2fold lookup array this.lines2fold[i] = currentFold; } this.foldNavigatorView.setContent(html); this.selectRow(editor.getCursorBufferPosition().row); }, /* called every time onCursorChange */ selectRow(row) { //console.log(arguments.callee.name); var fold = this.lines2fold[row]; var line = this.foldNavigatorView.selectFold(fold); // autoscroll navigator panel if ((line) && !this.wasItOnClick && this.settings.autoScrollFoldNavigatorPanel) { line.scrollIntoViewIfNeeded(false); if(this.settings.log){ console.log(line); } } if (line) { this.wasItOnClick = false; } }, // not yet used idea stolen from tree view resizeStarted() { document.onmousemove = () => { this.resizePanel() }; document.onmouseup = () => { this.resizeStopped() }; }, // not yet used idea stolen from tree view resizeStopped() { document.offmousemove = () => { this.resizePanel() }; document.offmouseup = () => { this.resizeStopped() }; }, // not yet used idea stolen from tree view resizePanel(d) { var pageX = d.pageX; var which = d.which; if (which !== 1) { return this.resizeStopped(); } }, toggle() { return (this.panel.isVisible() ? this.panel.hide() : this.panel.show()); }, open() { var editor = atom.workspace.getActiveTextEditor(); return this.panel.show(); }, close() { return this.panel.hide(); }, moveCursor(row, wasItOnClick = false) { //console.log(arguments.callee.name); this.wasItOnClick = wasItOnClick; // setCursorBufferPosition dies if row is string row = parseInt(row); var editor = atom.workspace.getActiveTextEditor(); if (!editor || row < 0) return; //editor.unfoldBufferRow(row); if (this.settings.keepFoldingAllTime && !wasItOnClick) { editor.foldAll(); } this.unfoldSubfolds(row); editor.setCursorBufferPosition([row, 0]); editor.scrollToCursorPosition({ center: true }); }, navigatorElementOnClick() { //console.log(arguments.callee.name); var element = this.foldNavigatorView.getElement(); element.onclick = (event) => { var clicked = null; var row; var editor = atom.workspace.getActiveTextEditor(); if (event.target.matches('.fold-navigator-item')) { clicked = event.target; } else if (event.target.matches('.fold-navigator-indentation') && event.target.parentNode && event.target.parentNode.matches('.fold-navigator-item')) { clicked = event.target.parentNode; } if (!clicked) return; row = clicked.dataset.row; //problem if (!row) return; if (editor && ((this.settings.keepFolding && !event.metaKey) || (!this.settings.keepFolding && event.metaKey))) { // fold all code before anything else sadly this triggers a lot of onDidChangeCursorPosition events editor.foldAll(); } this.moveCursor(row, true); }; }, foldActivePublic() { //console.log(arguments.callee.name); var fold = this.foldNavigatorView.getActiveFold(); var editor = atom.workspace.getActiveTextEditor(); if ((!fold && fold !== 0) || !editor) return; editor.foldBufferRow(fold); }, unfoldSubfoldsPublic() { //console.log(arguments.callee.name); this.unfoldSubfolds(false, false, true); }, unfoldSubfolds(row = false, editor = false, force = false) { //console.log(arguments.callee.name); var fold = (row || row === 0) ? row : this.foldNavigatorView.getActiveFold(); if (!fold && fold !== 0) return; var foldObj = this.foldObjects[fold]; var editor = editor ? editor : atom.workspace.getActiveTextEditor(); if (!foldObj || !editor) return; editor.unfoldBufferRow(fold); if (!this.settings.unfoldAllSubfolds && !force) return; if (foldObj.children.length > 0) { foldObj.children.forEach( (value) => { this.unfoldSubfolds(value, editor) } ); } }, toggleActiveFold() { //console.log(arguments.callee.name); var fold = this.foldNavigatorView.getActiveFold(); var editor = atom.workspace.getActiveTextEditor(); if ((!fold && fold !== 0) || !editor) return; if (editor.isFoldedAtBufferRow(fold)) { this.unfoldSubfolds(fold, editor, true); } else { editor.foldBufferRow(fold); } }, unfoldAtLevel(level) { var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; if ([1, 2, 3, 4, 5].indexOf(level) < 0) return; var lev = this.getLevel(level - 1); if (lev) { lev.forEach((fold) => { editor.unfoldBufferRow(fold); }); editor.scrollToCursorPosition({ center: true }); } }, foldAtLevel(level) { var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; if ([1, 2, 3, 4, 5].indexOf(level) < 0) return; var lev = this.getLevel(level - 1); if (lev) { lev.forEach((fold) => { editor.foldBufferRow(fold); }); editor.scrollToCursorPosition({ center: true }); } }, toggleFoldsLevel(level) { var editor = atom.workspace.getActiveTextEditor(); if (!editor) return; if ([1, 2, 3, 4, 5].indexOf(level) < 0) return; var lev = this.getLevel(level - 1); if (!lev) return; var first = lev[0]; if (!first && first !== 0) return; if (editor.isFoldedAtBufferRow(first)) { this.unfoldAtLevel(level); } else { this.foldAtLevel(level); } }, unfoldAtLevel1() { this.unfoldAtLevel(1); }, unfoldAtLevel2() { this.unfoldAtLevel(2); }, unfoldAtLevel3() { this.unfoldAtLevel(3); }, unfoldAtLevel4() { this.unfoldAtLevel(4); }, unfoldAtLevel5() { this.unfoldAtLevel(5); }, foldAtLevel1() { this.foldAtLevel(1); }, foldAtLevel2() { this.foldAtLevel(2); }, foldAtLevel3() { this.foldAtLevel(3); }, foldAtLevel4() { this.foldAtLevel(4); }, foldAtLevel5() { this.foldAtLevel(5); }, toggleFoldsLevel1() { this.toggleFoldsLevel(1); }, toggleFoldsLevel2() { this.toggleFoldsLevel(2); }, toggleFoldsLevel3() { this.toggleFoldsLevel(3); }, toggleFoldsLevel4() { this.toggleFoldsLevel(4); }, toggleFoldsLevel5() { this.toggleFoldsLevel(5); }, startTime() { this.time = new Date(); }, showTime(text) { console.log(text); console.log(new Date() - this.time); }, deactivate() { //console.log(arguments.callee.name); this.panel.destroy(); this.subscriptions.dispose(); this.foldNavigatorView.destroy(); if (this.onDidChangeCursorPositionSubscription) { this.onDidChangeCursorPositionSubscription.dispose(); } if (this.onDidStopChangingSubscription) { this.onDidStopChangingSubscription.dispose(); } //delete(this.searchModalElement); //delete(this.searchModalItems); //delete(this.searchModalInput); if (this.searchModal) { this.searchModal.destroy(); } }, /* we don't need this */ serialize() {} };<|fim▁end|>
<|file_name|>queue_alt.rs<|end_file_name|><|fim▁begin|>/*! Heterogeneous Queue (alternative) This version is hand-written (no macros) but has a simpler architecture that allows implicit consumption by deconstruction on assignment. # Example ```rust use heterogene::queue_alt::{Q0,Q1,Q2}; let q = (); let q = q.append(1u); let q = q.append('c'); let (num, q) = q; let (ch, q) = q; println!("Queue-alt: {} {} {}", num, ch, q); ``` */ pub trait Q0 { fn append<T1>(self, t1: T1) -> (T1,()); } impl Q0 for () { fn append<T1>(self, t1: T1) -> (T1,()) { (t1,()) } } pub trait Q1<T1> { fn append<T2>(self, t2: T2) -> (T1,(T2,())); } impl<T1> Q1<T1> for (T1,()) { fn append<T2>(self, t2: T2) -> (T1,(T2,())) { let (t1,_) = self; (t1,(t2,())) } } pub trait Q2<T1,T2> { fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,()))); } impl<T1,T2> Q2<T1,T2> for (T1,(T2,())) {<|fim▁hole|> let(t1,(t2,_)) = self; (t1,(t2,(t3,()))) } }<|fim▁end|>
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,()))) {
<|file_name|>database.py<|end_file_name|><|fim▁begin|>#!/bin/env python # -*- coding: utf-8; -*- # # (c) 2016 FABtotum, http://www.fabtotum.com # # This file is part of FABUI. #<|fim▁hole|># (at your option) any later version. # # FABUI is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with FABUI. If not, see <http://www.gnu.org/licenses/>. # Import standard python module # Import external modules # Import internal modules<|fim▁end|>
# FABUI is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or
<|file_name|>dockerhub_adapter.go<|end_file_name|><|fim▁begin|>// // Copyright (c) 2017 Red Hat, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // Red Hat trademarks are not licensed under Apache License, Version 2. // No permission is granted to use or replicate Red Hat trademarks that // are incorporated in this software or its documentation. // package adapters import ( "bytes" "context" "encoding/json" "fmt" "io/ioutil" "net/http" logging "github.com/op/go-logging" "github.com/openshift/ansible-service-broker/pkg/apb" ) const dockerhubName = "docker.io" const dockerHubLoginURL = "https://hub.docker.com/v2/users/login/" const dockerHubRepoImages = "https://hub.docker.com/v2/repositories/%v/?page_size=100"<|fim▁hole|>const dockerHubManifestURL = "https://registry.hub.docker.com/v2/%v/manifests/%v" // DockerHubAdapter - Docker Hub Adapter type DockerHubAdapter struct { Config Configuration Log *logging.Logger } // DockerHubImage - Image from a dockerhub registry. type DockerHubImage struct { Name string `json:"name"` Namespace string `json:"namespace"` } // DockerHubImageResponse - Image response for dockerhub. type DockerHubImageResponse struct { Count int `json:"count"` Results []*DockerHubImage `json:"results"` Next string `json:"next"` } // RegistryName - Retrieve the registry name func (r DockerHubAdapter) RegistryName() string { return dockerhubName } // GetImageNames - retrieve the images func (r DockerHubAdapter) GetImageNames() ([]string, error) { r.Log.Debug("DockerHubAdapter::GetImages") r.Log.Debug("BundleSpecLabel: %s", BundleSpecLabel) r.Log.Debug("Loading image list for org: [ %s ]", r.Config.Org) token, err := r.getDockerHubToken() if err != nil { r.Log.Errorf("unable to generate docker hub token - %v", err) return nil, err } channel := make(chan string) ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() // Intial call to getNextImages this will fan out to retrieve all the values. imageResp, err := r.getNextImages(ctx, r.Config.Org, token, fmt.Sprintf(dockerHubRepoImages, r.Config.Org), channel, cancelFunc) // if there was an issue with the first call, return the error if err != nil { return nil, err } // If no results in the fist call then close the channel as nothing will get loaded. if len(imageResp.Results) == 0 { r.Log.Info("canceled retrieval as no items in org") close(channel) } var apbData []string counter := 1 for imageData := range channel { apbData = append(apbData, imageData) if counter < imageResp.Count { counter++ } else { close(channel) } } // check to see if the context had an error if ctx.Err() != nil { r.Log.Errorf("encountered an error while loading images, we may not have all the apb in the catalog - %v", ctx.Err()) return apbData, ctx.Err() } return apbData, nil } // FetchSpecs - retrieve the spec for the image names. func (r DockerHubAdapter) FetchSpecs(imageNames []string) ([]*apb.Spec, error) { specs := []*apb.Spec{} for _, imageName := range imageNames { spec, err := r.loadSpec(imageName) if err != nil { r.Log.Errorf("unable to retrieve spec data for image - %v", err) return specs, err } if spec != nil { specs = append(specs, spec) } } return specs, nil } // getDockerHubToken - will retrieve the docker hub token. func (r DockerHubAdapter) getDockerHubToken() (string, error) { type Payload struct { Username string `json:"username"` Password string `json:"password"` } type TokenResponse struct { Token string `json:"token"` } data := Payload{ Username: r.Config.User, Password: r.Config.Pass, } payloadBytes, err := json.Marshal(data) if err != nil { return "", err } body := bytes.NewReader(payloadBytes) req, err := http.NewRequest("POST", dockerHubLoginURL, body) if err != nil { return "", err } req.Header.Set("Content-Type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { return "", err } defer resp.Body.Close() jsonToken, err := ioutil.ReadAll(resp.Body) tokenResp := TokenResponse{} err = json.Unmarshal(jsonToken, &tokenResp) if err != nil { return "", err } return tokenResp.Token, nil } // getNextImages - will follow the next URL using go routines. func (r DockerHubAdapter) getNextImages(ctx context.Context, org, token, url string, ch chan<- string, cancelFunc context.CancelFunc) (*DockerHubImageResponse, error) { req, err := http.NewRequest("GET", url, nil) if err != nil { r.Log.Errorf("unable to get next images for url: %v - %v", url, err) cancelFunc() close(ch) return nil, err } req.Header.Set("Authorization", fmt.Sprintf("JWT %v", token)) resp, err := http.DefaultClient.Do(req) if err != nil { r.Log.Errorf("unable to get next images for url: %v - %v", url, err) cancelFunc() close(ch) return nil, err } defer resp.Body.Close() imageList, err := ioutil.ReadAll(resp.Body) iResp := DockerHubImageResponse{} err = json.Unmarshal(imageList, &iResp) if err != nil { r.Log.Errorf("unable to get next images for url: %v - %v", url, err) cancelFunc() close(ch) return &iResp, err } // Keep getting the images if iResp.Next != "" { r.Log.Debugf("getting next page of results - %v", iResp.Next) // Fan out calls to get the next images. go r.getNextImages(ctx, org, token, iResp.Next, ch, cancelFunc) } for _, imageName := range iResp.Results { r.Log.Debugf("Trying to load %v/%v", imageName.Namespace, imageName.Name) go func(image *DockerHubImage) { select { case <-ctx.Done(): r.Log.Debugf( "loading images failed due to context err - %v name - %v", ctx.Err(), image.Name) return default: ch <- fmt.Sprintf("%v/%v", image.Namespace, image.Name) } }(imageName) } return &iResp, nil } func (r DockerHubAdapter) loadSpec(imageName string) (*apb.Spec, error) { if r.Config.Tag == "" { r.Config.Tag = "latest" } req, err := http.NewRequest("GET", fmt.Sprintf(dockerHubManifestURL, imageName, r.Config.Tag), nil) if err != nil { return nil, err } token, err := getBearerToken(imageName) if err != nil { return nil, err } req.Header.Set("Authorization", fmt.Sprintf("Bearer %v", token)) return imageToSpec(r.Log, req, r.Config.Tag) } func getBearerToken(imageName string) (string, error) { response, err := http.Get(fmt.Sprintf( "https://auth.docker.io/token?service=registry.docker.io&scope=repository:%v:pull", imageName)) if err != nil { return "", err } defer response.Body.Close() t := struct { Token string `json:"token"` }{} err = json.NewDecoder(response.Body).Decode(&t) if err != nil { return "", err } return t.Token, nil }<|fim▁end|>
<|file_name|>0003_alter_email_max_length.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.db import models, migrations from social.utils import setting_name EMAIL_LENGTH = getattr(settings, setting_name('EMAIL_LENGTH'), 254) class Migration(migrations.Migration): dependencies = [ ('default', '0002_add_related_name'), ]<|fim▁hole|> name='email', field=models.EmailField(max_length=EMAIL_LENGTH), ), ]<|fim▁end|>
operations = [ migrations.AlterField( model_name='code',
<|file_name|>requirement_configuration.py<|end_file_name|><|fim▁begin|># Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import from pex.fetcher import URLFetcher from pex.network_configuration import NetworkConfiguration from pex.requirements import Constraint, parse_requirement_file, parse_requirement_strings from pex.typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Iterable, List, Optional import attr # vendor:skip from pex.requirements import ParsedRequirement else: from pex.third_party import attr @attr.s(frozen=True) class RequirementConfiguration(object): requirements = attr.ib(default=None) # type: Optional[Iterable[str]] requirement_files = attr.ib(default=None) # type: Optional[Iterable[str]] constraint_files = attr.ib(default=None) # type: Optional[Iterable[str]] def parse_requirements(self, network_configuration=None): # type: (Optional[NetworkConfiguration]) -> Iterable[ParsedRequirement] parsed_requirements = [] # type: List[ParsedRequirement] if self.requirements:<|fim▁hole|> parsed_requirements.extend( requirement_or_constraint for requirement_or_constraint in parse_requirement_file( requirement_file, is_constraints=False, fetcher=fetcher ) if not isinstance(requirement_or_constraint, Constraint) ) return parsed_requirements def parse_constraints(self, network_configuration=None): # type: (Optional[NetworkConfiguration]) -> Iterable[Constraint] parsed_constraints = [] # type: List[Constraint] if self.constraint_files: fetcher = URLFetcher(network_configuration=network_configuration) for constraint_file in self.constraint_files: parsed_constraints.extend( requirement_or_constraint for requirement_or_constraint in parse_requirement_file( constraint_file, is_constraints=True, fetcher=fetcher ) if isinstance(requirement_or_constraint, Constraint) ) return parsed_constraints<|fim▁end|>
parsed_requirements.extend(parse_requirement_strings(self.requirements)) if self.requirement_files: fetcher = URLFetcher(network_configuration=network_configuration) for requirement_file in self.requirement_files:
<|file_name|>handshake.py<|end_file_name|><|fim▁begin|># This file is part of Scapy # Copyright (C) 2007, 2008, 2009 Arnaud Ebalard # 2015, 2016, 2017 Maxence Tury # This program is published under a GPLv2 license """ TLS handshake fields & logic. This module covers the handshake TLS subprotocol, except for the key exchange mechanisms which are addressed with keyexchange.py. """ from __future__ import absolute_import import math import struct from scapy.error import log_runtime, warning from scapy.fields import ByteEnumField, ByteField, EnumField, Field, \ FieldLenField, IntField, PacketField, PacketListField, ShortField, \ StrFixedLenField, StrLenField, ThreeBytesField, UTCTimeField from scapy.compat import hex_bytes, orb, raw from scapy.config import conf from scapy.modules import six from scapy.packet import Packet, Raw, Padding from scapy.utils import randstring, repr_hex from scapy.layers.x509 import OCSP_Response from scapy.layers.tls.cert import Cert from scapy.layers.tls.basefields import (_tls_version, _TLSVersionField, _TLSClientVersionField) from scapy.layers.tls.extensions import (_ExtensionsLenField, _ExtensionsField, _cert_status_type, TLS_Ext_SupportedVersion_CH, # noqa: E501 TLS_Ext_SignatureAlgorithms, TLS_Ext_SupportedVersion_SH, TLS_Ext_EarlyDataIndication) from scapy.layers.tls.keyexchange import (_TLSSignature, _TLSServerParamsField, _TLSSignatureField, ServerRSAParams, SigAndHashAlgsField, _tls_hash_sig, SigAndHashAlgsLenField) from scapy.layers.tls.session import (_GenericTLSSessionInheritance, readConnState, writeConnState) from scapy.layers.tls.crypto.compression import (_tls_compression_algs, _tls_compression_algs_cls, Comp_NULL, _GenericComp, _GenericCompMetaclass) from scapy.layers.tls.crypto.suites import (_tls_cipher_suites, _tls_cipher_suites_cls, _GenericCipherSuite, _GenericCipherSuiteMetaclass) ############################################################################### # Generic TLS Handshake message # ############################################################################### _tls_handshake_type = {0: "hello_request", 1: "client_hello", 2: "server_hello", 3: "hello_verify_request", 4: "session_ticket", 6: "hello_retry_request", 8: "encrypted_extensions", 11: "certificate", 12: "server_key_exchange", 13: "certificate_request", 14: "server_hello_done", 15: "certificate_verify", 16: "client_key_exchange", 20: "finished", 21: "certificate_url", 22: "certificate_status", 23: "supplemental_data", 24: "key_update"} class _TLSHandshake(_GenericTLSSessionInheritance): """ Inherited by other Handshake classes to get post_build(). Also used as a fallback for unknown TLS Handshake packets. """ name = "TLS Handshake Generic message" fields_desc = [ByteEnumField("msgtype", None, _tls_handshake_type), ThreeBytesField("msglen", None), StrLenField("msg", "", length_from=lambda pkt: pkt.msglen)] def post_build(self, p, pay): tmp_len = len(p) if self.msglen is None: l2 = tmp_len - 4 p = struct.pack("!I", (orb(p[0]) << 24) | l2) + p[4:] return p + pay def guess_payload_class(self, p): return conf.padding_layer def tls_session_update(self, msg_str): """ Covers both post_build- and post_dissection- context updates. """ self.tls_session.handshake_messages.append(msg_str) self.tls_session.handshake_messages_parsed.append(self) ############################################################################### # HelloRequest # ############################################################################### class TLSHelloRequest(_TLSHandshake): name = "TLS Handshake - Hello Request" fields_desc = [ByteEnumField("msgtype", 0, _tls_handshake_type), ThreeBytesField("msglen", None)] def tls_session_update(self, msg_str): """ Message should not be added to the list of handshake messages that will be hashed in the finished and certificate verify messages. """ return ############################################################################### # ClientHello fields # ############################################################################### class _GMTUnixTimeField(UTCTimeField): """ "The current time and date in standard UNIX 32-bit format (seconds since the midnight starting Jan 1, 1970, GMT, ignoring leap seconds)." """ def i2h(self, pkt, x): if x is not None: return x return 0 class _TLSRandomBytesField(StrFixedLenField): def i2repr(self, pkt, x): if x is None: return repr(x) return repr_hex(self.i2h(pkt, x)) class _SessionIDField(StrLenField): """ opaque SessionID<0..32>; section 7.4.1.2 of RFC 4346 """ pass class _CipherSuitesField(StrLenField): __slots__ = ["itemfmt", "itemsize", "i2s", "s2i"] islist = 1 def __init__(self, name, default, dico, length_from=None, itemfmt="!H"): StrLenField.__init__(self, name, default, length_from=length_from) self.itemfmt = itemfmt self.itemsize = struct.calcsize(itemfmt) i2s = self.i2s = {} s2i = self.s2i = {} for k in six.iterkeys(dico): i2s[k] = dico[k] s2i[dico[k]] = k def any2i_one(self, pkt, x): if (isinstance(x, _GenericCipherSuite) or isinstance(x, _GenericCipherSuiteMetaclass)): x = x.val if isinstance(x, bytes): x = self.s2i[x] return x def i2repr_one(self, pkt, x): fmt = "0x%%0%dx" % self.itemsize return self.i2s.get(x, fmt % x) def any2i(self, pkt, x): if x is None: return None if not isinstance(x, list): x = [x] return [self.any2i_one(pkt, z) for z in x] def i2repr(self, pkt, x): if x is None: return "None" tmp_len = [self.i2repr_one(pkt, z) for z in x] if len(tmp_len) == 1: tmp_len = tmp_len[0] else: tmp_len = "[%s]" % ", ".join(tmp_len) return tmp_len def i2m(self, pkt, val): if val is None: val = [] return b"".join(struct.pack(self.itemfmt, x) for x in val) def m2i(self, pkt, m): res = [] itemlen = struct.calcsize(self.itemfmt) while m: res.append(struct.unpack(self.itemfmt, m[:itemlen])[0]) m = m[itemlen:] return res def i2len(self, pkt, i): if i is None: return 0 return len(i) * self.itemsize class _CompressionMethodsField(_CipherSuitesField): def any2i_one(self, pkt, x): if (isinstance(x, _GenericComp) or isinstance(x, _GenericCompMetaclass)): x = x.val if isinstance(x, str): x = self.s2i[x] return x ############################################################################### # ClientHello # ############################################################################### class TLSClientHello(_TLSHandshake): """ TLS ClientHello, with abilities to handle extensions. The Random structure follows the RFC 5246: while it is 32-byte long, many implementations use the first 4 bytes as a gmt_unix_time, and then the remaining 28 byts should be completely random. This was designed in order to (sort of) mitigate broken RNGs. If you prefer to show the full 32 random bytes without any GMT time, just comment in/out the lines below. """ name = "TLS Handshake - Client Hello" fields_desc = [ByteEnumField("msgtype", 1, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSClientVersionField("version", None, _tls_version), # _TLSRandomBytesField("random_bytes", None, 32), _GMTUnixTimeField("gmt_unix_time", None), _TLSRandomBytesField("random_bytes", None, 28), FieldLenField("sidlen", None, fmt="B", length_of="sid"), _SessionIDField("sid", "", length_from=lambda pkt: pkt.sidlen), FieldLenField("cipherslen", None, fmt="!H", length_of="ciphers"), _CipherSuitesField("ciphers", None, _tls_cipher_suites, itemfmt="!H", length_from=lambda pkt: pkt.cipherslen), FieldLenField("complen", None, fmt="B", length_of="comp"), _CompressionMethodsField("comp", [0], _tls_compression_algs, itemfmt="B", length_from=lambda pkt: pkt.complen), # noqa: E501 _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: (pkt.msglen - (pkt.sidlen or 0) - # noqa: E501 (pkt.cipherslen or 0) - # noqa: E501 (pkt.complen or 0) - # noqa: E501 40))] def post_build(self, p, pay): if self.random_bytes is None: p = p[:10] + randstring(28) + p[10 + 28:] # if no ciphersuites were provided, we add a few usual, supported # ciphersuites along with the appropriate extensions if self.ciphers is None: cipherstart = 39 + (self.sidlen or 0) s = b"001ac02bc023c02fc027009e0067009c003cc009c0130033002f000a" p = p[:cipherstart] + hex_bytes(s) + p[cipherstart + 2:] if self.ext is None: ext_len = b'\x00\x2c' ext_reneg = b'\xff\x01\x00\x01\x00' ext_sn = b'\x00\x00\x00\x0f\x00\r\x00\x00\nsecdev.org' ext_sigalg = b'\x00\r\x00\x08\x00\x06\x04\x03\x04\x01\x02\x01' ext_supgroups = b'\x00\n\x00\x04\x00\x02\x00\x17' p += ext_len + ext_reneg + ext_sn + ext_sigalg + ext_supgroups return super(TLSClientHello, self).post_build(p, pay) def tls_session_update(self, msg_str): """ Either for parsing or building, we store the client_random along with the raw string representing this handshake message. """ super(TLSClientHello, self).tls_session_update(msg_str) s = self.tls_session s.advertised_tls_version = self.version self.random_bytes = msg_str[10:38] s.client_random = (struct.pack('!I', self.gmt_unix_time) + self.random_bytes) # No distinction between a TLS 1.2 ClientHello and a TLS # 1.3 ClientHello when dissecting : TLS 1.3 CH will be # parsed as TLSClientHello if self.ext: for e in self.ext: if isinstance(e, TLS_Ext_SupportedVersion_CH): s.advertised_tls_version = e.versions[0] if isinstance(e, TLS_Ext_SignatureAlgorithms): s.advertised_sig_algs = e.sig_algs class TLS13ClientHello(_TLSHandshake): """ TLS 1.3 ClientHello, with abilities to handle extensions. The Random structure is 32 random bytes without any GMT time """ name = "TLS 1.3 Handshake - Client Hello" fields_desc = [ByteEnumField("msgtype", 1, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSClientVersionField("version", None, _tls_version), _TLSRandomBytesField("random_bytes", None, 32), FieldLenField("sidlen", None, fmt="B", length_of="sid"), _SessionIDField("sid", "", length_from=lambda pkt: pkt.sidlen), FieldLenField("cipherslen", None, fmt="!H", length_of="ciphers"), _CipherSuitesField("ciphers", None, _tls_cipher_suites, itemfmt="!H", length_from=lambda pkt: pkt.cipherslen), FieldLenField("complen", None, fmt="B", length_of="comp"), _CompressionMethodsField("comp", [0], _tls_compression_algs, itemfmt="B", length_from=lambda pkt: pkt.complen), # noqa: E501 _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: (pkt.msglen - (pkt.sidlen or 0) - # noqa: E501 (pkt.cipherslen or 0) - # noqa: E501 (pkt.complen or 0) - # noqa: E501 40))] def post_build(self, p, pay): if self.random_bytes is None: p = p[:6] + randstring(32) + p[6 + 32:] return super(TLS13ClientHello, self).post_build(p, pay) def tls_session_update(self, msg_str): """ Either for parsing or building, we store the client_random along with the raw string representing this handshake message. """ super(TLS13ClientHello, self).tls_session_update(msg_str) s = self.tls_session if self.sidlen and self.sidlen > 0: s.sid = self.sid self.random_bytes = msg_str[10:38] s.client_random = self.random_bytes if self.ext: for e in self.ext: if isinstance(e, TLS_Ext_SupportedVersion_CH): self.tls_session.advertised_tls_version = e.versions[0] if isinstance(e, TLS_Ext_SignatureAlgorithms): s.advertised_sig_algs = e.sig_algs ############################################################################### # ServerHello # ############################################################################### class TLSServerHello(_TLSHandshake): """ TLS ServerHello, with abilities to handle extensions. The Random structure follows the RFC 5246: while it is 32-byte long, many implementations use the first 4 bytes as a gmt_unix_time, and then the remaining 28 byts should be completely random. This was designed in order to (sort of) mitigate broken RNGs. If you prefer to show the full 32 random bytes without any GMT time, just comment in/out the lines below. """ name = "TLS Handshake - Server Hello" fields_desc = [ByteEnumField("msgtype", 2, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSVersionField("version", None, _tls_version), # _TLSRandomBytesField("random_bytes", None, 32), _GMTUnixTimeField("gmt_unix_time", None), _TLSRandomBytesField("random_bytes", None, 28), FieldLenField("sidlen", None, length_of="sid", fmt="B"), _SessionIDField("sid", "", length_from=lambda pkt: pkt.sidlen), EnumField("cipher", None, _tls_cipher_suites), _CompressionMethodsField("comp", [0], _tls_compression_algs, itemfmt="B", length_from=lambda pkt: 1), _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: (pkt.msglen - (pkt.sidlen or 0) - # noqa: E501 38))] # 40)) ] @classmethod def dispatch_hook(cls, _pkt=None, *args, **kargs): if _pkt and len(_pkt) >= 6: version = struct.unpack("!H", _pkt[4:6])[0] if version == 0x0304 or version > 0x7f00: return TLS13ServerHello return TLSServerHello def post_build(self, p, pay): if self.random_bytes is None: p = p[:10] + randstring(28) + p[10 + 28:] return super(TLSServerHello, self).post_build(p, pay) def tls_session_update(self, msg_str): """ Either for parsing or building, we store the server_random along with the raw string representing this handshake message. We also store the session_id, the cipher suite (if recognized), the compression method, and finally we instantiate the pending write and read connection states. Usually they get updated later on in the negotiation when we learn the session keys, and eventually they are committed once a ChangeCipherSpec has been sent/received. """ super(TLSServerHello, self).tls_session_update(msg_str) self.tls_session.tls_version = self.version self.random_bytes = msg_str[10:38] self.tls_session.server_random = (struct.pack('!I', self.gmt_unix_time) + self.random_bytes) self.tls_session.sid = self.sid cs_cls = None if self.cipher: cs_val = self.cipher if cs_val not in _tls_cipher_suites_cls: warning("Unknown cipher suite %d from ServerHello" % cs_val) # we do not try to set a default nor stop the execution else: cs_cls = _tls_cipher_suites_cls[cs_val] comp_cls = Comp_NULL if self.comp: comp_val = self.comp[0] if comp_val not in _tls_compression_algs_cls: err = "Unknown compression alg %d from ServerHello" % comp_val warning(err) comp_val = 0 comp_cls = _tls_compression_algs_cls[comp_val] connection_end = self.tls_session.connection_end self.tls_session.pwcs = writeConnState(ciphersuite=cs_cls, compression_alg=comp_cls, connection_end=connection_end, tls_version=self.version) self.tls_session.prcs = readConnState(ciphersuite=cs_cls, compression_alg=comp_cls, connection_end=connection_end, tls_version=self.version) _tls_13_server_hello_fields = [ ByteEnumField("msgtype", 2, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSVersionField("version", 0x0303, _tls_version), _TLSRandomBytesField("random_bytes", None, 32), FieldLenField("sidlen", None, length_of="sid", fmt="B"), _SessionIDField("sid", "", length_from=lambda pkt: pkt.sidlen), EnumField("cipher", None, _tls_cipher_suites), _CompressionMethodsField("comp", [0], _tls_compression_algs, itemfmt="B", length_from=lambda pkt: 1), _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: (pkt.msglen - 38)) ] class TLS13ServerHello(_TLSHandshake): """ TLS 1.3 ServerHello """ name = "TLS 1.3 Handshake - Server Hello" fields_desc = _tls_13_server_hello_fields def post_build(self, p, pay): if self.random_bytes is None: p = p[:6] + randstring(32) + p[6 + 32:] return super(TLS13ServerHello, self).post_build(p, pay) def tls_session_update(self, msg_str): """ Either for parsing or building, we store the server_random along with the raw string representing this handshake message. We also store the cipher suite (if recognized), and finally we instantiate the write and read connection states. """ super(TLS13ServerHello, self).tls_session_update(msg_str) s = self.tls_session if self.ext: for e in self.ext: if isinstance(e, TLS_Ext_SupportedVersion_SH): s.tls_version = e.version break s.server_random = self.random_bytes s.ciphersuite = self.cipher cs_cls = None if self.cipher: cs_val = self.cipher if cs_val not in _tls_cipher_suites_cls: warning("Unknown cipher suite %d from ServerHello" % cs_val) # we do not try to set a default nor stop the execution else: cs_cls = _tls_cipher_suites_cls[cs_val] connection_end = s.connection_end if connection_end == "server": s.pwcs = writeConnState(ciphersuite=cs_cls, connection_end=connection_end, tls_version=s.tls_version) s.triggered_pwcs_commit = True elif connection_end == "client": s.prcs = readConnState(ciphersuite=cs_cls, connection_end=connection_end, tls_version=s.tls_version) s.triggered_prcs_commit = True if s.tls13_early_secret is None: # In case the connState was not pre-initialized, we could not # compute the early secrets at the ClientHello, so we do it here. s.compute_tls13_early_secrets() s.compute_tls13_handshake_secrets()<|fim▁hole|> elif connection_end == "client": shts = s.tls13_derived_secrets["server_handshake_traffic_secret"] s.prcs.tls13_derive_keys(shts) ############################################################################### # HelloRetryRequest # ############################################################################### class TLS13HelloRetryRequest(_TLSHandshake): name = "TLS 1.3 Handshake - Hello Retry Request" fields_desc = _tls_13_server_hello_fields ############################################################################### # EncryptedExtensions # ############################################################################### class TLSEncryptedExtensions(_TLSHandshake): name = "TLS 1.3 Handshake - Encrypted Extensions" fields_desc = [ByteEnumField("msgtype", 8, _tls_handshake_type), ThreeBytesField("msglen", None), _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: pkt.msglen - 2)] def post_build_tls_session_update(self, msg_str): self.tls_session_update(msg_str) s = self.tls_session connection_end = s.connection_end # Check if the server early_data extension is present in # EncryptedExtensions message (if so, early data was accepted by the # server) early_data_accepted = False if self.ext: for e in self.ext: if isinstance(e, TLS_Ext_EarlyDataIndication): early_data_accepted = True # If the serveur did not accept early_data, we change prcs traffic # encryption keys. Otherwise, the the keys will be updated after the # EndOfEarlyData message if connection_end == "server": if not early_data_accepted: s.prcs = readConnState(ciphersuite=type(s.wcs.ciphersuite), connection_end=connection_end, tls_version=s.tls_version) s.triggered_prcs_commit = True chts = s.tls13_derived_secrets["client_handshake_traffic_secret"] # noqa: E501 s.prcs.tls13_derive_keys(chts) s.rcs = self.tls_session.prcs s.triggered_prcs_commit = False def post_dissection_tls_session_update(self, msg_str): self.tls_session_update(msg_str) s = self.tls_session connection_end = s.connection_end # Check if the server early_data extension is present in # EncryptedExtensions message (if so, early data was accepted by the # server) early_data_accepted = False if self.ext: for e in self.ext: if isinstance(e, TLS_Ext_EarlyDataIndication): early_data_accepted = True # If the serveur did not accept early_data, we change pwcs traffic # encryption key. Otherwise, the the keys will be updated after the # EndOfEarlyData message if connection_end == "client": if not early_data_accepted: s.pwcs = writeConnState(ciphersuite=type(s.rcs.ciphersuite), connection_end=connection_end, tls_version=s.tls_version) s.triggered_pwcs_commit = True chts = s.tls13_derived_secrets["client_handshake_traffic_secret"] # noqa: E501 s.pwcs.tls13_derive_keys(chts) s.wcs = self.tls_session.pwcs s.triggered_pwcs_commit = False ############################################################################### # Certificate # ############################################################################### # XXX It might be appropriate to rewrite this mess with basic 3-byte FieldLenField. # noqa: E501 class _ASN1CertLenField(FieldLenField): """ This is mostly a 3-byte FieldLenField. """ def __init__(self, name, default, length_of=None, adjust=lambda pkt, x: x): self.length_of = length_of self.adjust = adjust Field.__init__(self, name, default, fmt="!I") def i2m(self, pkt, x): if x is None: if self.length_of is not None: fld, fval = pkt.getfield_and_val(self.length_of) f = fld.i2len(pkt, fval) x = self.adjust(pkt, f) return x def addfield(self, pkt, s, val): return s + struct.pack(self.fmt, self.i2m(pkt, val))[1:4] def getfield(self, pkt, s): return s[3:], self.m2i(pkt, struct.unpack(self.fmt, b"\x00" + s[:3])[0]) # noqa: E501 class _ASN1CertListField(StrLenField): islist = 1 def i2len(self, pkt, i): if i is None: return 0 return len(self.i2m(pkt, i)) def getfield(self, pkt, s): """ Extract Certs in a loop. XXX We should provide safeguards when trying to parse a Cert. """ tmp_len = None if self.length_from is not None: tmp_len = self.length_from(pkt) lst = [] ret = b"" m = s if tmp_len is not None: m, ret = s[:tmp_len], s[tmp_len:] while m: clen = struct.unpack("!I", b'\x00' + m[:3])[0] lst.append((clen, Cert(m[3:3 + clen]))) m = m[3 + clen:] return m + ret, lst def i2m(self, pkt, i): def i2m_one(i): if isinstance(i, str): return i if isinstance(i, Cert): s = i.der tmp_len = struct.pack("!I", len(s))[1:4] return tmp_len + s (tmp_len, s) = i if isinstance(s, Cert): s = s.der return struct.pack("!I", tmp_len)[1:4] + s if i is None: return b"" if isinstance(i, str): return i if isinstance(i, Cert): i = [i] return b"".join(i2m_one(x) for x in i) def any2i(self, pkt, x): return x class _ASN1CertField(StrLenField): def i2len(self, pkt, i): if i is None: return 0 return len(self.i2m(pkt, i)) def getfield(self, pkt, s): tmp_len = None if self.length_from is not None: tmp_len = self.length_from(pkt) ret = b"" m = s if tmp_len is not None: m, ret = s[:tmp_len], s[tmp_len:] clen = struct.unpack("!I", b'\x00' + m[:3])[0] len_cert = (clen, Cert(m[3:3 + clen])) m = m[3 + clen:] return m + ret, len_cert def i2m(self, pkt, i): def i2m_one(i): if isinstance(i, str): return i if isinstance(i, Cert): s = i.der tmp_len = struct.pack("!I", len(s))[1:4] return tmp_len + s (tmp_len, s) = i if isinstance(s, Cert): s = s.der return struct.pack("!I", tmp_len)[1:4] + s if i is None: return b"" return i2m_one(i) def any2i(self, pkt, x): return x class TLSCertificate(_TLSHandshake): """ XXX We do not support RFC 5081, i.e. OpenPGP certificates. """ name = "TLS Handshake - Certificate" fields_desc = [ByteEnumField("msgtype", 11, _tls_handshake_type), ThreeBytesField("msglen", None), _ASN1CertLenField("certslen", None, length_of="certs"), _ASN1CertListField("certs", [], length_from=lambda pkt: pkt.certslen)] @classmethod def dispatch_hook(cls, _pkt=None, *args, **kargs): if _pkt: tls_session = kargs.get("tls_session", None) if tls_session and (tls_session.tls_version or 0) >= 0x0304: return TLS13Certificate return TLSCertificate def post_dissection_tls_session_update(self, msg_str): self.tls_session_update(msg_str) connection_end = self.tls_session.connection_end if connection_end == "client": self.tls_session.server_certs = [x[1] for x in self.certs] else: self.tls_session.client_certs = [x[1] for x in self.certs] class _ASN1CertAndExt(_GenericTLSSessionInheritance): name = "Certificate and Extensions" fields_desc = [_ASN1CertField("cert", ""), FieldLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", [], length_from=lambda pkt: pkt.extlen)] def extract_padding(self, s): return b"", s class _ASN1CertAndExtListField(PacketListField): def m2i(self, pkt, m): return self.cls(m, tls_session=pkt.tls_session) class TLS13Certificate(_TLSHandshake): name = "TLS 1.3 Handshake - Certificate" fields_desc = [ByteEnumField("msgtype", 11, _tls_handshake_type), ThreeBytesField("msglen", None), FieldLenField("cert_req_ctxt_len", None, fmt="B", length_of="cert_req_ctxt"), StrLenField("cert_req_ctxt", "", length_from=lambda pkt: pkt.cert_req_ctxt_len), _ASN1CertLenField("certslen", None, length_of="certs"), _ASN1CertAndExtListField("certs", [], _ASN1CertAndExt, length_from=lambda pkt: pkt.certslen)] # noqa: E501 def post_dissection_tls_session_update(self, msg_str): self.tls_session_update(msg_str) connection_end = self.tls_session.connection_end if connection_end == "client": if self.certs: sc = [x.cert[1] for x in self.certs] self.tls_session.server_certs = sc else: if self.certs: cc = [x.cert[1] for x in self.certs] self.tls_session.client_certs = cc ############################################################################### # ServerKeyExchange # ############################################################################### class TLSServerKeyExchange(_TLSHandshake): name = "TLS Handshake - Server Key Exchange" fields_desc = [ByteEnumField("msgtype", 12, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSServerParamsField("params", None, length_from=lambda pkt: pkt.msglen), _TLSSignatureField("sig", None, length_from=lambda pkt: pkt.msglen - len(pkt.params))] # noqa: E501 def build(self, *args, **kargs): r""" We overload build() method in order to provide a valid default value for params based on TLS session if not provided. This cannot be done by overriding i2m() because the method is called on a copy of the packet. The 'params' field is built according to key_exchange.server_kx_msg_cls which should have been set after receiving a cipher suite in a previous ServerHello. Usual cases are: - None: for RSA encryption or fixed FF/ECDH. This should never happen, as no ServerKeyExchange should be generated in the first place. - ServerDHParams: for ephemeral FFDH. In that case, the parameter to server_kx_msg_cls does not matter. - ServerECDH\*Params: for ephemeral ECDH. There are actually three classes, which are dispatched by _tls_server_ecdh_cls_guess on the first byte retrieved. The default here is b"\03", which corresponds to ServerECDHNamedCurveParams (implicit curves). When the Server\*DHParams are built via .fill_missing(), the session server_kx_privkey will be updated accordingly. """ fval = self.getfieldval("params") if fval is None: s = self.tls_session if s.pwcs: if s.pwcs.key_exchange.export: cls = ServerRSAParams(tls_session=s) else: cls = s.pwcs.key_exchange.server_kx_msg_cls(b"\x03") cls = cls(tls_session=s) try: cls.fill_missing() except Exception: if conf.debug_dissector: raise pass else: cls = Raw() self.params = cls fval = self.getfieldval("sig") if fval is None: s = self.tls_session if s.pwcs: if not s.pwcs.key_exchange.anonymous: p = self.params if p is None: p = b"" m = s.client_random + s.server_random + raw(p) cls = _TLSSignature(tls_session=s) cls._update_sig(m, s.server_key) else: cls = Raw() else: cls = Raw() self.sig = cls return _TLSHandshake.build(self, *args, **kargs) def post_dissection(self, pkt): """ While previously dissecting Server*DHParams, the session server_kx_pubkey should have been updated. XXX Add a 'fixed_dh' OR condition to the 'anonymous' test. """ s = self.tls_session if s.prcs and s.prcs.key_exchange.no_ske: pkt_info = pkt.firstlayer().summary() log_runtime.info("TLS: useless ServerKeyExchange [%s]", pkt_info) if (s.prcs and not s.prcs.key_exchange.anonymous and s.client_random and s.server_random and s.server_certs and len(s.server_certs) > 0): m = s.client_random + s.server_random + raw(self.params) sig_test = self.sig._verify_sig(m, s.server_certs[0]) if not sig_test: pkt_info = pkt.firstlayer().summary() log_runtime.info("TLS: invalid ServerKeyExchange signature [%s]", pkt_info) # noqa: E501 ############################################################################### # CertificateRequest # ############################################################################### _tls_client_certificate_types = {1: "rsa_sign", 2: "dss_sign", 3: "rsa_fixed_dh", 4: "dss_fixed_dh", 5: "rsa_ephemeral_dh_RESERVED", 6: "dss_ephemeral_dh_RESERVED", 20: "fortezza_dms_RESERVED", 64: "ecdsa_sign", 65: "rsa_fixed_ecdh", 66: "ecdsa_fixed_ecdh"} class _CertTypesField(_CipherSuitesField): pass class _CertAuthoritiesField(StrLenField): """ XXX Rework this with proper ASN.1 parsing. """ islist = 1 def getfield(self, pkt, s): tmp_len = self.length_from(pkt) return s[tmp_len:], self.m2i(pkt, s[:tmp_len]) def m2i(self, pkt, m): res = [] while len(m) > 1: tmp_len = struct.unpack("!H", m[:2])[0] if len(m) < tmp_len + 2: res.append((tmp_len, m[2:])) break dn = m[2:2 + tmp_len] res.append((tmp_len, dn)) m = m[2 + tmp_len:] return res def i2m(self, pkt, i): return b"".join(map(lambda x_y: struct.pack("!H", x_y[0]) + x_y[1], i)) def addfield(self, pkt, s, val): return s + self.i2m(pkt, val) def i2len(self, pkt, val): if val is None: return 0 else: return len(self.i2m(pkt, val)) class TLSCertificateRequest(_TLSHandshake): name = "TLS Handshake - Certificate Request" fields_desc = [ByteEnumField("msgtype", 13, _tls_handshake_type), ThreeBytesField("msglen", None), FieldLenField("ctypeslen", None, fmt="B", length_of="ctypes"), _CertTypesField("ctypes", [1, 64], _tls_client_certificate_types, itemfmt="!B", length_from=lambda pkt: pkt.ctypeslen), SigAndHashAlgsLenField("sig_algs_len", None, length_of="sig_algs"), SigAndHashAlgsField("sig_algs", [0x0403, 0x0401, 0x0201], EnumField("hash_sig", None, _tls_hash_sig), # noqa: E501 length_from=lambda pkt: pkt.sig_algs_len), # noqa: E501 FieldLenField("certauthlen", None, fmt="!H", length_of="certauth"), _CertAuthoritiesField("certauth", [], length_from=lambda pkt: pkt.certauthlen)] # noqa: E501 class TLS13CertificateRequest(_TLSHandshake): name = "TLS 1.3 Handshake - Certificate Request" fields_desc = [ByteEnumField("msgtype", 13, _tls_handshake_type), ThreeBytesField("msglen", None), FieldLenField("cert_req_ctxt_len", None, fmt="B", length_of="cert_req_ctxt"), StrLenField("cert_req_ctxt", "", length_from=lambda pkt: pkt.cert_req_ctxt_len), _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: pkt.msglen - pkt.cert_req_ctxt_len - 3)] ############################################################################### # ServerHelloDone # ############################################################################### class TLSServerHelloDone(_TLSHandshake): name = "TLS Handshake - Server Hello Done" fields_desc = [ByteEnumField("msgtype", 14, _tls_handshake_type), ThreeBytesField("msglen", None)] ############################################################################### # CertificateVerify # ############################################################################### class TLSCertificateVerify(_TLSHandshake): name = "TLS Handshake - Certificate Verify" fields_desc = [ByteEnumField("msgtype", 15, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSSignatureField("sig", None, length_from=lambda pkt: pkt.msglen)] def build(self, *args, **kargs): sig = self.getfieldval("sig") if sig is None: s = self.tls_session m = b"".join(s.handshake_messages) if s.tls_version >= 0x0304: if s.connection_end == "client": context_string = b"TLS 1.3, client CertificateVerify" elif s.connection_end == "server": context_string = b"TLS 1.3, server CertificateVerify" m = b"\x20" * 64 + context_string + b"\x00" + s.wcs.hash.digest(m) # noqa: E501 self.sig = _TLSSignature(tls_session=s) if s.connection_end == "client": self.sig._update_sig(m, s.client_key) elif s.connection_end == "server": # should be TLS 1.3 only self.sig._update_sig(m, s.server_key) return _TLSHandshake.build(self, *args, **kargs) def post_dissection(self, pkt): s = self.tls_session m = b"".join(s.handshake_messages) if s.tls_version >= 0x0304: if s.connection_end == "client": context_string = b"TLS 1.3, server CertificateVerify" elif s.connection_end == "server": context_string = b"TLS 1.3, client CertificateVerify" m = b"\x20" * 64 + context_string + b"\x00" + s.rcs.hash.digest(m) if s.connection_end == "server": if s.client_certs and len(s.client_certs) > 0: sig_test = self.sig._verify_sig(m, s.client_certs[0]) if not sig_test: pkt_info = pkt.firstlayer().summary() log_runtime.info("TLS: invalid CertificateVerify signature [%s]", pkt_info) # noqa: E501 elif s.connection_end == "client": # should be TLS 1.3 only if s.server_certs and len(s.server_certs) > 0: sig_test = self.sig._verify_sig(m, s.server_certs[0]) if not sig_test: pkt_info = pkt.firstlayer().summary() log_runtime.info("TLS: invalid CertificateVerify signature [%s]", pkt_info) # noqa: E501 ############################################################################### # ClientKeyExchange # ############################################################################### class _TLSCKExchKeysField(PacketField): __slots__ = ["length_from"] holds_packet = 1 def __init__(self, name, length_from=None, remain=0): self.length_from = length_from PacketField.__init__(self, name, None, None, remain=remain) def m2i(self, pkt, m): """ The client_kx_msg may be either None, EncryptedPreMasterSecret (for RSA encryption key exchange), ClientDiffieHellmanPublic, or ClientECDiffieHellmanPublic. When either one of them gets dissected, the session context is updated accordingly. """ tmp_len = self.length_from(pkt) tbd, rem = m[:tmp_len], m[tmp_len:] s = pkt.tls_session cls = None if s.prcs and s.prcs.key_exchange: cls = s.prcs.key_exchange.client_kx_msg_cls if cls is None: return Raw(tbd) / Padding(rem) return cls(tbd, tls_session=s) / Padding(rem) class TLSClientKeyExchange(_TLSHandshake): """ This class mostly works like TLSServerKeyExchange and its 'params' field. """ name = "TLS Handshake - Client Key Exchange" fields_desc = [ByteEnumField("msgtype", 16, _tls_handshake_type), ThreeBytesField("msglen", None), _TLSCKExchKeysField("exchkeys", length_from=lambda pkt: pkt.msglen)] def build(self, *args, **kargs): fval = self.getfieldval("exchkeys") if fval is None: s = self.tls_session if s.prcs: cls = s.prcs.key_exchange.client_kx_msg_cls cls = cls(tls_session=s) else: cls = Raw() self.exchkeys = cls return _TLSHandshake.build(self, *args, **kargs) ############################################################################### # Finished # ############################################################################### class _VerifyDataField(StrLenField): def getfield(self, pkt, s): if pkt.tls_session.tls_version == 0x0300: sep = 36 elif pkt.tls_session.tls_version >= 0x0304: sep = pkt.tls_session.rcs.hash.hash_len else: sep = 12 return s[sep:], s[:sep] class TLSFinished(_TLSHandshake): name = "TLS Handshake - Finished" fields_desc = [ByteEnumField("msgtype", 20, _tls_handshake_type), ThreeBytesField("msglen", None), _VerifyDataField("vdata", None)] def build(self, *args, **kargs): fval = self.getfieldval("vdata") if fval is None: s = self.tls_session handshake_msg = b"".join(s.handshake_messages) con_end = s.connection_end if s.tls_version < 0x0304: ms = s.master_secret self.vdata = s.wcs.prf.compute_verify_data(con_end, "write", handshake_msg, ms) else: self.vdata = s.compute_tls13_verify_data(con_end, "write") return _TLSHandshake.build(self, *args, **kargs) def post_dissection(self, pkt): s = self.tls_session if not s.frozen: handshake_msg = b"".join(s.handshake_messages) if s.tls_version < 0x0304 and s.master_secret is not None: ms = s.master_secret con_end = s.connection_end verify_data = s.rcs.prf.compute_verify_data(con_end, "read", handshake_msg, ms) if self.vdata != verify_data: pkt_info = pkt.firstlayer().summary() log_runtime.info("TLS: invalid Finished received [%s]", pkt_info) # noqa: E501 elif s.tls_version >= 0x0304: con_end = s.connection_end verify_data = s.compute_tls13_verify_data(con_end, "read") if self.vdata != verify_data: pkt_info = pkt.firstlayer().summary() log_runtime.info("TLS: invalid Finished received [%s]", pkt_info) # noqa: E501 def post_build_tls_session_update(self, msg_str): self.tls_session_update(msg_str) s = self.tls_session if s.tls_version >= 0x0304: s.pwcs = writeConnState(ciphersuite=type(s.wcs.ciphersuite), connection_end=s.connection_end, tls_version=s.tls_version) s.triggered_pwcs_commit = True if s.connection_end == "server": s.compute_tls13_traffic_secrets() elif s.connection_end == "client": s.compute_tls13_traffic_secrets_end() s.compute_tls13_resumption_secret() def post_dissection_tls_session_update(self, msg_str): self.tls_session_update(msg_str) s = self.tls_session if s.tls_version >= 0x0304: s.prcs = readConnState(ciphersuite=type(s.rcs.ciphersuite), connection_end=s.connection_end, tls_version=s.tls_version) s.triggered_prcs_commit = True if s.connection_end == "client": s.compute_tls13_traffic_secrets() elif s.connection_end == "server": s.compute_tls13_traffic_secrets_end() s.compute_tls13_resumption_secret() # Additional handshake messages ############################################################################### # HelloVerifyRequest # ############################################################################### class TLSHelloVerifyRequest(_TLSHandshake): """ Defined for DTLS, see RFC 6347. """ name = "TLS Handshake - Hello Verify Request" fields_desc = [ByteEnumField("msgtype", 21, _tls_handshake_type), ThreeBytesField("msglen", None), FieldLenField("cookielen", None, fmt="B", length_of="cookie"), StrLenField("cookie", "", length_from=lambda pkt: pkt.cookielen)] ############################################################################### # CertificateURL # ############################################################################### _tls_cert_chain_types = {0: "individual_certs", 1: "pkipath"} class URLAndOptionalHash(Packet): name = "URLAndOptionHash structure for TLSCertificateURL" fields_desc = [FieldLenField("urllen", None, length_of="url"), StrLenField("url", "", length_from=lambda pkt: pkt.urllen), FieldLenField("hash_present", None, fmt="B", length_of="hash", adjust=lambda pkt, x: int(math.ceil(x / 20.))), # noqa: E501 StrLenField("hash", "", length_from=lambda pkt: 20 * pkt.hash_present)] def guess_payload_class(self, p): return Padding class TLSCertificateURL(_TLSHandshake): """ Defined in RFC 4366. PkiPath structure of section 8 is not implemented yet. """ name = "TLS Handshake - Certificate URL" fields_desc = [ByteEnumField("msgtype", 21, _tls_handshake_type), ThreeBytesField("msglen", None), ByteEnumField("certchaintype", None, _tls_cert_chain_types), FieldLenField("uahlen", None, length_of="uah"), PacketListField("uah", [], URLAndOptionalHash, length_from=lambda pkt: pkt.uahlen)] ############################################################################### # CertificateStatus # ############################################################################### class ThreeBytesLenField(FieldLenField): def __init__(self, name, default, length_of=None, adjust=lambda pkt, x: x): FieldLenField.__init__(self, name, default, length_of=length_of, fmt='!I', adjust=adjust) def i2repr(self, pkt, x): if x is None: return 0 return repr(self.i2h(pkt, x)) def addfield(self, pkt, s, val): return s + struct.pack(self.fmt, self.i2m(pkt, val))[1:4] def getfield(self, pkt, s): return s[3:], self.m2i(pkt, struct.unpack(self.fmt, b"\x00" + s[:3])[0]) # noqa: E501 _cert_status_cls = {1: OCSP_Response} class _StatusField(PacketField): def m2i(self, pkt, m): idtype = pkt.status_type cls = self.cls if idtype in _cert_status_cls: cls = _cert_status_cls[idtype] return cls(m) class TLSCertificateStatus(_TLSHandshake): name = "TLS Handshake - Certificate Status" fields_desc = [ByteEnumField("msgtype", 22, _tls_handshake_type), ThreeBytesField("msglen", None), ByteEnumField("status_type", 1, _cert_status_type), ThreeBytesLenField("responselen", None, length_of="response"), _StatusField("response", None, Raw)] ############################################################################### # SupplementalData # ############################################################################### class SupDataEntry(Packet): name = "Supplemental Data Entry - Generic" fields_desc = [ShortField("sdtype", None), FieldLenField("len", None, length_of="data"), StrLenField("data", "", length_from=lambda pkt:pkt.len)] def guess_payload_class(self, p): return Padding class UserMappingData(Packet): name = "User Mapping Data" fields_desc = [ByteField("version", None), FieldLenField("len", None, length_of="data"), StrLenField("data", "", length_from=lambda pkt: pkt.len)] def guess_payload_class(self, p): return Padding class SupDataEntryUM(Packet): name = "Supplemental Data Entry - User Mapping" fields_desc = [ShortField("sdtype", None), FieldLenField("len", None, length_of="data", adjust=lambda pkt, x: x + 2), FieldLenField("dlen", None, length_of="data"), PacketListField("data", [], UserMappingData, length_from=lambda pkt:pkt.dlen)] def guess_payload_class(self, p): return Padding class TLSSupplementalData(_TLSHandshake): name = "TLS Handshake - Supplemental Data" fields_desc = [ByteEnumField("msgtype", 23, _tls_handshake_type), ThreeBytesField("msglen", None), ThreeBytesLenField("sdatalen", None, length_of="sdata"), PacketListField("sdata", [], SupDataEntry, length_from=lambda pkt: pkt.sdatalen)] ############################################################################### # NewSessionTicket # ############################################################################### class TLSNewSessionTicket(_TLSHandshake): """ XXX When knowing the right secret, we should be able to read the ticket. """ name = "TLS Handshake - New Session Ticket" fields_desc = [ByteEnumField("msgtype", 4, _tls_handshake_type), ThreeBytesField("msglen", None), IntField("lifetime", 0xffffffff), FieldLenField("ticketlen", None, length_of="ticket"), StrLenField("ticket", "", length_from=lambda pkt: pkt.ticketlen)] @classmethod def dispatch_hook(cls, _pkt=None, *args, **kargs): s = kargs.get("tls_session", None) if s and s.tls_version and s.tls_version >= 0x0304: return TLS13NewSessionTicket return TLSNewSessionTicket def post_dissection_tls_session_update(self, msg_str): self.tls_session_update(msg_str) if self.tls_session.connection_end == "client": self.tls_session.client_session_ticket = self.ticket class TLS13NewSessionTicket(_TLSHandshake): """ Uncomment the TicketField line for parsing a RFC 5077 ticket. """ name = "TLS 1.3 Handshake - New Session Ticket" fields_desc = [ByteEnumField("msgtype", 4, _tls_handshake_type), ThreeBytesField("msglen", None), IntField("ticket_lifetime", 0xffffffff), IntField("ticket_age_add", 0), FieldLenField("noncelen", None, fmt="B", length_of="ticket_nonce"), StrLenField("ticket_nonce", "", length_from=lambda pkt: pkt.noncelen), FieldLenField("ticketlen", None, length_of="ticket"), # TicketField("ticket", "", StrLenField("ticket", "", length_from=lambda pkt: pkt.ticketlen), _ExtensionsLenField("extlen", None, length_of="ext"), _ExtensionsField("ext", None, length_from=lambda pkt: (pkt.msglen - (pkt.ticketlen or 0) - # noqa: E501 pkt.noncelen or 0) - 13)] # noqa: E501 def post_dissection_tls_session_update(self, msg_str): self.tls_session_update(msg_str) if self.tls_session.connection_end == "client": self.tls_session.client_session_ticket = self.ticket ############################################################################### # EndOfEarlyData # ############################################################################### class TLS13EndOfEarlyData(_TLSHandshake): name = "TLS 1.3 Handshake - End Of Early Data" fields_desc = [ByteEnumField("msgtype", 5, _tls_handshake_type), ThreeBytesField("msglen", None)] ############################################################################### # KeyUpdate # ############################################################################### _key_update_request = {0: "update_not_requested", 1: "update_requested"} class TLS13KeyUpdate(_TLSHandshake): name = "TLS 1.3 Handshake - Key Update" fields_desc = [ByteEnumField("msgtype", 24, _tls_handshake_type), ThreeBytesField("msglen", None), ByteEnumField("request_update", 0, _key_update_request)] ############################################################################### # All handshake messages defined in this module # ############################################################################### _tls_handshake_cls = {0: TLSHelloRequest, 1: TLSClientHello, 2: TLSServerHello, 3: TLSHelloVerifyRequest, 4: TLSNewSessionTicket, 8: TLSEncryptedExtensions, 11: TLSCertificate, 12: TLSServerKeyExchange, 13: TLSCertificateRequest, 14: TLSServerHelloDone, 15: TLSCertificateVerify, 16: TLSClientKeyExchange, 20: TLSFinished, 21: TLSCertificateURL, 22: TLSCertificateStatus, 23: TLSSupplementalData} _tls13_handshake_cls = {1: TLS13ClientHello, 2: TLS13ServerHello, 4: TLS13NewSessionTicket, 5: TLS13EndOfEarlyData, 8: TLSEncryptedExtensions, 11: TLS13Certificate, 13: TLS13CertificateRequest, 15: TLSCertificateVerify, 20: TLSFinished, 24: TLS13KeyUpdate}<|fim▁end|>
if connection_end == "server": shts = s.tls13_derived_secrets["server_handshake_traffic_secret"] s.pwcs.tls13_derive_keys(shts)
<|file_name|>fonts.js<|end_file_name|><|fim▁begin|>'use strict'; // ================================== // // Load modules. // // ================================== var config = require('../config.js'); var gulp = require('gulp'); // ================================== // // Fonts // // ================================== gulp.task('fonts', function () {<|fim▁hole|> gulp.src([config.fonts.src]) .pipe(gulp.dest(config.fonts.dest)); });<|fim▁end|>
<|file_name|>latentsemantic.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (C) 2019 David Arroyo Menéndez # Author: David Arroyo Menéndez <[email protected]><|fim▁hole|> # This file is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with latentsemantic; see the file LICENSE. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, # Boston, MA 02110-1301 USA, #import pdb import pandas as pd #pdb.set_trace() df = pd.read_csv('data/Reviews.csv') print(df.head(3)) print(df['Text'].head(2)) from sklearn.feature_extraction.text import TfidfVectorizer tfidf = TfidfVectorizer() print(tfidf) print(tfidf.fit(df['Text'])) X = tfidf.transform(df['Text']) print(X) #print(df['Text'][1].head(2)) print([X[1, tfidf.vocabulary_['peanuts']]]) print([X[1, tfidf.vocabulary_['jumbo']]]) print([X[1, tfidf.vocabulary_['error']]]) import numpy as np df.dropna(inplace=True) df[df['Score'] != 3] df['Positivity'] = np.where(df['Score'] > 3, 1, 0) cols = ['Id', 'ProductId', 'UserId', 'ProfileName', 'HelpfulnessNumerator', 'HelpfulnessDenominator', 'Score', 'Time', 'Summary'] df.drop(cols, axis=1, inplace=True) df.head(3) from sklearn.model_selection import train_test_split X = df.Text y = df.Positivity X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 0) print("Train set has total {0} entries with {1:.2f}% negative, {2:.2f}% positive".format(len(X_train), (len(X_train[y_train == 0]) / (len(X_train)*1.))*100, (len(X_train[y_train == 1]) / (len(X_train)*1.))*100)) print("Test set has total {0} entries with {1:.2f}% negative, {2:.2f}% positive".format(len(X_test), (len(X_test[y_test == 0]) / (len(X_test)*1.))*100, (len(X_test[y_test == 1]) / (len(X_test)*1.))*100))<|fim▁end|>
# Maintainer: David Arroyo Menéndez <[email protected]>
<|file_name|>core.rs<|end_file_name|><|fim▁begin|>// TODO: drop all fail! use std::u32; use std::cast; use std::str::from_utf8_owned; use std::rt::io::*; use misc::*; use zigzag::*; pub mod wire_format { pub static TAG_TYPE_BITS: u32 = 3; pub static TAG_TYPE_MASK: u32 = (1 << TAG_TYPE_BITS) - 1; #[deriving(Eq, Clone)] pub enum WireType { WireTypeVarint = 0, WireTypeFixed64 = 1, WireTypeLengthDelimited = 2, WireTypeStartGroup = 3, WireTypeEndGroup = 4, WireTypeFixed32 = 5, } impl WireType { pub fn new(n: u32) -> WireType { match n { 0 => WireTypeVarint, 1 => WireTypeFixed64, 2 => WireTypeLengthDelimited, 3 => WireTypeStartGroup, 4 => WireTypeEndGroup, 5 => WireTypeFixed32, _ => fail!("unknown wire type") } } } pub struct Tag(u32); impl Tag { fn value(self) -> u32 { match self { Tag(value) => value } } pub fn make(field_number: u32, wire_type: WireType) -> Tag { Tag((field_number << TAG_TYPE_BITS) | (wire_type as u32)) } pub fn unpack(self) -> (u32, WireType) { (self.field_number(), self.wire_type()) } fn wire_type(self) -> WireType { WireType::new(self.value() & TAG_TYPE_MASK) } pub fn field_number(self) -> u32 { let r = self.value() >> TAG_TYPE_BITS; assert!(r > 0, "field number must be positive"); r } } pub fn tag_unpack(tag: u32) -> (WireType, u32) { (Tag(tag).wire_type(), Tag(tag).field_number()) } } pub struct CodedInputStream { buffer: ~[u8], buffer_size: u32, buffer_pos: u32, reader: Option<@Reader>, total_bytes_retired: u32, current_limit: u32, buffer_size_after_limit: u32, } impl CodedInputStream { pub fn new(reader: @Reader) -> CodedInputStream { CodedInputStream { // TODO: buffer of size 1 is used, because // impl Reader for FILE* (that is io::stdin()) does not not stop // reading until buffer is full of EOF reached // This makes reading from pipe practically impossible. buffer: ~[0, ..1], buffer_size: 0, buffer_pos: 0, reader: Some(reader), total_bytes_retired: 0, current_limit: u32::max_value, buffer_size_after_limit: 0, } } fn remaining_in_buffer(&self) -> u32 { self.buffer_size - self.buffer_pos } fn remaining_in_buffer_slice<'a>(&'a self) -> &'a [u8] { self.buffer.slice(self.buffer_pos as uint, self.buffer_size as uint) } fn pos(&self) -> u32 { self.total_bytes_retired + self.buffer_pos } fn bytes_until_limit(&self) -> u32 { self.current_limit - self.pos() } // Refill buffer if buffer is empty. // Fails if buffer is not empty. // Returns false on EOF, or if limit reached. // Otherwize returns true. fn refill_buffer(&mut self) -> bool { if self.buffer_pos < self.buffer_size { fail!("called when buffer is not empty"); } if self.pos() == self.current_limit { return false; } match self.reader { Some(reader) => { self.total_bytes_retired += self.buffer_size; self.buffer_pos = 0; let mut_reader: @mut Reader = unsafe { cast::transmute(reader) }; self.buffer_size = do io_error::cond.trap(|e| { if e.kind != EndOfFile { io_error::cond.raise(e); }; }).inside { mut_reader.read(self.buffer).unwrap_or(0) as u32 }; if self.buffer_size == 0 { return false; } self.recompute_buffer_size_after_limit(); true }, None => false, } } fn refill_buffer_really(&mut self) { if !self.refill_buffer() { fail!("at EOF"); } } fn recompute_buffer_size_after_limit(&mut self) { self.buffer_size += self.buffer_size_after_limit; let buffer_end = self.total_bytes_retired + self.buffer_size; if buffer_end > self.current_limit { // limit is in current buffer self.buffer_size_after_limit = buffer_end - self.current_limit; self.buffer_size -= self.buffer_size_after_limit; } else { self.buffer_size_after_limit = 0; } } pub fn push_limit(&mut self, limit: u32) -> u32 { let old_limit = self.current_limit; let new_limit = self.pos() + limit; if new_limit > old_limit { fail!("truncated message"); } self.current_limit = new_limit; self.recompute_buffer_size_after_limit(); old_limit } pub fn pop_limit(&mut self, old_limit: u32) { if self.bytes_until_limit() != 0 { fail!("must pop only at current limit") } self.current_limit = old_limit; self.recompute_buffer_size_after_limit(); } pub fn eof(&mut self) -> bool { return self.buffer_pos == self.buffer_size && !self.refill_buffer() } pub fn read_raw_byte(&mut self) -> u8 { if self.buffer_pos == self.buffer_size { self.refill_buffer_really(); } let r = self.buffer[self.buffer_pos]; self.buffer_pos += 1; r } pub fn read_raw_varint64(&mut self) -> u64 { let mut bytes: ~[u8] = ~[]; loop { let b = self.read_raw_byte(); bytes.push(b & 0x7F); if b < 0x80 { break; } } let mut r = 0u64; for i in range(0, bytes.len()) { r = (r << 7) | bytes[bytes.len() - i - 1] as u64; } r } pub fn read_raw_varint32(&mut self) -> u32 { self.read_raw_varint64() as u32 } pub fn read_raw_little_endian32(&mut self) -> u32 { let mut bytes = [0u32, ..4]; for i in range(0, 4) { bytes[i] = self.read_raw_byte() as u32; } (bytes[0] ) | (bytes[1] << 8) | (bytes[2] << 16) | (bytes[3] << 24) } pub fn read_raw_little_endian64(&mut self) -> u64 { let mut bytes = [0u64, ..8]; for i in range(0, 8) { bytes[i] = self.read_raw_byte() as u64; } (bytes[0] ) | (bytes[1] << 8) | (bytes[2] << 16) | (bytes[3] << 24) | (bytes[4] << 32) | (bytes[5] << 40) | (bytes[6] << 48) | (bytes[7] << 56) } pub fn read_tag(&mut self) -> wire_format::Tag { wire_format::Tag(self.read_raw_varint32()) } // Read tag, return it is pair (field number, wire type) pub fn read_tag_unpack(&mut self) -> (u32, wire_format::WireType) { self.read_tag().unpack() } pub fn read_double(&mut self) -> f64 { let bits = self.read_raw_little_endian64(); unsafe { cast::transmute::<u64, f64>(bits) } } pub fn read_float(&mut self) -> f32 { let bits = self.read_raw_little_endian32(); unsafe { cast::transmute::<u32, f32>(bits) } } pub fn read_int64(&mut self) -> i64 { self.read_raw_varint64() as i64 } pub fn read_int32(&mut self) -> i32 { self.read_raw_varint32() as i32 } pub fn read_uint64(&mut self) -> u64 { self.read_raw_varint64() } pub fn read_uint32(&mut self) -> u32 { self.read_raw_varint32() } pub fn read_sint64(&mut self) -> i64 { decode_zig_zag_64(self.read_uint64()) } pub fn read_sint32(&mut self) -> i32 { decode_zig_zag_32(self.read_uint32()) } pub fn read_fixed64(&mut self) -> u64 { self.read_raw_little_endian64() } pub fn read_fixed32(&mut self) -> u32 { self.read_raw_little_endian32() } pub fn read_sfixed64(&mut self) -> i64 { self.read_raw_little_endian64() as i64 } pub fn read_sfixed32(&mut self) -> i32 { self.read_raw_little_endian32() as i32 } pub fn read_bool(&mut self) -> bool { self.read_raw_varint32() != 0 } pub fn skip_field(&mut self, wire_type: wire_format::WireType) { match wire_type { wire_format::WireTypeVarint => { self.read_raw_varint64(); }, wire_format::WireTypeFixed64 => { self.read_fixed64(); }, wire_format::WireTypeFixed32 => { self.read_fixed32(); } , wire_format::WireTypeLengthDelimited => { let len = self.read_raw_varint32(); self.skip_raw_bytes(len); }, _ => fail!("unknown wire type: {:i}", wire_type as int) } } pub fn read_raw_bytes(&mut self, count: u32) -> ~[u8] { let mut r: ~[u8] = ~[]; r.reserve(count as uint); while r.len() < count as uint { let rem = count - r.len() as u32; if rem <= self.remaining_in_buffer() { r.push_all(self.buffer.slice(self.buffer_pos as uint, (self.buffer_pos + rem) as uint)); self.buffer_pos += rem; } else { r.push_all(self.remaining_in_buffer_slice()); self.buffer_pos = self.buffer_size; self.refill_buffer_really(); } } r } pub fn skip_raw_bytes(&mut self, count: u32) { self.read_raw_bytes(count); } pub fn read_bytes(&mut self) -> ~[u8] { let len = self.read_raw_varint32(); self.read_raw_bytes(len) } pub fn read_string(&mut self) -> ~str { from_utf8_owned(self.read_bytes()) } pub fn merge_message<M : Message>(&mut self, message: &mut M) { let len = self.read_raw_varint32(); let old_limit = self.push_limit(len); message.merge_from(self); self.pop_limit(old_limit); } pub fn read_message<M : Message>(&mut self) -> M { let mut r: M = Message::new(); self.merge_message(&mut r); r.check_initialized(); r } } trait WithCodedOutputStream { fn with_coded_output_stream<T>(&self, cb: &fn(&mut CodedOutputStream) -> T) -> T; } impl WithCodedOutputStream for @Writer { fn with_coded_output_stream<T>(&self, cb: &fn(&mut CodedOutputStream) -> T) -> T { let mut os = CodedOutputStream::new(*self); let r = cb(&mut os); os.flush(); r } } fn with_coded_output_stream_to_bytes(cb: &fn(&mut CodedOutputStream)) -> ~[u8] { let w = VecWriter::new(); do (w as @Writer).with_coded_output_stream |os| { cb(os) } (*w.vec).to_owned() } trait WithCodedInputStream { fn with_coded_input_stream<T>(&self, cb: &fn(&mut CodedInputStream) -> T) -> T; } impl WithCodedInputStream for @Reader { fn with_coded_input_stream<T>(&self, cb: &fn(&mut CodedInputStream) -> T) -> T { let mut is = CodedInputStream::new(*self); let r = cb(&mut is); // reading from @Reader requires all data to be read, // because CodedInputStream caches data, and otherwize // buffer would be discarded assert!(is.eof()); r } } impl<'self> WithCodedInputStream for &'self [u8] { fn with_coded_input_stream<T>(&self, cb: &fn(&mut CodedInputStream) -> T) -> T { let reader = VecReader::new(@self.to_owned()); do (reader as @Reader).with_coded_input_stream |is| { cb(is) } } } pub struct CodedOutputStream { buffer: ~[u8], position: u32, writer: Option<@Writer>, } impl CodedOutputStream { pub fn new(writer: @Writer) -> CodedOutputStream { CodedOutputStream { buffer: ~[0, ..4096], position: 0, writer: Some(writer), } } fn refresh_buffer(&mut self) { let mut_writer: @mut Writer = unsafe { cast::transmute(self.writer.unwrap()) }; mut_writer.write(self.buffer.slice(0, self.position as uint)); self.position = 0; } pub fn flush(&mut self) { if self.writer.is_some() { self.refresh_buffer(); } } pub fn write_raw_byte(&mut self, byte: u8) { if self.position as uint == self.buffer.len() { self.refresh_buffer() } self.buffer[self.position] = byte; self.position += 1; } pub fn write_raw_bytes(&mut self, bytes: &[u8]) { self.refresh_buffer(); let mut_writer: @mut Writer = unsafe { cast::transmute(self.writer.unwrap()) }; mut_writer.write(bytes); } pub fn write_tag(&mut self, field_number: u32, wire_type: wire_format::WireType) { self.write_raw_varint32(wire_format::Tag::make(field_number, wire_type).value()); } pub fn write_raw_varint32(&mut self, value: u32) { self.write_raw_varint64(value as u64); } pub fn write_raw_varint64(&mut self, value: u64) { let mut temp = value; loop { if (temp & !0x7Fu64) == 0 { self.write_raw_byte(temp as u8); break; } else { self.write_raw_byte(((temp & 0x7F) | 0x80) as u8); temp >>= 7; } } } pub fn write_raw_little_endian32(&mut self, value: u32) { self.write_raw_byte(((value ) & 0xFF) as u8); self.write_raw_byte(((value >> 8) & 0xFF) as u8); self.write_raw_byte(((value >> 16) & 0xFF) as u8); self.write_raw_byte(((value >> 24) & 0xFF) as u8); } pub fn write_raw_little_endian64(&mut self, value: u64) { self.write_raw_byte(((value ) & 0xFF) as u8); self.write_raw_byte(((value >> 8) & 0xFF) as u8); self.write_raw_byte(((value >> 16) & 0xFF) as u8); self.write_raw_byte(((value >> 24) & 0xFF) as u8); self.write_raw_byte(((value >> 32) & 0xFF) as u8); self.write_raw_byte(((value >> 40) & 0xFF) as u8); self.write_raw_byte(((value >> 48) & 0xFF) as u8); self.write_raw_byte(((value >> 56) & 0xFF) as u8); } pub fn write_float_no_tag(&mut self, value: f32) { let bits = unsafe { cast::transmute::<f32, u32>(value) }; self.write_raw_little_endian32(bits); } pub fn write_double_no_tag(&mut self, value: f64) { let bits = unsafe { cast::transmute::<f64, u64>(value) }; self.write_raw_little_endian64(bits); } pub fn write_float(&mut self, field_number: u32, value: f32) { self.write_tag(field_number, wire_format::WireTypeFixed32); self.write_float_no_tag(value); } pub fn write_double(&mut self, field_number: u32, value: f64) { self.write_tag(field_number, wire_format::WireTypeFixed64); self.write_double_no_tag(value); } pub fn write_uint64_no_tag(&mut self, value: u64) { self.write_raw_varint64(value); } pub fn write_uint32_no_tag(&mut self, value: u32) { self.write_raw_varint32(value); } pub fn write_int64_no_tag(&mut self, value: i64) { self.write_raw_varint64(value as u64); } pub fn write_int32_no_tag(&mut self, value: i32) { self.write_raw_varint32(value as u32); } pub fn write_sint64_no_tag(&mut self, value: i64) { self.write_uint64_no_tag(encode_zig_zag_64(value)); } pub fn write_sint32_no_tag(&mut self, value: i32) { self.write_uint32_no_tag(encode_zig_zag_32(value)); } pub fn write_fixed64_no_tag(&mut self, value: u64) { self.write_raw_little_endian64(value); } pub fn write_fixed32_no_tag(&mut self, value: u32) { self.write_raw_little_endian32(value); } pub fn write_sfixed64_no_tag(&mut self, value: i64) { self.write_raw_little_endian64(value as u64); } pub fn write_sfixed32_no_tag(&mut self, value: i32) { self.write_raw_little_endian32(value as u32); } pub fn write_bool_no_tag(&mut self, value: bool) { self.write_raw_varint32(if value { 1 } else { 0 }); } pub fn write_enum_no_tag(&mut self, value: i32) { self.write_int32_no_tag(value); } pub fn write_uint64(&mut self, field_number: u32, value: u64) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_uint64_no_tag(value); } pub fn write_uint32(&mut self, field_number: u32, value: u32) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_uint32_no_tag(value); } pub fn write_int64(&mut self, field_number: u32, value: i64) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_int64_no_tag(value); } pub fn write_int32(&mut self, field_number: u32, value: i32) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_int32_no_tag(value); } pub fn write_sint64(&mut self, field_number: u32, value: i64) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_sint64_no_tag(value); } pub fn write_sint32(&mut self, field_number: u32, value: i32) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_sint32_no_tag(value); } pub fn write_fixed64(&mut self, field_number: u32, value: u64) { self.write_tag(field_number, wire_format::WireTypeFixed64); self.write_fixed64_no_tag(value); } pub fn write_fixed32(&mut self, field_number: u32, value: u32) { self.write_tag(field_number, wire_format::WireTypeFixed32); self.write_fixed32_no_tag(value); } pub fn write_sfixed64(&mut self, field_number: u32, value: i64) { self.write_tag(field_number, wire_format::WireTypeFixed64); self.write_sfixed64_no_tag(value); } pub fn write_sfixed32(&mut self, field_number: u32, value: i32) { self.write_tag(field_number, wire_format::WireTypeFixed32); self.write_sfixed32_no_tag(value); } pub fn write_bool(&mut self, field_number: u32, value: bool) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_bool_no_tag(value); } pub fn write_enum(&mut self, field_number: u32, value: i32) { self.write_tag(field_number, wire_format::WireTypeVarint); self.write_enum_no_tag(value); } pub fn write_bytes_no_tag(&mut self, bytes: &[u8]) { self.write_raw_varint32(bytes.len() as u32); self.write_raw_bytes(bytes); } pub fn write_string_no_tag(&mut self, s: &str) { self.write_bytes_no_tag(s.as_bytes()); } pub fn write_message_no_tag<M : Message>(&mut self, msg: &M) { msg.write_length_delimited_to(self); } pub fn write_bytes(&mut self, field_number: u32, bytes: &[u8]) { self.write_tag(field_number, wire_format::WireTypeLengthDelimited); self.write_bytes_no_tag(bytes); } pub fn write_string(&mut self, field_number: u32, s: &str) { self.write_tag(field_number, wire_format::WireTypeLengthDelimited); self.write_string_no_tag(s); } pub fn write_message<M : Message>(&mut self, field_number: u32, msg: &M) { self.write_tag(field_number, wire_format::WireTypeLengthDelimited); self.write_message_no_tag(msg); } } pub trait Message : Eq { fn new() -> Self; fn clear(&mut self); // all required fields set fn is_initialized(&self) -> bool; fn merge_from(&mut self, is: &mut CodedInputStream); fn write_to(&self, os: &mut CodedOutputStream); fn compute_sizes(&self, sizes: &mut ~[u32]) -> u32; } pub trait ProtobufEnum : Eq { fn value(&self) -> i32; } pub trait MessageUtil { // broken in 0.7 //fn parse_from_**(is: &mut Xxx) -> Self; fn write_to_writer(&self, w: @Writer); fn write_to_bytes(&self) -> ~[u8]; fn write_length_delimited_to(&self, os: &mut CodedOutputStream); fn write_length_delimited_to_writer(&self, w: @Writer); fn write_length_delimited_to_bytes(&self) -> ~[u8]; fn serialized_size(&self) -> u32; fn check_initialized(&self); } pub fn parse_from<M : Message>(is: &mut CodedInputStream) -> M { let mut r: M = Message::new(); r.merge_from(is); r.check_initialized(); r } pub fn parse_from_reader<M : Message>(reader: @Reader) -> M { do reader.with_coded_input_stream |is| { parse_from::<M>(is) } } pub fn parse_from_bytes<M : Message>(bytes: &[u8]) -> M { do bytes.with_coded_input_stream |is| { parse_from::<M>(is) } } pub fn parse_length_delimited_from<M : Message>(is: &mut CodedInputStream) -> M { is.read_message::<M>() } pub fn parse_length_delimited_from_reader<M : Message>(r: @Reader) -> M { // TODO: wrong: we may read length first, and then read exact number of bytes needed do r.with_coded_input_stream |is| { is.read_message::<M>() } } pub fn parse_length_delimited_from_bytes<M : Message>(bytes: &[u8]) -> M { do bytes.with_coded_input_stream |is| { is.read_message::<M>() } } impl<M : Message> MessageUtil for M { fn serialized_size(&self) -> u32 { let mut sizes = ~[]; self.compute_sizes(&mut sizes) } <|fim▁hole|> // TODO: report which fields are not initialized assert!(self.is_initialized()); } fn write_to_writer(&self, w: @Writer) { do w.with_coded_output_stream |os| { self.write_to(os); } } fn write_to_bytes(&self) -> ~[u8] { do with_coded_output_stream_to_bytes |os| { self.write_to(os) } } fn write_length_delimited_to(&self, os: &mut CodedOutputStream) { os.write_raw_varint32(self.serialized_size()); self.write_to(os); } fn write_length_delimited_to_writer(&self, w: @Writer) { do w.with_coded_output_stream |os| { self.write_length_delimited_to(os); } } fn write_length_delimited_to_bytes(&self) -> ~[u8] { do with_coded_output_stream_to_bytes |os| { self.write_length_delimited_to(os); } } } #[cfg(test)] mod test { use super::*; use std::rt::io::*; use std::rt::io::mem::*; use misc::*; use hex::*; fn test_read(hex: &str, callback: &fn(&mut CodedInputStream)) { let d = decode_hex(hex); let len = d.len(); let reader = @MemReader::new(d) as @Reader; let mut is = CodedInputStream::new(reader); assert_eq!(0, is.pos()); callback(&mut is); assert!(is.eof()); assert_eq!(len as u32, is.pos()); } #[test] fn test_input_stream_read_raw_byte() { do test_read("17") |is| { assert_eq!(23, is.read_raw_byte()); } } #[test] fn test_input_stream_read_varint() { do test_read("07") |reader| { assert_eq!(7, reader.read_raw_varint32()); } do test_read("07") |reader| { assert_eq!(7, reader.read_raw_varint64()); } do test_read("96 01") |reader| { assert_eq!(150, reader.read_raw_varint32()); } do test_read("96 01") |reader| { assert_eq!(150, reader.read_raw_varint64()); } } #[test] fn test_output_input_stream_read_float() { do test_read("95 73 13 61") |is| { assert_eq!(17e19, is.read_float()); } } #[test] fn test_input_stream_read_double() { do test_read("40 d5 ab 68 b3 07 3d 46") |is| { assert_eq!(23e29, is.read_double()); } } #[test] fn test_input_stream_skip_raw_bytes() { do test_read("") |reader| { reader.skip_raw_bytes(0); } do test_read("aa bb") |reader| { reader.skip_raw_bytes(2); } do test_read("aa bb cc dd ee ff") |reader| { reader.skip_raw_bytes(6); } } #[test] fn test_input_stream_limits() { do test_read("aa bb cc") |is| { let old_limit = is.push_limit(1); assert_eq!(1, is.bytes_until_limit()); assert_eq!(~[0xaa], is.read_raw_bytes(1)); is.pop_limit(old_limit); assert_eq!(~[0xbb, 0xcc], is.read_raw_bytes(2)); } } fn test_write(expected: &str, gen: &fn(&mut CodedOutputStream)) { let writer = VecWriter::new(); let mut os = CodedOutputStream::new(writer as @Writer); gen(&mut os); os.flush(); let r = writer.vec.to_owned(); assert_eq!(encode_hex(decode_hex(expected)), encode_hex(r)); } #[test] fn test_output_stream_write_raw_byte() { do test_write("a1") |os| { os.write_raw_byte(0xa1); } } #[test] fn test_output_stream_write_tag() { do test_write("08") |os| { os.write_tag(1, wire_format::WireTypeVarint); } } #[test] fn test_output_stream_write_raw_bytes() { do test_write("00 ab") |os| { os.write_raw_bytes([0x00, 0xab]); } } #[test] fn test_output_stream_write_raw_varint32() { do test_write("96 01") |os| { os.write_raw_varint32(150); } } #[test] fn test_output_stream_write_raw_varint64() { do test_write("96 01") |os| { os.write_raw_varint64(150); } } #[test] fn test_output_stream_write_raw_little_endian32() { do test_write("f1 e2 d3 c4") |os| { os.write_raw_little_endian32(0xc4d3e2f1); } } #[test] fn test_output_stream_write_float_no_tag() { do test_write("95 73 13 61") |os| { os.write_float_no_tag(17e19); } } #[test] fn test_output_stream_write_double_no_tag() { do test_write("40 d5 ab 68 b3 07 3d 46") |os| { os.write_double_no_tag(23e29); } } #[test] fn test_output_stream_write_raw_little_endian64() { do test_write("f1 e2 d3 c4 b5 a6 07 f8") |os| { os.write_raw_little_endian64(0xf807a6b5c4d3e2f1); } } }<|fim▁end|>
fn check_initialized(&self) {
<|file_name|>RecordTimer.py<|end_file_name|><|fim▁begin|>import os from enigma import eEPGCache, getBestPlayableServiceReference, \ eServiceReference, iRecordableService, quitMainloop, eActionMap from Components.config import config from Components.UsageConfig import defaultMoviePath from Components.TimerSanityCheck import TimerSanityCheck from Screens.MessageBox import MessageBox import Screens.Standby from Tools import Directories, Notifications, ASCIItranslit, Trashcan from Tools.XMLTools import stringToXML import timer import xml.etree.cElementTree import NavigationInstance from ServiceReference import ServiceReference from time import localtime, strftime, ctime, time from bisect import insort from sys import maxint # ok, for descriptions etc we have: # service reference (to get the service name)<|fim▁hole|> # parses an event, and gives out a (begin, end, name, duration, eit)-tuple. # begin and end will be corrected def parseEvent(ev, description = True): if description: name = ev.getEventName() description = ev.getShortDescription() if description == "": description = ev.getExtendedDescription() else: name = "" description = "" begin = ev.getBeginTime() end = begin + ev.getDuration() eit = ev.getEventId() begin -= config.recording.margin_before.value * 60 end += config.recording.margin_after.value * 60 return (begin, end, name, description, eit) class AFTEREVENT: NONE = 0 STANDBY = 1 DEEPSTANDBY = 2 AUTO = 3 def findSafeRecordPath(dirname): if not dirname: return None from Components import Harddisk dirname = os.path.realpath(dirname) mountpoint = Harddisk.findMountPoint(dirname) if mountpoint in ('/', '/media'): print '[RecordTimer] media is not mounted:', dirname return None if not os.path.isdir(dirname): try: os.makedirs(dirname) except Exception, ex: print '[RecordTimer] Failed to create dir "%s":' % dirname, ex return None return dirname def chechForRecordings(): if NavigationInstance.instance.getRecordings(): return True rec_time = NavigationInstance.instance.RecordTimer.getNextTimerTime() return rec_time > 0 and (rec_time - time()) < 360 # please do not translate log messages class RecordTimerEntry(timer.TimerEntry, object): ######### the following static methods and members are only in use when the box is in (soft) standby wasInStandby = False wasInDeepStandby = False receiveRecordEvents = False @staticmethod def keypress(key=None, flag=1): if flag and (RecordTimerEntry.wasInStandby or RecordTimerEntry.wasInDeepStandby): RecordTimerEntry.wasInStandby = False RecordTimerEntry.wasInDeepStandby = False eActionMap.getInstance().unbindAction('', RecordTimerEntry.keypress) @staticmethod def setWasInDeepStandby(): RecordTimerEntry.wasInDeepStandby = True eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress) @staticmethod def setWasInStandby(): if not RecordTimerEntry.wasInStandby: if not RecordTimerEntry.wasInDeepStandby: eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress) RecordTimerEntry.wasInDeepStandby = False RecordTimerEntry.wasInStandby = True @staticmethod def shutdown(): quitMainloop(1) @staticmethod def staticGotRecordEvent(recservice, event): if event == iRecordableService.evEnd: print "RecordTimer.staticGotRecordEvent(iRecordableService.evEnd)" if not chechForRecordings(): print "No recordings busy of sceduled within 6 minutes so shutdown" RecordTimerEntry.shutdown() # immediate shutdown elif event == iRecordableService.evStart: print "RecordTimer.staticGotRecordEvent(iRecordableService.evStart)" @staticmethod def stopTryQuitMainloop(): print "RecordTimer.stopTryQuitMainloop" NavigationInstance.instance.record_event.remove(RecordTimerEntry.staticGotRecordEvent) RecordTimerEntry.receiveRecordEvents = False @staticmethod def TryQuitMainloop(): if not RecordTimerEntry.receiveRecordEvents and Screens.Standby.inStandby: print "RecordTimer.TryQuitMainloop" NavigationInstance.instance.record_event.append(RecordTimerEntry.staticGotRecordEvent) RecordTimerEntry.receiveRecordEvents = True # send fake event.. to check if another recordings are running or # other timers start in a few seconds RecordTimerEntry.staticGotRecordEvent(None, iRecordableService.evEnd) ################################################################# def __init__(self, serviceref, begin, end, name, description, eit, disabled = False, justplay = False, afterEvent = AFTEREVENT.AUTO, checkOldTimers = False, dirname = None, tags = None, descramble = True, record_ecm = False, always_zap = False): timer.TimerEntry.__init__(self, int(begin), int(end)) if checkOldTimers == True: if self.begin < time() - 1209600: self.begin = int(time()) if self.end < self.begin: self.end = self.begin assert isinstance(serviceref, ServiceReference) if serviceref.isRecordable(): self.service_ref = serviceref else: self.service_ref = ServiceReference(None) self.eit = eit self.dontSave = False self.name = name self.description = description self.disabled = disabled self.timer = None self.__record_service = None self.start_prepare = 0 self.justplay = justplay self.always_zap = always_zap self.afterEvent = afterEvent self.dirname = dirname self.dirnameHadToFallback = False self.autoincrease = False self.autoincreasetime = 3600 * 24 # 1 day self.tags = tags or [] self.descramble = descramble self.record_ecm = record_ecm self.log_entries = [] self.resetState() def __repr__(self): return "RecordTimerEntry(name=%s, begin=%s, serviceref=%s, justplay=%s)" % (self.name, ctime(self.begin), self.service_ref, self.justplay) def log(self, code, msg): self.log_entries.append((int(time()), code, msg)) print "[TIMER]", msg def calculateFilename(self): service_name = self.service_ref.getServiceName() begin_date = strftime("%Y%m%d %H%M", localtime(self.begin)) print "begin_date: ", begin_date print "service_name: ", service_name print "name:", self.name print "description: ", self.description filename = begin_date + " - " + service_name if self.name: if config.recording.filename_composition.value == "short": filename = strftime("%Y%m%d", localtime(self.begin)) + " - " + self.name elif config.recording.filename_composition.value == "long": filename += " - " + self.name + " - " + self.description else: filename += " - " + self.name # standard if config.recording.ascii_filenames.value: filename = ASCIItranslit.legacyEncode(filename) if not self.dirname: dirname = findSafeRecordPath(defaultMoviePath()) else: dirname = findSafeRecordPath(self.dirname) if dirname is None: dirname = findSafeRecordPath(defaultMoviePath()) self.dirnameHadToFallback = True if not dirname: return None self.Filename = Directories.getRecordingFilename(filename, dirname) self.log(0, "Filename calculated as: '%s'" % self.Filename) return self.Filename def tryPrepare(self): if self.justplay: return True else: if not self.calculateFilename(): self.do_backoff() self.start_prepare = time() + self.backoff return False rec_ref = self.service_ref and self.service_ref.ref if rec_ref and rec_ref.flags & eServiceReference.isGroup: rec_ref = getBestPlayableServiceReference(rec_ref, eServiceReference()) if not rec_ref: self.log(1, "'get best playable service for group... record' failed") return False self.record_service = rec_ref and NavigationInstance.instance.recordService(rec_ref) if not self.record_service: self.log(1, "'record service' failed") return False if self.repeated: epgcache = eEPGCache.getInstance() queryTime=self.begin+(self.end-self.begin)/2 evt = epgcache.lookupEventTime(rec_ref, queryTime) if evt: self.description = evt.getShortDescription() if self.description == "": self.description = evt.getExtendedDescription() event_id = evt.getEventId() else: event_id = -1 else: event_id = self.eit if event_id is None: event_id = -1 prep_res=self.record_service.prepare(self.Filename + ".ts", self.begin, self.end, event_id, self.name.replace("\n", ""), self.description.replace("\n", ""), ' '.join(self.tags), self.descramble, self.record_ecm) if prep_res: if prep_res == -255: self.log(4, "failed to write meta information") else: self.log(2, "'prepare' failed: error %d" % prep_res) # we must calc nur start time before stopRecordService call because in Screens/Standby.py TryQuitMainloop tries to get # the next start time in evEnd event handler... self.do_backoff() self.start_prepare = time() + self.backoff NavigationInstance.instance.stopRecordService(self.record_service) self.record_service = None return False return True def do_backoff(self): if self.backoff == 0: self.backoff = 5 else: self.backoff *= 2 if self.backoff > 100: self.backoff = 100 self.log(10, "backoff: retry in %d seconds" % self.backoff) def activate(self): next_state = self.state + 1 self.log(5, "activating state %d" % next_state) if next_state == 1: if self.always_zap: if Screens.Standby.inStandby: self.log(5, "wakeup and zap to recording service") RecordTimerEntry.setWasInStandby() #set service to zap after standby Screens.Standby.inStandby.prev_running_service = self.service_ref.ref Screens.Standby.inStandby.paused_service = None #wakeup standby Screens.Standby.inStandby.Power() else: if RecordTimerEntry.wasInDeepStandby: RecordTimerEntry.setWasInStandby() cur_zap_ref = NavigationInstance.instance.getCurrentlyPlayingServiceReference() if cur_zap_ref and not cur_zap_ref.getPath():# we do not zap away if it is no live service Notifications.AddNotification(MessageBox, _("In order to record a timer, the TV was switched to the recording service!\n"), type=MessageBox.TYPE_INFO, timeout=20) self.failureCB(True) self.log(5, "zap to recording service") if next_state == self.StatePrepared: if self.tryPrepare(): self.log(6, "prepare ok, waiting for begin") # create file to "reserve" the filename # because another recording at the same time on another service can try to record the same event # i.e. cable / sat.. then the second recording needs an own extension... when we create the file # here than calculateFilename is happy if not self.justplay: open(self.Filename + ".ts", "w").close() # Give the Trashcan a chance to clean up try: Trashcan.instance.cleanIfIdle(self.Filename) except Exception, e: print "[TIMER] Failed to call Trashcan.instance.cleanIfIdle()" print "[TIMER] Error:", e # fine. it worked, resources are allocated. self.next_activation = self.begin self.backoff = 0 return True self.log(7, "prepare failed") if self.first_try_prepare: self.first_try_prepare = False cur_ref = NavigationInstance.instance.getCurrentlyPlayingServiceReference() if cur_ref and not cur_ref.getPath(): if Screens.Standby.inStandby: self.failureCB(True) elif not config.recording.asktozap.value: self.log(8, "asking user to zap away") Notifications.AddNotificationWithCallback(self.failureCB, MessageBox, _("A timer failed to record!\nDisable TV and try again?\n"), timeout=20, default=True) else: # zap without asking self.log(9, "zap without asking") Notifications.AddNotification(MessageBox, _("In order to record a timer, the TV was switched to the recording service!\n"), type=MessageBox.TYPE_INFO, timeout=20) self.failureCB(True) elif cur_ref: self.log(8, "currently running service is not a live service.. so stop it makes no sense") else: self.log(8, "currently no service running... so we dont need to stop it") return False elif next_state == self.StateRunning: # if this timer has been cancelled, just go to "end" state. if self.cancelled: return True if self.justplay: if Screens.Standby.inStandby: self.log(11, "wakeup and zap") RecordTimerEntry.setWasInStandby() #set service to zap after standby Screens.Standby.inStandby.prev_running_service = self.service_ref.ref Screens.Standby.inStandby.paused_service = None #wakeup standby Screens.Standby.inStandby.Power() else: if RecordTimerEntry.wasInDeepStandby: RecordTimerEntry.setWasInStandby() self.log(11, "zapping") NavigationInstance.instance.playService(self.service_ref.ref) return True else: self.log(11, "start recording") if RecordTimerEntry.wasInDeepStandby: RecordTimerEntry.keypress() if Screens.Standby.inStandby: #In case some plugin did put the receiver already in standby config.misc.standbyCounter.value = 0 else: Notifications.AddNotification(Screens.Standby.Standby, StandbyCounterIncrease=False) record_res = self.record_service.start() if record_res: self.log(13, "start record returned %d" % record_res) self.do_backoff() # retry self.begin = time() + self.backoff return False # Tell the trashcan we started recording. The trashcan gets events, # but cannot tell what the associated path is. Trashcan.instance.markDirty(self.Filename) return True elif next_state == self.StateEnded: old_end = self.end if self.setAutoincreaseEnd(): self.log(12, "autoincrase recording %d minute(s)" % int((self.end - old_end)/60)) self.state -= 1 return True self.log(12, "stop recording") if not self.justplay: NavigationInstance.instance.stopRecordService(self.record_service) self.record_service = None if not chechForRecordings(): if self.afterEvent == AFTEREVENT.DEEPSTANDBY or self.afterEvent == AFTEREVENT.AUTO and (Screens.Standby.inStandby or RecordTimerEntry.wasInStandby) and not config.misc.standbyCounter.value: if not Screens.Standby.inTryQuitMainloop: if Screens.Standby.inStandby: RecordTimerEntry.TryQuitMainloop() else: Notifications.AddNotificationWithCallback(self.sendTryQuitMainloopNotification, MessageBox, _("A finished record timer wants to shut down\nyour receiver. Shutdown now?"), timeout=20, default=True) elif self.afterEvent == AFTEREVENT.STANDBY or self.afterEvent == AFTEREVENT.AUTO and RecordTimerEntry.wasInStandby: if not Screens.Standby.inStandby: Notifications.AddNotificationWithCallback(self.sendStandbyNotification, MessageBox, _("A finished record timer wants to set your\nreceiver to standby. Do that now?"), timeout=20, default=True) else: RecordTimerEntry.keypress() return True def setAutoincreaseEnd(self, entry = None): if not self.autoincrease: return False if entry is None: new_end = int(time()) + self.autoincreasetime else: new_end = entry.begin - 30 dummyentry = RecordTimerEntry(self.service_ref, self.begin, new_end, self.name, self.description, self.eit, disabled=True, justplay = self.justplay, afterEvent = self.afterEvent, dirname = self.dirname, tags = self.tags) dummyentry.disabled = self.disabled timersanitycheck = TimerSanityCheck(NavigationInstance.instance.RecordTimer.timer_list, dummyentry) if not timersanitycheck.check(): simulTimerList = timersanitycheck.getSimulTimerList() if simulTimerList is not None and len(simulTimerList) > 1: new_end = simulTimerList[1].begin new_end -= 30 # 30 Sekunden Prepare-Zeit lassen if new_end <= time(): return False self.end = new_end return True def sendStandbyNotification(self, answer): RecordTimerEntry.keypress() if answer: Notifications.AddNotification(Screens.Standby.Standby) def sendTryQuitMainloopNotification(self, answer): RecordTimerEntry.keypress() if answer: Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 1) def getNextActivation(self): if self.state == self.StateEnded: return self.end next_state = self.state + 1 return {self.StatePrepared: self.start_prepare, self.StateRunning: self.begin, self.StateEnded: self.end }[next_state] def failureCB(self, answer): if answer == True: self.log(13, "ok, zapped away") #NavigationInstance.instance.stopUserServices() NavigationInstance.instance.playService(self.service_ref.ref) else: self.log(14, "user didn't want to zap away, record will probably fail") def timeChanged(self): old_prepare = self.start_prepare self.start_prepare = self.begin - self.prepare_time self.backoff = 0 if int(old_prepare) != int(self.start_prepare): self.log(15, "record time changed, start prepare is now: %s" % ctime(self.start_prepare)) def gotRecordEvent(self, record, event): # TODO: this is not working (never true), please fix. (comparing two swig wrapped ePtrs) if self.__record_service.__deref__() != record.__deref__(): return self.log(16, "record event %d" % event) if event == iRecordableService.evRecordWriteError: print "WRITE ERROR on recording, disk full?" # show notification. the 'id' will make sure that it will be # displayed only once, even if more timers are failing at the # same time. (which is very likely in case of disk fullness) Notifications.AddPopup(text = _("Write error while recording. Disk full?\n"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "DiskFullMessage") # ok, the recording has been stopped. we need to properly note # that in our state, with also keeping the possibility to re-try. # TODO: this has to be done. elif event == iRecordableService.evStart: text = _("A record has been started:\n%s") % self.name notify = config.usage.show_message_when_recording_starts.value and not Screens.Standby.inStandby if self.dirnameHadToFallback: text = '\n'.join((text, _("Please note that the previously selected media could not be accessed and therefore the default directory is being used instead."))) notify = True if notify: Notifications.AddPopup(text = text, type = MessageBox.TYPE_INFO, timeout = 3) elif event == iRecordableService.evRecordAborted: NavigationInstance.instance.RecordTimer.removeEntry(self) # we have record_service as property to automatically subscribe to record service events def setRecordService(self, service): if self.__record_service is not None: print "[remove callback]" NavigationInstance.instance.record_event.remove(self.gotRecordEvent) self.__record_service = service if self.__record_service is not None: print "[add callback]" NavigationInstance.instance.record_event.append(self.gotRecordEvent) record_service = property(lambda self: self.__record_service, setRecordService) def createTimer(xml): begin = int(xml.get("begin")) end = int(xml.get("end")) serviceref = ServiceReference(xml.get("serviceref").encode("utf-8")) description = xml.get("description").encode("utf-8") repeated = xml.get("repeated").encode("utf-8") disabled = long(xml.get("disabled") or "0") justplay = long(xml.get("justplay") or "0") always_zap = long(xml.get("always_zap") or "0") afterevent = str(xml.get("afterevent") or "nothing") afterevent = { "nothing": AFTEREVENT.NONE, "standby": AFTEREVENT.STANDBY, "deepstandby": AFTEREVENT.DEEPSTANDBY, "auto": AFTEREVENT.AUTO }[afterevent] eit = xml.get("eit") if eit and eit != "None": eit = long(eit); else: eit = None location = xml.get("location") if location and location != "None": location = location.encode("utf-8") else: location = None tags = xml.get("tags") if tags and tags != "None": tags = tags.encode("utf-8").split(' ') else: tags = None descramble = int(xml.get("descramble") or "1") record_ecm = int(xml.get("record_ecm") or "0") name = xml.get("name").encode("utf-8") #filename = xml.get("filename").encode("utf-8") entry = RecordTimerEntry(serviceref, begin, end, name, description, eit, disabled, justplay, afterevent, dirname = location, tags = tags, descramble = descramble, record_ecm = record_ecm, always_zap = always_zap) entry.repeated = int(repeated) for l in xml.findall("log"): time = int(l.get("time")) code = int(l.get("code")) msg = l.text.strip().encode("utf-8") entry.log_entries.append((time, code, msg)) return entry class RecordTimer(timer.Timer): def __init__(self): timer.Timer.__init__(self) self.Filename = Directories.resolveFilename(Directories.SCOPE_CONFIG, "timers.xml") try: self.loadTimer() except IOError: print "unable to load timers from file!" def doActivate(self, w): # when activating a timer which has already passed, # simply abort the timer. don't run trough all the stages. if w.shouldSkip(): w.state = RecordTimerEntry.StateEnded else: # when active returns true, this means "accepted". # otherwise, the current state is kept. # the timer entry itself will fix up the delay then. if w.activate(): w.state += 1 self.timer_list.remove(w) # did this timer reached the last state? if w.state < RecordTimerEntry.StateEnded: # no, sort it into active list insort(self.timer_list, w) else: # yes. Process repeated, and re-add. if w.repeated: w.processRepeated() w.state = RecordTimerEntry.StateWaiting w.first_try_prepare = True self.addTimerEntry(w) else: # Remove old timers as set in config self.cleanupDaily(config.recording.keep_timers.value) insort(self.processed_timers, w) self.stateChanged(w) def isRecording(self): for timer in self.timer_list: if timer.isRunning() and not timer.justplay: return True return False def loadTimer(self): # TODO: PATH! if not Directories.fileExists(self.Filename): return try: doc = xml.etree.cElementTree.parse(self.Filename) except SyntaxError: from Tools.Notifications import AddPopup from Screens.MessageBox import MessageBox AddPopup(_("The timer file (timers.xml) is corrupt and could not be loaded."), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed") print "timers.xml failed to load!" try: import os os.rename(self.Filename, self.Filename + "_old") except (IOError, OSError): print "renaming broken timer failed" return except IOError: print "timers.xml not found!" return root = doc.getroot() # put out a message when at least one timer overlaps checkit = True for timer in root.findall("timer"): newTimer = createTimer(timer) if (self.record(newTimer, True, dosave=False) is not None) and (checkit == True): from Tools.Notifications import AddPopup from Screens.MessageBox import MessageBox AddPopup(_("Timer overlap in timers.xml detected!\nPlease recheck it!"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed") checkit = False # at moment it is enough when the message is displayed one time def saveTimer(self): #root_element = xml.etree.cElementTree.Element('timers') #root_element.text = "\n" #for timer in self.timer_list + self.processed_timers: # some timers (instant records) don't want to be saved. # skip them #if timer.dontSave: #continue #t = xml.etree.cElementTree.SubElement(root_element, 'timers') #t.set("begin", str(int(timer.begin))) #t.set("end", str(int(timer.end))) #t.set("serviceref", str(timer.service_ref)) #t.set("repeated", str(timer.repeated)) #t.set("name", timer.name) #t.set("description", timer.description) #t.set("afterevent", str({ # AFTEREVENT.NONE: "nothing", # AFTEREVENT.STANDBY: "standby", # AFTEREVENT.DEEPSTANDBY: "deepstandby", # AFTEREVENT.AUTO: "auto"})) #if timer.eit is not None: # t.set("eit", str(timer.eit)) #if timer.dirname is not None: # t.set("location", str(timer.dirname)) #t.set("disabled", str(int(timer.disabled))) #t.set("justplay", str(int(timer.justplay))) #t.text = "\n" #t.tail = "\n" #for time, code, msg in timer.log_entries: #l = xml.etree.cElementTree.SubElement(t, 'log') #l.set("time", str(time)) #l.set("code", str(code)) #l.text = str(msg) #l.tail = "\n" #doc = xml.etree.cElementTree.ElementTree(root_element) #doc.write(self.Filename) list = [] list.append('<?xml version="1.0" ?>\n') list.append('<timers>\n') for timer in self.timer_list + self.processed_timers: if timer.dontSave: continue list.append('<timer') list.append(' begin="' + str(int(timer.begin)) + '"') list.append(' end="' + str(int(timer.end)) + '"') list.append(' serviceref="' + stringToXML(str(timer.service_ref)) + '"') list.append(' repeated="' + str(int(timer.repeated)) + '"') list.append(' name="' + str(stringToXML(timer.name)) + '"') list.append(' description="' + str(stringToXML(timer.description)) + '"') list.append(' afterevent="' + str(stringToXML({ AFTEREVENT.NONE: "nothing", AFTEREVENT.STANDBY: "standby", AFTEREVENT.DEEPSTANDBY: "deepstandby", AFTEREVENT.AUTO: "auto" }[timer.afterEvent])) + '"') if timer.eit is not None: list.append(' eit="' + str(timer.eit) + '"') if timer.dirname is not None: list.append(' location="' + str(stringToXML(timer.dirname)) + '"') if timer.tags is not None: list.append(' tags="' + str(stringToXML(' '.join(timer.tags))) + '"') list.append(' disabled="' + str(int(timer.disabled)) + '"') list.append(' justplay="' + str(int(timer.justplay)) + '"') list.append(' always_zap="' + str(int(timer.always_zap)) + '"') list.append(' descramble="' + str(int(timer.descramble)) + '"') list.append(' record_ecm="' + str(int(timer.record_ecm)) + '"') list.append('>\n') if config.recording.debug.value: for time, code, msg in timer.log_entries: list.append('<log') list.append(' code="' + str(code) + '"') list.append(' time="' + str(time) + '"') list.append('>') list.append(str(stringToXML(msg))) list.append('</log>\n') list.append('</timer>\n') list.append('</timers>\n') file = open(self.Filename + ".writing", "w") for x in list: file.write(x) file.flush() import os os.fsync(file.fileno()) file.close() os.rename(self.Filename + ".writing", self.Filename) def getNextZapTime(self): now = time() for timer in self.timer_list: if not timer.justplay or timer.begin < now: continue return timer.begin return -1 def getNextRecordingTime(self): now = time() for timer in self.timer_list: next_act = timer.getNextActivation() if timer.justplay or next_act < now: continue return next_act return -1 def getNextTimerTime(self): now = time() for timer in self.timer_list: next_act = timer.getNextActivation() if next_act < now: continue return next_act return -1 def isNextRecordAfterEventActionAuto(self): now = time() t = None for timer in self.timer_list: if timer.justplay or timer.begin < now: continue if t is None or t.begin == timer.begin: t = timer if t.afterEvent == AFTEREVENT.AUTO: return True return False def record(self, entry, ignoreTSC=False, dosave=True): #wird von loadTimer mit dosave=False aufgerufen timersanitycheck = TimerSanityCheck(self.timer_list,entry) if not timersanitycheck.check(): if ignoreTSC != True: print "timer conflict detected!" print timersanitycheck.getSimulTimerList() return timersanitycheck.getSimulTimerList() else: print "ignore timer conflict" elif timersanitycheck.doubleCheck(): print "ignore double timer" return None entry.timeChanged() print "[Timer] Record " + str(entry) entry.Timer = self self.addTimerEntry(entry) if dosave: self.saveTimer() return None def isInTimer(self, eventid, begin, duration, service): returnValue = None type = 0 time_match = 0 bt = None end = begin + duration refstr = str(service) for x in self.timer_list: check = x.service_ref.ref.toString() == refstr if not check: sref = x.service_ref.ref parent_sid = sref.getUnsignedData(5) parent_tsid = sref.getUnsignedData(6) if parent_sid and parent_tsid: # check for subservice sid = sref.getUnsignedData(1) tsid = sref.getUnsignedData(2) sref.setUnsignedData(1, parent_sid) sref.setUnsignedData(2, parent_tsid) sref.setUnsignedData(5, 0) sref.setUnsignedData(6, 0) check = sref.toCompareString() == refstr num = 0 if check: check = False event = eEPGCache.getInstance().lookupEventId(sref, eventid) num = event and event.getNumOfLinkageServices() or 0 sref.setUnsignedData(1, sid) sref.setUnsignedData(2, tsid) sref.setUnsignedData(5, parent_sid) sref.setUnsignedData(6, parent_tsid) for cnt in range(num): subservice = event.getLinkageService(sref, cnt) if sref.toCompareString() == subservice.toCompareString(): check = True break if check: timer_end = x.end type_offset = 0 if x.justplay: type_offset = 5 if (timer_end - x.begin) <= 1: timer_end += 60 if x.always_zap: type_offset = 10 if x.repeated != 0: if bt is None: bt = localtime(begin) et = localtime(end) bday = bt.tm_wday; begin2 = bday * 1440 + bt.tm_hour * 60 + bt.tm_min end2 = et.tm_wday * 1440 + et.tm_hour * 60 + et.tm_min if x.repeated & (1 << bday): xbt = localtime(x.begin) xet = localtime(timer_end) xbegin = bday * 1440 + xbt.tm_hour * 60 + xbt.tm_min xend = bday * 1440 + xet.tm_hour * 60 + xet.tm_min if xend < xbegin: xend += 1440 if begin2 < xbegin <= end2: if xend < end2: # recording within event time_match = (xend - xbegin) * 60 type = type_offset + 3 else: # recording last part of event time_match = (end2 - xbegin) * 60 type = type_offset + 1 elif xbegin <= begin2 <= xend: if xend < end2: # recording first part of event time_match = (xend - begin2) * 60 type = type_offset + 4 else: # recording whole event time_match = (end2 - begin2) * 60 type = type_offset + 2 else: if begin < x.begin <= end: if timer_end < end: # recording within event time_match = timer_end - x.begin type = type_offset + 3 else: # recording last part of event time_match = end - x.begin type = type_offset + 1 elif x.begin <= begin <= timer_end: if timer_end < end: # recording first part of event time_match = timer_end - begin type = type_offset + 4 else: # recording whole event time_match = end - begin type = type_offset + 2 if time_match: if type in (2,7,12): # When full recording do not look further returnValue = (time_match, [type]) break elif returnValue: if type not in returnValue[1]: returnValue[1].append(type) else: returnValue = (time_match, [type]) return returnValue def removeEntry(self, entry): print "[Timer] Remove " + str(entry) # avoid re-enqueuing entry.repeated = False # abort timer. # this sets the end time to current time, so timer will be stopped. entry.autoincrease = False entry.abort() if entry.state != entry.StateEnded: self.timeChanged(entry) print "state: ", entry.state print "in processed: ", entry in self.processed_timers print "in running: ", entry in self.timer_list # autoincrease instanttimer if possible if not entry.dontSave: for x in self.timer_list: if x.setAutoincreaseEnd(): self.timeChanged(x) # now the timer should be in the processed_timers list. remove it from there. self.processed_timers.remove(entry) self.saveTimer() def shutdown(self): self.saveTimer()<|fim▁end|>
# name (title) # description (description) # event data (ONLY for time adjustments etc.)
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
__author__ = 'mdavid'
<|file_name|>reducer_warehouses.js<|end_file_name|><|fim▁begin|>import { GET_WAREHOUSES_FULL_LIST, GET_WAREHOUSE, GET_COMPANIES, GET_SUPERVISORS } from '../actions/warehouses'; import humanize from 'humanize'; import Moment from 'moment'; const INITIAL_STATE = { warehousesList: [], warehouseDetail: {}, warehouseId: 0, companiesList: [], supervisorsList: [] }; export default function(state = INITIAL_STATE, action) { switch(action.type) { case GET_WAREHOUSES_FULL_LIST: return { ...state, warehousesList: action.payload.data.map(function(warehouse) { return { company: warehouse.company, supervisor: warehouse.supervisor, email: warehouse.email,<|fim▁hole|> action: warehouse.id }; }) }; case GET_WAREHOUSE: return { ...state, warehouseDetail: { company: action.payload.data[0].company_id, supervisor: action.payload.data[0].supervisor_id, name: action.payload.data[0].name, email: action.payload.data[0].email, telephone: action.payload.data[0].telephone, address: action.payload.data[0].address, tax: action.payload.data[0].tax, contact_name: action.payload.data[0].contact_name }, warehouseId: action.payload.data[0].id } case GET_COMPANIES: return { ...state, companiesList: action.payload.data.map(function(company) { return { value: company.id, label: company.name } }) } case GET_SUPERVISORS: return { ...state, supervisorsList: action.payload.data.map(function(supervisor) { return { value: supervisor.id, label: supervisor.name } }) } default: return state; } }<|fim▁end|>
telephone: warehouse.telephone, address: warehouse.address, contact_name: warehouse.contact_name,
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """The setup and build script for the python-telegram-bot library.""" import codecs import os from setuptools import setup, find_packages def requirements(): """Build the requirements list for this project""" requirements_list = [] with open('requirements.txt') as requirements: for install in requirements: requirements_list.append(install.strip()) return requirements_list<|fim▁hole|> with codecs.open('README.rst', 'r', 'utf-8') as fd: fn = os.path.join('telegram', 'version.py') with open(fn) as fh: code = compile(fh.read(), fn, 'exec') exec(code) setup(name='python-telegram-bot', version=__version__, author='Leandro Toledo', author_email='[email protected]', license='LGPLv3', url='https://python-telegram-bot.org/', keywords='python telegram bot api wrapper', description="We have made you a wrapper you can't refuse", long_description=fd.read(), packages=packages, install_requires=requirements(), extras_require={ 'json': 'ujson', 'socks': 'PySocks' }, include_package_data=True, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Communications :: Chat', 'Topic :: Internet', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ],)<|fim▁end|>
packages = find_packages(exclude=['tests*'])
<|file_name|>test_commands.py<|end_file_name|><|fim▁begin|># coding: utf-8 from django import VERSION from django.core.management import call_command from ._compat import patch CELERYD_COMMAND = 'djcelery.management.commands.celeryd.Command.handle' def test_celeryd_command(): if VERSION >= (1, 10): traceback = False else: traceback = None with patch(CELERYD_COMMAND) as handle: call_command('celeryd')<|fim▁hole|> autoreload=None, autoscale=None, beat=None, broker=None, concurrency=0, detach=None, exclude_queues=[], executable=None, gid=None, heartbeat_interval=None, hostname=None, include=[], logfile=None, loglevel='WARN', max_tasks_per_child=None, no_color=False, no_execv=False, optimization=None, pidfile=None, pool_cls='prefork', purge=False, pythonpath=None, queues=[], quiet=None, schedule_filename='celerybeat-schedule', scheduler_cls=None, send_events=False, settings=None, skip_checks=True, state_db=None, task_soft_time_limit=None, task_time_limit=None, traceback=traceback, uid=None, umask=None, verbosity=1, without_gossip=False, without_heartbeat=False, without_mingle=False, working_directory=None )<|fim▁end|>
handle.assert_called_with(
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from datetime import timedelta import logging import os import re import time from django.conf import settings from django.db import models from django.db.models import Q from django.utils.crypto import get_random_string from django.utils.encoding import python_2_unicode_compatible from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ import requests from mama_cas.compat import Session from mama_cas.exceptions import InvalidProxyCallback from mama_cas.exceptions import InvalidRequest from mama_cas.exceptions import InvalidService from mama_cas.exceptions import InvalidTicket from mama_cas.exceptions import UnauthorizedServiceProxy from mama_cas.exceptions import ValidationError from mama_cas.request import SingleSignOutRequest from mama_cas.services import get_logout_url from mama_cas.services import logout_allowed from mama_cas.services import service_allowed from mama_cas.services import proxy_allowed from mama_cas.services import proxy_callback_allowed from mama_cas.utils import add_query_params from mama_cas.utils import clean_service_url from mama_cas.utils import is_scheme_https from mama_cas.utils import match_service logger = logging.getLogger(__name__) class TicketManager(models.Manager): def create_ticket(self, ticket=None, **kwargs): """ Create a new ``Ticket``. Additional arguments are passed to the ``create()`` function. Return the newly created ``Ticket``. """ if not ticket: ticket = self.create_ticket_str() if 'service' in kwargs: kwargs['service'] = clean_service_url(kwargs['service']) if 'expires' not in kwargs: expires = now() + timedelta(seconds=self.model.TICKET_EXPIRE) kwargs['expires'] = expires t = self.create(ticket=ticket, **kwargs) logger.debug("Created %s %s" % (t.name, t.ticket)) return t def create_ticket_str(self, prefix=None): """ Generate a sufficiently opaque ticket string to ensure the ticket is not guessable. If a prefix is provided, prepend it to the string. """ if not prefix: prefix = self.model.TICKET_PREFIX return "%s-%d-%s" % (prefix, int(time.time()), get_random_string(length=self.model.TICKET_RAND_LEN)) def validate_ticket(self, ticket, service, renew=False, require_https=False): """ Given a ticket string and service identifier, validate the corresponding ``Ticket``. If validation succeeds, return the ``Ticket``. If validation fails, raise an appropriate error. If ``renew`` is ``True``, ``ServiceTicket`` validation will only succeed if the ticket was issued from the presentation of the user's primary credentials. If ``require_https`` is ``True``, ``ServiceTicket`` validation will only succeed if the service URL scheme is HTTPS. """ if not ticket: raise InvalidRequest("No ticket string provided") if not self.model.TICKET_RE.match(ticket): raise InvalidTicket("Ticket string %s is invalid" % ticket) try: t = self.get(ticket=ticket) except self.model.DoesNotExist: raise InvalidTicket("Ticket %s does not exist" % ticket) if t.is_consumed(): raise InvalidTicket("%s %s has already been used" % (t.name, ticket)) if t.is_expired(): raise InvalidTicket("%s %s has expired" % (t.name, ticket)) if not service: raise InvalidRequest("No service identifier provided") if require_https and not is_scheme_https(service): raise InvalidService("Service %s is not HTTPS" % service) if not service_allowed(service): raise InvalidService("Service %s is not a valid %s URL" % (service, t.name)) try: if not match_service(t.service, service): raise InvalidService("%s %s for service %s is invalid for " "service %s" % (t.name, ticket, t.service, service)) except AttributeError: pass try: if renew and not t.is_primary(): raise InvalidTicket("%s %s was not issued via primary " "credentials" % (t.name, ticket)) except AttributeError: pass logger.debug("Validated %s %s" % (t.name, ticket)) return t def delete_invalid_tickets(self): """ Delete consumed or expired ``Ticket``s that are not referenced by other ``Ticket``s. Invalid tickets are no longer valid for authentication and can be safely deleted. A custom management command is provided that executes this method on all applicable models by running ``manage.py cleanupcas``. """ for ticket in self.filter(Q(consumed__isnull=False) | Q(expires__lte=now())).order_by('-expires'): try: ticket.delete() except models.ProtectedError: pass def consume_tickets(self, user): """ Consume all valid ``Ticket``s for a specified user. This is run when the user logs out to ensure all issued tickets are no longer valid for future authentication attempts. """ for ticket in self.filter(user=user, consumed__isnull=True, expires__gt=now()): ticket.consume() @python_2_unicode_compatible class Ticket(models.Model): """ ``Ticket`` is an abstract base class implementing common methods and fields for CAS tickets. """ TICKET_EXPIRE = getattr(settings, 'MAMA_CAS_TICKET_EXPIRE', 90) TICKET_RAND_LEN = getattr(settings, 'MAMA_CAS_TICKET_RAND_LEN', 32) TICKET_RE = re.compile("^[A-Z]{2,3}-[0-9]{10,}-[a-zA-Z0-9]{%d}$" % TICKET_RAND_LEN) ticket = models.CharField(_('ticket'), max_length=255, unique=True) user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('user'), on_delete=models.CASCADE) expires = models.DateTimeField(_('expires')) consumed = models.DateTimeField(_('consumed'), null=True) objects = TicketManager() class Meta: abstract = True def __str__(self): return self.ticket @property def name(self): return self._meta.verbose_name def consume(self): """ Consume a ``Ticket`` by populating the ``consumed`` field with the current datetime. A consumed ``Ticket`` is invalid for future authentication attempts. """ self.consumed = now() self.save() def is_consumed(self): """ Check a ``Ticket``s consumed state, consuming it in the process. """ if self.consumed is None: self.consume() return False return True def is_expired(self): """ Check a ``Ticket``s expired state. Return ``True`` if the ticket is expired, and ``False`` otherwise. """ return self.expires <= now() class ServiceTicketManager(TicketManager): def request_sign_out(self, user): """ Send a single logout request to each service accessed by a specified user. This is called at logout when single logout is enabled. If requests-futures is installed, asynchronous requests will be sent. Otherwise, synchronous requests will be sent. """ session = Session() for ticket in self.filter(user=user, consumed__gte=user.last_login): ticket.request_sign_out(session=session) class ServiceTicket(Ticket): """ (3.1) A ``ServiceTicket`` is used by the client as a credential to obtain access to a service. It is obtained upon a client's presentation of credentials and a service identifier to /login. """ TICKET_PREFIX = 'ST' service = models.CharField(_('service'), max_length=255) primary = models.BooleanField(_('primary'), default=False)<|fim▁hole|> class Meta: verbose_name = _('service ticket') verbose_name_plural = _('service tickets') def is_primary(self): """ Check the credential origin for a ``ServiceTicket``. If the ticket was issued from the presentation of the user's primary credentials, return ``True``, otherwise return ``False``. """ if self.primary: return True return False def request_sign_out(self, session=requests): """ Send a POST request to the ``ServiceTicket``s logout URL to request sign-out. """ if logout_allowed(self.service): request = SingleSignOutRequest(context={'ticket': self}) url = get_logout_url(self.service) or self.service session.post(url, data={'logoutRequest': request.render_content()}) logger.info("Single sign-out request sent to %s" % url) class ProxyTicket(Ticket): """ (3.2) A ``ProxyTicket`` is used by a service as a credential to obtain access to a back-end service on behalf of a client. It is obtained upon a service's presentation of a ``ProxyGrantingTicket`` and a service identifier. """ TICKET_PREFIX = 'PT' service = models.CharField(_('service'), max_length=255) granted_by_pgt = models.ForeignKey('ProxyGrantingTicket', verbose_name=_('granted by proxy-granting ticket'), on_delete=models.CASCADE) class Meta: verbose_name = _('proxy ticket') verbose_name_plural = _('proxy tickets') class ProxyGrantingTicketManager(TicketManager): def create_ticket(self, service, pgturl, **kwargs): """ When a ``pgtUrl`` parameter is provided to ``/serviceValidate`` or ``/proxyValidate``, attempt to create a new ``ProxyGrantingTicket``. If validation succeeds, create and return the ``ProxyGrantingTicket``. If validation fails, return ``None``. """ pgtid = self.create_ticket_str() pgtiou = self.create_ticket_str(prefix=self.model.IOU_PREFIX) try: self.validate_callback(service, pgturl, pgtid, pgtiou) except ValidationError as e: logger.warning("%s %s" % (e.code, e)) return None else: # pgtUrl validation succeeded, so create a new PGT with the # previously generated ticket strings return super(ProxyGrantingTicketManager, self).create_ticket(ticket=pgtid, iou=pgtiou, **kwargs) def validate_callback(self, service, pgturl, pgtid, pgtiou): """Verify the provided proxy callback URL.""" if not proxy_allowed(service): raise UnauthorizedServiceProxy("%s is not authorized to use proxy authentication" % service) if not is_scheme_https(pgturl): raise InvalidProxyCallback("Proxy callback %s is not HTTPS" % pgturl) if not proxy_callback_allowed(service, pgturl): raise InvalidProxyCallback("%s is not an authorized proxy callback URL" % pgturl) # Verify that the SSL certificate is valid verify = os.environ.get('REQUESTS_CA_BUNDLE', True) try: requests.get(pgturl, verify=verify, timeout=5) except requests.exceptions.SSLError: raise InvalidProxyCallback("SSL certificate validation failed for proxy callback %s" % pgturl) except requests.exceptions.RequestException as e: raise InvalidProxyCallback(e) # Callback certificate appears valid, so send the ticket strings pgturl = add_query_params(pgturl, {'pgtId': pgtid, 'pgtIou': pgtiou}) try: response = requests.get(pgturl, verify=verify, timeout=5) except requests.exceptions.RequestException as e: raise InvalidProxyCallback(e) try: response.raise_for_status() except requests.exceptions.HTTPError as e: raise InvalidProxyCallback("Proxy callback %s returned %s" % (pgturl, e)) class ProxyGrantingTicket(Ticket): """ (3.3) A ``ProxyGrantingTicket`` is used by a service to obtain proxy tickets for obtaining access to a back-end service on behalf of a client. It is obtained upon validation of a ``ServiceTicket`` or a ``ProxyTicket``. """ TICKET_PREFIX = 'PGT' IOU_PREFIX = 'PGTIOU' TICKET_EXPIRE = getattr(settings, 'SESSION_COOKIE_AGE') iou = models.CharField(_('iou'), max_length=255, unique=True) granted_by_st = models.ForeignKey(ServiceTicket, null=True, blank=True, on_delete=models.PROTECT, verbose_name=_('granted by service ticket')) granted_by_pt = models.ForeignKey(ProxyTicket, null=True, blank=True, on_delete=models.PROTECT, verbose_name=_('granted by proxy ticket')) objects = ProxyGrantingTicketManager() class Meta: verbose_name = _('proxy-granting ticket') verbose_name_plural = _('proxy-granting tickets') def is_consumed(self): """Check a ``ProxyGrantingTicket``s consumed state.""" return self.consumed is not None<|fim▁end|>
objects = ServiceTicketManager()
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- ################################################################################ # # # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################ import clv_insured_ext import category import clv_insured import clv_insured_card import clv_tag import res_partner import wkf import history<|fim▁end|>
# GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License #
<|file_name|>resnet.py<|end_file_name|><|fim▁begin|># Copyright 2021 DeepMind Technologies Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ResNet model family.""" import functools import haiku as hk import jax import jax.numpy as jnp from nfnets import base class ResNet(hk.Module): """ResNetv2 Models.""" variant_dict = {'ResNet50': {'depth': [3, 4, 6, 3]}, 'ResNet101': {'depth': [3, 4, 23, 3]}, 'ResNet152': {'depth': [3, 8, 36, 3]}, 'ResNet200': {'depth': [3, 24, 36, 3]}, 'ResNet288': {'depth': [24, 24, 24, 24]}, 'ResNet600': {'depth': [50, 50, 50, 50]}, } def __init__(self, width, num_classes, variant='ResNet50', which_norm='BatchNorm', norm_kwargs=None, activation='relu', drop_rate=0.0, fc_init=jnp.zeros, conv_kwargs=None, preactivation=True, use_se=False, se_ratio=0.25, name='ResNet'): super().__init__(name=name) self.width = width self.num_classes = num_classes self.variant = variant self.depth_pattern = self.variant_dict[variant]['depth'] self.activation = getattr(jax.nn, activation) self.drop_rate = drop_rate self.which_norm = getattr(hk, which_norm) if norm_kwargs is not None: self.which_norm = functools.partial(self.which_norm, **norm_kwargs) if conv_kwargs is not None: self.which_conv = functools.partial(hk.Conv2D, **conv_kwargs) else: self.which_conv = hk.Conv2D self.preactivation = preactivation # Stem self.initial_conv = self.which_conv(16 * self.width, kernel_shape=7, stride=2, padding='SAME', with_bias=False, name='initial_conv') if not self.preactivation: self.initial_bn = self.which_norm(name='initial_bn') which_block = ResBlockV2 if self.preactivation else ResBlockV1 # Body self.blocks = [] for multiplier, blocks_per_stage, stride in zip([64, 128, 256, 512], self.depth_pattern, [1, 2, 2, 2]): for block_index in range(blocks_per_stage): self.blocks += [which_block(multiplier * self.width, use_projection=block_index == 0, stride=stride if block_index == 0 else 1, activation=self.activation, which_norm=self.which_norm, which_conv=self.which_conv, use_se=use_se, se_ratio=se_ratio)] # Head self.final_bn = self.which_norm(name='final_bn') self.fc = hk.Linear(self.num_classes, w_init=fc_init, with_bias=True) def __call__(self, x, is_training, test_local_stats=False, return_metrics=False): """Return the output of the final layer without any [log-]softmax.""" outputs = {} # Stem out = self.initial_conv(x) if not self.preactivation: out = self.activation(self.initial_bn(out, is_training, test_local_stats)) out = hk.max_pool(out, window_shape=(1, 3, 3, 1), strides=(1, 2, 2, 1), padding='SAME') if return_metrics: outputs.update(base.signal_metrics(out, 0)) # Blocks for i, block in enumerate(self.blocks): out, res_var = block(out, is_training, test_local_stats) if return_metrics: outputs.update(base.signal_metrics(out, i + 1)) outputs[f'res_avg_var_{i}'] = res_var if self.preactivation: out = self.activation(self.final_bn(out, is_training, test_local_stats)) # Pool, dropout, classify pool = jnp.mean(out, axis=[1, 2]) # Return pool before dropout in case we want to regularize it separately. outputs['pool'] = pool # Optionally apply dropout if self.drop_rate > 0.0 and is_training: pool = hk.dropout(hk.next_rng_key(), self.drop_rate, pool) outputs['logits'] = self.fc(pool) return outputs class ResBlockV2(hk.Module): """ResNet preac block, 1x1->3x3->1x1 with strides and shortcut downsample.""" def __init__(self, out_ch, stride=1, use_projection=False, activation=jax.nn.relu, which_norm=hk.BatchNorm, which_conv=hk.Conv2D, use_se=False, se_ratio=0.25, name=None): super().__init__(name=name) self.out_ch = out_ch self.stride = stride self.use_projection = use_projection self.activation = activation self.which_norm = which_norm self.which_conv = which_conv self.use_se = use_se self.se_ratio = se_ratio self.width = self.out_ch // 4 self.bn0 = which_norm(name='bn0') self.conv0 = which_conv(self.width, kernel_shape=1, with_bias=False, padding='SAME', name='conv0') self.bn1 = which_norm(name='bn1') self.conv1 = which_conv(self.width, stride=self.stride, kernel_shape=3, with_bias=False, padding='SAME', name='conv1') self.bn2 = which_norm(name='bn2') self.conv2 = which_conv(self.out_ch, kernel_shape=1, with_bias=False, padding='SAME', name='conv2') if self.use_projection: self.conv_shortcut = which_conv(self.out_ch, stride=stride, kernel_shape=1, with_bias=False, padding='SAME', name='conv_shortcut') if self.use_se: self.se = base.SqueezeExcite(self.out_ch, self.out_ch, self.se_ratio) def __call__(self, x, is_training, test_local_stats): bn_args = (is_training, test_local_stats) out = self.activation(self.bn0(x, *bn_args)) if self.use_projection: shortcut = self.conv_shortcut(out) else: shortcut = x out = self.conv0(out) out = self.conv1(self.activation(self.bn1(out, *bn_args))) out = self.conv2(self.activation(self.bn2(out, *bn_args))) if self.use_se:<|fim▁hole|> class ResBlockV1(ResBlockV2): """Post-Ac Residual Block.""" def __call__(self, x, is_training, test_local_stats): bn_args = (is_training, test_local_stats) if self.use_projection: shortcut = self.conv_shortcut(x) shortcut = self.which_norm(name='shortcut_bn')(shortcut, *bn_args) else: shortcut = x out = self.activation(self.bn0(self.conv0(x), *bn_args)) out = self.activation(self.bn1(self.conv1(out), *bn_args)) out = self.bn2(self.conv2(out), *bn_args) if self.use_se: out = self.se(out) * out res_avg_var = jnp.mean(jnp.var(out, axis=[0, 1, 2])) return self.activation(out + shortcut), res_avg_var<|fim▁end|>
out = self.se(out) * out # Get average residual standard deviation for reporting metrics. res_avg_var = jnp.mean(jnp.var(out, axis=[0, 1, 2])) return out + shortcut, res_avg_var
<|file_name|>slice-panic-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //<|fim▁hole|>// except according to those terms. // Test that if a slicing expr[..] fails, the correct cleanups happen. // pretty-expanded FIXME #23616 use std::thread; struct Foo; static mut DTOR_COUNT: isize = 0; impl Drop for Foo { fn drop(&mut self) { unsafe { DTOR_COUNT += 1; } } } fn foo() { let x: &[_] = &[Foo, Foo]; &x[3..4]; } fn main() { let _ = thread::spawn(move|| foo()).join(); unsafe { assert!(DTOR_COUNT == 2); } }<|fim▁end|>
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed
<|file_name|>nds-NL.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js const u = undefined; function plural(n: number): number { return 5; } export default [ 'nds-NL', [['AM', 'PM'], u, u], u, [['S', 'M', 'T', 'W', 'T', 'F', 'S'], ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'], u, u], u, [ ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'], ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', 'M12'], u ], u, [['BCE', 'CE'], u, u], 1, [6, 0], ['y-MM-dd', 'y MMM d', 'y MMMM d', 'y MMMM d, EEEE'], ['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'], ['{1} {0}', u, u, u], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'],<|fim▁hole|> 'EUR', '€', 'EUR', {'JPY': ['JP¥', '¥'], 'USD': ['US$', '$']}, 'ltr', plural ];<|fim▁end|>
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>use super::ResponseInfo; use std::net::SocketAddr; use xml_rpc::{self, rouille, Value}; use super::{Response, ResponseError}; pub struct Server { server: xml_rpc::Server, } impl Default for Server { fn default() -> Self { let mut server = xml_rpc::Server::default(); server.set_on_missing(on_missing); Server { server } } } impl Server { #[inline] pub fn register_value<T>(&mut self, name: impl Into<String>, msg: &'static str, handler: T) where T: Fn(xml_rpc::Params) -> Response<Value> + Send + Sync + 'static, { self.server.register_value(name, move |args| { let response = handler(args); let response_info = ResponseInfo::from_response(response, msg); response_info.into() }) } #[inline] pub fn bind( self, uri: &SocketAddr, ) -> xml_rpc::error::Result< xml_rpc::server::BoundServer< impl Fn(&rouille::Request) -> rouille::Response + Send + Sync + 'static, >, > { self.server.bind(uri) } } #[allow(clippy::needless_pass_by_value)] #[inline] fn on_missing(_params: xml_rpc::Params) -> xml_rpc::Response { let error_message = ResponseError::Client("Bad method requested".into());<|fim▁hole|><|fim▁end|>
let info = ResponseInfo::from_response_error(error_message); info.into() }
<|file_name|>environment.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> import threading from ggrc import db from ggrc.app import app from ggrc.models import create_db, drop_db from wsgiref.simple_server import make_server from ggrc import settings use_migrations = True def before_all(context): context.base_url = 'http://localhost:9000' create_db(use_migrations) app.debug = False app.testing = True if getattr(settings, 'MEMCACHE_MECHANISM', False) is True: from google.appengine.api import memcache from google.appengine.ext import testbed context.testbed = testbed.Testbed() context.testbed.activate() context.testbed.init_memcache_stub() context.query_count = 0 def increment_query_count(conn, clauseelement, multiparams, params): context.query_count += 1 from sqlalchemy import event event.listen(db.engine, "before_execute", increment_query_count) context.server = make_server('', 9000, app) context.thread = threading.Thread(target=context.server.serve_forever) context.thread.start()<|fim▁hole|> context.server.shutdown() context.thread.join() db.session.remove() drop_db(use_migrations) if getattr(settings, 'MEMCACHE_MECHANISM', False) is True: from google.appengine.api import memcache from google.appengine.ext import testbed context.testbed.deactivate()<|fim▁end|>
def after_all(context):
<|file_name|>timezone_request.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/timezone/timezone_request.h" #include <string> #include "base/json/json_reader.h" #include "base/metrics/histogram.h" #include "base/metrics/sparse_histogram.h" #include "base/strings/string_number_conversions.h" #include "base/strings/stringprintf.h" #include "base/time/time.h" #include "base/values.h" #include "content/public/common/geoposition.h" #include "google_apis/google_api_keys.h" #include "net/base/escape.h" #include "net/base/load_flags.h" #include "net/http/http_status_code.h" #include "net/url_request/url_fetcher.h" #include "net/url_request/url_request_context_getter.h" #include "net/url_request/url_request_status.h" namespace chromeos { namespace { const char kDefaultTimezoneProviderUrl[] = "https://maps.googleapis.com/maps/api/timezone/json?"; const char kKeyString[] = "key"; // Language parameter is unsupported for now. // const char kLanguageString[] = "language"; const char kLocationString[] = "location"; const char kSensorString[] = "sensor"; const char kTimestampString[] = "timestamp"; const char kDstOffsetString[] = "dstOffset"; const char kRawOffsetString[] = "rawOffset"; const char kTimeZoneIdString[] = "timeZoneId"; const char kTimeZoneNameString[] = "timeZoneName"; const char kStatusString[] = "status"; const char kErrorMessageString[] = "error_message"; // Sleep between timezone request retry on HTTP error. const unsigned int kResolveTimeZoneRetrySleepOnServerErrorSeconds = 5; // Sleep between timezone request retry on bad server response. const unsigned int kResolveTimeZoneRetrySleepBadResponseSeconds = 10;<|fim▁hole|>}; const StatusString2Enum statusString2Enum[] = { {"OK", TimeZoneResponseData::OK}, {"INVALID_REQUEST", TimeZoneResponseData::INVALID_REQUEST}, {"OVER_QUERY_LIMIT", TimeZoneResponseData::OVER_QUERY_LIMIT}, {"REQUEST_DENIED", TimeZoneResponseData::REQUEST_DENIED}, {"UNKNOWN_ERROR", TimeZoneResponseData::UNKNOWN_ERROR}, {"ZERO_RESULTS", TimeZoneResponseData::ZERO_RESULTS}, }; enum TimeZoneRequestEvent { // NOTE: Do not renumber these as that would confuse interpretation of // previously logged data. When making changes, also update the enum list // in tools/metrics/histograms/histograms.xml to keep it in sync. TIMEZONE_REQUEST_EVENT_REQUEST_START = 0, TIMEZONE_REQUEST_EVENT_RESPONSE_SUCCESS = 1, TIMEZONE_REQUEST_EVENT_RESPONSE_NOT_OK = 2, TIMEZONE_REQUEST_EVENT_RESPONSE_EMPTY = 3, TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED = 4, // NOTE: Add entries only immediately above this line. TIMEZONE_REQUEST_EVENT_COUNT = 5 }; enum TimeZoneRequestResult { // NOTE: Do not renumber these as that would confuse interpretation of // previously logged data. When making changes, also update the enum list // in tools/metrics/histograms/histograms.xml to keep it in sync. TIMEZONE_REQUEST_RESULT_SUCCESS = 0, TIMEZONE_REQUEST_RESULT_FAILURE = 1, TIMEZONE_REQUEST_RESULT_SERVER_ERROR = 2, TIMEZONE_REQUEST_RESULT_CANCELLED = 3, // NOTE: Add entries only immediately above this line. TIMEZONE_REQUEST_RESULT_COUNT = 4 }; // Too many requests (more than 1) mean there is a problem in implementation. void RecordUmaEvent(TimeZoneRequestEvent event) { UMA_HISTOGRAM_ENUMERATION( "TimeZone.TimeZoneRequest.Event", event, TIMEZONE_REQUEST_EVENT_COUNT); } void RecordUmaResponseCode(int code) { UMA_HISTOGRAM_SPARSE_SLOWLY("TimeZone.TimeZoneRequest.ResponseCode", code); } // Slow timezone resolve leads to bad user experience. void RecordUmaResponseTime(base::TimeDelta elapsed, bool success) { if (success) { UMA_HISTOGRAM_TIMES("TimeZone.TimeZoneRequest.ResponseSuccessTime", elapsed); } else { UMA_HISTOGRAM_TIMES("TimeZone.TimeZoneRequest.ResponseFailureTime", elapsed); } } void RecordUmaResult(TimeZoneRequestResult result, unsigned retries) { UMA_HISTOGRAM_ENUMERATION( "TimeZone.TimeZoneRequest.Result", result, TIMEZONE_REQUEST_RESULT_COUNT); UMA_HISTOGRAM_SPARSE_SLOWLY("TimeZone.TimeZoneRequest.Retries", retries); } // Creates the request url to send to the server. GURL TimeZoneRequestURL(const GURL& url, const content::Geoposition& geoposition, bool sensor) { std::string query(url.query()); query += base::StringPrintf( "%s=%f,%f", kLocationString, geoposition.latitude, geoposition.longitude); if (url == DefaultTimezoneProviderURL()) { std::string api_key = google_apis::GetAPIKey(); if (!api_key.empty()) { query += "&"; query += kKeyString; query += "="; query += net::EscapeQueryParamValue(api_key, true); } } if (!geoposition.timestamp.is_null()) { query += base::StringPrintf( "&%s=%ld", kTimestampString, geoposition.timestamp.ToTimeT()); } query += "&"; query += kSensorString; query += "="; query += (sensor ? "true" : "false"); GURL::Replacements replacements; replacements.SetQueryStr(query); return url.ReplaceComponents(replacements); } void PrintTimeZoneError(const GURL& server_url, const std::string& message, TimeZoneResponseData* timezone) { timezone->status = TimeZoneResponseData::REQUEST_ERROR; timezone->error_message = base::StringPrintf("TimeZone provider at '%s' : %s.", server_url.GetOrigin().spec().c_str(), message.c_str()); LOG(WARNING) << "TimeZoneRequest::GetTimeZoneFromResponse() : " << timezone->error_message; } // Parses the server response body. Returns true if parsing was successful. // Sets |*timezone| to the parsed TimeZone if a valid timezone was received, // otherwise leaves it unchanged. bool ParseServerResponse(const GURL& server_url, const std::string& response_body, TimeZoneResponseData* timezone) { DCHECK(timezone); if (response_body.empty()) { PrintTimeZoneError(server_url, "Server returned empty response", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_EMPTY); return false; } VLOG(1) << "TimeZoneRequest::ParseServerResponse() : Parsing response " << response_body; // Parse the response, ignoring comments. std::string error_msg; scoped_ptr<base::Value> response_value(base::JSONReader::ReadAndReturnError( response_body, base::JSON_PARSE_RFC, NULL, &error_msg)); if (response_value == NULL) { PrintTimeZoneError(server_url, "JSONReader failed: " + error_msg, timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } const base::DictionaryValue* response_object = NULL; if (!response_value->GetAsDictionary(&response_object)) { PrintTimeZoneError(server_url, "Unexpected response type : " + base::StringPrintf("%u", response_value->GetType()), timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } std::string status; if (!response_object->GetStringWithoutPathExpansion(kStatusString, &status)) { PrintTimeZoneError(server_url, "Missing status attribute.", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } bool found = false; for (size_t i = 0; i < arraysize(statusString2Enum); ++i) { if (status != statusString2Enum[i].string) continue; timezone->status = statusString2Enum[i].value; found = true; break; } if (!found) { PrintTimeZoneError( server_url, "Bad status attribute value: '" + status + "'", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } const bool status_ok = (timezone->status == TimeZoneResponseData::OK); if (!response_object->GetDoubleWithoutPathExpansion(kDstOffsetString, &timezone->dstOffset) && status_ok) { PrintTimeZoneError(server_url, "Missing dstOffset attribute.", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } if (!response_object->GetDoubleWithoutPathExpansion(kRawOffsetString, &timezone->rawOffset) && status_ok) { PrintTimeZoneError(server_url, "Missing rawOffset attribute.", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } if (!response_object->GetStringWithoutPathExpansion(kTimeZoneIdString, &timezone->timeZoneId) && status_ok) { PrintTimeZoneError(server_url, "Missing timeZoneId attribute.", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } if (!response_object->GetStringWithoutPathExpansion( kTimeZoneNameString, &timezone->timeZoneName) && status_ok) { PrintTimeZoneError(server_url, "Missing timeZoneName attribute.", timezone); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_MALFORMED); return false; } // "error_message" field is optional. Ignore result. response_object->GetStringWithoutPathExpansion(kErrorMessageString, &timezone->error_message); return true; } // Attempts to extract a position from the response. Detects and indicates // various failure cases. scoped_ptr<TimeZoneResponseData> GetTimeZoneFromResponse( bool http_success, int status_code, const std::string& response_body, const GURL& server_url) { scoped_ptr<TimeZoneResponseData> timezone(new TimeZoneResponseData); // HttpPost can fail for a number of reasons. Most likely this is because // we're offline, or there was no response. if (!http_success) { PrintTimeZoneError(server_url, "No response received", timezone.get()); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_EMPTY); return timezone.Pass(); } if (status_code != net::HTTP_OK) { std::string message = "Returned error code "; message += base::IntToString(status_code); PrintTimeZoneError(server_url, message, timezone.get()); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_NOT_OK); return timezone.Pass(); } if (!ParseServerResponse(server_url, response_body, timezone.get())) return timezone.Pass(); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_RESPONSE_SUCCESS); return timezone.Pass(); } } // namespace TimeZoneResponseData::TimeZoneResponseData() : dstOffset(0), rawOffset(0), status(ZERO_RESULTS) { } GURL DefaultTimezoneProviderURL() { return GURL(kDefaultTimezoneProviderUrl); } TimeZoneRequest::TimeZoneRequest( net::URLRequestContextGetter* url_context_getter, const GURL& service_url, const content::Geoposition& geoposition, bool sensor, base::TimeDelta retry_timeout) : url_context_getter_(url_context_getter), service_url_(service_url), geoposition_(geoposition), sensor_(sensor), retry_timeout_abs_(base::Time::Now() + retry_timeout), retries_(0) { } TimeZoneRequest::~TimeZoneRequest() { DCHECK(thread_checker_.CalledOnValidThread()); // If callback is not empty, request is cancelled. if (!callback_.is_null()) { RecordUmaResponseTime(base::Time::Now() - request_started_at_, false); RecordUmaResult(TIMEZONE_REQUEST_RESULT_CANCELLED, retries_); } } void TimeZoneRequest::StartRequest() { DCHECK(thread_checker_.CalledOnValidThread()); RecordUmaEvent(TIMEZONE_REQUEST_EVENT_REQUEST_START); request_started_at_ = base::Time::Now(); ++retries_; url_fetcher_.reset( net::URLFetcher::Create(request_url_, net::URLFetcher::GET, this)); url_fetcher_->SetRequestContext(url_context_getter_); url_fetcher_->SetLoadFlags(net::LOAD_BYPASS_CACHE | net::LOAD_DISABLE_CACHE | net::LOAD_DO_NOT_SAVE_COOKIES | net::LOAD_DO_NOT_SEND_COOKIES | net::LOAD_DO_NOT_SEND_AUTH_DATA); url_fetcher_->Start(); } void TimeZoneRequest::MakeRequest(TimeZoneResponseCallback callback) { callback_ = callback; request_url_ = TimeZoneRequestURL(service_url_, geoposition_, false /* sensor */); StartRequest(); } void TimeZoneRequest::Retry(bool server_error) { const base::TimeDelta delay = base::TimeDelta::FromSeconds( server_error ? kResolveTimeZoneRetrySleepOnServerErrorSeconds : kResolveTimeZoneRetrySleepBadResponseSeconds); timezone_request_scheduled_.Start( FROM_HERE, delay, this, &TimeZoneRequest::StartRequest); } void TimeZoneRequest::OnURLFetchComplete(const net::URLFetcher* source) { DCHECK_EQ(url_fetcher_.get(), source); net::URLRequestStatus status = source->GetStatus(); int response_code = source->GetResponseCode(); RecordUmaResponseCode(response_code); std::string data; source->GetResponseAsString(&data); scoped_ptr<TimeZoneResponseData> timezone = GetTimeZoneFromResponse( status.is_success(), response_code, data, source->GetURL()); const bool server_error = !status.is_success() || (response_code >= 500 && response_code < 600); url_fetcher_.reset(); DVLOG(1) << "TimeZoneRequest::OnURLFetchComplete(): timezone={" << timezone->ToStringForDebug() << "}"; const base::Time now = base::Time::Now(); const bool retry_timeout = (now >= retry_timeout_abs_); const bool success = (timezone->status == TimeZoneResponseData::OK); if (!success && !retry_timeout) { Retry(server_error); return; } RecordUmaResponseTime(base::Time::Now() - request_started_at_, success); const TimeZoneRequestResult result = (server_error ? TIMEZONE_REQUEST_RESULT_SERVER_ERROR : (success ? TIMEZONE_REQUEST_RESULT_SUCCESS : TIMEZONE_REQUEST_RESULT_FAILURE)); RecordUmaResult(result, retries_); TimeZoneResponseCallback callback = callback_; // Empty callback is used to identify "completed or not yet started request". callback_.Reset(); // callback.Run() usually destroys TimeZoneRequest, because this is the way // callback is implemented in TimeZoneProvider. callback.Run(timezone.Pass(), server_error); // "this" is already destroyed here. } std::string TimeZoneResponseData::ToStringForDebug() const { static const char* const status2string[] = { "OK", "INVALID_REQUEST", "OVER_QUERY_LIMIT", "REQUEST_DENIED", "UNKNOWN_ERROR", "ZERO_RESULTS", "REQUEST_ERROR" }; return base::StringPrintf( "dstOffset=%f, rawOffset=%f, timeZoneId='%s', timeZoneName='%s', " "error_message='%s', status=%u (%s)", dstOffset, rawOffset, timeZoneId.c_str(), timeZoneName.c_str(), error_message.c_str(), (unsigned)status, (status < arraysize(status2string) ? status2string[status] : "unknown")); }; } // namespace chromeos<|fim▁end|>
struct StatusString2Enum { const char* string; TimeZoneResponseData::Status value;
<|file_name|>configurable_component.rs<|end_file_name|><|fim▁begin|>use crate::components::ComponentInner; impl<M, E> printspool_config_form::Configurable<M> for ComponentInner<M, E> where M: printspool_config_form::Model, E: Default, {<|fim▁hole|> fn model(&self) -> &M { &self.model } fn model_version(&self) -> i32 { self.model_version } }<|fim▁end|>
fn id(&self) -> async_graphql::ID { format!("component-{}", self.id).into() }
<|file_name|>state_script.rs<|end_file_name|><|fim▁begin|>use crate::state::*; use std; use std::process::Command; struct StateScript { script_path: String, shared_state: SharedState, state_observer: StateObserver, } impl StateScript { fn new(script_path: &str, shared_state: SharedState) -> StateScript { let state_observer = shared_state.lock().add_observer(); StateScript { script_path: String::from(script_path), shared_state, state_observer, } } fn run_script(&self, state: StreamState, output: &str) { let result = Command::new(&self.script_path) .arg(state.as_str()) .arg(output)<|fim▁hole|> match result { Ok(status) => { if !status.success() { println!( "ERROR: {} {} failed with error code {}", self.script_path, state.as_str(), status.code().unwrap_or(0) ); } } Err(e) => println!("ERROR: Failed to run {}: {}", self.script_path, e), } } fn run(&mut self) { let mut stream_state; let mut output_name: String; { let state = self.shared_state.lock(); output_name = state.current_output().name.clone(); stream_state = state.state().stream_state; }; self.run_script(stream_state, output_name.as_str()); loop { match self.state_observer.recv() { Ok(StateChange::SelectOutput { output }) => { output_name = self.shared_state.lock().state().outputs[output] .name .clone(); } Ok(StateChange::SetStreamState { stream_state: new_stream_state, }) => { stream_state = new_stream_state; } Ok(_) => continue, Err(_) => return, }; self.run_script(stream_state, output_name.as_str()); } } } pub fn start_state_script_contoller(script_path: &str, shared_state: SharedState) { let mut c = StateScript::new(script_path, shared_state); std::thread::spawn(move || { c.run(); }); }<|fim▁end|>
.status();
<|file_name|>data-gift-service.js<|end_file_name|><|fim▁begin|>(function () { 'use strict'; angular .module("myApp.presents") .factory("dataGiftService", dataGiftService);<|fim▁hole|> var dataGiftService = { notification: notification, pleaseWaitDialog: pleaseWaitDialog }; function notification(status, msg, time) { if (angular.isUndefined(time)) time = 3000; $mdToast.show( $mdToast.simple() .textContent(msg) .position("top right") .hideDelay(time) .theme(status) ); } function pleaseWaitDialog(msg, parentEl) { if (angular.isUndefined(parentEl)) { parentEl = angular.element(document.body); } $mdDialog.show({ parent: parentEl, template: '<md-dialog class="wait-dialog" aria-label="Please wait">' + ' <md-dialog-content layout="column" layour-align="center center">' + '<h1 class="text-center">Proszę czekać</h1>' + '<md-content class="text-center">' + msg + '</md-content>'+ ' </md-dialog-content>' + '</md-dialog>' }); } return dataGiftService; } })();<|fim▁end|>
function dataGiftService($mdToast, $mdDialog) {