text
stringlengths
29
850k
# coding=utf-8 from __future__ import unicode_literals import os from django.forms import widgets from django.utils.safestring import mark_safe from django.core.urlresolvers import reverse from django.conf import settings HTML = ( '<div class="s3direct" data-url="{policy_url}">' ' <div class="link-controls">' ' <a class="link" target="_blank" href="{file_url}">{file_name}</a>' ' <a class="remove" href="#remove">Очистить</a>' ' </div>' ' <div class="progress-controls">' ' <div class="progress progress-striped">' ' <div class="progress-bar progress-bar-success" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100">' ' </div>' ' <div class="info"></div>' ' </div>' ' <span class="abort btn btn-danger btn-sm">Отмена</span>' ' </div>' ' <div class="form-controls">' ' <input type="hidden" value="{file_url}" id="{element_id}" name="{name}" />' ' <input type="file" class="fileinput" />' ' </div>' '</div>' ) class S3DirectEditor(widgets.TextInput): class Media: js = ( 's3direct/js/jquery-1.10.2.min.js', 's3direct/js/jquery.iframe-transport.js', 's3direct/js/jquery.ui.widget.js', 's3direct/js/jquery.fileupload.js', 's3direct/js/s3direct.js', ) css = { 'all': ( 's3direct/css/bootstrap-progress.min.css', 's3direct/css/styles.css', ) } def __init__(self, *args, **kwargs): self.upload_to = kwargs.pop('upload_to', '') super(S3DirectEditor, self).__init__(*args, **kwargs) def render(self, name, value, attrs=None): final_attrs = self.build_attrs(attrs) element_id = final_attrs.get('id') kwargs = {'upload_to': self.upload_to} policy_url = reverse('s3direct', kwargs=kwargs) file_url = value if value else '' if hasattr(file_url, 'name'): file_url = file_url.name file_name = os.path.basename(file_url) output = HTML.format(policy_url=policy_url, file_url=file_url, file_name=file_name, element_id=element_id, name=name) return mark_safe(output)
Our Pittsburgh Maker Profile series has been on extended hiatus for awhile now. This area is so rife with creativity that I feel I’ve been doing it a disservice, simply highlighting one maker a month really isn’t enough. So this September I’ll be highlighting a few dozen instead! You’ll know it’s a Pittsburgh creator by the little seal above that will mark each post. I first became aware of the flora-meets-fauna art of Ashley Cecil last spring when she created a line of scarves that sold at the Carnegie Museum of Art. She paints from live observation at renowned institutions, and marrying realism with abstract modern backgrounds is her signature style. Those two things alone garner loads of interest for me, but what makes it all work so well together is Ashley’s innate understanding of color and the way she knows when to keep it reigned it or go all out. She’s also working to save birdlife with an innovative window film that helps birds see the surface rather than fly into it injuring, or even worse killing, themselves. Fall is almost here! I’m so ready, and so is my desktop. Get ready for August, because it’s here whether you’re ready or not! Back-to-school, football, and PSLs are just around the corner. As you may have noticed, there’s been a lot of emphasis on the exterior of my house lately. While I’m forever futzing around with things inside, the outside deserves a little more love because it doesn’t quite match my more modern and minimal aesthetic. I’d love to make it magazine ready by updating the facade and I’m hoping James Hardie siding is the golden ticket! My place was built back in 1900, and underneath the mishmash of siding and cedar shingles lies the original yellow brick. My hope is to one day pare all of this down to one material, in one color, going in the same direction – dreams! – so I checked out James Hardie’s line of plank siding. The James Hardie product line offers a great collection of profiles, textures, widths, and colors with character in the tradition of American home design, while their commitment to artistry and innovation allows for timeless designs and performance. My home deserves to stand out on the block and yours does, too. I ordered a selection of their HardiePlank siding samples – North America’s #1 brand – to check out in (left to right) Evening Blue, Pearl Gray, and Iron Gray. I desperately want to do a super dark gray monochromatic facade, but the idea of how much summer heat it will trap makes me nervous, so I also grabbed a light gray similar to what’s currently on most of my home. The blue is my wildcard (SO CRAZY). While the Iron Gray and Evening Blue have a wood-like texture, the Pearl Gray is smooth. James Hardie lets you express your personality through hundreds of inspiring color combinations with a color collection featuring both national colors and regional specialty colors. Advanced technology coats surfaces, edges, and features of each siding plank uniformly while multiple layers of color are baked onto each board for a great finish and a strong bond that resists chipping, peeling, cracking, and fading for years to come. James Hardie allows you to enjoy the peace of mind that comes from a single manufacturer covering your home’s finish with a 15-year limited warranty, and siding with an industry best 30-year non-prorated warranty. While I’m looking into ideas to update the exterior of my place, I’m not currently ready financially to undertake such a big project. Instead I’m focusing on smaller things that are more affordable yet still add curb appeal. I have a solid idea of what I’m looking to do on my home, but if you need some help check out James Hardie’s site for additional design inspiration. And if you’d like to check out their siding in person be sure and order some free samples to compare – choose from textured, smooth, or beaded. Their fiber cement products are engineered for climate, HZ5 products resist shrinking, swelling, and cracking even after years of wet or freezing conditions while HZ10 products resist damage from hot, humid conditions, blistering sun, and more. James Hardie’s products won’t be eaten by animals or insects and have been proven more fire resistant than wood or vinyl siding. With all of that assurance you can feel good about how you dress your home. This post sponsored by James Hardie. All words and opinions are my own. Thank you for supporting the brands that help Design Crush create fresh content! Follow James Hardie on Twitter, Facebook, Pinterest, Instagram, Houzz, and YouTube.
from hfss import get_active_project project = get_active_project() design = project.new_em_design("TestDesign") modeler = design.modeler # Cavity bx = design.set_variable("bx", "10mm") by = design.set_variable("by", "25mm") bz = design.set_variable("bz", "15mm") # Tunnel tx = design.set_variable("tx", "10mm") ty = design.set_variable("ty", "1mm") tz = design.set_variable("tz", "1mm") # Chip cz = design.set_variable('cz', ".45mm") def create_cavity(name): box = modeler.draw_box_center([0, 0, 0], [bx, by, bz], name=name) cyl1 = modeler.draw_cylinder_center([0, by/2, 0], bx/2, bz, axis='Z') cyl2 = modeler.draw_cylinder_center([0, -by/2, 0], bx/2, bz, axis='Z') modeler.unite([box, cyl1, cyl2]) return box cav1 = create_cavity("Cavity1") cav2 = create_cavity("Cavity2") modeler.translate(cav1, [(tx+bx)/2, 0, 0]) modeler.translate(cav2, [-(tx+bx)/2, 0, 0]) tunnel = modeler.draw_box_center([0, 0, 0], [tx, ty, tz], name='Tunnel') cav = modeler.unite([cav1, cav2, tunnel]) chip = modeler.draw_box_corner([-tx/2, -ty/2, -tz/2], [tx, ty, cz], name='Chip', material='sapphire') cav1.transparency = 1.0
The Biology of the Lion King: Engineers meet Biologists, Veteranarians, and Scientists! Engineers meet Biologists, Veteranarians, and Scientists! Alan Wilson has developed a structure and motion laboratory, which focuses on the study of animal motion and behavior to improve technology. The lab is designed to learn more about the structure of animals, and how this structure allows them to move. Alan Wilson asks questions about dogs, dinosaurs, elephants, and cheetahs. His dedicated team of vets, biologists, engineers, mathematicians, and scientists work together to develop new ideas to change the way we think about robotic systems. Increasing technological sophistication makes studying animals more precise than ever before, and scientists are able to measure movement with extreme definition. As we discussed in class, wildlife is a great resource for scientific study, and engineers can use their systems as models to design other systems. The structure and motion lab uses both fieldwork and laboratory work to study animal locomotion. The entire project is a collaborative effort that combines many different fields of science. Over the last ten years, Alan has been developing microtechnology to mount on animals for data collection. This microtechnology ranges from GPS tags to aerial drone systems to follow animals in the wild. This technology needs to be quiet, lightweight, and climate proof to survive in the harsh conditions of the wild. The most interesting part of this article for me is the Cheetah Robot design, that is using the aerodynamic body structure of the cheetah as inspiration for a lightweight, free-moving field robot. In class, we marveled at the magnificent speed and grace of the cheetah. I wondered how a creature created by nature could move so quickly and effortlessly in the savannah. This lab is actually studying this animal with such precision, that scientists are actually coming closer to figuring out how the cheetah runs! Wow! The interdisciplinary study of this laboratory is inspiring! It is so great to see different fields coming together to ask questions about their surroundings. Reading this article made me reflect on my studies as an engineer. It has been so wonderful for me to come to Cape Town with a variety of Stanford students with different academic backgrounds to share ideas, and experience a new culture together.
#!/usr/bin/python # -*- coding: utf-8 -*- """ Autor original del ejemplo de una aplicacion Tk: Jan Bodnar last modified: December 2010 website: www.zetcode.com Modificado y ampliado para ser una GUI de GDB para MIPS. (C) 2014 - Rafael Ignacio Zurita <[email protected]> Lea el archivo README.md para conocer la licencia de este programa. """ import time import sys import random from subprocess import Popen, PIPE, STDOUT from Tkinter import * from ttk import Frame, Button, Label, Style # Para extrar el nombre de archivo sin ruta import ntpath from ScrolledText import * import tkFileDialog import tkMessageBox class Mipsx(Frame): def __init__(self, parent): Frame.__init__(self, parent) self.parent = parent self.ejecucion = False def prox_instruccion(): p.stdin.write('step 1\n') mostrar_en(area4, "proximo") estado() if self.ejecucion: memoria() registros() listado() def ejecutar(): while self.ejecucion: prox_instruccion() def salida(w, findelinea): w.delete("1.0", END) a = p.stdout.readline() while not findelinea in a: # Esto es para saber si la ejecucion termino'. # TODO: Hay que quitarlo de este metodo. Donde ponerlo? if "No stack" in a: self.ejecucion = False w.insert(END,'\n\nEjecucion FINALIZADA\n\n') a = a.replace('(gdb) ', '') w.insert(END,a) a = p.stdout.readline() def mostrar_en(w, findelinea): p.stdin.write(findelinea) p.stdin.write('\r\n') salida(w, findelinea) def mostrar_en_depuracion(): file = open("/tmp/archivotemp"+PUERTOyPS+".txt") contents = file.read() #area4.delete('1.0',END) area4.insert(END,contents) file.close() def memoria(): # Para mostrar el segmento de datos, la etiqueta memoria debe estar al principio p.stdin.write('info address memoria\n') p.stdin.write('infomemoria\n') a = p.stdout.readline() solicitar_seg_de_datos = "" while not "infomemoria" in a: print "a : "+a if "Symbol " in a: a = a.replace('(gdb) Symbol "memoria" is at ', '') a = a.replace(' in a file compiled without debugging.','') solicitar_seg_de_datos = "x/40xw "+a+"\n" a = p.stdout.readline() if solicitar_seg_de_datos == "": p.stdin.write('x/40xw $pc\n') else: p.stdin.write(solicitar_seg_de_datos) p.stdin.write('x/40xw main\n') p.stdin.write('x/128 $sp - 128\n') mostrar_en(area3, "memoria") def estado(): p.stdin.write('info frame\n') mostrar_en(area4, "estado") file = open("/tmp/archivotemp"+PUERTOyPS+".txt") contents = file.readline() while not "Remote" in contents: print contents area4.insert(END,contents) contents = file.readline() area4.insert(END,"----------------------------------------\nSalida Estandar : \n\n") contents = file.read() file.close() area4.insert(END,contents) def registros(): p.stdin.write('info register\n') mostrar_en(area1, "registros") def listado(): p.stdin.write('list 1,100\n') # p.stdin.write('disas main \n') p.stdin.write('disas \n') mostrar_en(area2, "listado") def compilarparasie(): area4.delete('1.0',END) area4.insert('1.0',"Compilando para la SIE ...\r\n") root.update_idletasks() p.stdin.write('detach \n') guardar_archivo_a_compilar() tub = Popen(['mipsx_compilarparasie.sh', self.archivoacompilar, PUERTOyPS], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] mostrar_en_depuracion() if tub.returncode == 0: area4.insert(END, "Compilacion para la SIE OK\n") else: area4.insert(END, "ERROR al compilar y cargar") mostrar_en_depuracion() def compilarycargar(): area4.delete('1.0',END) area4.insert('1.0',"Compilando y Cargando ...\r\n") root.update_idletasks() p.stdin.write('detach \n') guardar_archivo_a_compilar() tub = Popen(['mipsx_compilarycargar.sh', self.archivoacompilar, PUERTOyPS], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] mostrar_en_depuracion() if tub.returncode == 0: area4.insert(END, "Compilacion y carga : OK\n") # ejecutable = self.archivoactual+".elf" # ejecutable = ntpath.basename(ejecutable) ejecutable = self.archivoacompilar+".elf" ejecutable = ntpath.basename(ejecutable) # Nos conectamos al gdbserver # ip_mips="10.0.15.232" # ip_mips="192.168.0.71" ip_mips="10.0.15.50" #comando='target extended-remote '+ip_mips+':'+PUERTOyPS+'\n' comando='target remote '+ip_mips+':'+PUERTOyPS+'\n' p.stdin.write(comando) # gdbfile = 'set remote exec-file /tmp/'+ejecutable+'\n' # p.stdin.write(gdbfile) # Respondemos "y"es a recargar p.stdin.write('y \n') # Abrimos con gdb el archivo ejecutable gdbfile = 'file /tmp/'+ejecutable+'\n' p.stdin.write(gdbfile) # Respondemos "y"es a recargar p.stdin.write('y \n') p.stdin.write('delete \n') p.stdin.write('y \n') p.stdin.write('break main\n') # p.stdin.write('run\n') p.stdin.write('continue\n') self.ejecucion = True mostrar_en(area4,"estado") memoria() registros() listado() else: area4.insert(END, "ERROR al compilar y cargar") mostrar_en_depuracion() PUERTOyPS=str( random.randrange(4000,8000+1) ) # PUERTOyPS="4567" self.parent.title("Mipsx - GUI for gdb multiarch") self.style = Style() self.style.theme_use("default") self.pack(fill=BOTH, expand=1) # Para expandir cuando las ventanas cambian de tamao for i in range(3): self.columnconfigure(i, weight=1) for i in range(20): self.rowconfigure(i, weight=1) lbl = Label(self, text="Registros GDB en MIPS - MR3020") lbl.grid(row=1,column=2, sticky=W, pady=4, padx=5) area1 = Text(self,height=12,width=80) area1.grid(row=2, column=2, columnspan=1, rowspan=5, sticky=E+W+S+N) lbl = Label(self, text="Programa en Assembler y Programa Binario Decodificado (disassemble)") lbl.grid(row=7, column=2, pady=1, padx=1, sticky=W+N+E+S) area2 = Text(self, height=6,width=80) area2.grid(row=8, column=2, columnspan=1, rowspan=5, padx=1, sticky=E+W+S+N) lbl = Label(self, text='Memoria - Segmento de datos (debe existir la etiqueta "memoria") - Segmento de texto - Pila') lbl.grid(row=13, column=2, pady=1, padx=1, sticky=W+N+E+S) area3 = Text(self,height=15,width=80) area3.grid(row=14, column=2, columnspan=1, rowspan=5, padx=1, sticky=E+W+S+N) lbl4 = Label(self, text="Mensajes de Depuracion") lbl4.grid(row=13, column=0, pady=1, padx=1, sticky=W+N+E+S) area4 = Text(self,height=8,width=60) area4.grid(row=14, column=0, columnspan=1, rowspan=5, padx=1, sticky=E+W+S+N) lbl = Label(self, text="Editor del Programa") lbl.grid(row=1,column=0, sticky=W, pady=4, padx=5) area5 = ScrolledText(self,height=20,width=60) area5.grid(row=2, column=0, columnspan=1, rowspan=10, padx=1, sticky=E+W+S+N) # Variables globales archivoactual = "hello.s" archivoacompilar = "hello.s" archivotemp = "/tmp/archivotemp"+PUERTOyPS+".txt" # ip_mips = "10.0.15.232" ip_mips = "10.0.15.50" # ip_mips = "192.168.0.71" # Al abrir un archivo deseamos tener un area de trabajo cero def limpiar_areas(): area4.delete('1.0',END) area3.delete('1.0',END) area2.delete('1.0',END) area1.delete('1.0',END) def abrir_en_editor(archivo): fd = open(archivo) contents = fd.read() area5.delete('1.0',END) area5.insert('1.0',contents) fd.close() self.archivoactual = archivo print self.archivoactual def open_command(): FILEOPENOPTIONS = dict(defaultextension='*.s', filetypes=[('Archivo assembler','*.s'), ('Todos los archivos','*.*')]) file = tkFileDialog.askopenfile(parent=root,mode='rb',title='Select a file', **FILEOPENOPTIONS) if file != None: limpiar_areas() abrir_en_editor(file.name) def guardar_archivo_a_compilar(): self.archivoacompilar = "/tmp/archivo"+PUERTOyPS+".s" tub = Popen(['rm', self.archivoacompilar], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] tub = Popen(['touch', self.archivoacompilar], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] tmp = open(self.archivoacompilar, "w") if tmp != None: data = area5.get('1.0', END+'-1c') tmp.write(data) tmp.close() archivotmppwd = "archivo"+PUERTOyPS+".s" tub = Popen(['cp', self.archivoacompilar, archivotmppwd], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] def save_command(): file = tkFileDialog.asksaveasfile(mode='w') if file != None: # slice off the last character from get, as an extra return is added data = area5.get('1.0', END+'-1c') file.write(data) file.close() self.archivoactual = file.name print self.archivoactual def exit_command(): if tkMessageBox.askokcancel("Quit", "Do you really want to quit?"): root.destroy() def about_command(): label = tkMessageBox.showinfo("Acerca de", "MIPSX - GUI for gdb multiarch\n\nEntorno de desarrollo en lenguaje assembler arquitectura MIPS\nEste programa ensabla, genera el programa ejecutable, y lo ejecuta en modo debug en una maquina MIPS real\n\nCopyright 2014 Rafael Ignacio Zurita\n\nFacultad de Informatica\nUniversidad Nacional del Comahue\n\nThis program is free software; you can redistribute it and/or modify it under the terms of the GPL v2") def dummy(): print "I am a Dummy Command, I will be removed in the next step" def no_hacer_nada(): print "nada por hacer" def archivo_sin_guardar(): data = area5.get('1.0', END+'-1c') fd = open(self.archivoactual) contents = fd.read() fd.close() if data == contents: return False res = tkMessageBox.askquestion("Confirmar", "Archivo sin guardar\nEsta seguro de finalizar el programa?", icon='warning') if res == 'yes': return False return True def salir(): if archivo_sin_guardar(): return tmp = "/tmp/archivo"+PUERTOyPS+".s" tmp2 = "archivo"+PUERTOyPS+".s" tmp3 = "/tmp/archivo"+PUERTOyPS+".s.elf" tmp4 = "/tmp/archivotemp"+PUERTOyPS+".txt" tub = Popen(['rm', tmp, tmp2, tmp3, tmp4], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] tmp2 = "/tmp/archivo"+PUERTOyPS+".s.o" ip_mips = "10.0.15.50" tub = Popen(['mipsx_finalizar_gdbserver.sh', ip_mips, PUERTOyPS, tmp, tmp2, tmp3, tmp4], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] # ip_mips = "10.0.15.232" # ip_mips = "192.168.0.71" # killgdbserver = Popen(['sshpass', '-p', clave, 'ssh', '-o', 'StrictHostKeyChecking=no', '-l', 'root', ip_mips, comando], stdout=PIPE, stdin=PIPE, stderr=STDOUT) quit() menu = Menu(root) root.config(menu=menu) filemenu = Menu(menu) menu.add_cascade(label="Archivo", menu=filemenu) filemenu.add_command(label="Nuevo", command=dummy) filemenu.add_command(label="Abrir...", command=open_command) filemenu.add_command(label="Guardar...", command=save_command) filemenu.add_separator() filemenu.add_command(label="Salir", command=salir) menu.add_command(label="Run", command=ejecutar) menu.add_command(label="Next", command=prox_instruccion) menu.add_command(label="Breakpoint", command=no_hacer_nada) menu.add_command(label="Compilar y Cargar", command=compilarycargar) menu.add_command(label="Compilar para SIE", command=compilarparasie) helpmenu = Menu(menu) menu.add_cascade(label="Ayuda", menu=helpmenu) helpmenu.add_command(label="Acerca de...", command=about_command) menu.add_command(label="Salir", command=salir) abrir_en_editor("hello.s") # para que al cerrar la ventana cierre los temporales y los borre root.protocol("WM_DELETE_WINDOW", salir) def main(): root.mainloop() if __name__ == '__main__': p = Popen(['gdb-multiarch'], stdout=PIPE, stdin=PIPE, stderr=STDOUT) root = Tk() # Para expandir cuando las ventanas cambian de tamao root.columnconfigure(0,weight=1) root.rowconfigure(0, weight=1) app = Mipsx(root) main()
The Canadian Credit Information Service (CCIS) provides to furniture manufacturers the best credit information as for canadian and american retailers payment patterns. Service is included in the Association’s membership fee. Should you require further information, please contact Yves Daigle, the Business Development and Programs Director at [email protected] or at 514 730-4555.
import numpy as np from sklearn import linear_model from sknn.mlp import Regressor, Layer import learning.mdp from projectfiles.random_deck_generator import RandomDeckGenerator from hearthbreaker.engine import Deck, card_lookup, Game from hearthbreaker.agents import * import projectfiles.util from projectfiles.feature_extract import * class HearthstoneMDP(learning.mdp.MDP): def __init__(self, strategy): self.strategy = strategy def start_state(self): generator = RandomDeckGenerator() deck1 = generator.generate() deck2 = deck1.copy() game = Game([deck1, deck2], [RandomAgent(), RandomAgent()]) game.pre_game() return game def is_end_state(self, state): return state.game_ended def getActions(self, state): # An "action" is actually parametrized directly by the state corresponding # to the current player's actions. The strategy object enumerates a list of # possible actions return self.strategy.getActions(state.copy()) def getRandomAction(self, state): return self.strategy.getRandomAction(state.copy()) def getBestAction(self, state, heuristic): return self.strategy.getBestAction(state.copy(), heuristic) def getSuccAndReward(self, state, next_action): next_state = next_action.copy() reward = 0.0 if next_state.game_ended: if next_state.winner is None: reward = self.getReward("tie") elif state.current_player.name == next_state.winner.name: reward = self.getReward("win") else: reward = self.getReward("lose") return (next_state, reward) def getReward(self, event): return {"win" : 10, "lose" : -8, "tie" : 3}[event] def getDiscount(self): return 0.8 class Model: def __init__(self): pass def __call__(self, state, action): # the action is a state! next_state = action return self.eval(state, next_state) def eval(self, state, next_state): raise NotImplementedError("") def update(self, state, next_state, delta): assert(state.current_player.name == next_state.current_player.name) print("curplay", state.current_player.name, \ "health", state.current_player.hero.health, \ "my_next_health", next_state.current_player.hero.health, \ "enemy_health", state.current_player.opponent.hero.health, \ "enemy_next_heatlh", next_state.current_player.opponent.hero.health, \ "delta", delta) class LinearModel(Model): def __init__(self, feature_extractor, initial_weights = None): self.feature_extractor = feature_extractor self.weights = initial_weights if initial_weights is not None else feature_extractor.get_initial() class StatePairLinearModel(LinearModel): # Takes a feature extractor that expects TWO state arguments def __init__(self, feature_extractor, initial_weights = None): super().__init__(feature_extractor, initial_weights) assert(isinstance(self.feature_extractor, StatePairFeatureExtractor)) def eval(self, state, next_state): assert(state.current_player.name == next_state.current_player.name) if isinstance(self.feature_extractor, StateFeatureExtractor): return np.dot(self.weights, self.feature_extractor(next_state) - self.feature_extractor(state)) else: assert(isinstance(self.feature_extractor, StatePairFeatureExtractor)) return np.dot(self.weights, self.feature_extractor(state, next_state)) def update(self, state, next_state, delta): super().update(state, next_state, delta) phi = self.feature_extractor(state, next_state) self.weights += delta * phi # self.feature_extractor.debug(self.weights) class FinalStateLinearModel(LinearModel): # Takes a feature extractor that expects ONE state argument def __init__(self, feature_extractor, initial_weights = None): super().__init__(feature_extractor, initial_weights) assert(isinstance(self.feature_extractor, StateFeatureExtractor)) def eval(self, state, next_state): # if next_state.current_player_win(): return 1e9 # if next_state.current_player_lose(): return -1e9 return np.dot(self.weights, self.feature_extractor(next_state)) def train(self, dataset): clf = linear_model.LinearRegression() X, y = dataset # X = [self.feature_extractor(state) for state, value in dataset] # y = [value for state, value in dataset] clf.fit(X, y) self.weights = clf.coef_ # print(self.weights) def update(self, state, next_state, delta): super().update(state, next_state, delta) phi = self.feature_extractor(next_state) self.weights += delta * phi # self.feature_extractor.debug(weights) class StateDifferenceLinearModel(LinearModel): def __init__(self, feature_extractor, initial_weights = None): super().__init__(feature_extractor, initial_weights) assert(isinstance(self.feature_extractor, StateFeatureExtractor)) def eval(self, state, next_state): return np.dot(self.weights, self.feature_extractor(next_state) - self.feature_extractor(state)) def update(self, state, next_state, delta): super().update(state, next_state, delta) phi = self.feature_extractor(state) next_phi = self.feature_extractor(next_state) # self.feature_extractor.debug(next_phi - phi) self.weights += delta * (next_phi - phi) self.feature_extractor.debug(self.weights) class BasicHeuristicModel(Model): def __init__(self): super().__init__() def eval(self, state_1, state_2): def score(player): score = 0 for i in player.minions: score += i.calculate_attack() score += i.health score += len(player.hand) * 2 score += player.hero.health + player.hero.armor return score return score(state_2.current_player) - score(state_2.other_player) class FinalStateNeuralModel(Model): def __init__(self, feature_extractor, nn = None): self.feature_extractor = feature_extractor self.nn = nn if nn is not None else self.get_initial() # self.train() def get_initial(self): return Regressor( layers=[ Layer("Rectifier", units=100), # Layer("Sigmoid", units = 200), # Layer("Tanh", units = 100) Layer("Linear")], learning_rate=0.001, n_iter=10, f_stable = 0.1) def eval(self, state, next_state): if next_state.current_player_win(): return 1e9 if next_state.current_player_lose(): return -1e9 vec = np.array(self.feature_extractor(next_state)) return self.nn.predict(np.ndarray(shape = (1, len(vec)), buffer = vec)) # return np.dot(self.weights, self.feature_extractor(next_state)) def train(self, dataset): X, y = dataset # X = np.array([self.feature_extractor(state) for state, value in dataset]) # y = [value for state, value in dataset] self.nn.fit(X, y) # def train(self): # Data = open("data.txt", "r") # Tmp = Data.read().splitlines() # training_set = [] # for i in Tmp: # c = i.split(" ") # for j in range(0, len(c)): # c[j] = float(c[j]) # training_set.append(c) # X = [] # y = [] # for data_point in training_set: # X.append(data_point[0:-1]) # y.append(data_point[-1]) # for i in X: # if (len(i) != 38): # print(i) # X = np.ndarray(shape = (len(y), len(X[0])), buffer = np.array(X)) # y = np.ndarray(shape = (len(y), 1), buffer = np.array(y)) # self.nn.fit(X, y) # print("Learning from data size: " + str(len(y))) # Data.close() class DeepNeuralModel(FinalStateNeuralModel): def get_initial(self): return Regressor( layers=[ Layer("Rectifier", units=100), Layer("Sigmoid", units = 200), Layer("Tanh", units = 100), Layer("Linear")], learning_rate=0.001, n_iter=10, f_stable = 0.1)
Couleur De Peinture Pour Wc #12 - Chat Coloriage 224 Gommettes En Couleur was published in May 31, 2018 at 5:28 pm. If you like to use this image as your desktop background, you may click the download link below or you can just right click on the picture above , then select "Save Image As" to download the Couleur De Peinture Pour Wc #12 - Chat Coloriage 224 Gommettes En Couleur or by choose the "Set Desktop Background As" options if your internet browser has the capability. If you could not grab the the most outstanding picture you are looking for, you need to go for "Search Column" at top right or browse another picture wallpapers whatever you like. This photo has the image size is 45kB and the resolution of 800x565. Couleur De Peinture Pour Wc #12 - Chat Coloriage 224 Gommettes En Couleur has been seen by 31 users and it is a picture from Couleur De Peinture Pour Wc.
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-12 02:59 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='About', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('description', models.CharField(max_length=1000)), ], ), migrations.CreateModel( name='Education', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('institution', models.CharField(max_length=100)), ('dates', models.CharField(max_length=100)), ('degree', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Expericence', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('dates', models.CharField(max_length=100)), ('position', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Portfolio', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('description', models.CharField(max_length=1000)), ('site', models.CharField(max_length=500)), ('repo', models.CharField(max_length=500)), ], ), ]
Ready-to-eat, single-serving soups now benefit from greatly extended shelf life thanks to ITC and Verstraete’s innovative Naturcrem IML packaging, using an in mould label (IML) with oxygen barrier. Traditionally these products can start to go bad once the soup comes into contact with oxygen. The new packaging radically reduces that risk, says the company. Injection moulder ITC Packaging and Verstraete IML were approached by Spanish food producer Dulcesol to provide an innovative solution to its single-serving soup range. The challenge lies in getting the industrially produced soup to consumers in an optimum condition, so as natural, fresh, and full flavoured as possible, it claimed. Extended storage time and an aesthetically distinctive pack were the criteria. The labels also had to withstand pasteurization. Verstraete IML utilized a combination of specific inks, a special lacquer, and novel pasteurization-resistant oxygen barrier film.
#!/usr/bin/python3 # -*- coding: utf-8 -*- """Day 10 of AdventOfCode.com: What's common in between digits and nuclear decay?""" import os # ToDo: rewrite this one with groupby, and\or ConwayMendeleevTable, and\or multithreading def look_and_say(numbers): """ Performs a look'n'say iteration. Repeated digits are collapsed into one and preceeded by their amount. Add 1 before each single digit. '111' -> '31' :param numbers: string of digits :return: look'n'say op over digits """ digit = "" result = "" count = 0 for c in numbers: if c == digit: count += 1 else: if count: result += str(count) + digit digit = c count = 1 result += str(count) + digit return result with open(os.path.dirname(os.path.realpath('__file__')) + "/input/day10.txt", "r") as datafile: data = datafile.read().replace('\n', '') print(0, len(data)) for i in range(0, 79): data = look_and_say(data) print(i + 1, len(data))
High performance Mini-Tower case for gamers and PC enthusiasts. Solidly constructed chassis interior (0.5mm thickness). Power / Reset / MIC & HD Audio / USB2.0*2 / USB3.0*1. Support High-End CPU cooler max. height up to 150mm.
# $Id: __init__.py 1945 2006-03-05 01:06:37Z cpbotha $ # importing this module shouldn't directly cause other large imports # do large imports in the init() hook so that you can call back to the # ModuleManager progress handler methods. """matplotlib_kit package driver file. Inserts the following modules in sys.modules: matplotlib, pylab. @author: Charl P. Botha <http://cpbotha.net/> """ import os import re import sys import types # you have to define this VERSION = '' def init(theModuleManager, pre_import=True): if hasattr(sys, 'frozen') and sys.frozen: # matplotlib supports py2exe by checking for matplotlibdata in the appdir # but this is only done on windows (and therefore works for our windows # installer builds). On non-windows, we have to stick it in the env # to make sure that MPL finds its datadir (only if we're frozen) mpldir = os.path.join(theModuleManager.get_appdir(), 'matplotlibdata') os.environ['MATPLOTLIBDATA'] = mpldir # import the main module itself # this doesn't import numerix yet... global matplotlib import matplotlib # use WX + Agg backend (slower, but nicer that WX) matplotlib.use('WXAgg') # interactive mode: user can use pylab commands from any introspection # interface, changes will be made immediately and matplotlib cooperates # nicely with main WX event loop matplotlib.interactive(True) # with matplotlib 1.0.1 we can't do this anymore. # makes sure we use the numpy backend #from matplotlib import rcParams #rcParams['numerix'] = 'numpy' theModuleManager.setProgress(25, 'Initialising matplotlib_kit: config') # @PATCH: # this is for the combination numpy 1.0.4 and matplotlib 0.91.2 # matplotlib/numerix/ma/__init__.py: # . normal installation fails on "from numpy.ma import *", so "from # numpy.core.ma import *" is done, thus bringing in e.g. getmask # . pyinstaller binaries for some or other reason succeed on # "from numpy.ma import *" (no exception raised), therefore do # not do "from numpy.core.ma import *", and therefore things like # getmask are not imported. # solution: # we make sure that "from numpy.ma import *" actually brings in # numpy.core.ma by importing that and associating the module # binding to the global numpy.ma. #if hasattr(sys, 'frozen') and sys.frozen: # import numpy.core.ma # sys.modules['numpy.ma'] = sys.modules['numpy.core.ma'] # import the pylab interface, make sure it's available from this namespace global pylab import pylab theModuleManager.setProgress(90, 'Initialising matplotlib_kit: pylab') # build up VERSION global VERSION VERSION = '%s' % (matplotlib.__version__,) theModuleManager.setProgress(100, 'Initialising matplotlib_kit: complete')
My laboratory utilizes high-resolution nuclear magnetic resonance (NMR) spectroscopy and other biophysical and biochemical methods to investigate the structure, dynamics, and folding mechanisms of proteins and to map their functional interactions. NMR is unique as a method for determining three-dimensional structures of proteins and protein complexes in solution and also providing novel information about the time-dependent structural fluctuations that are essential for protein function. Intrinsically disordered proteins and cellular signaling. Intrinsically disordered proteins are highly abundant in eukaryotes and play a central role in cellular regulatory processes and signaling pathways. We are using a multidisciplinary approach, including a broad range of biochemical and biophysical methods, NMR, and single molecule fluorescence (in collaboration with Ashok Deniz), to elucidate the structure of the general transcriptional coactivators CBP and p300 and characterize their functional interactions with key cellular and viral targets. We are implementing novel NMR methods, intein labeling technologies, and single molecule FRET methods to characterize the structure of disordered proteins and their complexes and to elucidate the mechanism by which disordered proteins fold upon binding to their targets. Mechanisms of nucleic acid recognition by zinc finger proteins. We are using NMR and X-ray crystallography (in collaboration with Ian Wilson's laboratory) to elucidate the structural basis by which the protein Muscleblind recognizes pathogenic RNA sequences, and by which the protein Kaiso binds both regulatory and methylated DNA motifs. Mechanisms of protein folding and misfolding. NMR is uniquely suited for studies of protein folding and misfolding pathways, providing detailed insights into the structure and dynamics of unfolded states and partially folded intermediates. We are applying NMR relaxation dispersion methods to elucidate the molecular mechanism by which the protein transthyretin spontaneously unfolds and aggregates, leading to amyloid disease. Protein dynamics and "invisible" excited states. We are applying NMR relaxation dispersion methods to characterize the dynamics of the enzyme dihydrofolate reductase and to determine the structure of weakly populated excited states that play a functional role in catalysis. These studies, which are also being applied to study the dynamics of a stress-activated protein kinase, are providing unprecedented insights into the intrinsic dynamics of enzymes and their role in catalysis. The dynamic energy landscape of dihydrofolate reductase catalysis. D.D. Boehr, D. McElheny, H.J. Dyson and P.E. Wright (2006), Science 313, 1638-1642. Mechanism of coupled folding and binding of an intrinsically disordered protein. K. Sugase, H.J. Dyson and P.E. Wright (2007), Nature 447, 1021-1027. Measurement of protein unfolding/refolding kinetics and structural characerization of hidden intermediates by NMR relaxation dispersion. D.W. Meinhold, P.E. Wright (2011), Proc. Natl Acad. Sci., 108, 9078-9083. Molecular basis for recognition of methylated and specific DNA sequences by the zinc finger protein Kaiso. B.A. Buck-Koehntop, R.L. Stanfield, D.C. Ekiert, M.A. Martinez-Yamout, H.J. Dyson, I.A. Wilson and P.E. Wright (2012), Proc. Natl Acad. Sci., 109, 15229-15234. Modulation of allostery by protein intrinsic disorder. A.C.M. Ferreon, J.C. Ferreon, P.E. Wright, A.A. Deniz (2013), Nature, 498, 390-394.
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-17 02:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Gallery', fields=[ ('galleryID', models.AutoField(primary_key=True, serialize=False)), ('gallery', models.CharField(max_length=255)), ('galleryDescription', models.CharField(max_length=255)), ('create_dt', models.DateField()), ('edit_dt', models.DateField()), ], ), migrations.CreateModel( name='GalleryImage', fields=[ ('galleryImageID', models.AutoField(primary_key=True, serialize=False)), ('isCover', models.BooleanField(default=False)), ('galleryID_fk', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='imageManager.Gallery')), ], ), migrations.CreateModel( name='Image', fields=[ ('imageID', models.AutoField(primary_key=True, serialize=False)), ('image', models.CharField(max_length=255)), ('imageDescription', models.CharField(max_length=255)), ('create_dt', models.DateField()), ('edit_dt', models.DateField()), ], ), migrations.AddField( model_name='galleryimage', name='imageID_fk', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='imageManager.Image'), ), ]
Got the domain name and my old email address back! The Chinch Bugs have a new CD! Song snippets from "Broke" are: here! The release party is January 1, 2012 at the Cause on the corner of Lyndale and Lake in Minneapolis. Jeff Carpenter's birthday is December 12. You enjoyed Nigel Tufnel Day, 11/11/11? Jeff Carpenter's birthday will go to twelve. The big news for 12/12/11 is that Jeff Carpenter's birthday will be celebrated at Mainstreet Bar in Hopkins. Somewhat Fierce and Auto Body Experience are on the bill. Please RSVP by 12/5/2012: event page. Here is the Mainstreet Bar calendar. Chinch Bugs are planning a new Bert Records release on 1/1/2012. So, it is time to repackage a Bert Records web site. This page represents the humble beginnings phase of the new web site. A start.
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonpatch from anvil import utils class Origin(dict): def __init__(self, filename, patched=False): super(Origin, self).__init__() self.filename = filename self.patched = patched def load(filename, patch_file=None): base = utils.load_yaml(filename) patched = False if patch_file: patch = jsonpatch.JsonPatch(patch_file) patch.apply(base, in_place=True) patched = True origin = Origin(filename, patched=patched) origin.update(base) return origin
New in box - Solis Tek SE/DE Digital 1000W 240V STK 1241 Ballasts (Set of 2). Brand new products. Buyer must pick up items from the Narragansett Police Department. Items will not be shipped. This is a third auction of ballasts (set/2).
# -*- coding: utf-8 -*- # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: from typing import Tuple, Any, Union, Optional from crash.infra.callback import ObjfileEventCallback from crash.infra.callback import Callback from crash.exceptions import DelayedAttributeError import gdb class NamedCallback(ObjfileEventCallback): """ A base class for Callbacks with names This cannot be used directly since it does not provide a method for :meth:`.ObjfileEventCallback.callback`. Args: name: The name of the symbol or type to be resolved. callback: A function to call with the result of the derived class's :meth:`.ObjfileEventCallback.check_ready` method. attrname (optional): A name safe for use as an attribute name. If unspecified, defaults to the same string as name. Attributes: name (:obj:`str`): The name of the symbol or type being resolved. attrname (:obj:`str`): The name of symbol or type being resolved translated for use as an attribute name. """ def __init__(self, name: str, callback: Callback, attrname: str = None) -> None: super().__init__() self.name = name self.attrname = self.name if attrname is not None: self.attrname = attrname self._callback = callback # This is silly but it avoids pylint abstract-method warnings def check_ready(self) -> Any: """ The method that derived classes implement for detecting when the conditions required to call the callback have been met. Returns: :obj:`object`: This method can return an arbitrary object. It will be passed untouched to :meth:`callback` if the result is anything other than :obj:`None` or :obj:`False`. """ raise NotImplementedError("check_ready must be implemented by derived class.") def callback(self, result: Any) -> Union[None, bool]: """ The callback for handling the sucessful result of :meth:`check_ready`. It indirectly calls the callback specified in the constructor. Args: result: The result returned from :meth:`check_ready` Returns: :obj:`None` or :obj:`bool`: If :obj:`None` or :obj:`True`, the callback succeeded and will be completed and removed. Otherwise, the callback will stay connected for future completion. """ return self._callback(result) class MinimalSymbolCallback(NamedCallback): """ A callback that executes when the named minimal symbol is discovered in the objfile and returns the :obj:`gdb.MinSymbol`. The callback must accept a :obj:`gdb.MinSymbol` and return :obj:`bool` or :obj:`None`. Args: name: The name of the minimal symbol to discover callback: The callback to execute when the minimal symbol is discovered symbol_file (optional): Name of the symbol file to use """ def __init__(self, name: str, callback: Callback, symbol_file: str = None) -> None: super().__init__(name, callback) self.symbol_file = symbol_file self.connect_callback() def check_ready(self) -> Optional[gdb.MinSymbol]: """ Returns the result of looking up the minimal symbol when a new object file is loaded. Returns: :obj:`gdb.MinSymbol`: The requested minimal symbol """ return gdb.lookup_minimal_symbol(self.name, self.symbol_file, None) def __str__(self) -> str: return ("<{}({}, {}, {})>" .format(self.__class__.__name__, self.name, self.symbol_file, self.callback)) class SymbolCallback(NamedCallback): """ A callback that executes when the named symbol is discovered in the objfile and returns the :obj:`gdb.Symbol`. The callback must accept a :obj:`gdb.Symbol` and return :obj:`bool` or :obj:`None`. Args: name: The name of the symbol to discover callback: The callback to execute when the symbol is discovered domain (optional): The domain to search for the symbol. The value is assumed to be one of the value associated with :obj:`gdb.Symbol` constant, i.e. SYMBOL_*_DOMAIN. """ def __init__(self, name: str, callback: Callback, domain: int = gdb.SYMBOL_VAR_DOMAIN) -> None: super().__init__(name, callback) self.domain = domain self.connect_callback() def check_ready(self) -> Optional[gdb.Symbol]: """ Returns the result of looking up the symbol when a new object file is loaded. Returns: :obj:`gdb.Symbol`: The requested symbol """ return gdb.lookup_symbol(self.name, None, self.domain)[0] def __str__(self) -> str: return ("<{}({}, {})>" .format(self.__class__.__name__, self.name, self.domain)) class SymvalCallback(SymbolCallback): """ A callback that executes when the named symbol is discovered in the objfile and returns the :obj:`gdb.Value` associated with the :obj:`gdb.Symbol`. The callback must accept a :obj:`gdb.Value` and return :obj:`bool` or :obj:`None`. See :obj:`SymbolCallback` for arguments. """ def check_ready(self) -> Optional[gdb.Value]: # type: ignore """ After successfully looking up the :obj:`gdb.Symbol`, returns the :obj:`gdb.Value` associated with it. Returns: :obj:`gdb.Value`: The value associated with the requested symbol """ sym = super().check_ready() if sym is not None: try: return sym.value() except gdb.MemoryError: pass return None class TypeCallback(NamedCallback): """ A callback that executes when the named type is discovered in the objfile and returns the :obj:`gdb.Type` associated with it. The callback must accept a :obj:`gdb.Type` and return :obj:`bool` or :obj:`None`. Args: name: The name of the type to discover callback: The callback to execute when the type is discovered block (optional): The :obj:`gdb.Block` to search for the symbol """ def __init__(self, name: str, callback: Callback, block: gdb.Block = None) -> None: (name, attrname, self.pointer) = self.resolve_type(name) super().__init__(name, callback, attrname) self.block = block self.connect_callback() @staticmethod def resolve_type(name: str) -> Tuple[str, str, bool]: """ This function takes a C type name and translates it into a 3-tuple that contains the basic type name, the type name translated to a form suitable for an attribute name, and whether the type corresponds to a pointer. The basic type name has all leading and trailing whitespace stripped, and any ``*`` removed. The attribute type name takes that base, removes the leading ``struct`` for structure types, removes any leading or trailing whitespace, replaces internal spaces with underscores, and appends a ``_type`` or ``_p_type`` suffix, depending on whether the type is a pointer type. Some examples: - ``struct foo`` → ``foo_type`` - ``struct foo *`` → ``foo_p_type`` - ``unsigned long`` → ``unsigned_long_type`` *Notes*: - Multiple levels of pointers are not handled properly. In practice this means that ``struct foo *`` and ``struct foo **`` can't be used simultaneously. This is typically not a problem. - Unions are not handled as a special case as structs are. A union type would use an attribute name of ``union_foo_type``. Returns: (:obj:`str`, :obj:`str`, :obj:`bool`): A 3-tuple consisting of the basic type name, the name formatted for use as an attribute name, and whether the type is a pointer type. """ pointer = False name = name.strip() if name[-1] == '*': pointer = True name = name[:-1].strip() attrname = name if name.startswith('struct '): attrname = name[7:].strip() if pointer: attrname += '_p_type' else: attrname += '_type' name = name attrname = attrname.replace(' ', '_') return (name, attrname, pointer) def check_ready(self) -> Optional[gdb.Type]: try: return gdb.lookup_type(self.name, self.block) except gdb.error: return None def __str__(self) -> str: return ("<{}({}, {})>" .format(self.__class__.__name__, self.name, self.block)) class DelayedValue: """ A generic class for making class attributes available that describe to-be-loaded symbols, minimal symbols, and types. """ def __init__(self, name: str, attrname: str = None) -> None: if name is None or not isinstance(name, str): raise ValueError("Name must be a valid string") self.name = name if attrname is None: self.attrname = name else: self.attrname = attrname assert self.attrname is not None self.value: Any = None def get(self) -> Any: if self.value is None: raise DelayedAttributeError(self.name) return self.value def callback(self, value: Any) -> None: if self.value is not None: return self.value = value class DelayedMinimalSymbol(DelayedValue): """ A DelayedValue that handles minimal symbols. Args: name: The name of the minimal symbol """ def __init__(self, name: str) -> None: super().__init__(name) self.cb = MinimalSymbolCallback(name, self.callback) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb)) class DelayedSymbol(DelayedValue): """ A DelayedValue that handles symbols. Args: name: The name of the symbol """ def __init__(self, name: str) -> None: super().__init__(name) self.cb = SymbolCallback(name, self.callback) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb)) class DelayedType(DelayedValue): """ A DelayedValue for types. Args: name: The name of the type. """ def __init__(self, name: str) -> None: (name, attrname, self.pointer) = TypeCallback.resolve_type(name) super().__init__(name, attrname) self.cb = TypeCallback(name, self.callback) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.callback)) def callback(self, value: gdb.Type) -> None: if self.pointer: value = value.pointer() self.value = value class DelayedSymval(DelayedSymbol): """ A :obj:`DelayedSymbol` that returns the :obj:`gdb.Value` associated with the symbol. Args: name: The name of the symbol. """ def callback(self, value: gdb.Symbol) -> None: symval = value.value() if symval.type.code == gdb.TYPE_CODE_FUNC: symval = symval.address self.value = symval def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb)) class DelayedMinimalSymval(DelayedMinimalSymbol): """ A DelayedMinimalSymbol that returns the address of the minimal symbol as an :obj:`int`. Args: name: The name of the minimal symbol. """ def callback(self, value: gdb.MinSymbol) -> None: self.value = int(value.value().address) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb))
Our team of agronomy specialist in North Carolina are passionate about helping customers with fertilization needs and improving their crop yields. Whether you are looking for a crop advisor for grain crops – wheat, sorghum, corn, and soybeans, or beef cattle-cow/calf operation management recommendations, our North Carolina team has the education and experience to answer your questions and help you make the right decisions to help maximize yields and profits. Agronomy Sales Manager - Prior to joining Southern States, Josh worked on row crop farms for more than 10 years. He now owns and operates a row crop and cattle farm. He is a member of the North Carolina Farm Bureau Young Farmers and Ranchers. Agronomy Operations Manager – Greg has been with Southern States for over 27 years. He enjoys learning about the ever-evolving ag industry of new products and new technologies. Territory Sales Manager – Kevin has worked for Southern States for over 30 years, specializing in crops, seed, fertilizer and chemicals. He grew up on a small family farm with cattle, hogs, row crops and tobacco. Kevin attended North Carolina State University and earned his bachelor’s degree in ag systems technology. Agronomy Sales Manager - Shannon holds a master’s degree in agricultural extension education and a bachelor’s degree in agricultural business management from North Carolina State University. He grew up on a beef cattle-cow/calf operation and specializes in fertility and pest management. Prior to joining Southern States in 2011, Shannon was an Agriculture Extension Agent with the North Carolina Cooperative Extension. Agronomy Sales Manager – Russell grew up on a family farm in Mt. Olive and currently farms a small acreage of corn and soybeans. He has worked in the farm industry for over 18 years, having worked for Southern States for over 18 years and Nutrien Ag Solutions for 5 years. Russell is very passionate about preserving and protecting farm land from development and believes it's critical that we protect the land for future production of food and fiber. Agronomy Sales Manager - Gerald has been with Southern States for over 13 years. He grew up on a tobacco, corn and soybean farm in North Carolina and specializes in fertilizer, seed and chemicals. He is passionate about helping his customers with fertilization needs and improving their crop yields. Agronomy Operations Manager – Brooks grew up on a 1,200-acre row crop farm that grew corn, wheat and soybeans. He farmed with his family for 12 years after college. Before joining Southern States in 2015, he worked in grain procurement and handling and milling operations. Brooks is passionate about helping customers establish and maintain sustainable farming practices. Agronomy Sales Manager – Kevin has been with Southern States for over 22 years and is a member of the North Carolina Farm Bureau. Prior to joining Southern States, he worked as an Agricultural Extension Agent for 12 years. Kevin grew up on a small beef cattle farm and graduated from North Carolina State University with a degree in agriculture education. He enjoys working with soil and tissue tests to develop fertility programs, and helping solve any of his customers’ problems. Agronomy Sales Manager - A member of the Southern States team for over 24 years, David owns and operates a 3,600 acre farm, where he grows cotton and soybeans. He is a Certified Crop Advisor (CCA) and holds a degree from Pembroke State University. Agronomy Sales Manager – Ben has been with Southern States for over 5 years and specializes in corn, wheat, soybeans, cotton and tobacco. He holds a degree in ag business management from North Carolina State University and is a Certified Crop Adviser (CCA). Agronomy Sales Manager - Rachel has been in agriculture her whole life, growing up, she was involved in her grandparent’s row crop and hog farm and later went on to North Carolina State University to graduate with a major in agricultural business management and minor in animal science & economics. Before joining Southern States, Rachel was the agronomist for Smithfield Hog Production division’s farm sustainability project, Smithfield Agronomics, and a territory sales manager for Alltech. She is a Certified Crop Advisor and specializes in grain crops – wheat, sorghum, corn, and soybeans, with a focus on disease and deficiency identification. Agronomy Operations Manager – Mark has over 29 years of experience at Southern States helping his customers increase profit at the farm level. He grew up on a 3,000-acre row crop farm with 200 dairy and 100 beef cattle. Mark is CCA certified and has received a number of awards and accolades throughout his career, including the Operating Excellence Award, Top Performing Location, and Allied Industry award. He holds a bachelor’s degree from Wake Forest University and is the president of the Cleveland County Cattlemen’s Association and a member of North Carolina Cattlemen’s Association. He is also a member of North Carolina Commercial Raspberry and Blackberry Grower's Association (NCCRBGA), Cleveland County 4H and FFA Advisory Committees, NC Farm Bureau, and NC Farm Credit. Agronomy Sales Manager - Jim has been with Southern States serving farmers in North Carolina for over 22 years. A two-time recipient of the Top Territory Sales Manager of the Year, Kiser specializes in seed trait technology, crop protection products and specialty crops. He has a degree in Agriculture Business Management from North Carolina State University and is a member of the American Society of Agronomy. Agronomy Sales Manager – Aubrey grew up on a livestock farm operation and specializes in fertility and pest management in row crops and pasture/hay. He holds a degree in business administration from Catawba College and was involved in FFA in high school, serving on the local high school FFA alumni council. As an adolescent, Aubrey showed livestock and horses on the national level. Agronomy Sales Manager – Michael grew up on a corn, peanut, soybean and wheat farm and has been with Southern States for over 4 years. Agronomy Sales Manager – Charles was born and raised on a 300-acre tobacco farm and specializes in fertilizer and seed. He earned his degree from Clemson University, majoring in agricultural mechanization and business, with a minor in plant environmental science and row crop production. Agronomy Sales Manager – Brian was raised on a 1,700-acre row crop operation that farmed cotton, peanuts, wheat and soybeans. He’s worked for Southern States for over 21 years, specializing in row crops and chemicals. His goal is to help farmers get higher yields out of their crops. Agronomy Sales Manager – A member of the Southern States team for over 8 years, Alan specializes in peanuts, corn, soybeans and tobacco. Before joining Southern States he spent 18 years with Helena and 2 years with Brunswick Farm Supply. Alan grew up on a 250-acre row crop farm where they grew tobacco, corn and soybeans. He holds a bachelor’s degree in Agronomy from NC State University. Precision Ag Coordinator – Weston has worked for Southern States for over 13 years. Agronomy Sales Manager – Brian grew up on a 750-acre operation where he and his family farmed hay, row crops and raised beef cow calves. He has over 14 years of experience with Southern States and is a member of North Carolina Farm Bureau.
""" Django settings for eCIP project. Generated by 'django-admin startproject' using Django 1.10.6. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '7@w69y(17k10@!*_bddkq+^cw11f7r7#h_%x5ug1czi#$b%p27' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [u'nickjstevens.pythonanywhere.com'] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'app', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'eCIP.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'eCIP.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'nickjstevens$django_eCIP', 'USER': 'nickjstevens', 'PASSWORD':'django_eCIP_password', 'HOST': 'nickjstevens.mysql.pythonanywhere-services.com', 'PORT': '', } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' # default static files settings for PythonAnywhere. # see https://help.pythonanywhere.com/pages/DjangoStaticFiles for more info MEDIA_ROOT = u'/home/nickjstevens/django_apps/eCIP/app/media' MEDIA_URL = '/media/' STATIC_ROOT = u'/home/nickjstevens/django_apps/eCIP/app/static' STATIC_URL = '/static/' REST_FRAMEWORK = { # Use Django's standard `django.contrib.auth` permissions, # or allow read-only access for unauthenticated users. 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly' ] }
According to the National Highway Traffic Safety Administration, speeding is a great factor in 31 percent of motor accidents in the United States. Speeding fatalities average 1, 000 Americans every month, and cost $40 billion each year in damages. In spite of statistics proving that overspeeding is a factor in one-third of all fatal crashes, motorists still continues to endanger their lives and the lives of others through overspeeding or driving too fast under bad road conditions. Among the three most dangerous driving behaviors – drunk driving, speeding, and not wearing seatbelts – speeding is the one that gets least attention and is not advocated well. Speeding is one of the negative driving behaviors that is not taken seriously, either by the driver, police officers, or the law. Everyone drives over the limit, and judges report that those ticketed for speeding believe their reason for speeding is justified. So, is this the reason why people drive dangerously fast speeds? Psychologists believe that speeding is an acceptable habit in society that rewards people for people doing things in a fast pace. For some, speeding is one way of taking control of what they think is an acceptable level of risk. A lot of people also tend to overestimate their skills in driving and underestimate the risks that speeding presents. Speeding is a habitual behavior – although everyone believe that speeding a dangerous behavior, most still overspeed. Because of the dangers of overspeeding, there are many effective strategies that help reduce crashes caused by overspeeding. Such preventive measures are automated speed enforcement, highly-visible enforcement blitzes, road engineering, and many others. Speeding is not only a problem in freeways, but also in local streets, thus resulting in many accidents and injuries. Injuries due to speeding can lead to expensive lawsuits, and personal injury attorneys know how to fight for their client’s rights. If you want to avoid lengthy and expensive lawsuits, there are ways that can make your driving safer: give yourself lots of time to get to your destination, make sure to drive based on the weather conditions, and use your common sense when driving. Wow. What a crazy/scary thing to think about. It does make sense though.
#Developed by: Nikos Kargas from gnuradio import gr from gnuradio import uhd from gnuradio import blocks from gnuradio import filter from gnuradio import analog from gnuradio import digital from gnuradio import qtgui import rfid DEBUG = False class reader_top_block(gr.top_block): # Configure usrp source def u_source(self): self.source = uhd.usrp_source( device_addr=self.usrp_address_source, stream_args=uhd.stream_args( cpu_format="fc32", channels=range(1), ), ) self.source.set_samp_rate(self.adc_rate) self.source.set_center_freq(self.freq, 0) self.source.set_gain(self.rx_gain, 0) self.source.set_antenna("RX2", 0) self.source.set_auto_dc_offset(False,0) # Uncomment this line for SBX daughterboard # Configure usrp sink def u_sink(self): self.sink = uhd.usrp_sink( device_addr=self.usrp_address_sink, stream_args=uhd.stream_args( cpu_format="fc32", channels=range(1), ), ) self.sink.set_samp_rate(self.dac_rate) self.sink.set_center_freq(self.freq, 0) self.sink.set_gain(self.tx_gain, 0) self.sink.set_antenna("TX/RX", 0) def __init__(self): gr.top_block.__init__(self) #rt = gr.enable_realtime_scheduling() ######## Variables ######### self.dac_rate = 1e6 # DAC rate self.adc_rate = 10e6 # ADC rate self.decim = 5 # Decimation (downsampling factor) self.ampl = 0.5 # Output signal amplitude (signal power vary for different RFX900 cards) self.freq = 910e6 # Modulation frequency (can be set between 902-920) self.rx_gain = 0 # RX Gain (gain at receiver) self.tx_gain = 25 # RFX900 no Tx gain option BLF = 100e3 # Backscatter link frequency self.usrp_address_source = "addr=192.168.10.2,recv_frame_size=256" self.usrp_address_sink = "addr=192.168.10.2,recv_frame_size=256" # Each FM0 symbol consists of ADC_RATE/BLF samples (2e6/40e3 = 50 samples) # 10 samples per symbol after matched filtering and decimation self.num_taps = [1] * (int)(self.adc_rate/BLF/2) # matched to half symbol period ######## File sinks for debugging (1 for each block) ######### self.file_sink_source = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/source", False) self.file_sink_matched_filter = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/matched_filter", False) self.file_sink_gate = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/gate", False) self.file_sink_decoder = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/decoder", False) self.file_sink_reader = blocks.file_sink(gr.sizeof_float*1, "../misc/data/reader", False) ######## Blocks ######### self.matched_filter = filter.fir_filter_ccc(self.decim, self.num_taps); self.gate = rfid.gate(int(self.adc_rate/self.decim)) self.tag_decoder = rfid.tag_decoder(int(self.adc_rate/self.decim)) self.reader = rfid.reader(int(self.adc_rate/self.decim),int(self.dac_rate)) self.amp = blocks.multiply_const_ff(self.ampl) self.to_complex = blocks.float_to_complex() if (DEBUG == False) : # Real Time Execution # USRP blocks self.u_source() self.u_sink() ######## Connections ######### self.connect(self.source, self.matched_filter) self.connect(self.matched_filter, self.gate) self.connect(self.gate, self.tag_decoder) self.connect((self.tag_decoder,0), self.reader) self.connect(self.reader, self.amp) self.connect(self.amp, self.to_complex) self.connect(self.to_complex, self.sink) #File sinks for logging (Remove comments to log data) self.connect(self.source, self.file_sink_source) else : # Offline Data self.file_source = blocks.file_source(gr.sizeof_gr_complex*1, "../misc/data/file_source_test",False) ## instead of uhd.usrp_source self.file_sink = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/file_sink", False) ## instead of uhd.usrp_sink ######## Connections ######### self.connect(self.file_source, self.matched_filter) self.connect(self.matched_filter, self.gate) self.connect(self.gate, self.tag_decoder) self.connect((self.tag_decoder,0), self.reader) self.connect(self.reader, self.amp) self.connect(self.amp, self.to_complex) self.connect(self.to_complex, self.file_sink) #File sinks for logging self.connect(self.gate, self.file_sink_gate) self.connect((self.tag_decoder,1), self.file_sink_decoder) # (Do not comment this line) #self.connect(self.file_sink_reader, self.file_sink_reader) #self.connect(self.matched_filter, self.file_sink_matched_filter) if __name__ == '__main__': main_block = reader_top_block() main_block.start() while(1): inp = raw_input("'Q' to quit \n") if (inp == "q" or inp == "Q"): break main_block.reader.print_results() main_block.stop()
Your people will be my people, and your God will be my God. However, she asked for two months, to bewail her virginity and many do not understand the significance of this. After further pleading of Mordecai, she consents: In nearly all primitive languages red appears, and in general is the first color name to appear. He would invite her to come watch the stars, and they would lie on the roof of the old Headquarters building under a blanket together. I believe you are doing the loving thing in sharing all that you do. I know it must have been painful for you to relive all of this. As a background for the figures of saints it signifies chastity. Glad was her upraising, when, with maiden mirth And merry timbre], she came forth; But, alas, the death march! By tribal custom Ruth gleaned in the fields of Boaz, a kinsman of Naomi. She wanted to pay for love's duty full price. I hate that I could have lived in the same house as you for a year pof dating profile never had an inkling of what you were going through, ruth boaz dating. But I can pledge for our great Sisterhood The trustful knowledge of a holier way Where there is always a morning, fine and good. The words "mother's house" are found in Gen I'm thinking of people in organisations in the US and overseas that work with IBLP and people who are speaking at the regional conferences. See, midst the multitude the victim stands, Dauntless, serene, though terror palsies them! These were the rules for adults. We realize that we have become the workmanship of God himself! Or try this article, which alleges 4 different sexual molestations by Gothard: Gothard, and for the graciousness with which you have shared the suffering you experienced at his hand. The one puts God before country, the other puts country before life. During the famine Elimelech and his wife Naomi and their two sons, Mahlon and Chilion, emigrated from Bethlehem to Moab, and there they remained until the sons had taken unto themselves as wives two beautiful Moabitish girls named Ruth and Orpha. Whether we study our Star Point heroines historically or traditionally, the stories are inspiring and beautiful. This is an issue of eternity. Through its power and its activity, all objects are mutually attracted or repelled in accordance with their polarities. It is the sacred color of the Mohammadans, who carry the green flag and the background of whose prayer rugs is always green, let the design worked into the fabric be what list of gay dating websites may. Has anyone ever gotten a hold of him? Strong light increases the intensity of yellow while most colors are dimmed. The truth is Mr. Perfect submission, all is at rest, I, in my Saviour, dating someone off craigslist happy and blest. How do you know what he "thinks"? For the man whom the king delighteth to honor, let royal apparel be brought which the king useth to wear, and the horse that the king rideth upon, and on whose head a crown royal is set, and let the apparel and the horse be delivered to the hands of one of the king's most noble princes, that they may array the man therewith whom the king delighteth to honor, and cause him to ride on horseback before him: Your people will be my people and your God my God. Gothard, but we want to bring to light his false teaching that does not bring glory to God. In the tenth chapter of St. As a former secretary to Bill Gothard, I can honestly say that I relate with a great deal ruth boaz dating what you have shared. Jephthah had a history of difficulties with other chiefs because of his lowly birth, so that he resented it when he was asked to help defeat the Ammonites. Huckabee is contemplating another run for President. As the jet taxied away from the terminal that winter day inthe parents tearfully waving off their teenage daughter never imagined how many years would pass yuri dating yoga teacher she truly returned. She was embarking on a journey that would last nearly a decade, and when it was over no one would ever be able to turn back the arms of time and recapture what was lost along the way. I am that daughter, and I dedicate this story to parents everywhere who love their children as my parents loved me. This is a true story, but as I recount the strange happenings of it, I wonder if it will seem beyond belief to those who have for years given their loyalty, trust and love to the major characters who must be described herein. To all of you I plead, please read with open hearts and follow this story to the end. In doing so you may grieve, as I have grieved, or shudder as you walk with me through the shadows of my personal valley of death. The lessons were costly, at times even brutal, but they provide all the more reason for the telling. If only one person can be spared by learning from my experiences, the hurt of putting them on paper will be worthwhile. My parents expounded often on how Mr. At 16 they considered Mr. They were indeed correct. However, other things began to change. I faithfully sought to apply each of his concepts and struggled with feelings of guilt whenever I failed to do so. I began linking achievement with the approval of God and worked the harder to be all that I thought He would desire. I quickly arranged to take a leave of absence from my freshman year of college and boarded a plane for Chicago two days after Christmas, less than two weeks from the date of Mr. It seemed a strange time to leave my family, but Mr. Gothard had clearly stated I must come immediately. Our respect and reverence for this man was so deep we never thought to question either his motives or his judgment. Many opportunities to do so would arise in the future, but each one would be quickly pushed aside. We, like so many others, ruth boaz dating, were blinded by our desire to see a changed world and our love for the man who promised such a reward for his laborers. And labor we did, ruth boaz dating, which takes this account back to the years between and During this period of time a lifestyle was initiated by Mr. Gothard that would later end in tragedy for many on his staff. It is easy to report the havoc which transpired, but the difficult challenge is to reveal what caused it. This task is both difficult and painful for it ruth boaz dating exposing the actions of a man so many hold in great esteem. There is no other way, however, to tell the true story or to provide the facts which may help you guard yourself and your family from the devastation which has left its mark on many lives, including my own. The memories were too painful, and she never had the opportunity to finish writing it. My Ruth was much like the Ruth of the Bible. Her journey was long and difficult, filled with physical, emotional and spiritual pain. We were married shortly after she left the Institute. This is a story of betrayal, deceit, and evil control. Ruth was never able to finish writing her story; it was too difficult emotionally. In the end, she was fighting for her life, raising two young children, and living for each day and for what the Lord had in store for tomorrow. When she saw that Bill was using the same techniques on the ATI students that she had suffered under many years before, she fought even harder. Ruth went so far as to reestablish contact with Bill, hoping she could be an agent of change. But that never happened. The pain and suffering I read on Recovering Grace is what she wanted to stop. The Institute we lived under is different than yours. Smaller cities had video seminars where Bill would sometimes make a one-night appearance. Byover two million people had attended his seminars. The Institute owned two airplanes: They also had a larger Lear 55 on order. Money was never an object. Historically, this was during the era of Jim and Tammy Bakker, who were in their prime with the PTL network, who, along with Jimmy Swaggart were making their heart-breaking TV appeals to millions of older people living on fixed incomes while they were living in opulent luxury and sin. Bill took a drastically different approach: All of this was true; his favorite food was cheap and easy: But the other side of the story was that the Institute spent thousands of ministry dollars to keep his old, rusty Oldsmobile ruth boaz dating classic running. He would drive his old junker to the airport and then climb into his brand-new Learjet. If he wanted anything personally, he would casually mention it to his staff and they would buy it as a gift or expense it. And this false sense of humility had its rewards. For Christmas or his birthday, he would request expensive leather luggage or rare, expensive duck decoys to add to his collection. The staff felt emotionally manipulated into providing these gifts, even though they made almost nothing themselves. And this was just over her corresponding with a young man who lived two thousand miles away. Ruth and Bill finally reached a compromise: Bill would read all of the correspondence from this young man and help her write very non-personal letters in response, while discouraging any phone calls between the two. I was that young man and was unaware of this high level of internet dating etiquette first date till after we left the Institute. Sadly, this was par for the course among staff members. Image was everything to Bill. To the staff, Bill reinforced the message that God was blessing the ministry based on its growth and prosperity, which implied Bill was of God and could do no wrong. Staff members who questioned anything Bill taught were forced to leave and were labeled as failures. Questioning was rarely tolerated by Bill and his family. Fairly quickly she was promoted to his personal secretary. This was dating sites vancouver island 9 to 5 job; she was expected to be available whenever Bill wanted her, and the early mornings and late nights meant she had almost no personal life outside of the ministry. She traveled with Bill to be available to him in between seminars, ruth boaz dating. It is easy to see how an impressionable young girl would struggle to maintain objectivity when she is suddenly thrust into the jet-setting world of a popular Christian speaker who was being welcomed by thousands of Christians in cities across the country. It was an exciting life! Although he taught the highest standards of physical conduct for the sake of participer a un job dating appearances, Bill started asking Ruth to sit on his lap in his office. He would invite her to come watch the stars, and they would lie on the roof of the old Headquarters building under a blanket together. Bill would take her to dinner and have her sit next to him on the bench seat of his Olds Through the years, he has systematically fired staff for much less physical and emotional involvement. Ruth often said how much all the girls hated it, but felt powerless to stop it. He did so without consulting the parents, even though he teaches in the seminar that this is an evidence of a man with questionable motives. What dating franciscan apple dishes purpose could Bill have had in discussing marriage with more than one woman? If this was a means of creating loyalty to himself, it is not one we believe God can bless. Bill had interfered with every relationship she had attempted, yet he failed to move to a point of commitment. As a result of her breakdown she told me she felt like she was punished by being demoted. She was given a choice: The IBYC scandal of was horrific, and was an out and out case of sexual harassment and abuse. Predictably, many of them were seduced and violated. They were encouraged to hand over their minds and spirits in a way totally contrary to Scripture, and without the maturity of a close personal walk with God, they were easy prey for the tragedies which occurred. Ruth was one of the women who had been sexually preyed upon and willingly admitted it when asked. I am not sharing something private, but a part of her life story that she wanted others to learn from so as to not make the same mistakes. God guides us directly. Bill, in response to a letter from me, acknowledged that he should have never sent the women up north. Northwoods is where most of the sexual misconduct occurred. This letter was significant because it was the first time since that he had taken any responsibility for what occurred. His letter was in response to a page letter I had sent him earlier that year, calling for his repentance, ruth boaz dating. Regardless of his intent I did offer him forgiveness for that item, and for that one item only. After visiting Oak Brook, I did not have a peace about it. As Bill drove me back to the airport, I told him I lacked a peace regarding their offer. He stated that my peace was not necessary because God only gave peace through the chain of command, not to me personally. I knew that Christ promises the peace that passes all understanding, so deciding to go with that promise, I rejected the position. Several years later, I was ready for a change and I accepted their second offer, but I only lasted eighteen months and resigned when the scandal broke. It gave me the background to help her regain her spiritual and emotional freedom as she left the Institute. Even after leaving the Institute Ruth found it very difficult to make decisions. They had what was considered a rather small family for those days - only six children. The azure dome bends o'er the whole. Gothard but rather with God for allowing hurtful things to happen to you. The Lord blessed me with true sincere-hearted friends who weren't afraid to tell me the truth and some other very dear adult staff members who were "watchdogs. O, child of God, how peacefully He calms thy fears to rest, And draws thee upward tenderly Where dwell the ruth boaz dating and blest; And He who bendeth silently Above the gloom of night, Will take thee home where endless joy Shall fill thy soul with light. Ahasuerus was advised to assemble all the beautiful maidens at the palace and make a choice. The men may wear robes made of inexpensive material, or they need not robe. True at heart in adversity, he was also true in prosperity, and the vow he had made when he besought the strong arm of the Lord was not lost to him when he beheld his beloved daughter, the very core of his heart, his idolized child, rush out to greet him in his triumphant entry, and the vow he had uttered flashed across his mind. They were in ATI for over 20 years. The tragedy is that much of the learning came through the failures of yours and mine alike. He stated that my peace was not necessary because God only gave peace through the chain of command, not to me personally. There's surely somewhere a lowly place, In earth's harvest fields so wide - Where I may labor thro' life's short day For Jesus the crucified. It is easy to see how an impressionable young girl would struggle to maintain objectivity when she is suddenly thrust into the jet-setting world of a popular Christian speaker who was being welcomed by thousands of Christians in cities across the country, ruth boaz dating. Famines were not uncommon in the land of Canaan. It is unusual, however, that Naomi did not insist her sons return to Bethlehem to find wives, like Isaac and Jacob returned to their ancestral home in Mesopotamia to find wives Gen Chapter 24 and Chapter Bid her therefore that she help me? It was a law among the Jews that cherry blossoms dating poor had a right to glean in anyone's field. Friend of the Jews: Naomi walks away a few steps with Ruth; Naomi's friends look after her sorrowfully. Ruth came from a people who had a scandalous past and a turbulent history. The sacrifice God required is "a humble and contrite heart," and as Samuel admonished Saul, "To obey is better than to sacrifice. Paulinus of Nola writes: The Secret of Sukkot.
import time # from mx import datetime import datetime from openerp.osv import fields, osv from openerp.tools.translate import _ import sys class inpatient_registration (osv.osv): # Method to check for availability and make the hospital bed reservation def registration_confirm(self, cr, uid, ids, context={}): for reservation in self.browse(cr,uid,ids): bed_id= str(reservation.bed.id) cr.execute("select count (*) from medical_inpatient_registration where (hospitalization_date::timestamp,discharge_date::timestamp) overlaps ( timestamp %s , timestamp %s ) and state= %s and bed = cast(%s as integer)", (reservation.hospitalization_date,reservation.discharge_date,'confirmed',bed_id)) res = cr.fetchone() if res[0] > 0: raise osv.except_osv('Warning', 'Bed has been already reserved in this period' ) else: self.write(cr, uid, ids, {'state':'confirmed'}) return True def patient_discharge(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'free'}) return True def registration_cancel(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'cancelled'}) return True def registration_admission(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'hospitalized'}) return True _name = "medical.inpatient.registration" _description = "Patient admission History" _columns = { 'name' : fields.char ('Registration Code',size=128), 'patient' : fields.many2one ('medical.patient','Patient'), 'admission_type' : fields.selection([('routine','Routine'),('maternity','Maternity'),('elective','Elective'),('urgent','Urgent'),('emergency','Emergency')],'Admission type'), 'hospitalization_date' : fields.datetime ('Hospitalization date'), 'discharge_date' : fields.datetime ('Discharge date'), 'attending_physician' : fields.many2one ('medical.physician','Attending Physician'), 'operating_physician' : fields.many2one ('medical.physician','Operating Physician'), 'admission_reason' : fields.many2one ('medical.pathology','Reason for Admission', help="Reason for Admission"), 'bed' : fields.many2one ('medical.hospital.bed','Hospital Bed'), 'nursing_plan' : fields.text ('Nursing Plan'), 'discharge_plan' : fields.text ('Discharge Plan'), 'info' : fields.text ('Extra Info'), 'state': fields.selection((('free','Free'),('cancelled','Cancelled'),('confirmed','Confirmed'),('hospitalized','Hospitalized')),'Status'), } _defaults = { 'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'medical.inpatient.registration'), 'state': lambda *a : 'free' } _sql_constraints = [ ('name_uniq', 'unique (name)', 'The Registration code already exists')] inpatient_registration () class appointment (osv.osv): _name = "medical.appointment" _inherit = "medical.appointment" _columns = { 'inpatient_registration_code' : fields.many2one ('medical.inpatient.registration','Inpatient Registration',help="Enter the patient hospitalization code"), } appointment () # Add the patient status to the partner class patient_data (osv.osv): _name = "medical.patient" _inherit = "medical.patient" _description = "Patient related information" def _get_patient_status (self, cr, uid, ids,name, arg, context={}): def get_hospitalization_status (patient_dbid): cr.execute ( 'select state from medical_inpatient_registration where patient=%s and state=\'hospitalized\'', (patient_dbid,)) try: patient_status = str(cr.fetchone()[0]) except: patient_status = "outpatient" return patient_status result={} # Get the patient (DB) id to be used in the search on the medical inpatient registration table lookup for patient_data in self.browse(cr, uid, ids, context=context): patient_dbid = patient_data.id if patient_dbid: result[patient_data.id] = get_hospitalization_status (patient_dbid) return result _columns = { 'patient_status': fields.function(_get_patient_status, method=True, type='char', string='Hospitalization Status', help="Shows whether the patient is hospitalized"), } patient_data ()
LuvFree.com is a 100% free online dating and personal ads site. There are a lot of Port Alberni singles searching romance, friendship, fun and more dates. Join our Port Alberni dating site, view free personal ads of single people and talk with them in chat rooms in a real time. Seeking and finding love isn't hard with our Port Alberni personals. Hello. I am an easy going person who likes reading, history and classic movies. I also enjoy learning new things. I live alone, except for my two cats and two feral cats who come by for meals. Feel free to say hi.. we may have things in common. I, am retired and looking for some one to spend my time with that like, s to take walks and to go to the beach in the evening, s sometimes to watch the star, s come out. Could this be the site? Honest, good hearted man looking for honest, good hearted woman. I like hiking, biking, and movies. I work as a landscaper/mover for a company called hire a slave.
from hq.hquery.evaluation_error import HqueryEvaluationError from hq.hquery.expression_context import peek_context, get_context_node from hq.hquery.functions.core_number import number from hq.hquery.object_type import string_value, is_sequence, object_type_name from hq.hquery.sequences import make_node_set from hq.soup_util import root_tag_from_any_tag, is_tag_node exports = ('count', 'id', 'last', 'name', 'position') def count(sequence): HqueryEvaluationError.must_be_node_set_or_sequence(sequence) return number(len(sequence)) def id(ids): if is_sequence(ids): ids = set(string_value(item) for item in ids) else: ids = set(string_value(ids).split()) result = [] for node in root_tag_from_any_tag(get_context_node()).descendants: if is_tag_node(node) and 'id' in node.attrs and node['id'] in ids: result.append(node) return make_node_set(result) def last(): return number(peek_context().size) def name(*args): if len(args) > 0: value = args[0] if is_sequence(value): value = value[0] if is_tag_node(value): return value.name else: return '' else: node = get_context_node() if is_tag_node(node): return node.name else: return '' def position(): return number(peek_context().position)
Conceptually a plasma spheromak is a closed plasma sheet formed by a closed spiral of plasma hose in the plane of the plasma sheet. The direction of the plasma hose axis within the plasma sheet conforms to the toroid surface curvature. The plasma hose current causes both toroidal and poloidal magnetic field components. A spheromak is cylindrically symmetric about the spheromak main axis and is mirror symmetric about the spheromak's equatorial plane. The plasma sheet has a net charge Qs that is uniformly distributed over the plasma hose length Lh. This distributed charge causes a spherically radial electric field outside the plasma sheet and a cylindrically radial electric field inside the plasma sheet. The center to center plasma hose spacing Dh is smaller in the spheromak core than at the spheromak periphery. Hence the charge per unit area on the plasma sheet is higher in the spheromak core than at the spheromak periphery. In the central core of the spheromak the electric field is zero and the purely poloidal magnetic field points along the spheromak's main axis of symmetry. In the region enclosed by the plasma sheet the magnetic field is purely toroidal and the electric field is cylindrically radial. In the region not enclosed by the plasma sheet the magnetic field is purely poloidal and the electric field is spherically radial. The charge circulates in the narrow low magnetic field region at the interface surface between the toroidal and poloidal magnetic fields. A spheromak can be mathematically modelled as a closed single layer plasma hose spiral in the shape of a toroid. This plasma sheet is formed from a long length Lh of plasma hose. The plasma hose axial direction gradually changes over the surface of the toroid. The behavior of plasma hose is discussed on the web page titled: PLASMA SHEET PROPERTIES. Photographs of experimental spheromaks within cylindrical metal enclosures show that the experimental spheromak cross section is not round. The following diagram shows the approximate cross sectional shape of an experimentally observed spheromak. Thus R takes only positive values whereas H takes both positive and negative values. A spheromak is a cylindrically symmetric closed charge sheet formed from spiraling charge enclosing a toroidal shaped volume. However, for a real spheromak in a vacuum chamber the toroid cross section is not round. The radius of curvature on the outside of the spheromak near the equatorial plane is greater than the radius of a coaxial sphere. Near the equatorial plane the real spheromak core radius is nearly constant. A real spheromak exhibits distinct small radius corners on the charge sheet at the position of the maximum spheromak axial length. A spheromak has important distinct geometrical dimensions. An experimental spheromak photographed in a laboratory differs from an ideal spheromak in free space in two important respects. Consequently the field energy density outside the spheromak on the equatorial plane is cylindrical rather than spherical. This change in external field energy density causes a reduction in the spheromak outside radius Rs and reduces the position stability of the spheromak outside wall. is not exactly true. This issue causes spheromak shape distortion, especially in the necks of the spheromak where the electric fields from opposite sides of the spheromak are partially cancelling and the consequently reduced energy density is supplemented by the poloidal magnetic field energy density. Lt = length of each purely toroidal turn. After an element of charge moving along the plasma hose has passed through the spheromak core hole Nt times and has circled around the main axis of spheromak symmetry Np times, it reaches the point in the plasma hose closed path where it originally started. There are Nt parallel plasma hoses that go through the equatorial plane in the central core of the spheromak and hence form the spheromak core walls. This equation establishes a relationship between the number of plasma ions Ni, the number of plasma free electrons Ne and the free electron velocity Ve. This equation is a condition for plasma sheet existence and hence is a condition for plasma spheromak existence. Note if the plasma hose charge per unit length is uniform then |Ve| is constant and hence the electron kinetic energy is constant along the plasma hose. Hence the plasma sheet is an equipotential surface. the spheromak magnetic field is almost entirely poloidal and the spheromak is known as a FRC (Field Reversed Configuration). The toroidal magnetic field can point either Clock Wise (CW) or Counter Clock Wise (CCW) around the spheromak's central poloidal magnetic field. Hence a spheromak has two possible distinct magnetic states of equal energy. the poloidal magnetic field components cancel to zero. Thus in numerical evaluation of plasma sheet properties Vi is generally negligibly small. Thus the effect of a positive radial electric field is to reduce the radius of curvature of ions orbiting the spheromak central axis. Thus the effect of a positive radial electric field is to increase the radius of curvature of electrons orbiting the spheromak central axis. and there is no radial electric field. However, when the electrons and ions are following the same path in opposite directions collisions between electrons and ions cause the electrons to lose a bit energy and the ions to gain a bit energy. This change in particle energy causes the electron and ion paths to separate sufficiently that the electron and ion streams no longer collide. However, an electric field forms that tends to increase the electron path radius of curvature and decrease the ion path radius of curvature, causing maintenance of plasma hose. Over the life of the spheromak, as the free electrons lose energy to the ions the radial electric field E increases to maintain the plasma hose. The spheromak ceases to exist. The value of B is this equation is at a magnetic field minimum location where it is impractical to directly measure the magnetic field. However, we may be able to approximately infer the value of B from the spheromak size. This low value suggests that for a plasma spheromak the magnetic field energy completely dominates the electric field energy. Experimental evidence suggests that the same is true in an atomic particle spheromak. Hence in a spheromak both electrons and ions have the same momentum magnitude of opposite sign so that the net momentum is zero, In a random plasma the energy rather than the momentum is equally distributed over the particles. and during spheromak decay the free electrons lose energy to the ions as well as to neutral particles. However, note that the ion doppler signal may be confused by the thermal component of ion motion, which will generate emission line broadening. Further confusion will occur if there is any uncertainty with respect to the ion mass. Initially use a spheromak formed from He-4 because, unlike hydrogen that can exist as either molecular or atomic hydrogen ions, there is no uncertainty about the ion mass. Assume a He-4 spheromak with a free electron kinetic energy of 25 eV. Assume that the atoms are singly ionized (He+ ions). = 40.05 X 10^-19 J. This velocity causes a small frequency shift, above or below Fe, but this frequency shift may easily be lost in the thermal broadening. If ions with this velocity are present they should be easy to detect. This frequency shift is at least comparable to the thermal line broadening. Thus the underlying assumption that a spheromak is composed of a plasma sheet can be experimentally demonstrated. At low free electron kinetic energies it may be difficult to separate the ion doppler frequency shift due to spheromak particle motion from the line broadening due to thermal ion motion. However, for compressed spheromaks with 400 eV free electrons the ion doppler frequency shift due to spheromak particle motion should be four fold larger and comparison of ion doppler and Thomson Scattering data should give clear results. Note that in principle this technique can also be used to determine the dominant ion mass in a compressed spheromak. The spheromak ion velocity Vi measured via the Ion Doppler technique is inversely proportional to the ion mass. In executing the above experimental measurements it is essential to remember that the Thomson scattering and ion doppler measurement techniques were originally developed for random plasmas, but the particle motion in a spheromak is not random. The motion path of the electrons and ions is a spiral within the plasma sheet and has a substantial axial component within the spheromak core. These issues will seriously affect the calibration of some instruments. Also both Thomson Scattering and Ion Doppler instrumentation sample the plasma density near the core of the spheromak where the spacial density of free electrons and ions is much higher than the average spacial density in the spheromak. Failure to properly take these issues into account can lead to a computed average free electron/ion density that is much higher than the actual average free electron/ion density. Moreover, this erroneous calculation may appear to be confirmed by data from an ion probe, which will also indicate too high due to sampling the outer surface of the spheromak. This web page last updated April 2, 2015.
import glob import importlib import os import sys import typing import cauldron from cauldron import environ from cauldron.environ import Response from cauldron.runner import source from cauldron.session.projects import Project from cauldron.session.projects import ProjectStep def add_library_path(path: str) -> bool: """ Adds the path to the Python system path if not already added and the path exists. :param path: The path to add to the system paths :return: Whether or not the path was added. Only returns False if the path was not added because it doesn't exist """ if not path or not os.path.exists(path): return False if path not in sys.path: sys.path.append(path) return True def remove_library_path(path: str) -> bool: """ Removes the path from the Python system path if it is found in the system paths. :param path: The path to remove from the system paths :return: Whether or not the path was removed. """ if path in sys.path: sys.path.remove(path) return True return False def initialize(project: typing.Union[str, Project]): """ :param project: :return: """ if isinstance(project, str): project = Project(source_directory=project) # When opening a project, if there are any steps in the project, the # first step should be selected by default. has_selected_step = any([s.is_selected for s in project.steps]) if not has_selected_step and project.steps: project.steps[0].is_selected = True cauldron.project.load(project) return project def close(): """...""" os.chdir(environ.configs.fetch('directory', os.path.expanduser('~'))) project = cauldron.project.internal_project if not project: return False [remove_library_path(path) for path in project.library_directories] remove_library_path(project.source_directory) cauldron.project.unload() return True def _reload_module(path: str, library_directory: str): """ Reloads the module at the specified path within the package rooted at the given library_directory. """ path = os.path.dirname(path) if path.endswith('__init__.py') else path start_index = len(library_directory) + 1 end_index = -3 if path.endswith('.py') else None package_path = path[start_index:end_index] module = sys.modules.get(package_path.replace(os.sep, '.')) return importlib.reload(module) if module is not None else None def _reload_library(directory: str) -> list: """ Carries out a reload action on the specified root library directory that is assumed to contain a python local package with potential module changes. :param directory: Root directory of the library package to reload. """ if not add_library_path(directory): # If the library wasn't added because it doesn't exist, remove it # in case the directory has recently been deleted and then return # an empty result remove_library_path(directory) return [] glob_path = os.path.join(os.path.realpath(directory), '**', '*.py') # Force file paths to be sorted by hierarchy from deepest to shallowest, # which ensures that changes are reloaded by children before any dependencies # are encountered in parents. found_file_paths = sorted( glob.glob(glob_path, recursive=True), key=lambda p: "{}--{}".format(str(p.count(os.sep)).zfill(4), p), reverse=True, ) # Iterate over imports multiple times in case there's a failed import as the # result of dependency changes between multiple files. However, after 20 # iterations give up and fail. outputs = [] last_error = None for i in range(20): for path in [*found_file_paths]: try: outputs.append(_reload_module(path, directory)) # Remove the path if the reload operation succeeded. found_file_paths.remove(path) except Exception as error: # Ignore failures and hope they can be resolved in another pass. last_error = error if not found_file_paths: # If there's nothing left to reload, return the reloaded modules. return outputs # If 20 attempts to reload modules fail, it's time to error out. raise RuntimeError( "Failed to reload modified modules. This could be due to a circular import." ) from last_error def reload_libraries(library_directories: list = None) -> list: """ Reload the libraries stored in the project's local and shared library directories to ensure that any modifications since the previous load/reload have been refreshed. """ directories = library_directories or [] project = cauldron.project.get_internal_project() if project: directories += project.library_directories if not directories: return [] return [ reloaded_module for directory in directories for reloaded_module in _reload_library(directory) if reloaded_module is not None ] def section( response: Response, project: typing.Union[Project, None], starting: ProjectStep = None, limit: int = 1, force: bool = False, skips: typing.List[ProjectStep] = None ) -> list: """ :param response: :param project: :param starting: :param limit: :param force: :param skips: Steps that should be skipped while running this section :return: """ limit = max(1, limit) if project is None: project = cauldron.project.get_internal_project() starting_index = 0 if starting: starting_index = project.steps.index(starting) count = 0 steps_run = [] for ps in project.steps: if count >= limit: break if ps.index < starting_index: continue if skips and ps in skips: continue if not force and count == 0 and not ps.is_dirty(): continue steps_run.append(ps) if not source.run_step(response, project, ps, force=force): return steps_run count += 1 return steps_run def complete( response: Response, project: typing.Union[Project, None], starting: ProjectStep = None, force: bool = False, limit: int = -1 ) -> list: """ Runs the entire project, writes the results files, and returns the URL to the report file :param response: :param project: :param starting: :param force: :param limit: :return: Local URL to the report path """ if project is None: project = cauldron.project.get_internal_project() starting_index = 0 if starting: starting_index = project.steps.index(starting) count = 0 steps_run = [] for ps in project.steps: if 0 < limit <= count: break if ps.index < starting_index: continue if not force and not ps.is_dirty(): if limit < 1: environ.log( '[{}]: Nothing to update'.format(ps.definition.name) ) continue count += 1 steps_run.append(ps) success = source.run_step(response, project, ps, force=True) if not success or project.stop_condition.halt: return steps_run return steps_run
This is a wonderfully easy recipe to make! Dinner will be on the table in 30 minutes! In a flat dish, press bread crumbs into chicken on all sides. In a saute pan, saute garlic in half of olive oil for a minute. Add spinach, pine nuts, and grated Parmesan. Saute until spinach is limp. Cool and reserve. Oil one oven-proof casserole dish. Lay the chicken breasts in the casserole dish. On top of each chicken breast, spoon spinach mixture. Top with Provolone cheese. Garnish with chopped parsley. Bake in a preheated 400-degree oven for 20 minutes. Remove from casserole with spatula and serve on heated plates. Very Good! Definitely a keeper. I might try to dip the chicken in a little lemon juice before rolling in bread crumbs next time. Husband even ate the leftovers for work, and he NEVER eats leftovers.
#pylint: disable=C0111 from lettuce import world, step from lettuce.django import django_url from common import TEST_COURSE_NAME, TEST_SECTION_NAME, i_am_registered_for_the_course, section_location ############### ACTIONS #################### @step('when I view the video it has autoplay enabled') def does_autoplay(step): assert(world.css_find('.video')[0]['data-autoplay'] == 'True') @step('the course has a Video component') def view_video(step): coursename = TEST_COURSE_NAME.replace(' ', '_') i_am_registered_for_the_course(step, coursename) # Make sure we have a video add_video_to_course(coursename) chapter_name = TEST_SECTION_NAME.replace(" ", "_") section_name = chapter_name url = django_url('/courses/edx/Test_Course/Test_Course/courseware/%s/%s' % (chapter_name, section_name)) world.browser.visit(url) def add_video_to_course(course): template_name = 'i4x://edx/templates/video/default' world.ItemFactory.create(parent_location=section_location(course), template=template_name, display_name='Video')
Google Wallet Or PayPal 0 0 cakey 2 Off 0 0 big24 2 Off 10 When Paying With Credit Card, Google Wallet Or PayPal 0 0 flan Save 2 Off 10 0 0 don24 2 Off. Printing this coupons requires the SmartSource Printer Java applet. They can also receive up to 4 off of 20 or more on online pickup orders when they use an Eat 24 coupon. But in the event you dont have insurance and are just a cash buyer, you coul use the Loestrin 24 Fe coupon to conserve 24 on the normal retail price of this drug. Gonal-F Coupon, there are two coupons for : Coupon Value and Save: Save.50 on every 75. There are two coupons for : Coupon Value and Save: Instant Savings 15 How Does. Four Monistat Coupon online available. Soothing care Coupon, Save.00 on Any soothing care; Product from THE makers OF monistat. Ortho TRI cyclen LO Coupon. So that you can use Loestrin Coupon in order to save some useful cash when you decide to purchase this drug. Follow this Monistat Manufacturer coupon link to get this offer: Get Coupon! Take 2 Off Orders 10 or More, buy 1 Get 1 50 off Full Priced Styles 5 0 boss24 2 Off 10 Orders When Paying With Credit Card, PayPal, Or Google Wallet 4 0 butter, extra 2 Off 4 0 bleu, extra 2 Off. Other Related Coupons: Loestrin 24 Coupon, there are two coupons for : Coupon Value and Save: Pay no more than 25 with Loestrin. How to get/acquire, loestrin 24 Coupon free, loestrin 24 coupons will help you not to pay more than 24 for your prescription drug. Customers can receive 2 off of 10 or more when they use. You can also obtain the Loestrin 24 coupon (Loestrin 24 fe Patients Savings Card) by calling in addition to finding details in other website. Eat 24 is an online food ordering service in over 650 cities, with over 15,000 restaurants. Loestrin 24 Fe is far the most famous anti pregnancy medications in the marketplace which it is taken by plenty of ladies to stop unwanted pregnancy. 7-Day Treatment, Save.00 on Any monistat 7-Day Treatment. Deer 2 Off 10 20 1 gyro, save 2 Off 10 18 0, chickfila 20 Off Any Little Golden Book at Random House 17 0, bobevans 4 Off 20 Online Pickup Orders 14 1 7f43i, extra 2 Off On 10 Or More 16 2 yelp24. 2 Off 10 When Paying With Credit Card, Google Wallet Or PayPal 0 0 minute24 2 Off 10 With Credit Card, Google Wallet Or PayPal For Members 0 0 jello24 2 Off 10 With Credit Card, Google Wallet Or PayPal For Members 0 0 truth24. Synthroid Coupon, there are two coupons for : Coupon Value and Save: Save 10 On 90 Tablets How Does.
from com.zhyfoundry.spider import Configuration from com.zhyfoundry.spider.impl import BaseSpider from com.zhyfoundry.spider.impl.CRM import CRM from com.zhyfoundry.spider.impl.s2 import Fetcher2, Parser2, Tracker2 import time import traceback class Spider2(BaseSpider.BaseSpider): def __init__(self): super(Spider2, self).__init__() def crawl(self, trackingTimestamp, keyword = None): config = Configuration.Configuration.readFromFile(); countLimit = 65535 if config.maxFetchCount == -1 else config.maxFetchCount urlsToFetch = self.fetchURL(trackingTimestamp, countLimit) if len(urlsToFetch) == 0: print 'No URL to fetch.' return fetcher = Fetcher2.Fetcher2() parser = Parser2.Parser2() count = 0 tracker = Tracker2.Tracker2() for url in urlsToFetch: if count >= countLimit: print 'Fetch count limitation reached: ' + str(countLimit) break; count += 1; print 'URL to fetch: ' + str(url) html = fetcher.fetch(url.url, config) if parser.needLogin(html): print 'Need to Login' html = fetcher.login(self.username, self.password) if parser.needLogin(html): raise Exception("Login fail!") print 'Login success!' html = fetcher.fetch(url.url, config) if parser.isDetailPage(html): parseResult = parser.parse(html, url.url, config) if parseResult.content != None: try: CRM.saveEnterprise(parseResult.content); except: print traceback.format_exc() tracker.updateTrackTime(url.id) tracker.track(parseResult.newSeeds, url.id, self.id, None) elif keyword != None: print 'Search term: ' + keyword html = fetcher.search(keyword) tracker.updateTrackTime(url.id) page = 1 while (True): parseSearchResult = parser.parseSearchResult(html) tracker.track(parseSearchResult.newSeeds, url.id, self.id, None) if parseSearchResult.newSeedRightNow == None or count >= countLimit: print 'parseSearchResult.newSeedRightNow == None: ' + str(parseSearchResult.newSeedRightNow == None) print 'count >= countLimit: ' + str(count >= countLimit) break page += 1 print 'Will crawl page ' + str(page) + ': ' + parseSearchResult.newSeedRightNow['href'] print 'Sleep ' + str(config.interval) + ' second.' time.sleep(config.interval) html = fetcher.fetch(parseSearchResult.newSeedRightNow['href'], config) if html == None: retryTimes = 0 while (retryTimes < config.maxRetryTimes and html == None): retryTimes += 1 print 'Retry ' + str(retryTimes) html = fetcher.fetch(parseSearchResult.newSeedRightNow['href'], config) count += 1 print 'Sleep ' + str(config.interval) + ' second.' time.sleep(config.interval)
On this page, you will find photos from SigmaCamp 2015. Since there is a lot of photos, they are arranged by day. Thanks to all our photographers, and especially to Vicka Bershadsky, for these photos!
""" This is the base model for Gasista Felice. It includes common data on which all (or almost all) other applications rely on. """ from django.db import models from django.utils.translation import ugettext, ugettext_lazy as _ from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db.models import permalink from django_comments.models import Comment from django.contrib.contenttypes.models import ContentType from django.conf import settings from django.dispatch import receiver from django.db.models.signals import post_save, pre_save from workflows.models import Workflow, Transition, State #from history.models import HistoricalRecords from consts import GAS_REFERRER_SUPPLIER from flexi_auth.models import PermissionBase # mix-in class for permissions management from flexi_auth.models import ParamRole, Param from flexi_auth.exceptions import WrongPermissionCheck from flexi_auth.utils import get_parametric_roles from flexi_auth.models import PrincipalParamRoleRelation from simple_accounting.models import economic_subject, AccountingDescriptor, LedgerEntry, account_type from lib import ClassProperty, unordered_uniq from gf.base import const from gf.base.utils import get_resource_icon_path from gf.base.accounting import PersonAccountingProxy from workflows.utils import do_transition import os import logging import geocoder log = logging.getLogger(__name__) class Resource(object): """Base class for project fundamental objects. This is a basic mix-in class used to factor out data/behaviours common to the majority of model classes in the project's applications. Resource API is composed of: * Basic methods and properties: * basic type and resource string representation * caching operations * Relational properties: * how the resource relates to other resources """ # Attribute used to make a list of confidential lists confidential_fields = () # Attribute used to cache data volatile_fields = [] #----------------------------------------- # Basic properites #----------------------------------------- @ClassProperty @classmethod def resource_type(cls): """String representation of resource type""" return cls.__name__.lower() @property def urn(self): """Unique resource name""" return '%s/%s' % (self.resource_type, self.pk) @property def ancestors(self): """List of ancestors of a resource. This is te list of parents from root to the resource itself. It is used p.e. to display navigation breadcrumbs. You SHOULD NOT implement it in subclasses """ if self.parent: return self.parent.ancestors + [self.parent] else: return [] @property def parent(self): """Identifies resource which includes this resource. Stated that there can be only one parent for a resource, (no multiple parents allowed), setting this attribute makes the resource confident of who includes itself. This attribute is then used to make the list of `:ref:ancestors`. You MUST implement it in subclasses if they have parent. """ return None def do_transition(self, transition, user): return do_transition(self, transition, user) @property def allnotes(self): ctype = ContentType.objects.get_for_model(self.__class__) notes = Comment.objects.filter(object_pk=self.pk, content_type=ctype).order_by('-submit_date') return notes @permalink def get_absolute_url(self): return ('rest.views.resource_page', (), { 'resource_type' : self.resource_type, 'resource_id' : self.pk }) def get_absolute_url_page(self): return self.get_absolute_url().replace('/rest', '/rest/#rest') def as_dict(self): return { 'name': unicode(self), 'urn' : self.urn, } #-- Referrers API --# @property def referrers(self): """Returns User QuerySet bound to resource""" raise NotImplementedError("class: %s method: referrers" % self.__class__.__name__) @property def referrer(self): """Return User bound to resource""" raise NotImplementedError("class: %s method: referrer" % self.__class__.__name__) @property def referrers_people(self): """Returns Person related to referrers QuerySet""" return Person.objects.filter(user__in=self.referrers) @property def info_people(self): """Returns Person to contact for info QuerySet""" raise NotImplementedError("class: %s method: info_people" % self.__class__.__name__) #-- History API --# # Requires that an history manager exists for the resource # TODO: encapsulate it in HistoryResource class @property def created_on(self): """Returns datetime instance of when the instance has been created.""" # There could be the case that a deleted id is reused, so, do not use .get method self_as_of_creation = \ self._default_history.filter(id=self.pk, history_type="+")[0] return self_as_of_creation.history_date @property def created_by(self): """Returns user that created the resource.""" #COMMENT fero: disabled user in history! return User.objects.none() # There could be the case that a deleted id is reused, so, do not use .get method self_as_of_creation = \ self._default_history.filter(id=self.pk, history_type="+")[0] return self_as_of_creation.history_user @property def created_by_person(self): """Returns person bound to the user that created the resource.""" u = self.created_by if u is not None: return u.person return None @property def last_update_by(self): """Returns user that has made the last update to the resource.""" #COMMENT fero: disabled user in history! return User.objects.none() # There could be the case that a deleted id is reused, so, do not use .get method try: self_as_of_last_update = \ self._default_history.filter(id=self.pk, history_type="~")[0] except IndexError: # This object has never been update return None else: return self_as_of_last_update.history_user @property def last_update_by_person(self): """Returns person bound to the user that made the last update the resource.""" u = self.last_update_by if u is not None: return u.person return None @property def updaters(self): """Returns User QuerySet of who has updated the resource.""" self_updaters = unordered_uniq( self._default_history.filter(id=self.pk, history_type="~").values_list('history_user') ) return User.objects.filter(pk__in=map(lambda x: x[0].pk, self_updaters)) #------------------------------------ # Basic properties: cache management #------------------------------------ def save_checkdata_in_cache(self): key = Resource.cache_key(self.pk) data_to_cache = {} for n in self.volatile_fields: data_to_cache[n] = getattr(self, n) if not data_to_cache: return False try: pstore.savedata(key, data_to_cache) except Exception, e: raise return True def load_checkdata_from_cache(self): if not self.volatile_fields: return False key = Resource.cache_key(self.pk) data = pstore.getalldata(key, self.volatile_fields) for n in self.volatile_fields: if data.has_key(n): setattr(self, n, data[n]) return True @classmethod def cache_key(cls, resource_id): #TODO fero CHECK #Pay attention because it is connected to class return "%s/%s" % (cls.resource_type, resource_id) #--------------------------------------------- # Relational properties: # not all must be implemented by Resource subclasses # but just only that makes sense #--------------------------------------------- @property def des_list(self): """Return DES instances bound to the resource""" raise NotImplementedError("class: %s method: des_list" % self.__class__.__name__) @property def des(self): """Return the DES instance bound to the resource""" from des.models import Siteattr return Siteattr.get_site() raise NotImplementedError("class: %s method: des" % self.__class__.__name__) @property def gas_list(self): """Return GAS list bound to resource""" raise NotImplementedError("class: %s method: gas_list" % self.__class__.__name__) @property def gas(self): """Return GAS bound to resource""" raise NotImplementedError("class: %s method: gas" % self.__class__.__name__) def categories(self): """Return ProductCategory bound to resource""" raise NotImplementedError("class: %s method: categories" % self.__class__.__name__) def category(self): """Return ProductCategory bound to resource""" raise NotImplementedError("class: %s method: category" % self.__class__.__name__) @property def persons(self): """Return persons bound to resource""" raise NotImplementedError("class: %s method: persons" % self.__class__.__name__) @property def person(self): """Return person bound to resource""" raise NotImplementedError("class: %s method: person" % self.__class__.__name__) @property def gasmembers(self): """Return GAS members bound to resource""" raise NotImplementedError("class: %s method: gasmembers" % self.__class__.__name__) @property def gasmember(self): """Return GAS member bound to resource""" raise NotImplementedError("class: %s method: gasmember" % self.__class__.__name__) @property def pacts(self): """Return pacts bound to resource""" raise NotImplementedError("class: %s method: pacts" % self.__class__.__name__) @property def pact(self): """Return pact bound to resource""" raise NotImplementedError("class: %s method: pact" % self.__class__.__name__) @property def suppliers(self): """Return suppliers bound to resource""" raise NotImplementedError("class: %s method: suppliers" % self.__class__.__name__) @property def supplier(self): """Return supplier bound to resource""" raise NotImplementedError("class: %s method: supplier" % self.__class__.__name__) @property def orders(self): """Return orders bound to resource""" raise NotImplementedError("class: %s method: orders" % self.__class__.__name__) @property def order(self): """Return order bound to resource""" raise NotImplementedError("class: %s method: order" % self.__class__.__name__) @property def deliveries(self): """Return deliveries bound to resource""" raise NotImplementedError("class: %s method: deliveries" % self.__class__.__name__) @property def delivery(self): """Return delivery bound to resource""" raise NotImplementedError("class: %s method: delivery" % self.__class__.__name__) @property def withdrawals(self): """Return withdrawals bound to resource""" raise NotImplementedError("class: %s method: withdrawals" % self.__class__.__name__) @property def withdrawal(self): """Return withdrawal bound to resource""" raise NotImplementedError("class: %s method: withdrawal" % self.__class__.__name__) @property def products(self): """Return products bound to resource""" raise NotImplementedError("class: %s method: products" % self.__class__.__name__) @property def product(self): """Return product bound to resource""" raise NotImplementedError("class: %s method: product" % self.__class__.__name__) @property def stocks(self): """Return SupplierStock list bound to resource""" raise NotImplementedError("class: %s method: stocks" % self.__class__.__name__) @property def stock(self): """Return SupplierStock bound to resource""" raise NotImplementedError("class: %s method: stock" % self.__class__.__name__) @property def orderable_products(self): """Return GASSupplierOrderProduct querySet for orders bound to resource""" raise NotImplementedError("class: %s method: orderable_products" % self.__class__.__name__) @property def ordered_products(self): """Return GASMemberOrder querySet for orders bound to resource""" raise NotImplementedError("class: %s method: ordered_products" % self.__class__.__name__) @property def basket(self): """Return GASMemberOrder querySet for open orders bound to resource""" raise NotImplementedError("class: %s method: basket" % self.__class__.__name__) #-- Contacts --# @property def contacts(self): """Contact QuerySet bound to the resource. You SHOULD override it when needed """ return self.contact_set.all() @property def email_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.contacts.filter(flavour=const.EMAIL).values_list('value')))) @property def phone_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.contacts.filter(flavour=const.PHONE).values_list('value')))) @property def preferred_email_address(self): """The email address, where we should write if we would know more info on the resource. It is not necessarily bound to a person. NOTE that it could be even a list of addresses following syntax in RFC 5322 and RFC 5321, or simply http://en.wikipedia.org/wiki/Email_address#Syntax :) Usually you SHOULD NOT NEED TO OVERRIDE IT in subclasses """ if settings.EMAIL_DEBUG: return settings.EMAIL_DEBUG_ADDR else: return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_email_contacts.values_list('value')))) @property def preferred_email_contacts(self): """Email Contacts, where we should write if we would know more info on the resource. It is not necessarily bound to a person. Usually you SHOULD NOT NEED TO OVERRIDE IT in subclasses """ return self.contacts.filter(flavour=const.EMAIL, is_preferred=True) or \ self.contacts.filter(flavour=const.EMAIL) @property def preferred_phone_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_phone_contacts.values_list('value')))) @property def preferred_phone_contacts(self): return self.contacts.filter(flavour=const.PHONE, is_preferred=True) or \ self.contacts.filter(flavour=const.PHONE) # @property # def preferred_www_address(self): # return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_www_contacts.values_list('value')))) # @property # def preferred_www_contacts(self): # return self.contacts.filter(flavour=const.WWW, is_preferred=True) or \ # self.contacts.filter(flavour=const.WWW) @property def preferred_fax_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_fax_contacts.values_list('value')))) @property def preferred_fax_contacts(self): return self.contacts.filter(flavour=const.FAX, is_preferred=True) or \ self.contacts.filter(flavour=const.FAX) @property def icon(self): "Returns default icon for resource""" icon = models.ImageField(upload_to="fake") basedir = os.path.join(settings.STATIC_URL, "nui", "img", settings.THEME) icon.url = os.path.join(basedir, "%s%s.%s" % (self.resource_type, "128x128", "png")) return icon #TODO CHECK if these methods SHOULD be removed from Resource API # because they are tied only to a specific resource. Leave commented now. # If you need them in a specific resource, implement in it # @property # def gasstocks(self): # """Return GASSupplierStock list bound to resource""" # raise NotImplementedError # # @property # def gasstock(self): # """Return GASSupplierStock bound to resource""" # raise NotImplementedError # # @property # def units(self): # """Return unit measure list bound to resource""" # raise NotImplementedError # # @property # def unit(self): # """Return unit measure bound to resource""" # raise NotImplementedError #--------------------------# @property def economic_movements(self): """Return accounting LedgerEntry instances.""" raise NotImplementedError @property def balance(self): """Return an economic state bound to resource (DES, GASMember, GAS or Supplier through ) Accounting sold for this ressource """ acc_tot = self.person.accounting.system['/wallet'].balance return acc_tot #------------------------------------------------------------------------------ class PermissionResource(Resource, PermissionBase): """ Just a convenience for classes inheriting both from `Resource` and `PermissionBase` """ def _get_roles(self): """ Return a QuerySet containing all the parametric roles which have been assigned to this Resource. """ # Roles MUST BE a property because roles are bound to a User # with `add_principal()` and not directly to a GAS member # costruct the result set by joining partial QuerySets roles = [] ctype = ContentType.objects.get_for_model(self) params = Param.objects.filter(content_type=ctype, object_id=self.pk) # get all parametric roles assigned to the Resource; return ParamRole.objects.filter(param_set__in=params) roles = property(_get_roles) @economic_subject class Person(models.Model, PermissionResource): """ A Person is an anagraphic record of a human being. It can be a User or not. """ name = models.CharField(max_length=128,verbose_name=_('name')) surname = models.CharField(max_length=128,verbose_name=_('surname')) display_name = models.CharField(max_length=128, blank=True, verbose_name=_('display name')) # Leave here ssn, but do not display it ssn = models.CharField(max_length=128, unique=True, editable=False, blank=True, null=True, help_text=_('Write your social security number here'),verbose_name=_('Social Security Number')) contact_set = models.ManyToManyField('Contact', null=True, blank=True,verbose_name=_('contacts')) user = models.OneToOneField(User, null=True, blank=True, verbose_name=_('User'), help_text=_("bind to a user if you want to give this person an access to the platform") ) address = models.ForeignKey('Place', null=True, blank=True,verbose_name=_('main address')) avatar = models.ImageField(upload_to=get_resource_icon_path, null=True, blank=True, verbose_name=_('avatar')) website = models.URLField(blank=True, verbose_name=_("web site")) accounting = AccountingDescriptor(PersonAccountingProxy) # #history = HistoricalRecords() class Meta: verbose_name = _("person") verbose_name_plural = _("people") ordering = ('display_name',) db_table = 'base_person' def __unicode__(self): rv = self.display_name if not rv: # If display name is not provided --> save display name rv = u'%(name)s %(surname)s' % {'name' : self.name, 'surname': self.surname} self.display_name = rv self.save() # Removed city visualization following Orlando's and Dominique's agreements # WAS: if self.city: # WAS: rv += u" (%s)" % self.city return rv @property def report_name(self): return u"%(name)s %(surname)s" % {'name' : self.name, 'surname': self.surname} def clean(self): if not self.user and self.gasmembers.count(): raise ValidationError(_("A person without user cannot be a GAS member")) self.name = self.name.strip().lower().capitalize() self.surname = self.surname.strip().lower().capitalize() self.display_name = self.display_name.strip() if not self.ssn: self.ssn = None else: self.ssn = self.ssn.strip().upper() return super(Person, self).clean() @property def uid(self): """ A unique ID (an ASCII string) for ``Person`` model instances. """ return self.urn.replace('/','-') @property def parent(self): return self.des @property def icon(self): return self.avatar or super(Person, self).icon ## START Resource API # Note that all the following methods return a QuerySet @property def persons(self): return Person.objects.filter(pk=self.pk) @property def person(self): return self @property def gasmembers(self): #TODO UNITTEST """ GAS members associated to this person; to each of them corresponds a membership of this person in a GAS. """ return self.gasmember_set.all() @property def gas_list(self): #TODO UNITTEST """ All GAS this person belongs to (remember that a person may be a member of more than one GAS). """ from gf.gas.models import GAS gas_pks = set(member.gas.pk for member in self.gasmembers) return GAS.objects.filter(pk__in=gas_pks) @property def des_list(self): #TODO UNITTEST """ All DESs this person belongs to (either as a member of one or more GAS or as a referrer for one or more suppliers in the DES). """ from des.models import DES des_set = set([gas.des for gas in self.gas_list]) return DES.objects.filter(pk__in=[obj.pk for obj in des_set]) @property def des(self): from des.models import Siteattr return Siteattr.get_site() @property def pacts(self): """ A person is related to: pacts signed with a GAS he/she belongs to """ from gf.gas.models import GASSupplierSolidalPact # initialize the return QuerySet qs = GASSupplierSolidalPact.objects.none() #add the suppliers who have signed a pact with a GAS this person belongs to for gas in self.gas_list: qs = qs | gas.pacts return qs @property def suppliers(self): #TODO UNITTEST """ A person is related to: 1) suppliers for which he/she is a referrer 2) suppliers who have signed a pact with a GAS he/she belongs to """ from gf.supplier.models import Supplier # initialize the return QuerySet qs = Supplier.objects.none() #add the suppliers who have signed a pact with a GAS this person belongs to for gas in self.gas_list: qs = qs | gas.suppliers # add the suppliers for which this person is an agent referred_set = set([sr.supplier for sr in self.supplieragent_set.all()]) qs = qs | Supplier.objects.filter(pk__in=[obj.pk for obj in referred_set]) return qs @property def orders(self): #TODO UNITTEST """ A person is related to: 1) supplier orders opened by a GAS he/she belongs to 2) supplier orders for which he/she is a referrer 3) order to suppliers for which he/she is a referrer """ from gf.gas.models import GASSupplierOrder # initialize the return QuerySet qs = GASSupplierOrder.objects.none() #add the supplier orders opened by a GAS he/she belongs to for gas in self.gas_list: qs = qs | gas.orders return qs @property def deliveries(self): #TODO UNITTEST """ A person is related to: 1) delivery appointments for which this person is a referrer 2) delivery appointments associated with a GAS he/she belongs to """ from gf.gas.models import Delivery # initialize the return QuerySet qs = Delivery.objects.none() # add delivery appointments for which this person is a referrer for member in self.gasmembers: qs = qs | member.delivery_set.all() # add delivery appointments associated with a GAS he/she belongs to for gas in self.gas_list: qs = qs | gas.deliveries return qs @property def withdrawals(self): #TODO UNITTEST """ A person is related to: 1) withdrawal appointments for which this person is a referrer 2) withdrawal appointments associated with a GAS he/she belongs to """ from gf.gas.models import Withdrawal # initialize the return QuerySet qs = Withdrawal.objects.none() # add withdrawal appointments for which this person is a referrer for member in self.gasmembers: qs = qs | member.withdrawal_set.all() # add withdrawal appointments associated with a GAS he/she belongs to for gas in self.gas_list: qs = qs | gas.withdrawals return qs ## END Resource API @property def city(self): if self.address: return self.address.city else: return None def setup_accounting(self): """ Accounting hierarchy for Person. . ROOT (/) |----------- wallet [A] +----------- incomes [P,I] + | +--- other (private order, correction, deposit) +----------- expenses [P,E] + +--- other (correction, donation, ) """ self.subject.init_accounting_system() # create a generic asset-type account (a sort of "virtual wallet") system = self.accounting.system system.get_or_create_account( parent_path='/', name='wallet', kind=account_type.asset ) # Expenses and incomes of other kind... system.get_or_create_account( parent_path='/expenses', name='other', kind=account_type.expense ) system.get_or_create_account( parent_path='/incomes', name='other', kind=account_type.income ) #----------------- Authorization API ------------------------# # Table-level CREATE permission @classmethod def can_create(cls, user, context): # Who can create a new Person in a DES ? # * DES administrators allowed_users = User.objects.none() try: des = context['site'] except KeyError: return User.objects.none() #raise WrongPermissionCheck('CREATE', cls, context) else: allowed_users = des.gas_tech_referrers return user in allowed_users # Row-level EDIT permission def can_edit(self, user, context): # Who can edit a Person in a DES ? # * the person itself # * administrators of one of the DESs this person belongs to des_admins = [] for des in self.des_list: des_admins += des.admins allowed_users = list(des_admins) + [self.user] return user in allowed_users # Row-level DELETE permission def can_delete(self, user, context): # Who can delete a Person from the system ? allowed_users = [self.user] return user in allowed_users #-----------------------------------------------------# @property def username(self): if self.user: return self.user.username else: return ugettext("has not an account in the system") display_fields = ( name, surname, models.CharField(name="city", verbose_name=_("City")), models.CharField(name="username", verbose_name=_("Username")), #DO NOT SHOW now models.CharField(name="email_address", verbose_name=_("Email")), #DO NOT SHOW now models.CharField(name="phone_address", verbose_name=_("Phone")), address, ) def has_been_member(self, gas): """ Return ``True`` if this person is bound to the GAS ``gas`` (GASMember exist whether it is suspended or not), ``False`` otherwise. If ``gas`` is not a ``GAS`` model instance, raise ``TypeError``. """ from gf.gas.models import GAS, GASMember if not isinstance(gas, GAS): raise TypeError(_(u"GAS membership can only be tested against a GAS model instance")) return bool(GASMember.all_objects.filter(gas=gas, person=self).count()) def is_member(self, gas): """ Return ``True`` if this person is an active (not suspended) member of GAS ``gas``, ``False`` otherwise. If ``gas`` is not a ``GAS`` model instance, raise ``TypeError``. """ from gf.gas.models import GAS if not isinstance(gas, GAS): raise TypeError(_(u"GAS membership can only be tested against a GAS model instance")) return gas in [member.gas for member in self.gasmembers] @property def full_name(self): return self.name + self.surname def save(self, *args, **kw): if not self.display_name: self.display_name = u"%(name)s %(surname)s" % {'name' : self.name, 'surname': self.surname} super(Person, self).save(*args, **kw) class Contact(models.Model): """If is a contact, just a contact email or phone""" flavour = models.CharField(max_length=32, choices=const.CONTACT_CHOICES, default=const.EMAIL,verbose_name=_('flavour')) value = models.CharField(max_length=256,verbose_name=_('value')) is_preferred = models.BooleanField(default=False,verbose_name=_('preferred')) description = models.CharField(max_length=128, blank=True, default='',verbose_name=_('description')) ##history = HistoricalRecords() class Meta: verbose_name = _("contact") verbose_name_plural = _("contacts") db_table = 'base_contact' def __unicode__(self): return u"%(t)s: %(v)s" % {'t': self.flavour, 'v': self.value} def clean(self): self.flavour = self.flavour.strip() if self.flavour not in map(lambda x: x[0], const.CONTACT_CHOICES): raise ValidationError(_("Contact flavour MUST be one of %s" % map(lambda x: x[0], const.CONTACT_CHOICES))) self.value = self.value.strip() self.description = self.description.strip() return super(Contact, self).clean() class Place(models.Model, PermissionResource): """Places should be managed as separate entities for various reasons: * among the entities arising in the description of GAS' activities, there are several being places or involving places, so abstracting this information away seems a good thing; * in the context of multi-GAS (retina) orders, multiple delivery and/or withdrawal locations can be present. """ name = models.CharField(max_length=128, blank=True, help_text=_("You can avoid to specify a name if you specify an address"),verbose_name=_('name')) description = models.TextField(blank=True,verbose_name=_('description')) # QUESTION: add place type from CHOICE (HOME, WORK, HEADQUARTER, WITHDRAWAL...) # ANSWER: no place type here. It is just a point in the map address = models.CharField(max_length=128, blank=True,verbose_name=_('address')) #zipcode as a string: see http://stackoverflow.com/questions/747802/integer-vs-string-in-database zipcode = models.CharField(verbose_name=_("Zip code"), max_length=128, blank=True) city = models.CharField(max_length=128,verbose_name=_('city')) province = models.CharField(max_length=2, help_text=_("Insert the province code here (max 2 char)"),verbose_name=_('province')) #Geolocation: do not use GeoDjango PointField here. #We can make a separate geo application maybe in future lon = models.FloatField(null=True, blank=True,verbose_name=_('lon')) lat = models.FloatField(null=True, blank=True,verbose_name=_('lat')) ##history = HistoricalRecords() class Meta: verbose_name = _("place") verbose_name_plural = _("places") ordering = ('name', 'address', 'city') db_table = 'base_place' def __unicode__(self): rv = u"" if self.name: rv += self.name + u" - " if self.address: rv += self.address + u", " if self.zipcode: rv += u"%s " % self.zipcode rv += self.city.lower().capitalize() if self.province: rv += u" (%s)" % self.province.upper() return rv # fetch coords from open street map def update_coords(self): addressString = self.zipcode + ' ' + self.city + ' ' + self.province + ' ' + self.address location = geocoder.osm(addressString) if location.status == 'OK': self.lon = location.lng self.lat = location.lat def clean(self): self.name = self.name.strip().lower().capitalize() self.address = self.address.strip().lower().capitalize() #TODO: we should compute city and province starting from zipcode using local_flavor in forms self.city = self.city.lower().capitalize() self.province = self.province.upper() self.zipcode = self.zipcode.strip() if self.zipcode: if settings.VALIDATE_NUMERICAL_ZIPCODES: try: int(self.zipcode) except ValueError: raise ValidationError(_("Wrong ZIP CODE provided")) self.description = self.description.strip() return super(Place, self).clean() def save(self, *args, **kw): #TODO: Copy-on-write model # a) check if an already existent place with the same full address exist and in that case force update # b) if we are updating a Place --> detach it from other stuff pointing to it and clone super(Place, self).save(*args, **kw) #----------------- Authorization API ------------------------# # Table-level CREATE permission @classmethod def can_create(cls, user, context): # Who can create a new Place in a DES ? # Everyone belongs to the DES try: des = context['site'] except KeyError: raise WrongPermissionCheck('CREATE', cls, context) else: # It's ok because only one DES is supported return not user.is_anonymous() # otherwhise it should be # return user in User.objects.filter(person__in=des.persons) # Row-level EDIT permission def can_edit(self, user, context): # Who can edit details of an existing place in a DES ? # (note that places can be shared among GASs) # * DES administrators # * User that created the place # * User who has updated it. How he can do it? # If a User try to create a new place with the same parameters # of an already existent one, he updates the place allowed_users = self.des.admins | self.created_by | self.updaters return user in allowed_users # Row-level DELETE permission def can_delete(self, user, context): # Who can delete an existing place from a DES ? # (note that places can be shared among GASs) # * DES administrators # * User that created the place # * User who has updated it. How he can do it? see can_edit above allowed_users = self.des.admins | self.created_by | self.updaters return user in allowed_users #-----------------------------------------------------# display_fields = ( name, description, address, zipcode, city, province ) # Generic workflow management class DefaultTransition(models.Model, PermissionResource): workflow = models.ForeignKey(Workflow, related_name="default_transition_set",verbose_name=_('workflow')) state = models.ForeignKey(State,verbose_name=_('state')) transition = models.ForeignKey(Transition,verbose_name=_('transition')) class Meta: verbose_name = _("default transition") verbose_name_plural = _("default transitions") db_table = 'base_defaulttransition' class WorkflowDefinition(object): """ This class encapsulates all the data and logic needed to create and setup a Workflow (as in the `django-workflows` app), including creation of States and Transitions, assignment of Transitions to States and specification of the initial state and the default Transition for each State. To setup a new Workflow, just specify the needed data in the declarative format described below, then call the `register_workflow` method. ## TODO: workflow declaration's specs go here. """ def __init__(self, workflow_name, state_list, transition_list, state_transition_map, initial_state, default_transitions): # stash the workflow specs for later use self.workflow_name = workflow_name self.state_list = state_list self.transition_list = transition_list self.state_transition_map = state_transition_map self.initial_state_name = initial_state self.default_transitions = default_transitions def register_workflow(self): # check workflow specifications for internal consistency; # return an informative error message to the user if the check fails try: self.check_workflow_specs() except ImproperlyConfigured, e: raise ImproperlyConfigured(_("Workflow specifications are not consistent.\n %s") % e) try: # Check for already existent workflow. Operation `register_workflow` is idempotent... Workflow.objects.get(name=self.workflow_name) except Workflow.DoesNotExist: # Initialize workflow self.workflow = Workflow.objects.create(name=self.workflow_name) ## create States objects self.states = {} # dictionary containing State objects for our Workflow for (key, name) in self.state_list: self.states[key] = State.objects.create(name=name, workflow=self.workflow) ## create Transition objects self.transitions = {} # dictionary containing Transition objects for the current Workflow for (key, transition_name, destination_name) in self.transition_list: dest_state = self.states[destination_name] self.transitions[key] = Transition.objects.create(name=transition_name, workflow=self.workflow, destination=dest_state) ## associate Transitions to States for (state_name, transition_name) in self.state_transition_map: log.debug("Workflow %(w)s, adding state=%(s)s transition=%(t)s" % { 'w' : self.workflow_name, 's' : state_name, 't' : transition_name, }) state = self.states[state_name] transition = self.transitions[transition_name] state.transitions.add(transition) ## set the initial State for the Workflow state = self.states[self.initial_state_name] self.workflow.initial_state = state self.workflow.save() ## define default Transitions for States in a Workflow, ## so we can suggest to end-users what the next "logical" State could be for (state_name, transition_name) in self.default_transitions: state = self.states[state_name] transition = self.transitions[transition_name] self.workflow.default_transition_set.add(DefaultTransition(state=state, transition=transition)) def check_workflow_specs(self): """Check the provided workflow specifications for internal consistency. Return True if the specs are fine, False otherwise. """ state_names = [key for (key, name) in self.state_list] transition_names = [key for (key, transition_name, destination_name) in self.transition_list] ## States have to be unique # TODO ## Transitions have to be unique # TODO ## a Transition must point to an existing State for (key, transition_name, destination_name) in self.transition_list: if destination_name not in state_names: raise ImproperlyConfigured("Transition %s points to the non-existent State %s" % (key, destination_name)) ## a Transition must be assigned to an existing State for (state_name, transition_name) in self.state_transition_map: if state_name not in state_names: raise ImproperlyConfigured("Transition %s can't be assigned to the non-existent State %s" % (transition_name, state_name)) ## initial State must exists if self.initial_state_name not in state_names: raise ImproperlyConfigured("Workflow %s: initial state %s must be included in state names %s" % (self.workflow_name, self.initial_state_name, state_names)) ## a default Transition for a State must exists and had to be previously assigned to that State for (state_name, transition_name) in self.default_transitions: if state_name not in state_names: raise ImproperlyConfigured("A default Transition can't be defined for the non-existent State %s" % state_name) elif transition_name not in transition_names: raise ImproperlyConfigured("The default Transition for the State %s can't be set to a non-existent Transitions %s" % (state_name, transition_name)) elif (state_name, transition_name) not in self.state_transition_map: raise ImproperlyConfigured("The default Transition for the State %s must be one of its valid Transitions" % state_name) #------------------------------------------------------------------------------- #This is an HACK used just because we need these users use parts of the web admin interface from consts import GAS_MEMBER , GAS_REFERRER_TECH, SUPPLIER_REFERRER from django.contrib.auth.models import Group, Permission # groups for users GROUP_TECHS = "techs" GROUP_SUPPLIERS = "suppliers" GROUP_REFERRER_SUPPLIERS = "gas_referrer_suppliers" GROUP_USERS = "users" GROUP_MEMBERS = "gasmembers" def init_perms_for_groups(): from gf.base.models import Person, Place, Contact from gf.gas.models import GAS, GASConfig, GASMember from gf.supplier.models import ( SupplierConfig, SupplierProductCategory, ProductCategory, SupplierStock, Product, Supplier ) from django.contrib.auth.models import User from django.contrib.auth import get_permission_codename g_techs = Group.objects.get(name=GROUP_TECHS) g_suppliers = Group.objects.get(name=GROUP_SUPPLIERS) g_referrers_suppliers = Group.objects.get(name=GROUP_REFERRER_SUPPLIERS) g_gasmembers = Group.objects.get(name=GROUP_MEMBERS) techs_perms_d = { Person : ('add', 'change', 'delete'), Place : ('add', 'change', 'delete'), Contact : ('add', 'change', 'delete'), GAS : ('change',), GASConfig : ('change',), SupplierConfig : ('change',), GASMember : ('add', 'change', 'delete'), SupplierProductCategory : ('add', 'change', 'delete'), ProductCategory : ('add', 'change', 'delete'), SupplierStock : ('add', 'change', 'delete'), Product : ('add', 'change', 'delete'), Supplier : ('add', 'change'), User : ('add', 'change',), # add User is important for Add GASMember Form! Leave it here now. TODO } supplier_perms_d = { Person : ('add', 'change'), Place : ('add', 'change'), Contact : ('add', 'change'), SupplierConfig : ('change',), SupplierProductCategory : ('add', 'change', 'delete'), SupplierStock : ('add', 'change', 'delete'), Product : ('add', 'change', 'delete'), Supplier : ('change',), } gas_referrer_supplier_perms_d = supplier_perms_d.copy() gas_referrer_supplier_perms_d.update({ Supplier : ('add', 'change'), }) gm_perms_d = { Person : ('change',), Place : ('add', 'change',), Contact : ('add', 'change',), } group_perms_d_tuples = ( (g_techs , techs_perms_d), (g_suppliers , supplier_perms_d), (g_referrers_suppliers , gas_referrer_supplier_perms_d), (g_gasmembers , gm_perms_d), ) for gr, perms_d in group_perms_d_tuples: for klass, actions in perms_d.items(): ctype = ContentType.objects.get_for_model(klass) for action in actions: codename = get_permission_codename(action, klass._meta) log.debug("Adding perm %s to group %s" % (codename, gr)) p = Permission.objects.get( content_type=ctype, codename=codename ) gr.permissions.add(p) def setup_data_handler(sender, instance, created, **kwargs): """ Ovverride temporarly for associating some groups to users This will be in use until some part of the interface use admin-interface. After this can be removed """ if created: # Check that groups exist. Create them the first time g_techs, created = Group.objects.get_or_create(name=GROUP_TECHS) g_suppliers, created = Group.objects.get_or_create(name=GROUP_SUPPLIERS) g_referrers_suppliers, created = Group.objects.get_or_create(name=GROUP_REFERRER_SUPPLIERS) g_gasmembers, created = Group.objects.get_or_create(name=GROUP_MEMBERS) if created: # Create all groups needed for this hack # Check only last... init_perms_for_groups() role_group_map = { GAS_MEMBER : g_gasmembers, GAS_REFERRER_SUPPLIER : g_referrers_suppliers, SUPPLIER_REFERRER : g_suppliers, GAS_REFERRER_TECH : g_techs, } # Set "is_staff" to access the admin inteface instance.user.is_staff = True instance.user.save() role_name = instance.role.role.name group = role_group_map.get(role_name) if group: try: instance.user.groups.add(group) except KeyError: log.debug("%s create cannot add %s's group %s(%s)" % (role_name, group, instance, instance.pk) ) # END hack #------------------------------------------------------------------------------- def validate(sender, instance, **kwargs): try: # `instance` is the model instance that has just been created instance.clean() except AttributeError: # sender model doesn't specify any sanitize operations, so just ignore the signal pass def setup_data(sender, instance, created, **kwargs): """ Setup proper data after a model instance is saved to the DB for the first time. This function just calls the `setup_data()` instance method of the sender model class (if defined); actual role-creation/setup logic is encapsulated there. """ if created: # Automatic data-setup should happen only at instance-creation time try: # `instance` is the model instance that has just been created instance.setup_data() except AttributeError: # sender model doesn't specify any data-related setup operations, so just ignore the signal pass # add `validate` function as a listener to the `pre_save` signal pre_save.connect(validate) # add `setup_data` function as a listener to the `post_save` signal post_save.connect(setup_data) post_save.connect(setup_data_handler, sender=PrincipalParamRoleRelation)
To filter salaries for Senior Consultant in Chennai, India Area, Sign In or Register. How much does a Senior Consultant make in Chennai, India? The average salary for a Senior Consultant is ₹1,287,357 in Chennai, India. Salaries estimates are based on 275 salaries submitted anonymously to Glassdoor by Senior Consultant employees in Chennai, India.
import numpy as np from statsmodels.tsa.arima_process import arma_generate_sample from statsmodels.tsa.arma_mle import Arma #TODO: still refactoring problem with cov_x #copied from sandbox.tsa.arima.py def mcarma22(niter=10, nsample=1000, ar=None, ma=None, sig=0.5): '''run Monte Carlo for ARMA(2,2) DGP parameters currently hard coded also sample size `nsample` was not a self contained function, used instances from outer scope now corrected ''' #nsample = 1000 #ar = [1.0, 0, 0] if ar is None: ar = [1.0, -0.55, -0.1] #ma = [1.0, 0, 0] if ma is None: ma = [1.0, 0.3, 0.2] results = [] results_bse = [] for _ in range(niter): y2 = arma_generate_sample(ar,ma,nsample+1000, sig)[-nsample:] y2 -= y2.mean() arest2 = Arma(y2) rhohat2a, cov_x2a, infodict, mesg, ier = arest2.fit((2,2)) results.append(rhohat2a) err2a = arest2.geterrors(rhohat2a) sige2a = np.sqrt(np.dot(err2a,err2a)/nsample) #print('sige2a', sige2a, #print('cov_x2a.shape', cov_x2a.shape #results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) if not cov_x2a is None: results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) else: results_bse.append(np.nan + np.zeros_like(rhohat2a)) return np.r_[ar[1:], ma[1:]], np.array(results), np.array(results_bse) def mc_summary(res, rt=None): if rt is None: rt = np.zeros(res.shape[1]) nanrows = np.isnan(res).any(1) print('fractions of iterations with nans', nanrows.mean()) res = res[~nanrows] print('RMSE') print(np.sqrt(((res-rt)**2).mean(0))) print('mean bias') print((res-rt).mean(0)) print('median bias') print(np.median((res-rt),0)) print('median bias percent') print(np.median((res-rt)/rt*100,0)) print('median absolute error') print(np.median(np.abs(res-rt),0)) print('positive error fraction') print((res > rt).mean(0)) if __name__ == '__main__': #short version # true, est, bse = mcarma22(niter=50) # print(true # #print(est # print(est.mean(0) ''' niter 50, sample size=1000, 2 runs [-0.55 -0.1 0.3 0.2 ] [-0.542401 -0.09904305 0.30840599 0.2052473 ] [-0.55 -0.1 0.3 0.2 ] [-0.54681176 -0.09742921 0.2996297 0.20624258] niter=50, sample size=200, 3 runs [-0.55 -0.1 0.3 0.2 ] [-0.64669489 -0.01134491 0.19972259 0.20634019] [-0.55 -0.1 0.3 0.2 ] [-0.53141595 -0.10653234 0.32297968 0.20505973] [-0.55 -0.1 0.3 0.2 ] [-0.50244588 -0.125455 0.33867488 0.19498214] niter=50, sample size=100, 5 runs --> ar1 too low, ma1 too high [-0.55 -0.1 0.3 0.2 ] [-0.35715008 -0.23392766 0.48771794 0.21901059] [-0.55 -0.1 0.3 0.2 ] [-0.3554852 -0.21581914 0.51744748 0.24759245] [-0.55 -0.1 0.3 0.2 ] [-0.3737861 -0.24665911 0.48031939 0.17274438] [-0.55 -0.1 0.3 0.2 ] [-0.30015385 -0.27705506 0.56168199 0.21995759] [-0.55 -0.1 0.3 0.2 ] [-0.35879991 -0.22999604 0.4761953 0.19670835] new version, with burnin 1000 in DGP and demean [-0.55 -0.1 0.3 0.2 ] [-0.56770228 -0.00076025 0.25621825 0.24492449] [-0.55 -0.1 0.3 0.2 ] [-0.27598305 -0.2312364 0.57599134 0.23582417] [-0.55 -0.1 0.3 0.2 ] [-0.38059051 -0.17413628 0.45147109 0.20046776] [-0.55 -0.1 0.3 0.2 ] [-0.47789765 -0.08650743 0.3554441 0.24196087] ''' ar = [1.0, -0.55, -0.1] ma = [1.0, 0.3, 0.2] nsample = 200 run_mc = True#False if run_mc: for sig in [0.1, 0.5, 1.]: import time t0 = time.time() rt, res_rho, res_bse = mcarma22(niter=100, sig=sig) print('\nResults for Monte Carlo') print('true') print(rt) print('nsample =', nsample, 'sigma = ', sig) print('elapsed time for Monte Carlo', time.time()-t0) # 20 seconds for ARMA(2,2), 1000 iterations with 1000 observations #sige2a = np.sqrt(np.dot(err2a,err2a)/nsample) #print('\nbse of one sample' #print(sige2a * np.sqrt(np.diag(cov_x2a)) print('\nMC of rho versus true') mc_summary(res_rho, rt) print('\nMC of bse versus zero') # this implies inf in percent mc_summary(res_bse) print('\nMC of bse versus std') mc_summary(res_bse, res_rho.std(0))
so what are they like to peddle ? Topic: so what are they like to peddle ? Re: so what are they like to peddle ? has anyone ever stuck some gears on one , cant be to hard can it ? Looks more like you need RAC / AA for a C5, might be worth checking if you are covered. When I went out one day I ended up after about a 3 miles with a puncture. Luckily I made it to a car wash that I use on a regular basis and the owner brought me home so that I could collect the car to go and get the C5 left at car wash. Whilst I was getting my car 5 people had tried to make offers to buy the C5. Not for sale after 30hours of work and costs of parts. Also changed the original Sinclair tyres to Schwalbe city jects (thanks Karl) and touch wood no more punctures. AS you like peddling , can i have your motor ? i always carry the C5 tool kit & jack plus puncture kit. Best of all i tell my wife where im going and always carry a mobile, just in case i need a lift lol.. They arnt the best to push home lol.. Pedalling a c5 with two batteries wasnt fun at all.. Like Umpa, always carry adequate spares if you can. c5 toolbox, pump and puncture repair kit are a must. Carry spare Inner tubes if you can. Took delivery of my 1st C5 from Karl today & he was a diamond geezer loaning me a battery and charger to experiment with, whilst I find my own driving style and average out the predicted range I want to achieve, nice touch hey? Thanks Karl. I currently have a 2 speed basic Brompton bike, which tends to jam in one gear, so thought the C5 with battery on board would be harder to pedal & true enough it was, but only up hill on the flat I found it really easy and lot more comfortable than my Brompton's poor pedal setting. Being fairly fit & cycling 6 miles everyday, albeit not at super high speeds the C5 really brought a smile to my face & all those that saw it. I recently had a young lad bring round a c5 for repair who had been pedaling his C5 about and his dad had trouble keeping up with him on his mounting bike !!!! - I have a c5 'Archie' that has a 3 speed conversion but to be honest it does not make a massive difference or maybe it my age !!!!
from abc import ABCMeta, abstractclassmethod class Cajero(metaclass=ABCMeta): #Interfaz @abstractclassmethod def next_succesor(succesor): pass @abstractclassmethod def handle(cantidad): pass class Cajero50(Cajero): def __init__(self): self._succesor = None def next_succesor(self, succesor): self._succesor = succesor def handle(self,cantidad): if cantidad >=50: num = cantidad // 50 sobrante = cantidad % 50 print(f"entregando {num} billetes de 50") if sobrante != 0: self._succesor.handle(sobrante) else: self._succesor.handle(cantidad) class Cajero20(Cajero): def __init__(self): self._succesor = None def next_succesor(self, succesor): self._succesor = succesor def handle(self,cantidad): if cantidad >= 20: num = cantidad // 20 sobrante = cantidad % 20 print(f"entregando {num} billetes de 20") if sobrante != 0: self._succesor.handle(sobrante) else: self._succesor.handle(cantidad) class Cajero10(Cajero): def __init__(self): self._succesor = None def next_succesor(self, succesor): self._succesor = succesor def handle(self,cantidad): if cantidad >= 10: num = cantidad // 10 sobrante = cantidad % 10 print(f"entregando {num} billetes de 10") if sobrante != 0: self._succesor.handle(sobrante) else: self._succesor.handle(cantidad) class CajeroATMChain: def __init__(self): self.chain1 = Cajero50() self.chain2 = Cajero20() self.chain3 = Cajero10() self.chain1.next_succesor(self.chain2) self.chain2.next_succesor(self.chain3) if __name__ == '__main__': ATM = CajeroATMChain() cantidad = int(input("que cantidad desea sacar?: ")) if cantidad < 10 or cantidad % 10 != 0: print("saldo insuficiente") exit() ATM.chain1.handle(cantidad) print(f"la cantidad de {cantidad} fue entregada...") print("que tenga un buen dia...")
Inquiring about yoga is a great first step towards better health. We are so glad you decided to begin your health journey with us. We offer a variety of ways to get started from our Free Beginner class to our 3 week Beginner Yoga Series. All of our beginner classes are taught by RYT 500, YACEP certified instructors. We look forward to working with you. Improves strength, balance and circulation. Increases flexibility, energy and concentration. Heightens awareness of posture and breathing. Relieves joint stress and pain. Reduces stress and relieves tension. In short yoga makes you feel better. Practicing the postures, breathing exercises and meditation makes you healthier in body, mind, and spirit. This is a one hour class that introduces students to our studio, some fundamental postures and the teacher helps each student determine public classes to attend or recommends the Beginner Workshop Series. The Beginner Yoga series is for those wanting an effective and tailored start to yoga. Specifically designed for beginners, this series provides a safe environment where everyone is new to yoga. Over the course of this series you are taken through the foundational postures one at a time, receiving alignment cues and immediate feedback to help you. Breathe work and relaxation is also covered. Your questions during class are encouraged and serve to help you and others gain a deeper understanding. Yoga mats and props are provided. Students that complete the Beginner Yoga Series receive $100 off their First Month of Unlimited Yoga! This package includes a 75 minute private yoga lesson and your first month of unlimited Yoga. Your teacher will help with any modifications or questions specific to your practice and also make recommendations of classes for you to attend for your first month of Yoga. Contact us to get your beginner private lesson scheduled! Ready to try our studio and experience a variety of classes? Gentle Yoga, Candlelight Yoga, LifeCore 1 and All Levels Classes are great options for beginner students.
# -*- coding: utf-8 -*- # # This file is a plugin for EventGhost. # Copyright © 2005-2019 EventGhost Project <http://www.eventghost.org/> # # EventGhost is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 2 of the License, or (at your option) # any later version. # # EventGhost is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along # with EventGhost. If not, see <http://www.gnu.org/licenses/>. ur"""<rst> Plugin for the CyberLink Universal Remote Control """ import eg eg.RegisterPlugin( name = "CyberLink Universal Remote Control", author = "Bitmonster", version = "1.0.1", kind = "remote", guid = "{097D33BE-FD65-43D2-852B-5DA8A3FBC489}", description = __doc__, hardwareId = "USB\\VID_0766&PID_0204", ) KEY_CODES_1 = { (0, 0, 30, 0, 0, 0, 0, 0): "Num1", (0, 0, 31, 0, 0, 0, 0, 0): "Num2", (0, 0, 32, 0, 0, 0, 0, 0): "Num3", (0, 0, 33, 0, 0, 0, 0, 0): "Num4", (0, 0, 34, 0, 0, 0, 0, 0): "Num5", (0, 0, 35, 0, 0, 0, 0, 0): "Num6", (0, 0, 36, 0, 0, 0, 0, 0): "Num7", (0, 0, 37, 0, 0, 0, 0, 0): "Num8", (0, 0, 38, 0, 0, 0, 0, 0): "Num9", (0, 0, 39, 0, 0, 0, 0, 0): "Num0", (0, 0, 76, 0, 0, 0, 0, 0): "Clear", (0, 0, 40, 0, 0, 0, 0, 0): "Ok", (0, 0, 79, 0, 0, 0, 0, 0): "Right", (0, 0, 80, 0, 0, 0, 0, 0): "Left", (0, 0, 81, 0, 0, 0, 0, 0): "Down", (0, 0, 82, 0, 0, 0, 0, 0): "Up", } KEY_CODES_2 = { (3, 0, 0, 2): "Info", (3, 0, 0, 4): "Rewind", (3, 0, 0, 8): "Forward", (3, 0, 0, 64): "Play", (3, 0, 0, 128): "Pause", } KEY_CODES_3 = { (3, 0, 1): "ChannelUp", (3, 0, 2): "ChannelDown", (3, 0, 4): "Back", (3, 0, 16): "Stop", (3, 1, 0): "NextTrack", (3, 2, 0): "PreviousTrack", (3, 4, 0): "Radio", (3, 16, 0): "Mute", (3, 32, 0): "VolumeUp", (3, 64, 0): "VolumeDown", (3, 128, 0): "Record", (4, 0, 1): "Angel", (4, 0, 2): "Language", (4, 0, 4): "DvdMenu", (4, 0, 8): "Subtitle", (4, 0, 16): "SAP", (4, 0, 32): "Teletext", (4, 0, 64): "LastChannel", (4, 1, 0): "Home", (4, 2, 0): "TV", (4, 8, 0): "Green", (4, 16, 0): "Yellow", (4, 32, 0): "Blue", (4, 128, 0): "Red", } KEY_CODES_4 = { (2, 2): "Power", } class CyberlinkUniversalRemote(eg.PluginBase): def __start__(self): self.buffer = [] self.expectedLength = 0 self.winUsb = eg.WinUsb(self) self.winUsb.Device(self.Callback, 8).AddHardwareId( "CyberLink Universal Remote Control (Keypad)", "USB\\VID_0766&PID_0204&MI_00" ) self.winUsb.Device(self.Callback, 4).AddHardwareId( "CyberLink Universal Remote Control (Buttons)", "USB\\VID_0766&PID_0204&MI_01" ) self.winUsb.Start() self.last_data = [] def __stop__(self): self.winUsb.Stop() def Callback(self, data): if self.last_data != data: # print data if data in KEY_CODES_1: self.TriggerEnduringEvent(KEY_CODES_1[data]) self.last_data = data elif data in KEY_CODES_2: self.TriggerEnduringEvent(KEY_CODES_2[data]) self.last_data = data elif data[:3] in KEY_CODES_3: self.TriggerEnduringEvent(KEY_CODES_3[data[:3]]) self.last_data = data elif data[:2] in KEY_CODES_4: self.TriggerEnduringEvent(KEY_CODES_4[data[:2]]) self.last_data = data elif len(data) == len(self.last_data): self.EndLastEvent() self.last_data = [] # print "EndLastEvent"
This page states the "Terms and Conditions" under which you may use the The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck web site. If you do not accept the Terms and Conditions stated here, do not use this web site and service. By using this web site, you are indicating your acceptance to be bound by the terms of these Terms and Conditions. The terms "You" and "User" as used herein refer to all individuals and/or entities accessing this website for any reason you must immediately notify us of any changes to your name, address or e-mail address previously supplied to us, by sending us e-mail at the above address, calling us at the above telephone number or writing to us at the above address. 1. Use of material appearing on The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck website for the purposes of this agreement, "material" means material including, without limitation, text, graphics and sound material, published on the The Cliff at Cupecoy , Heritage Real Estate, Robert Gluck website, whether copyright of Heritage Real Estate or a third party. The Material may contain inaccuracies or typographical errors. The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck makes no representations about the accuracy, reliability, completeness, or timeliness of the website or the Material. The use of the website and the Material is at your own risk. Changes are periodically made to the website and may be made at any time. You may download and print extracts from the material and make copies of these for your own personal use only. You are not allowed to download or print the material, or extracts from it, in a systematic or regular manner so as to create a database in electronic or paper form comprising all or part of the material appearing on the The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck website. You shall not copy or adapt the code used to create the website pages. It is copyright protected. Attempting to decipher, de-compile, disassemble or reverse engineer any of the software comprising or in any way making up a part of the website is prohibited. You must not reproduce any part of the The Cliff at Cupecoy Heritage Real Estate, Robert Gluck website, or the material or transmit it to or store it in any other website or disseminate any part of the material in any other form, unless we have indicated that you may do so in writing. You may apply for permission to do so by sending us e-mail at the above address, calling us at the above telephone number, writing to us at the above address. In no event shall the company, its suppliers or any third parties mentioned on the website be liable for any damages whatsoever (including, without limitation, incidental and consequential damages, lost profits or damages resulting from lost data or business interruption), as a result of your doing, or not doing, anything as a result of viewing, reading or listening to the material or any part of it or as a result of your inability to use the website and the material, whether based on warranty, contract, tort, or any other legal theory. You can access other sites via links from the The Cliff at Cupecoy Heritage Real Estate, Robert Gluck website. These sites are not under our control and we are not responsible in any way for any of their contents. We give no warranties of any kind concerning the The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck website or the material. In particular, we do not warrant that the The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck website or any of its contents is virus free. You must take your own precautions in this respect as we accept no responsibility for any infection by virus or other contamination or by anything, which has destructive properties. Although we will do our best to provide constant, uninterrupted access to the The Cliff at Cupecoy, Heritage Real Estate, Robert Gluck website, we do not guarantee this. We accept no responsibility or liability for any interruption or delay. This agreement is governed by the law of the Netherlands Antilles and the parties agree to submit to the exclusive jurisdiction of the courts of the Netherlands Antilles.
"""Admin panel for tags.""" import logging from typing import Callable, List import sqlalchemy as sa import sqlalchemy.orm from flask import current_app, flash, redirect, render_template, request from sqlalchemy.sql import functions as func from abilian.core.entities import Entity from abilian.core.extensions import db from abilian.core.models.tag import Tag, entity_tag_tbl from abilian.i18n import _, _l, _n from abilian.services import get_service from abilian.services.indexing.service import index_update from abilian.web import url_for from abilian.web.admin import AdminPanel from abilian.web.views import ObjectEdit from abilian.web.views.base import View from .forms import TagForm logger = logging.getLogger(__name__) _OBJ_COUNT = func.count(entity_tag_tbl.c.entity_id).label("obj_count") def get_entities_for_reindex(tags): """Collect entities for theses tags.""" if isinstance(tags, Tag): tags = (tags,) session = db.session() indexing = get_service("indexing") tbl = Entity.__table__ tag_ids = [t.id for t in tags] query = ( sa.sql.select([tbl.c.entity_type, tbl.c.id]) .select_from(tbl.join(entity_tag_tbl, entity_tag_tbl.c.entity_id == tbl.c.id)) .where(entity_tag_tbl.c.tag_id.in_(tag_ids)) ) entities = set() with session.no_autoflush: for entity_type, entity_id in session.execute(query): if entity_type not in indexing.adapted: logger.debug("%r is not indexed, skipping", entity_type) item = ("changed", entity_type, entity_id, ()) entities.add(item) return entities def schedule_entities_reindex(entities): """ :param entities: as returned by :func:`get_entities_for_reindex` """ entities = [(e[0], e[1], e[2], dict(e[3])) for e in entities] return index_update.apply_async(kwargs={"index": "default", "items": entities}) class NSView(View): """View a Namespace.""" def __init__(self, view_endpoint, *args, **kwargs): super().__init__(*args, **kwargs) self.__selected_tags = None self.view_endpoint = view_endpoint def prepare_args(self, args, kwargs): self.ns = kwargs.get("ns") self.form_errors = {} return args, kwargs def get(self, ns): tags = ( Tag.query.filter(Tag.ns == ns) .outerjoin(entity_tag_tbl, entity_tag_tbl.c.tag_id == Tag.id) .add_column(_OBJ_COUNT) .group_by(Tag) .order_by(sa.sql.func.lower(Tag.label)) ) # get a list of rows instead of (Tag, count) tuples tags = list(tags.session.execute(tags)) return render_template( "admin/tags_ns.html", ns=ns, tags=tags, errors=self.form_errors, merge_to=request.form.get("merge_to", default="__None__", type=int), selected_tags={t.id for t in self._get_selected_tags()}, ) def redirect_to_view(self): return redirect(url_for(".tags_ns", ns=self.ns)) def post(self, ns): data = request.form action = data.get("__action") if action == "delete": return self.do_delete() elif action == "merge": return self.do_merge() else: flash(_("Unknown action")) self.get(self.ns) def _get_selected_tags(self) -> List[Tag]: if self.__selected_tags is None: tag_ids = request.form.getlist("selected", type=int) if not tag_ids: self.__selected_tags = [] else: self.__selected_tags = Tag.query.filter( Tag.ns == self.ns, Tag.id.in_(tag_ids) ).all() return self.__selected_tags def do_delete(self): data = request.form confirm = data.get("confirm_delete", False, type=bool) if not confirm: flash(_("Please fix the error(s) below"), "error") self.form_errors["confirm_delete"] = _( "Must be checked to ensure you " "intent to delete these tags" ) return self.get(self.ns) session = db.session() tags = self._get_selected_tags() if not tags: flash(_("No action performed: no tags selected"), "warning") return self.redirect_to_view() count = len(tags) entities_to_reindex = get_entities_for_reindex(tags) success_message = _n( "%(tag)s deleted", "%(num)d tags deleted:\n%(tags)s", count, tag=tags[0].label, tags=", ".join(t.label for t in tags), ) for tag in tags: session.delete(tag) session.commit() flash(success_message) schedule_entities_reindex(entities_to_reindex) return self.redirect_to_view() def do_merge(self): target_id = request.form.get("merge_to", type=int) if not target_id: flash(_("You must select a target tag to merge to"), "error") return self.get(self.ns) target = Tag.query.filter(Tag.ns == self.ns, Tag.id == target_id).scalar() if not target: flash(_("Target tag not found, no action performed"), "error") return self.get(self.ns) merge_from = set(self._get_selected_tags()) if target in merge_from: merge_from.remove(target) if not merge_from: flash(_("No tag selected for merging"), "warning") return self.get(self.ns) session = db.session() merge_from_ids = [t.id for t in merge_from] tbl = entity_tag_tbl entities_to_reindex = get_entities_for_reindex(merge_from) already_tagged = sa.sql.select([tbl.c.entity_id]).where( tbl.c.tag_id == target.id ) del_dup = tbl.delete().where( sa.sql.and_( tbl.c.tag_id.in_(merge_from_ids), tbl.c.entity_id.in_(already_tagged) ) ) session.execute(del_dup) update = ( tbl.update() .where(tbl.c.tag_id.in_(merge_from_ids)) .values(tag_id=target.id) ) session.execute(update) for merged in merge_from: session.delete(merged) session.commit() schedule_entities_reindex(entities_to_reindex) return self.redirect_to_view() class BaseTagView: """Mixin for tag views.""" Model = Tag Form = TagForm def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.extension = current_app.extensions["tags"] def prepare_args(self, args, kwargs): self.ns = kwargs.pop("ns") return super().prepare_args(args, kwargs) def view_url(self): return url_for(".tags_ns", ns=self.ns) index_url = view_url class TagEdit(BaseTagView, ObjectEdit): _message_success = _l("Tag edited") has_changes = False _entities_to_reindex: List[Entity] = [] def after_populate_obj(self): session = sa.orm.object_session(self.obj) self.has_changes = self.obj in (session.dirty | session.deleted) if self.has_changes: # since the tag may be in pending-delete, we must collect them # before flush self._entities_to_reindex = get_entities_for_reindex(self.obj) def commit_success(self): if not (self.has_changes and self._entities_to_reindex): return schedule_entities_reindex(self._entities_to_reindex) class TagPanel(AdminPanel): """Tags administration.""" id = "tags" label = _l("Tags") icon = "tags" def get(self): obj_count = ( sa.sql.select( [Tag.ns, func.count(entity_tag_tbl.c.entity_id).label("obj_count")] ) .select_from(Tag.__table__.join(entity_tag_tbl)) .group_by(Tag.ns) .alias() ) ns_query = ( sa.sql.select( [Tag.ns, func.count(Tag.id).label("tag_count"), obj_count.c.obj_count], from_obj=[Tag.__table__.outerjoin(obj_count, Tag.ns == obj_count.c.ns)], ) .group_by(Tag.ns, obj_count.c.obj_count) .order_by(Tag.ns) ) session = db.session() namespaces = session.execute(ns_query) return render_template("admin/tags.html", namespaces=namespaces) def install_additional_rules(self, add_url_rule: Callable) -> None: panel_endpoint = f".{self.id}" ns_base = "/<string:ns>/" add_url_rule( ns_base, endpoint="ns", view_func=NSView.as_view("ns", view_endpoint=panel_endpoint), ) tag_base = f"{ns_base}<int:object_id>/" add_url_rule( tag_base, endpoint="tag_edit", view_func=TagEdit.as_view("tag_edit", view_endpoint=panel_endpoint), ) add_url_rule( f"{tag_base}delete", endpoint="tag_delete", view_func=TagEdit.as_view("tag_delete", view_endpoint=panel_endpoint), )
The Custom Fingerprint Silver Keepsake Urn is a .925 sterling silver custom made fingerprint jewelry keepsake. The medallion used to show the fingerprint is made in a custom mold. All keepsakes hold cremated remains. Please allow 6 weeks for production for this item.
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Media.filesize' db.add_column(u'oppia_media', 'filesize', self.gf('django.db.models.fields.BigIntegerField')(default=None, null=True, blank=True), keep_default=False) # Adding field 'Media.media_length' db.add_column(u'oppia_media', 'media_length', self.gf('django.db.models.fields.IntegerField')(default=None, null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Media.filesize' db.delete_column(u'oppia_media', 'filesize') # Deleting field 'Media.media_length' db.delete_column(u'oppia_media', 'media_length') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'oppia.activity': { 'Meta': {'object_name': 'Activity'}, 'baseline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.IntegerField', [], {}), 'section': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Section']"}), 'title': ('django.db.models.fields.TextField', [], {}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}) }, u'oppia.activityschedule': { 'Meta': {'object_name': 'ActivitySchedule'}, 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'schedule': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Schedule']"}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, u'oppia.award': { 'Meta': {'object_name': 'Award'}, 'award_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Badge']"}), 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.awardcourse': { 'Meta': {'object_name': 'AwardCourse'}, 'award': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Award']"}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'course_version': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, u'oppia.badge': { 'Meta': {'object_name': 'Badge'}, 'allow_multiple_awards': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'default_icon': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'points': ('django.db.models.fields.IntegerField', [], {'default': '100'}), 'ref': ('django.db.models.fields.CharField', [], {'max_length': '20'}) }, u'oppia.cohort': { 'Meta': {'object_name': 'Cohort'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'schedule': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['oppia.Schedule']", 'null': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, u'oppia.course': { 'Meta': {'object_name': 'Course'}, 'badge_icon': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'blank': 'True'}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'filename': ('django.db.models.fields.CharField', [], {'max_length': '200'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'lastupdated_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'shortname': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'title': ('django.db.models.fields.TextField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'version': ('django.db.models.fields.BigIntegerField', [], {}) }, u'oppia.coursedownload': { 'Meta': {'object_name': 'CourseDownload'}, 'agent': ('django.db.models.fields.TextField', [], {'default': 'None', 'blank': 'True'}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'course_version': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'download_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.coursetag': { 'Meta': {'object_name': 'CourseTag'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Tag']"}) }, u'oppia.media': { 'Meta': {'object_name': 'Media'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'download_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'filename': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'filesize': ('django.db.models.fields.BigIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'media_length': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}) }, u'oppia.message': { 'Meta': {'object_name': 'Message'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'icon': ('django.db.models.fields.CharField', [], {'max_length': '200'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'link': ('django.db.models.fields.URLField', [], {'max_length': '255'}), 'message': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'publish_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, u'oppia.participant': { 'Meta': {'object_name': 'Participant'}, 'cohort': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Cohort']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.points': { 'Meta': {'object_name': 'Points'}, 'cohort': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Cohort']", 'null': 'True'}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']", 'null': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'points': ('django.db.models.fields.IntegerField', [], {}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.schedule': { 'Meta': {'object_name': 'Schedule'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lastupdated_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'title': ('django.db.models.fields.TextField', [], {}) }, u'oppia.section': { 'Meta': {'object_name': 'Section'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.IntegerField', [], {}), 'title': ('django.db.models.fields.TextField', [], {}) }, u'oppia.tag': { 'Meta': {'object_name': 'Tag'}, 'courses': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['oppia.Course']", 'through': u"orm['oppia.CourseTag']", 'symmetrical': 'False'}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'icon': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'order_priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, u'oppia.tracker': { 'Meta': {'object_name': 'Tracker'}, 'activity_title': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'agent': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['oppia.Course']", 'null': 'True', 'blank': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), 'section_title': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'submitted_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'time_taken': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'tracker_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '10', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'uuid': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}) }, u'oppia.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'about': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'job_title': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'organisation': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['oppia']
Amazon founder and the world’s richest man Jeff Bezos is making moves in order get his space firm Blue Origin into the production phase to meet his set deadline of 2020 for the launch of it’s anticipated heavy rocket the New Glenn. Blue Origin is currently in what can be described as a recruitment frenzy, as it’s been hiring hundreds of engineers for the last three years, and still plans to bolster it’s numbers to 3000 employees over the next two to three years, according to Reuters. Blue Origin’s efforts to strengthen it’s workforce can be attributed to it’s commitment to the successful launch of the New Glenn, a heavy-launch vehicle which Bezos claims will be able to haul satellites and, eventually, people into orbit. The New Glenn’s first stage booster will be reusable in order to reduce costs and increase efficiency and frequency of launches, much like Elon Musk’s Falcon Heavy. Blue Origin’s engineers are in the final stages of design and will soon begin producing the vital components that will then go trough rigorous testing, according to Reuters, who cite people familiar with the project that requested anonymity since they’re not authorized to speak publicly.
# -*- Mode: Python; coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 ## ## Copyright (C) 2013 Async Open Source <http://www.async.com.br> ## All rights reserved ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., or visit: http://www.gnu.org/. ## ## Author(s): Stoq Team <[email protected]> ## import decimal import logging import gtk from storm.expr import LeftJoin from stoqlib.api import api from stoqlib.database.runtime import get_default_store from stoqlib.database.viewable import Viewable from stoqlib.domain.product import Product, ProductManufacturer from stoqlib.domain.sellable import Sellable from stoqlib.domain.workorder import WorkOrder from stoqlib.gui.base.dialogs import run_dialog from stoqlib.gui.editors.personeditor import ClientEditor from stoqlib.gui.editors.producteditor import ProductEditor from stoqlib.gui.editors.workordereditor import WorkOrderEditor from stoqlib.gui.events import (StartApplicationEvent, StopApplicationEvent, EditorCreateEvent, RunDialogEvent, PrintReportEvent, SearchDialogSetupSearchEvent, ApplicationSetupSearchEvent) from stoqlib.gui.search.searchcolumns import SearchColumn from stoqlib.gui.search.searchextension import SearchExtension from stoqlib.gui.utils.keybindings import add_bindings, get_accels from stoqlib.gui.utils.printing import print_report from stoqlib.gui.wizards.personwizard import PersonRoleWizard from stoqlib.gui.wizards.workorderquotewizard import WorkOrderQuoteWizard from stoqlib.lib.message import warning from stoqlib.lib.translation import stoqlib_gettext from stoqlib.reporting.sale import SaleOrderReport from stoq.gui.services import ServicesApp from .medicssearch import OpticalMedicSearch, MedicSalesSearch from .opticaleditor import MedicEditor, OpticalWorkOrderEditor from .opticalhistory import OpticalPatientDetails from .opticalreport import OpticalWorkOrderReceiptReport from .opticalslave import ProductOpticSlave, WorkOrderOpticalSlave from .opticalwizard import OpticalSaleQuoteWizard, MedicRoleWizard from .opticaldomain import OpticalProduct _ = stoqlib_gettext log = logging.getLogger(__name__) class ProductSearchExtention(SearchExtension): spec_attributes = dict( gf_glass_type=OpticalProduct.gf_glass_type, gf_size=OpticalProduct.gf_size, gf_lens_type=OpticalProduct.gf_lens_type, gf_color=OpticalProduct.gf_color, gl_photosensitive=OpticalProduct.gl_photosensitive, gl_anti_glare=OpticalProduct.gl_anti_glare, gl_refraction_index=OpticalProduct.gl_refraction_index, gl_classification=OpticalProduct.gl_classification, gl_addition=OpticalProduct.gl_addition, gl_diameter=OpticalProduct.gl_diameter, gl_height=OpticalProduct.gl_height, gl_availability=OpticalProduct.gl_availability, cl_degree=OpticalProduct.cl_degree, cl_classification=OpticalProduct.cl_classification, cl_lens_type=OpticalProduct.cl_lens_type, cl_discard=OpticalProduct.cl_discard, cl_addition=OpticalProduct.cl_addition, cl_cylindrical=OpticalProduct.cl_cylindrical, cl_axis=OpticalProduct.cl_axis, cl_color=OpticalProduct.cl_color, cl_curvature=OpticalProduct.cl_curvature, ) spec_joins = [ LeftJoin(OpticalProduct, OpticalProduct.product_id == Product.id) ] def get_columns(self): info_cols = { _('Frame'): [ ('gf_glass_type', _('Glass Type'), str, False), ('gf_size', _('Size'), str, False), ('gf_lens_type', _('Lens Type'), str, False), ('gf_color', _('Color'), str, False), ], _('Glass Lenses'): [ ('gl_photosensitive', _('Photosensitive'), str, False), ('gl_anti_glare', _('Anti Glare'), str, False), ('gl_refraction_index', _('Refraction Index'), decimal.Decimal, False), ('gl_classification', _('Classification'), str, False), ('gl_addition', _('Addition'), str, False), ('gl_diameter', _('Diameter'), str, False), ('gl_height', _('Height'), str, False), ('gl_availability', _('Availability'), str, False), ], _('Contact Lenses'): [ ('cl_degree', _('Degree'), decimal.Decimal, False), ('cl_classification', _('Classification'), str, False), ('cl_lens_type', _('Lens Type'), str, False), ('cl_discard', _('Discard'), str, False), ('cl_addition', _('Addition'), str, False), ('cl_cylindrical', _('Cylindrical'), decimal.Decimal, False), ('cl_axis', _('Axis'), decimal.Decimal, False), ('cl_color', _('Color'), str, False), ('cl_curvature', _('Curvature'), str, False), ], } columns = [] for label, columns_list in info_cols.iteritems(): for c in columns_list: columns.append( SearchColumn(c[0], title='%s - %s' % (label, c[1]), data_type=c[2], visible=c[3])) return columns class ServicesSearchExtention(SearchExtension): spec_attributes = dict( manufacturer_name=ProductManufacturer.name ) spec_joins = [ LeftJoin(Product, Product.sellable_id == Sellable.id), LeftJoin(ProductManufacturer, Product.manufacturer_id == ProductManufacturer.id) ] def get_columns(self): return [ SearchColumn('manufacturer_name', title=_('Manufacturer'), data_type=str, visible=False) ] class OpticalUI(object): def __init__(self): # This will contain a mapping of (appname, uimanager) -> extra_ui # We need to store that like this because each windows has it's unique # uimanager, and we have an extra_ui for different apps self._app_ui = dict() self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) StopApplicationEvent.connect(self._on_StopApplicationEvent) EditorCreateEvent.connect(self._on_EditorCreateEvent) RunDialogEvent.connect(self._on_RunDialogEvent) PrintReportEvent.connect(self._on_PrintReportEvent) SearchDialogSetupSearchEvent.connect(self._on_SearchDialogSetupSearchEvent) ApplicationSetupSearchEvent.connect(self._on_ApplicationSetupSearchEvent) add_bindings([ ('plugin.optical.pre_sale', ''), ('plugin.optical.search_medics', ''), ]) # # Private # def _add_sale_menus(self, sale_app): uimanager = sale_app.uimanager ui_string = """ <ui> <menubar name="menubar"> <placeholder name="ExtraMenubarPH"> <menu action="OpticalMenu"> <menuitem action="OpticalPreSale"/> <menuitem action="OpticalMedicSaleItems"/> <menuitem action="OpticalMedicSearch"/> </menu> </placeholder> </menubar> </ui> """ group = get_accels('plugin.optical') ag = gtk.ActionGroup('OpticalSalesActions') ag.add_actions([ ('OpticalMenu', None, _(u'Optical')), ('OpticalPreSale', None, _(u'Sale with work order...'), group.get('pre_sale'), None, self._on_OpticalPreSale__activate), ('OpticalMedicSearch', None, _(u'Medics...'), group.get('search_medics'), None, self._on_MedicsSearch__activate), ('OpticalMedicSaleItems', None, _(u'Medics sold items...'), None, None, self._on_MedicSaleItems__activate), ]) uimanager.insert_action_group(ag, 0) self._app_ui[('sales', uimanager)] = uimanager.add_ui_from_string(ui_string) def _add_services_menus(self, services_app): uimanager = services_app.uimanager ui_string = """ <ui> <menubar name="menubar"> <placeholder name="AppMenubarPH"> <menu action="OrderMenu"> <separator/> <menuitem action="OpticalDetails"/> </menu> </placeholder> </menubar> <popup name="ServicesSelection"> <placeholder name="ServicesSelectionPH"> <separator/> <menuitem action="OpticalDetails"/> </placeholder> </popup> </ui> """ ag = gtk.ActionGroup('OpticalServicesActions') ag.add_actions([ ('OpticalDetails', None, _(u'Edit optical details...'), None, None, self._on_OpticalDetails__activate), ]) uimanager.insert_action_group(ag, 0) self._app_ui[('services', uimanager)] = uimanager.add_ui_from_string(ui_string) services_app.search.connect( 'result-selection-changed', self._on_ServicesApp__result_selection_changed, uimanager) def _remove_app_ui(self, appname, uimanager): ui = self._app_ui.pop((appname, uimanager), None) if ui is not None: uimanager.remove_ui(ui) def _fix_work_order_editor(self, editor, model, store): slave = WorkOrderOpticalSlave(store, model, show_finish_date=False, visual_mode=editor.visual_mode) editor.add_extra_tab('Ótico', slave) def _print_report(button): print_report(OpticalWorkOrderReceiptReport, [model]) # Also add an print button if editor.edit_mode: print_button = editor.add_button(_('Print'), gtk.STOCK_PRINT) print_button.connect('clicked', _print_report) def _add_product_slave(self, editor, model, store): editor.add_extra_tab(ProductOpticSlave.title, ProductOpticSlave(store, model)) def _create_pre_sale(self): if self._current_app.check_open_inventory(): warning(_("You cannot create a pre-sale with an open inventory.")) return with api.new_store() as store: run_dialog(OpticalSaleQuoteWizard, self._current_app, store) if store.committed: self._current_app.refresh() def _add_patient_history_button(self, editor, model): button = editor.add_button(_(u'Patient History')) button.connect('clicked', self._on_patient_history_clicked, editor, model) # Save the button on the editor, so the tests can click on it editor.patient_history_button = button # # Events # def _on_StartApplicationEvent(self, appname, app): self._current_app = app if appname == 'sales': self._add_sale_menus(app) elif appname == 'services': self._add_services_menus(app) def _on_StopApplicationEvent(self, appname, app): self._remove_app_ui(appname, app.uimanager) def _on_EditorCreateEvent(self, editor, model, store, *args): # Use type() instead of isinstance so tab does not appear on subclasses # (unless thats the desired effect) editor_type = type(editor) if editor_type is ProductEditor: self._add_product_slave(editor, model, store) elif editor_type is WorkOrderEditor: self._fix_work_order_editor(editor, model, store) elif editor_type is ClientEditor: self._add_patient_history_button(editor, model) def _on_RunDialogEvent(self, dialog, parent, *args, **kwargs): # Every sale with work order should use OpticalSaleQuoteWizard instead # of WorkOrderQuoteWizard when this plugin is enabled if dialog is WorkOrderQuoteWizard: return OpticalSaleQuoteWizard elif dialog is PersonRoleWizard and MedicEditor in args: return MedicRoleWizard def _on_SearchDialogSetupSearchEvent(self, dialog): if not issubclass(dialog.search_spec, Viewable): return viewable = dialog.search_spec if (viewable.has_column(Sellable.description) and viewable.has_join_with(Product)): dialog.add_extension(ProductSearchExtention()) def _on_ApplicationSetupSearchEvent(self, app): if isinstance(app, ServicesApp): extention = ServicesSearchExtention() extention.attach(app) # # Callbacks # def _on_PrintReportEvent(self, report_class, *args, **kwargs): if issubclass(report_class, SaleOrderReport): sale = args[0] store = sale.store workorders = list(WorkOrder.find_by_sale(store, sale)) if len(workorders): print_report(OpticalWorkOrderReceiptReport, workorders) return True return False def _on_patient_history_clicked(self, widget, editor, client): run_dialog(OpticalPatientDetails, editor, client.store, client) def _on_OpticalPreSale__activate(self, action): self._create_pre_sale() def _on_MedicsSearch__activate(self, action): with api.new_store() as store: run_dialog(OpticalMedicSearch, None, store, hide_footer=True) def _on_MedicSaleItems__activate(self, action): store = api.new_store() run_dialog(MedicSalesSearch, None, store, hide_footer=True) store.rollback() def _on_OpticalDetails__activate(self, action): wo_view = self._current_app.search.get_selected_item() with api.new_store() as store: work_order = store.fetch(wo_view.work_order) run_dialog(OpticalWorkOrderEditor, None, store, work_order) def _on_ServicesApp__result_selection_changed(self, search, uimanager): optical_details = uimanager.get_action( '/menubar/AppMenubarPH/OrderMenu/OpticalDetails') optical_details.set_sensitive(bool(search.get_selected_item()))
Welcome to lucidEDU! Here you'll find educational information on various data storage related topics. It is our hope that this lucidEDU will become your in-hand resource for enhancing your knowledge base and skills. RAID-Z is not actually a kind of RAID, but a higher-level software solution that implements an integrated redundancy scheme similar to RAID 5, using ZFS. RAID-Z avoids the RAID 5 “write hole” using copy-on-write: rather than overwriting data, it writes to a new location and then automatically overwrites the pointer to the old data.
""" The outputs.py module represents some form of all outputs from the Automater program to include all variation of output files. Any addition to the Automater that brings any other output requirement should be programmed in this module. Class(es): SiteDetailOutput -- Wrapper class around all functions that pass #NOMOREPRINTS print output from Automater, to include standard output and file system output. Function(s): No global exportable functions are defined. Exception(s): No exceptions exported. """ import csv import socket import re from datetime import datetime from operator import attrgetter class SiteDetailOutput(object): """ SiteDetailOutput provides the capability to output information to the screen, a text file, a comma-seperated value file, or a file formatted with html markup (readable by web browsers). Public Method(s): createOutputInfo Instance variable(s): _listofsites - list storing the list of site results stored. """ def __init__(self,sitelist): """ Class constructor. Stores the incoming list of sites in the _listofsites list. Argument(s): sitelist -- list containing site result information to be printed. Return value(s): Nothing is returned from this Method. """ self._listofsites = [] self._listofsites = sitelist @property def ListOfSites(self): """ Checks instance variable _listofsites for content. Returns _listofsites if it has content or None if it does not. Argument(s): No arguments are required. Return value(s): _listofsites -- list containing list of site results if variable contains data. None -- if _listofsites is empty or not assigned. Restriction(s): This Method is tagged as a Property. """ if self._listofsites is None or len(self._listofsites) == 0: return None return self._listofsites def createOutputInfo(self,parser): """ Checks parser information calls correct pass #NOMOREPRINTS print methods based on parser requirements. Returns nothing. Argument(s): parser -- Parser object storing program input parameters used when program was run. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ self.PrintToScreen() if parser.hasCEFOutFile(): self.PrintToCEFFile(parser.CEFOutFile) if parser.hasTextOutFile(): self.PrintToTextFile(parser.TextOutFile) if parser.hasHTMLOutFile(): self.PrintToHTMLFile(parser.HTMLOutFile) if parser.hasCSVOutSet(): self.PrintToCSVFile(parser.CSVOutFile) def PrintToScreen(self): """ Formats site information correctly and prints it to the user's standard output. Returns nothing. Argument(s): No arguments are required. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: pass #NOMOREPRINTS print "\n____________________ Results found for: " + site.Target + " ____________________" target = site.Target if siteimpprop is None or len(siteimpprop)==0: pass #NOMOREPRINTS print "No results in the " + site.FriendlyName[index] + " category" else: if siteimpprop[index] is None or len(siteimpprop[index])==0: pass #NOMOREPRINTS print "No results found for: " + site.ReportStringForResult[index] else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult[index] + " " + str(siteimpprop) laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult[index] + " " + str(siteresult) laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: pass #NOMOREPRINTS print "\n____________________ Results found for: " + site.Target + " ____________________" target = site.Target if siteimpprop is None or len(siteimpprop)==0: pass #NOMOREPRINTS print "No results found in the " + site.FriendlyName else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult + " " + str(siteimpprop) laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult + " " + str(siteresult) laststring = "" + site.ReportStringForResult + " " + str(siteresult) else: pass def PrintToCEFFile(self,cefoutfile): """ Formats site information correctly and prints it to an output file in CEF format. CEF format specification from http://mita-tac.wikispaces.com/file/view/CEF+White+Paper+071709.pdf "Jan 18 11:07:53 host message" where message: "CEF:Version|Device Vendor|Device Product|Device Version|Signature ID|Name|Severity|Extension" Returns nothing. Argument(s): cefoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) curr_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') hostname = socket.gethostname() prefix = ' '.join([curr_date,hostname]) cef_version = "CEF:Version1.1" cef_deviceVendor = "TekDefense" cef_deviceProduct = "Automater" cef_deviceVersion = "2.1" cef_SignatureID = "0" cef_Severity = "2" cef_Extension = " " cef_fields = [cef_version,cef_deviceVendor,cef_deviceProduct,cef_deviceVersion, \ cef_SignatureID, cef_Severity, cef_Extension] pattern = "^\[\+\]\s+" target = "" pass #NOMOREPRINTS print '\n[+] Generating CEF output: ' + cefoutfile f = open(cefoutfile, "wb") csv.register_dialect('escaped',delimiter='|',escapechar='\\',doublequote=False,quoting=csv.QUOTE_NONE) cefRW = csv.writer(f,'escaped') #cefRW.writerow(['Target', 'Type', 'Source', 'Result']) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] " + \ re.sub(pattern,"",site.ReportStringForResult[index])+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + res #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult if "" + tgt + typ + source + str(res) != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+str(res)])+"] " + \ re.sub(pattern,"",site.ReportStringForResult[index])+ str(siteresult)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + str(res) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] " + \ re.sub(pattern,"",site.ReportStringForResult)+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + res else: laststring = "" for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult if "" + tgt + typ + source + str(res) != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+str(res)])+"] " + \ re.sub(pattern,"",site.ReportStringForResult)+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + str(res) f.flush() f.close() pass #NOMOREPRINTS print "" + cefoutfile + " Generated" def PrintToTextFile(self,textoutfile): """ Formats site information correctly and prints it to an output file in text format. Returns nothing. Argument(s): textoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" pass #NOMOREPRINTS print "\n[+] Generating text output: " + textoutfile f = open(textoutfile, "w") if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: f.write("\n____________________ Results found for: " + site.Target + " ____________________") target = site.Target if siteimpprop is None or len(siteimpprop)==0: f.write("\nNo results in the " + site.FriendlyName[index] + " category") else: if siteimpprop[index] is None or len(siteimpprop[index])==0: f.write("\nNo results found for: " + site.ReportStringForResult[index]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: f.write("\n" + site.ReportStringForResult[index] + " " + str(siteimpprop)) laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: f.write("\n" + site.ReportStringForResult[index] + " " + str(siteresult)) laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: f.write("\n____________________ Results found for: " + site.Target + " ____________________") target = site.Target if siteimpprop is None or len(siteimpprop)==0: f.write("\nNo results found in the " + site.FriendlyName) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: f.write("\n" + site.ReportStringForResult + " " + str(siteimpprop)) laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: f.write("\n" + site.ReportStringForResult + " " + str(siteresult)) laststring = "" + site.ReportStringForResult + " " + str(siteresult) f.flush() f.close() pass #NOMOREPRINTS print "" + textoutfile + " Generated" def PrintToCSVFile(self,csvoutfile): """ Formats site information correctly and prints it to an output file with comma-seperators. Returns nothing. Argument(s): csvoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" pass #NOMOREPRINTS print '\n[+] Generating CSV output: ' + csvoutfile f = open(csvoutfile, "wb") csvRW = csv.writer(f, quoting=csv.QUOTE_ALL) csvRW.writerow(['Target', 'Type', 'Source', 'Result']) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + res #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult if "" + tgt + typ + source + str(res) != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + str(res) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + res else: laststring = "" for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult if "" + tgt + typ + source + str(res) != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + str(res) f.flush() f.close() pass #NOMOREPRINTS print "" + csvoutfile + " Generated" def PrintToHTMLFile(self,htmloutfile): """ Formats site information correctly and prints it to an output file using HTML markup. Returns nothing. Argument(s): htmloutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" pass #NOMOREPRINTS print '\n[+] Generating HTML output: ' + htmloutfile f = open(htmloutfile, "w") f.write(self.getHTMLOpening()) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) f.write(self.getHTMLClosing()) f.flush() f.close() pass #NOMOREPRINTS print "" + htmloutfile + " Generated" def getHTMLOpening(self): """ Creates HTML markup to provide correct formatting for initial HTML file requirements. Returns string that contains opening HTML markup information for HTML output file. Argument(s): No arguments required. Return value(s): string. Restriction(s): The Method has no restrictions. """ return '''<style type="text/css"> #table-3 { border: 1px solid #DFDFDF; background-color: #F9F9F9; width: 100%; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; font-family: Arial,"Bitstream Vera Sans",Helvetica,Verdana,sans-serif; color: #333; } #table-3 td, #table-3 th { border-top-color: white; border-bottom: 1px solid #DFDFDF; color: #555; } #table-3 th { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 14px; } #table-3 td { font-size: 12px; padding: 4px 7px 2px; vertical-align: top; }res h1 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: Center; line-height: 1.3em; font-size: 40px; } h2 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 16px; } h4 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 10px; } </style> <html> <body> <title> Automater Results </title> <h1> Automater Results </h1> <table id="table-3"> <tr> <th>Target</th> <th>Type</th> <th>Source</th> <th>Result</th> </tr> ''' def getHTMLClosing(self): """ Creates HTML markup to provide correct formatting for closing HTML file requirements. Returns string that contains closing HTML markup information for HTML output file. Argument(s): No arguments required. Return value(s): string. Restriction(s): The Method has no restrictions. """ return ''' </table> <br> <br> <p>Created using Automater.py by @TekDefense <a href="http://www.tekdefense.com">http://www.tekdefense.com</a>; <a href="https://github.com/1aN0rmus/TekDefense">https://github.com/1aN0rmus/TekDefense</a></p> </body> </html> '''
The 10 Best Small Cities To Live In The U.S. Kenesaw, Nebraska is about as big as it sounds. It is a quick burst of texture along the railroad line to Hastings with houses, sheds and the remainder of a grain elevator reaching out of the Southern Nebraska prairie on either side of Smith Avenue. Who wouldn't want to live in Never Never Land, where forever never ends and all you need is faith, trust and pixie dust? Where a boy with buff quads in tights fights pirates and tells you to leave the world behind and bid your cares goodbye? It's beginning to look a lot like Christmas... especially at your next-door neighbor's neon nativity scene, which is totally pulling focus from your classy white twinkle lights. But these 11 towns in America are taking Christmas to a whole new level. Writer, blogger, and college student from Illinois. Transphobia is not what we should be teaching at our schools. Instead, we should be preaching and practicing tolerance and acceptance. Remember that transgender students are not lying, they are not dangerous, they are not tools for your bigotry. As any small-town aficionado will tell you, there are plenty of advantages to living away from the big city, and you don't have to completely abandon your urban lifestyle. 'Tis the season for driving around and gawking at leaves. 5 Incredible Midwest Towns You Aren't Visiting -- But Should! Living in the heart of America's Heartland lends itself to wonderful exploratory road trips. From Great Plains to arid Badlands, Great Lakes to Ozark Mountains, the Midwest is far more than flat land filled with corn, beans, and hogs. I know, I know -- everyone's squawking about how it's an extension of Williamsburg, or giving you their recommendation for the most rustic home goods store in Hudson. But if you peel just off the main drag, you'll find the places that make the area special and worth a little exploring. Sure, we love big, bustling cities. But there's something to be said for small towns where life moves slower, traditions are everywhere and the people stop to say hello. Cities are great for so many reasons (the convenience, the culture, the 127 different kinds of Thai delivery). But there's also something to be said for small towns (quaint Main Streets, kooky annual festivals and charm out the wazoo). From your high school football team to your high school sweetheart, small-town loyalty runs deep. When the going gets tough, no one gets going, and neither will she. During my visit to Rome, I found myself with two extra days that were pretty much empty. Rather than spend them in the city, I decided to visit some nearby villages, partially for the beautiful views they offered but mostly to get a true feel of how modern Italians live their everyday lives. From land to shining sea, these are some of the most charming, most scenic and most interesting towns to visit with your whole family. The inability to acknowledge or see this by so many people I know is...well, funny. I laugh and let go. Not in sarcasm, but in confusion and self-preservation to accept people are all different and if their truth is black and white, who am I to say how THEY should see things? Writer, consultant and speaker on creating great communities. In real life, this is Albert Lea, Minnesota, a town of 18,000 working to prove that healthy lifestyles like walking and good nutrition are not just big city things.
from tkinter import * from random import * root = Tk() canvas = Canvas(root) canvas.pack() def create_scores_text(): global scores_text scores_text = canvas.create_text(60, 12, text="Scores: " + str(scores), font="Sans 18") def change_scores_text(): canvas.itemconfigure(scores_text, text="Scores: " + str(scores)) def generate_random_ball_coord(): x = randint(r, screen_width-r) y = randint(r, screen_height-r) return x, y def generate_random_ball_velocity(): vx = randint(-10, +10) vy = randint(-10, +10) return vx, vy def create_ball(): global x, y, vx, vy, ball x, y = generate_random_ball_coord() vx, vy = generate_random_ball_velocity() ball = canvas.create_oval(x - r, y - r, x + r, y + r, fill="green") def move_ball(): global x, y, vx, vy new_x, new_y = x - vx, y - vy if new_x < r or new_x > screen_width - r: new_x = x # rolling back coordinate! vx = -vx if new_y < r or new_y > screen_height - r: new_y = y # rolling back coordinate! vy = -vy canvas.move(ball, new_x - x, new_y - y) x, y = new_x, new_y def flick_ball(): global x, y, vx, vy new_x, new_y = generate_random_ball_coord() vx, vy = generate_random_ball_velocity() canvas.move(ball, new_x - x, new_y - y) x, y = new_x, new_y def time_event(): move_ball() canvas.after(100, time_event) def mouse_click(event): global scores if (event.x - x)**2 + (event.y - y)**2 <= r**2: scores += 1 change_scores_text() flick_ball() scores = 0 x = y = 0 # not needed r = 50 screen_width = int(canvas["width"]) screen_height = int(canvas["height"]) create_ball() create_scores_text() canvas.bind('<Button-1>', mouse_click) time_event() # начинаю циклически запускать таймер root.mainloop()
While most gemstones, appear to be unbreakable, they are still likely to be damaged with improper wear and care. Stones like Amethyst, Emerald and Tanzanite can be easily scratched, chipped or shattered with hard knocks and excessive changes in temperature. Emeralds can have an oily appearance, and care should be taken when cleaning to not remove this surface treatment. Most stones can be cleaned using warm water, gentle detergents and a soft toothbrush. Ensure that the stone is rinsed well and can be dried with a soft cloth. Avoid harsh chemicals with gemstones as they can alter the colour of some and effect the durability of others. When storing gemstone jewellery it is recommended to keep with either in individual soft fabric pouches or in a jewellery box with dividers so the stones are protected from contact with each other. Said to be the ‘sailors lucky stone’, and to be of help with arthritis, eye inflammation, sore through and varicose veins. A rare and precious stone for its exquisite apple green stones. This was very popular in Ancient Egyptian times and is rumoured to have been Cleopatra&apos;s favourite stone. However, it disappeared and rediscovered in the 1900&apos;s. Believed to increased strength of the wearer and provide invisibility in desperate times.
import sqlalchemy from sqlalchemy import Table, MetaData, Column, Integer, Text, ForeignKey from dbconnection import dbconnection authors = Table('authors', MetaData(), Column('id', Integer, primary_key=True), Column('name', Text)) albums = Table('albums', MetaData(), Column('id', Integer, primary_key=True), Column('authorid', Integer, ForeignKey('authors.id')), Column('name', Text)) songs = Table('songs', MetaData(), Column('id', Integer, primary_key=True), Column('authorid', Integer, ForeignKey('authors.id')), Column('albumid', Integer, ForeignKey('albums.id')), Column('name', Text)) def getAnagraphicTableResults(table, query, limit, offset): """Returns a dictionary representing the results of a query on a table composed of two columns: id (Integer Primary Key) and Name (Text)""" results = [] s = sqlalchemy.sql.select([table.c.id, table.c.name]) if query is not None and query.strip() != '': s = s.where(table.c.name.like('%' + query + '%')) if limit != -1: s = s.limit(limit) if offset != -1: s = s.offset(offset) for row in dbconnection.execute(s): r = {} r['id'] = row[0] r['name'] = row[1] results.append(r) return results def getBasicSearch(query, limit, offset): """Returns a combined search of authors, albums and songs matching the query""" r = {} r['authors'] = getAuthors(query, limit, offset) r['albums'] = getAlbums(query, None, limit, offset) r['songs'] = getSongs(query, None, None, limit, offset) return r def getAuthors(query, limit, offset): """Returns a dictionary of authors array""" return getAnagraphicTableResults(authors, query, limit, offset) def getAlbums(query, authorid, limit, offset): """Returns a dictionary of albums array""" results = [] s = sqlalchemy.sql.select([albums.c.id, albums.c.name, authors.c.id, authors.c.name]).where( albums.c.authorid == authors.c.id) if query is not None and query.strip() != '': s = s.where(albums.c.name.like('%' + query + '%')) if authorid is not None and authorid.strip() != '': try: s = s.where(authors.c.id == int(authorid)) except ValueError: pass if limit != -1: s = s.limit(limit) if offset != -1: s = s.offset(offset) for row in dbconnection.execute(s): r = {"id": row[0], "name": row[1], "author": {"id": row[2], "name": row[3]}} results.append(r) return results def getSongs(query, authorid, albumid, limit, offset): """Returns a dictionary of songs array""" results = [] s = sqlalchemy.sql.select([songs.c.id, songs.c.name, authors.c.id, authors.c.name, albums.c.id, albums.c.name]).where( songs.c.authorid == authors.c.id).where( songs.c.albumid == albums.c.id) if query is not None and query.strip() != '': s = s.where(songs.c.name.like('%' + query + '%')) if authorid is not None and authorid.strip() != '': try: s = s.where(authors.c.id == int(authorid)) except ValueError: pass if albumid is not None and albumid.strip() != '': try: s = s.where(albums.c.id == int(albumid)) except ValueError: pass if limit != -1: s = s.limit(limit) if offset != -1: s = s.offset(offset) for row in dbconnection.execute(s): r = {"id": row[0], "name": row[1], "author": { "id": row[2], "name": row[3]}, "album": { "id": row[4], "name": row[5]}} results.append(r) return results
Autism Anglia offers bespoke training packages delivered by specialised autism professionals at your place of work. Previous clients include the police, schools, care homes, the service industry and local authority. Our training is bespoke, meaning that we work with you to develop a session that is relevant to your field of work and provides your staff with the knowledge they require to deliver the best practice. If you have particular budget restraints, we will be happy to discuss this with you and see if we can meet your training requirements. If you are interested in Autism Anglia delivering a training session in your place of work, please contact us or call 01206 577678.
""" Django settings for harare project. Generated by 'django-admin startproject' using Django 1.10.3. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os import dj_database_url # Build paths inside the project like this: os.path.join(BASE_DIR, ...) # BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'xnk36o$h1m!)p0y!b(63myjcw_69be&1k@e91(jdftia3^h1h*' # SECURITY WARNING: don't run with debug turned on in production! # The following checks to see if running on Heroku and then disables debugging. # http://stackoverflow.com/questions/9383450/how-can-i-detect-herokus-environment ON_HEROKU = False if 'DATABASE_URL' in os.environ: ON_HEROKU = True DEBUG = True if ON_HEROKU: DEBUG = False ALLOWED_HOSTS = ['*'] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', # third_party_apps 'crispy_forms', 'bootstrap3', 'markitup', 'autoslug', # my_apps 'pyladies_harare', 'talks', 'profiles', 'accounts', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'whitenoise.middleware.WhiteNoiseMiddleware', ] ROOT_URLCONF = 'harare.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] CRISPY_TEMPLATE_PACK = 'bootstrap3' WSGI_APPLICATION = 'harare.wsgi.application' MARKITUP_FILTER = ('markdown.markdown', {'safe_mode': True}) # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Update database configuration with $DATABASE_URL db_from_env = dj_database_url.config(conn_max_age=500) DATABASES['default'].update(db_from_env) # AUTH_USER_MODEL = 'auth.User' # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Africa/Harare' USE_I18N = True USE_L10N = True USE_TZ = True # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ # PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles') STATIC_URL = '/static/' # Extra places for collectstatic to find static files. STATICFILES_DIRS = ( os.path.join(PROJECT_ROOT, 'static'), )
As the new operator of Utah's largest oil field, the Greater Aneth Oil Field (Aneth), Elk celebrated its first year of operatorship with zero employee injuries resulting in days away, restricted activities or transferred duties. This safety performance milestone was better than the former operator of the field for the preceding 12-month period and achieved with 19,974 less man hours worked. Elk now has a staff and consultant headcount approaching 115, a large majority of which are field operations based. We believe this improved safety performance has been achieved because we have been able to review, refresh and add operating business values in health, safety and environment from existing and new employees and contractors over the last 12 months as we transform into an active, best practice oil field operator. The addition of Aneth staff and contractors to the existing Elk team necessitated our broadening of sustainability strategy and future sustainability reporting scope as we continue to transform. This ensures our expanded employee and contractor base has a transparent and usable EHS management system. This adaptive approach also encapsulates other areas of good corporate governance as management structures are enhanced to deliver more robust systems that in turn are focused on providing continued shareholder returns. Elk's COO continues to steer the sustainability governance structure as assisted by our US-based EHS Coordinator and two field-based EHS Specialists along with a Governmental/Corporate Affairs Officer. Corporate and field oversight ensures sustainability themes are implemented into and monitored across our everyday business practices in the areas we operate in. As Elk further develops in this area, we will better measure, refine and embed sustainability goals and targets. Health and safety of our staff and contractors is paramount and taking over operatorship of Aneth on 1 January 2018 allowed us to undertake ground-up revisions of our Corporate Safety Manual, Process Safety Management Manual, Operator Qualification Manual and Contractor EHS Manual. This process has already started to deliver in that our EHS measurement for 2018 was better than the former operator's preceding 12 months. Wellbeing of our staff and contractors: As the new Aneth operator Elk undertook early in 2018 the "8760" Safety Campaign Development - Safety should be a priority 24 hours a day, 365 days per year. Elk is further building its capability in actively managing diversity to develop and improve the talent available to drive performance and capability outcomes. Most of Elk's Aneth field operations and office employees across all disciplines belong to the Navajo Nation. Elk provides college scholarships and paid internships. Landowner interaction: The Greater Aneth Oil Field (Aneth) lies solely within the Navajo Nation Reservation and Navajo Nation Oil and Gas (NNOG) is Elk's 37% partner in further developing the field. NNOG generates significant income for the Navajo Nation from its operating partnership with Elk. On 207 square kilometres (80 square miles) that make up Aneth there are some 125 homesteads and Elk field operations managers ensure a two-way dialogue with homesteaders making certain they are fully aware of impending works in the vicinity of their properties to minimise disturbance. In late 2018 Elk was a major sponsor of the 107th Annual Northern Navajo Nation Fair in Shiprock, New Mexico. Elk sponsorship was allocated for emergency services and public safety needs for the duration of the fair. Elk supported the two agencies with needed lodging and meals. The fair attracts a lot of local and interstate visitors therefore requiring a paramedic presence and increased traffic control. A particularly harsh winter with rare heavy snow falls across our operating acreage led to multiple requests for help from elderly lease residents for assistance in snow removal on roads into home sites. Many of the elders depend on passable roads to access and obtain daily medical care. Elk also maintains strong links with academia across the State and hosted graduating engineering students from the University of Utah for a tour of the Aneth Unit. Operating footprint: As well as adhering to all Navajo Nation, State and Federal protocols as a diligent operator of Aneth, we all appreciate the natural beauty of the environment we work in. On World Clean Up Day 2018 sponsored by Elk a large number of employees, family, friends and community members volunteered and came together to clean up almost 32 kilometres (20 miles) of nature strip adjoining the public highway through the Aneth area. Regulatory compliance and reporting: Elk has established relationships and reporting requirements with the requisite traditional owners, State and Federal authorities, which includes the Bureau of Indian Affairs, the Bureau of Land Management, the US Environmental Protection Agency - Water and Air Permits, the Navajo Nation Water Board, the Utah Division of Oil, Gas and Mining, Pipeline and Hazardous Materials Safety Administration and the Federal Motor Carrier Safety Administration. For the benefit of all Elk stakeholders during the coming year we will adhere to and benchmark our expanded sustainability strategy and reporting against global oil industry guidelines for social and environmental issues as set down by IPIECA (International Petroleum Industry Environmental Conservation Association). Elk will also be utilising components of the GRI (Global Reporting Initiative) guidelines to enhance our sustainability performance and disclosure for the benefit of all stakeholders.
import zope.interface import zope.event import superorganism.gui.interfaces import urwid class CharKeyPressed(object): zope.interface.implements(superorganism.gui.interfaces.ICharKeyPressEvent) def __init__(self, screen, key): self.screen = screen self.key = key class FunctionKeyPressed(CharKeyPressed): zope.interface.implements(superorganism.gui.interfaces.IFunctionKeyPressEvent) class Dispatcher(object): zope.interface.implements(superorganism.gui.interfaces.IKeyDispatcher) def __init__(self, screen): self.screen = screen def dispatch_key_events(self): keys = self.screen.get_input() for key in self.screen.get_input(): if self.is_valid_char(key): zope.event.notify( CharKeyPressed(self.screen, key)) else: zope.event.notify( FunctionKeyPressed(self.screen, key)) def is_valid_char(self, key): return urwid.util.is_wide_char(key,0) or (len(key)==1 and ord(key) >= 32)
This 4 bedroom, two story brick home in Trent Woods & Bangert School Dist. is a must see! The kitchen has been updated with granite countertops, stainless steel appliances and boasts a center island. The home has oak floors throughout. Two bedrooms upstairs open up to the spacious balcony in the front. This home has a large studio or office space with abundant natural light and has it's own separate entrance. Workshop or She Shed is right off the patio with a built-in firepit all on a fenced-in large lot. New Natural gas hot water heater.
# Django settings for SOLE project. DEBUG = True TEMPLATE_DEBUG = DEBUG import os settings_dir = os.path.dirname(__file__) PROJECT_ROOT = os.path.abspath(os.path.dirname(settings_dir)) ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'private/development.db'), } } # Email settings EMAIL_HOST = '' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL = '' MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'public/media/') MEDIA_URL = '/media/' STATIC_ROOT = os.path.join(PROJECT_ROOT, 'public/static/') STATIC_URL = '/static/' # when running on openshift if 'OPENSHIFT_REPO_DIR' in os.environ: PROJECT_ROOT = os.path.join(os.environ.get('OPENSHIFT_REPO_DIR'), 'wsgi', 'solesite') DATA_DIR = os.path.join(os.environ['OPENSHIFT_DATA_DIR']) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(DATA_DIR, 'development.db'), } } MEDIA_ROOT = os.path.join(DATA_DIR, 'media') STATIC_ROOT = os.path.join(os.environ.get('OPENSHIFT_REPO_DIR'), 'wsgi', 'static') # Internationalization TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ugettext = lambda s: s LANGUAGES = ( ('--', ugettext('select here')), ('nl', ugettext('Dutch')), ('fr', ugettext('French')), ('pl', ugettext('Polish')), ('pt', ugettext('Portugese')), ('pt-br', ugettext('Brazilian Portuguese')), ('es', ugettext('Spanish')), ('el', ugettext('Greek')), ('en', ugettext('English')), ('jp', ugettext('Japanese')), ) STATICFILES_DIRS = ( os.path.join(PROJECT_ROOT, 'solesite/static/'), ) LOCALE_PATHS = ( os.path.join(PROJECT_ROOT, 'locale'), ) SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '_g-js)o8z#8=9pr1&amp;05h^1_#)91sbo-)g^(*=-+epxmt4kc9m#' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.locale.LocaleMiddleware', 'userena.middleware.UserenaLocaleMiddleware', ) # Add the Guardian and userena authentication backends AUTHENTICATION_BACKENDS = ( 'userena.backends.UserenaAuthenticationBackend', 'guardian.backends.ObjectPermissionBackend', 'django.contrib.auth.backends.ModelBackend', ) # Settings used by SOLE LOGIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' AUTH_PROFILE_MODULE = 'profiles.Profile' USERENA_DISABLE_PROFILE_LIST = False USERENA_MUGSHOT_SIZE = 140 ROOT_URLCONF = 'solesite.urls' WSGI_APPLICATION = 'solesite.wsgi.application' TEMPLATE_DIRS = ( os.path.join(PROJECT_ROOT, 'solesite/templates/'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', 'django.contrib.admindocs', 'guardian', 'south', 'userena', 'userena.contrib.umessages', 'profiles', 'easy_thumbnails', 'jobs', ) LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } # Needed for Django guardian ANONYMOUS_USER_ID = -1 # Test runner TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
TCW Group and the other defendants in a lawsuit brought by Sara Tirschwell, a former TCW distressed debt fund manager who is seeking $30 million for allegedly being fired for lodging a sexual harassment complaint, are seeking to have personal information kept secret, according to claims made in a letter to the trial judge. In the letter to New York Supreme Court Justice Robert D. Kalish, Ms. Tirshwell's attorney Lita Beth Wright says that the defendants — particularly Jess Ravich, who Ms. Tirschwell alleges sexually harassed her at TCW — are asking to go beyond the model confidentiality agreement created by the New York City Bar Association without adequately defining what "personal information" would include. Mr. Ravich is a group managing director and head of alternative products at TCW. Ms. Wright also claims other issues concerning the discovery process, including "critical deficiencies" in the documents provided to plaintiff as part of a document exchange. Ms. Tirschwell provided 7,400 documents, while Mr. Ravich gave 26 documents. Ms. Wright's letter disputes Mr. Ravich's claim that he deleted certain texts and emails and said TCW improperly redacted certain documents. Ms. Wright is asking the court to schedule a pre-trial conference before depositions in the case begin on May 31. TCW spokesman Doug Morris declined comment.
import matplotlib.pyplot as plt import numpy as np from matplotlib.ticker import MultipleLocator, FormatStrFormatter, ScalarFormatter, LogLocator from matplotlib.backends.backend_pdf import PdfPages plt.rcParams['axes.linewidth'] = 1.5 pp = PdfPages('ggchem.pdf') file = 'Static_fast.dat' data = open(file) dummy = data.readline() dimens = data.readline() dimens = np.array(dimens.split()) NELEM1 = int(dimens[0]) NMOLE1 = int(dimens[1]) NDUST1 = int(dimens[2]) header = data.readline() data.close() dat1 = np.loadtxt(file,skiprows=3) keyword1 = np.array(header.split()) Tg1 = dat1[:,0] # T [K] ntot = 0.0*Tg1 for i in range(3,4+NELEM1+NMOLE1): # electrons, all atoms, ions and cations ntot = ntot + 10**dat1[:,i] lntot1 = np.log10(ntot) file = 'Static_gas.dat' data = open(file) dummy = data.readline() dimens = data.readline() dimens = np.array(dimens.split()) NELEM2 = int(dimens[0]) NMOLE2 = int(dimens[1]) NDUST2 = int(dimens[2]) header = data.readline() data.close() dat2 = np.loadtxt(file,skiprows=3) keyword2 = np.array(header.split()) Tg2 = dat2[:,0] # T [K] ntot = 0.0*Tg2 for i in range(3,4+NELEM2+NMOLE2): # electrons, all atoms, ions and cations ntot = ntot + 10**dat2[:,i] lntot2 = np.log10(ntot) file = 'Static_cond.dat' data = open(file) dummy = data.readline() dimens = data.readline() dimens = np.array(dimens.split()) NELEM3 = int(dimens[0]) NMOLE3 = int(dimens[1]) NDUST3 = int(dimens[2]) header = data.readline() data.close() dat3 = np.loadtxt(file,skiprows=3) keyword3 = np.array(header.split()) Tg3 = dat3[:,0] # T [K] ntot = 0.0*Tg3 for i in range(3,4+NELEM3+NMOLE3): # electrons, all atoms, ions and cations ntot = ntot + 10**dat3[:,i] lntot3 = np.log10(ntot) bar = 1.E+6 # 1 bar in dyn/cm2 Tmin = 500 Tmax = 3000 sep = 100 col = ['darkgoldenrod','darkgray','darkgreen','darkmagenta','red','darkorange','darkorchid','aqua','cadetblue'] col2 = ['aquamarine','beige','darkolivegreen','bisque','burlywood','chartreuse','chocolate','coral','cornflowerblue','crimson','darkcyan','darkkhaki'] #================== some important molecules ==================== fig,ax = plt.subplots() mols = ['CO','CO2','CH4','N2','NH3','HCN','C2H2','C2H4','H2O'] mols = np.array(mols) count = 0 for mol in mols: i = np.where(mol==keyword1)[0][0] yy = dat1[:,i]-lntot1 # log10 nmol/ntot plt.plot(Tg1,yy,c=col[count],lw=3,label=mol) i = np.where(mol==keyword2)[0][0] yy = dat2[:,i]-lntot2 # log10 nmol/ntot plt.plot(Tg2,yy,c=col[count],lw=2,ls='--') i = np.where(mol==keyword3)[0][0] yy = dat3[:,i]-lntot3 # log10 nmol/ntot plt.plot(Tg3,yy,c=col[count],lw=2,ls=':') count = count + 1 plt.xlabel(r'$T\ \mathrm{[K]}$',fontsize=20) plt.ylabel(r'$\mathrm{log}_{10}\ n_\mathrm{mol}/n_\mathrm{tot}$',fontsize=20) plt.xlim(Tmin,Tmax) plt.ylim(-15,-2) plt.tick_params(axis='both', labelsize=14) plt.tick_params('both', length=6, width=1.5, which='major') plt.tick_params('both', length=3, width=1, which='minor') minorLocator = MultipleLocator(sep) ax.xaxis.set_minor_locator(minorLocator) minorLocator = MultipleLocator(1) ax.yaxis.set_minor_locator(minorLocator) plt.legend(loc='lower right',fontsize=11,fancybox=True) plt.tight_layout() plt.savefig(pp,format='pdf') plt.clf() pp.close() print '... written output to ggchem.pdf.'
Get a discounted fee on The Master Class Series if you have taken or if you are enrolled in The Humber College Puppetry Intensive!! Theatre Hera is a Toronto based theatre collective established with the purpose of producing fresh, original works by local artists. We are currently seeking TWO MALE actors to join our cast. MANDELSTAM has won awards in the Toronto Fringe Festival’s New Play Writing Contest in 2007 and 2008. Naked Goddess Productions is looking for a versatile male actor (age range 40-50) for its production of Ninety, a compelling two-hander by Australian playwright Joanna Murray Smith at part of the 2015 Toronto Fringe Festival, July 1-12, 2015. Associated Designers of Canada (ADC) is pleased to announce the selected participants in Canada’s entry to the competition of Countries and Regions at the 13th edition of the Prague Quadrennial of Performance Design and Space (PQ2015). The mission of The Burlington Performing Arts Centre is to provide the people of Burlington with a broad range of excellent performance opportunities that will both inspire and delight. Mandelshtam relates the story of Stalin’s persecution of Osip Mandelsthtam, one of the foremost Russian poets of the twentieth century.
# Copyright (C) 2016 Intel Corporation # # Released under the MIT license (see COPYING.MIT) # # Functions to get metadata from the testing host used # for analytics of test results. from collections import OrderedDict from collections.abc import MutableMapping from xml.dom.minidom import parseString from xml.etree.ElementTree import Element, tostring from oeqa.utils.commands import runCmd, get_bb_vars def get_os_release(): """Get info from /etc/os-release as a dict""" data = OrderedDict() os_release_file = '/etc/os-release' if not os.path.exists(os_release_file): return None with open(os_release_file) as fobj: for line in fobj: key, value = line.split('=', 1) data[key.strip().lower()] = value.strip().strip('"') return data def metadata_from_bb(): """ Returns test's metadata as OrderedDict. Data will be gathered using bitbake -e thanks to get_bb_vars. """ metadata_config_vars = ('MACHINE', 'BB_NUMBER_THREADS', 'PARALLEL_MAKE') info_dict = OrderedDict() hostname = runCmd('hostname') info_dict['hostname'] = hostname.output data_dict = get_bb_vars() # Distro information info_dict['distro'] = {'id': data_dict['DISTRO'], 'version_id': data_dict['DISTRO_VERSION'], 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} # Host distro information os_release = get_os_release() if os_release: info_dict['host_distro'] = OrderedDict() for key in ('id', 'version_id', 'pretty_name'): if key in os_release: info_dict['host_distro'][key] = os_release[key] info_dict['layers'] = get_layers(data_dict['BBLAYERS']) info_dict['bitbake'] = git_rev_info(os.path.dirname(bb.__file__)) info_dict['config'] = OrderedDict() for var in sorted(metadata_config_vars): info_dict['config'][var] = data_dict[var] return info_dict def metadata_from_data_store(d): """ Returns test's metadata as OrderedDict. Data will be collected from the provided data store. """ # TODO: Getting metadata from the data store would # be useful when running within bitbake. pass def git_rev_info(path): """Get git revision information as a dict""" from git import Repo, InvalidGitRepositoryError, NoSuchPathError info = OrderedDict() try: repo = Repo(path, search_parent_directories=True) except (InvalidGitRepositoryError, NoSuchPathError): return info info['commit'] = repo.head.commit.hexsha info['commit_count'] = repo.head.commit.count() try: info['branch'] = repo.active_branch.name except TypeError: info['branch'] = '(nobranch)' return info def get_layers(layers): """Returns layer information in dict format""" layer_dict = OrderedDict() for layer in layers.split(): layer_name = os.path.basename(layer) layer_dict[layer_name] = git_rev_info(layer) return layer_dict def write_metadata_file(file_path, metadata): """ Writes metadata to a XML file in directory. """ xml = dict_to_XML('metadata', metadata) xml_doc = parseString(tostring(xml).decode('UTF-8')) with open(file_path, 'w') as f: f.write(xml_doc.toprettyxml()) def dict_to_XML(tag, dictionary, **kwargs): """ Return XML element converting dicts recursively. """ elem = Element(tag, **kwargs) for key, val in dictionary.items(): if tag == 'layers': child = (dict_to_XML('layer', val, name=key)) elif isinstance(val, MutableMapping): child = (dict_to_XML(key, val)) else: if tag == 'config': child = Element('variable', name=key) else: child = Element(key) child.text = str(val) elem.append(child) return elem
The old adage, “Education is the key to success”, maybe a cliché to many of us but definitely not to the scholars of Bantay Bata 163 and Singapore Airlines. Bantay Bata 163, in partnership with Singapore Airlines (SIA), welcomed the new school year 2016-2017 together with the 128 scholars supported by the partnership. Most of the scholars came from the provinces. Only 33 are from Manila. Last July 23, Bantay Bata, SIA and the scholars gathered together in an Opening Assembly to encourage the children before the new school year started. Aside from the financial assistance, the scholars, with big smiles on their faces, also received school supplies and certificates of recognition. More than anything else, the support and guidance that Bantay Bata 163 and SIA give to the scholars continue to inspire them to pursue their studies to achieve their dreams. “Nakakapag aral po ako ng maayos dahil sa binibigay po sa amin ng Singapore Airlines. Kayo po ang makatutulong sa akin sa pagtatapos ko po ng pag aaral. Maraming Salamat po sa Singapore Airlines at Bantay Bata 163” (I was able to continue my studies because of Singapore Airlines. I know that SIA will help me to finish my studies. Thank you very much to Singapore Airlines and to Bantay Bata 163), shared Kevin, one of the scholars. “Thank you po Singapore Airlines, kasi po isa po kayo sa inspirasyon ko para mag aral nang mabuti at para po mag sipag sa pag aaral.” (Thank you to Singapore Airlines, you are one of my inspirations to do well on my studies.) Gel, Grade 5, stated. Each of the scholars believes that Bantay Bata 163 and SIA’s help is a bridge for them to reach their dreams and to free them from the clutches of poverty. General Manager of SIA, Carol Ong did not disappoint the children as she promised to continue the good charity and partnership hwith Bantay Bata 163. For Ong, it is a high privilege to be a part of the Bantay Bata advocacy in providing education to less fortunate children. As Ong ended her speech, she left a short but meaningful message for the scholars: “The sky is the limit, always reach for the highest”.
from unittest import TestCase from httpretty import activate, register_uri, GET from schematics.validate import validate from expressly import Api from expressly.api_responses import BannerResponse from expressly.tests import dummy_api_key, api_dev_url, dummy_campaign_uuid class BannerTest(TestCase): def setUp(self): self.api = Api(dummy_api_key, api_dev_url, False) self.dummy_email = '[email protected]' @activate def test_request(self): register_uri( GET, 'http://%s/api/v2/banner/%s?email=%s' % (api_dev_url, dummy_campaign_uuid, self.dummy_email), body=bytearray(""" { "bannerImageUrl": "https://buyexpressly.com/assets/banner/awesome-banner.jpg", "migrationLink": "https://www.myblog.com/expressly/api/3aff1880-b0f5-45bd-8f33-247f55981f2c" }""", 'utf-8'), status=200, content_type='application/json' ) response = self.api.get_banner(dummy_campaign_uuid, self.dummy_email) self.assertEqual(response.status, 200) self.assertIsInstance(response.data, BannerResponse) self.assertTrue(validate(BannerResponse, response.data))
Are you a developer looking to improve your workflow? Have you always wanted to learn source control? Are you looking to make your development workflow safer? Well, you have come to the right course. Git is the industry-standard software for source control. With Git, you can save snapshots of your work as you go along in a timeline. Git allows you to create backups that take minimum space, making your work space safer. If you mess up, ruin your code, or damage your files, Git will be there to help. We start from the complete beginning, talking about basic theory to familiarise new users with the environments we will be working in. Then, we introduce the Terminal, to make sure to alleviate any misconceptions about the Terminal, and to make sure you are comfortable with the environment before we move onto Git & GitHub. Remember, the main goal of the course is to teach Git & GitHub as smoothly as possible. Then, we move on to Git & GitHub. We break it up into step-by-step pieces, so that everything makes sense, and you fully understand each part before moving onto the next skill. This is the only free course that takes Git, GitHub, version control, and basic Terminal knowledge, and bundles it together in one course for users of all levels. Whether you are a novice with no knowledge of the terminal, Git, or source control, or you are an established developer looking to integrate Git & GitHub into your work, this course has everything for you. So, whether you are trying to learn Git for your work, or looking to learn for fun, this course will turn you into a Git & GitHub Master. Anyone looking to learn how to use Git & GitHub, both independently and together. Anyone looking to learn source / version control.
# -*- coding: utf-8 -*- # # mididings # # Copyright (C) 2008-2014 Dominic Sacré <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # import sys if sys.version_info < (3,): import Tkinter else: import tkinter as Tkinter class AutoScrollbar(Tkinter.Scrollbar): def set_show_hide(self, show, hide): self._show = show self._hide = hide def set(self, lo, hi): if float(lo) <= 0.0 and float(hi) >= 1.0: self._hide() else: self._show() Tkinter.Scrollbar.set(self, lo, hi) class LiveThemedFactory(object): def __init__(self, color, color_highlight, color_background): self.color = color self.color_highlight = color_highlight self.color_background = color_background def Tk(self, **options): w = Tkinter.Tk() w.config(background=self.color_background) w.config(**options) return w def Frame(self, master, **options): w = Tkinter.Frame(master) w.config(background=self.color) w.config(**options) return w def AutoScrollbar(self, master, **options): w = AutoScrollbar(master) w.config( background=self.color, activebackground=self.color_highlight, troughcolor=self.color_background, borderwidth=1, relief='flat', width=16, ) w.config(**options) return w def Listbox(self, master, **options): w = Tkinter.Listbox(master) w.config( background=self.color_background, foreground=self.color, selectbackground=self.color_background, selectforeground=self.color_highlight, selectborderwidth=0, borderwidth=0, ) w.config(**options) return w def Button(self, master, **options): w = Tkinter.Button(master) w.config( background=self.color_background, foreground=self.color, activebackground=self.color_background, activeforeground=self.color_highlight, borderwidth=0, highlightthickness=0, relief='flat', ) w.config(**options) return w def Canvas(self, master, **options): w = Tkinter.Canvas(master) w.config(background=self.color_background) w.config(**options) return w class UnthemedFactory(object): def Tk(self, **options): w = Tkinter.Tk() w.config(**options) return w def Frame(self, master, **options): return Tkinter.Frame(master, **options) def AutoScrollbar(self, master, **options): return AutoScrollbar(master, **options) def Listbox(self, master, **options): return Tkinter.Listbox(master, **options) def Button(self, master, **options): return Tkinter.Button(master, **options) def Canvas(self, master, **options): return Tkinter.Canvas(master, **options)
We believe Lab Technicians and science Teachers are subjected to enough chemical exposure during their career as it is. Students, too, deserve to learn in an environment that limits their chemical exposure to those experiments that require the use of chemicals in a controlled and safe manner. The specimens supplied by Dissection Connection are 100% chemical preservative free. No formalin, no glycol, no nothing. The exception are the small intestines which are packaged in a solution of sterile 0.9% saline to prevent dehydration of the specimen. The piglets are just too difficult to pin to a board in the tradition way for dissection. Heather from Southern Biological showed me this elastic band method between sessions at a conference one day.
# -*- coding: utf8 -*- # This file is part of PyBossa. # # Copyright (C) 2014 SF Isle of Man Limited # # PyBossa is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBossa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBossa. If not, see <http://www.gnu.org/licenses/>. """ PyBossa api module for exposing domain object TaskRun via an API. This package adds GET, POST, PUT and DELETE methods for: * task_runs """ from flask import request from flask.ext.login import current_user from pybossa.model.task_run import TaskRun from werkzeug.exceptions import Forbidden, BadRequest from api_base import APIBase from pybossa.util import get_user_id_or_ip from pybossa.core import task_repo, sentinel class TaskRunAPI(APIBase): """Class API for domain object TaskRun.""" __class__ = TaskRun reserved_keys = set(['id', 'created', 'finish_time']) def _update_object(self, taskrun): """Update task_run object with user id or ip.""" # validate the task and project for that taskrun are ok task = task_repo.get_task(taskrun.task_id) if task is None: # pragma: no cover raise Forbidden('Invalid task_id') if (task.project_id != taskrun.project_id): raise Forbidden('Invalid project_id') if _check_task_requested_by_user(taskrun, sentinel.master) is False: raise Forbidden('You must request a task first!') # Add the user info so it cannot post again the same taskrun if current_user.is_anonymous(): taskrun.user_ip = request.remote_addr else: taskrun.user_id = current_user.id def _forbidden_attributes(self, data): for key in data.keys(): if key in self.reserved_keys: raise BadRequest("Reserved keys in payload") def _check_task_requested_by_user(taskrun, redis_conn): user_id_ip = get_user_id_or_ip() usr = user_id_ip['user_id'] or user_id_ip['user_ip'] key = 'pybossa:task_requested:user:%s:task:%s' % (usr, taskrun.task_id) task_requested = bool(redis_conn.get(key)) if user_id_ip['user_id'] is not None: redis_conn.delete(key) return task_requested
Held every Tuesday evening in the historic literary epicenter of San Francisco, Tuesdays at North Beach is a free poetry series celebrating internationally acclaimed poets and showcasing local talent. Guests have included Jonathan Richman, David Meltzer, Diane di Prima, California Poet Laureate Al Young and freshly-discovered poets from Friends' sister program, Poets 11. The series is curated by Friends' Poet-in-Residence, Jack Hirschman. Tonight's speakers: Carol Denney & Patti Trimble.
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api, fields, models, registry, tools class DecimalPrecision(models.Model): _inherit = 'decimal.precision' display_digits = fields.Integer('Display Digits', required=True, default=2) @tools.ormcache(skiparg=3) def display_precision_get(self, cr, uid, application): cr.execute('select display_digits from decimal_precision where name=%s', (application,)) res = cr.fetchone() return res[0] if res else 2 @api.model @api.returns('self', lambda value: value.id) def create(self, vals): record = super(DecimalPrecision, self).create(vals) self.display_precision_get.clear_cache(self) return record @api.multi def write(self, vals): result = super(DecimalPrecision, self).write(vals) self.display_precision_get.clear_cache(self) return result @api.multi def unlink(self): result = super(DecimalPrecision, self).unlink() self.display_precision_get.clear_cache(self) return result @staticmethod def get_display_precision(cr, uid, application): res = 2 dp_obj = registry(cr.dbname)['decimal.precision'] if hasattr(dp_obj, 'display_precision_get'): res = dp_obj.display_precision_get(cr, uid, application) return 16, res
3g antenna for mobile phone,10 Antennas Scrambler for sale,The newest tablet by Microsoft is generally getting good reviews. Meanwhile, a new report says Nintendo moved more game consoles than Sony and Microsoft in June (well, in terms of total units). See how cell phone jammers work.the microsoft office mobile apps are optimized for touch and smaller screens on windows 10 phones and tablets,we have the best security apps that puts the power in your hands,. Brighthand compares five top free apps and declares a winner.all radio transmitters are illegal,. Lookout is now the only all-in-one app that protects your device.download walmart mobile app and browse cellphones.. There are many free phone tracker apps available online for download on their official sites.free shipping on purchases over $35 and save 5% every day with your target redcard,The original cell phone jammer specialists since 1999,find great deals on ebay for cell phones antenna and cell phone antenna booster,Our Top Pick Will Surprise You!,. For their placement that can range anywhere from $800 to $2.Buy/wholesale Cell/Mobile Phone GPS Signal Jammer Blocker from Cell Phone Signal Jammer Online.performed between the Sprint and Verizon LTE networks with two iPhone 6.. Labobbon emf blocker for cell phone / laptop/ tablet/ kindle/ router/ wifi | protect you and your family from radiation,you can stop the annoying phone calls,. There's a good chance it can be resuscitated - …,browse and install your favorite android apps and games on your android phone …,3w high power 3g cell phone blocker - 20 metersmain features,so people will not by disturbed by the noise by using a 4g cell phone jammer,find great deals on ebay for lg 3 cell phone,free pc audio recorder latest version,cellphone jammers are illegal,. These are external links and will open in a new window middlesbrough is the best place in the uk to get mobile 4g coverage while bournemouth is the worst,iPhone 8 and Samsung Galaxy S9 – when you compare mobile phone offers with MoneySuperMarket,Call your carrier and ask nicely—if your contract has expired,frequently asked questions (faqs) enforcement bureau,products like jerdon 5x led rectangular wall mount mirror in chrome.find great deals on ebay for 3 phase electric meter in electrical meters.up to 20 meters working radius,. Search antenna booster for cell phone.know where your money is going,wireless tenants utilize cell towers to deploy various technologies to a subscriber base.. 433mhz or 868mhz wireless alarm system,This is what I got you can look it over and see if make sense or their most likely a better way to ask.we’ve updated our 5ghz wifi article to help address some of the points our readers have brought up,.
import mimetypes import socket from urllib.parse import quote import gunicorn.app.base from whitenoise import WhiteNoise from .request import Request from .utils import make_active_helper def _get_local_ip(): ip = socket.gethostbyname(socket.gethostname()) if not ip.startswith("127."): return ip sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # doesn't even have to be reachable sock.connect(("8.8.8.8", 1)) ip = sock.getsockname()[0] except Exception: ip = "127.0.0.1" finally: sock.close() return ip DISPLAY = """ ┌─────────────────────────────────────────────────┐ │ Clay is running │ │ │ │ - Your machine: {local}│ │ - Your network: {network}│ │ │ │ Press `ctrl+c` to quit. │ └─────────────────────────────────────────────────┘ """ def _display_running_message(host, port): # pragma: no cover local = "{:<29}".format(f"http://{host}:{port}") network = "{:<29}".format(f"http://{_get_local_ip()}:{port}") print(DISPLAY.format(local=local, network=network)) def on_starting(server): """Gunicorn hook""" _display_running_message(*server.address[0]) class GunicornMiddleware(gunicorn.app.base.BaseApplication): def __init__(self, app, **options): self.app = app self.options = options super().__init__() def load_config(self): config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None} for key, value in config.items(): self.cfg.set(key.lower(), value) def load(self): return self.app class WSGIApp: def __init__(self, clay): self.clay = clay def __call__(self, environ, start_response): return self.wsgi(environ, start_response) def wsgi(self, environ, start_response): request = Request(environ) body, status, headers = self.call(request) if hasattr(body, "encode"): body = body.encode("utf8") headers.append(("Content-Length", str(len(body)))) start_response(status, headers) return [body] def call(self, request): path = request.path print(path) if not self.clay.file_exists(path): print("file doesnt exists", path) path += "/index.html" if not self.clay.file_exists(path): return self.not_found(request) active = make_active_helper(request) if request.method == "HEAD": body = "" else: print("rendering file", path) body = self.clay.render_file(path, request=request, active=active) mime = mimetypes.guess_type(path)[0] or "text/plain" response_headers = [("Content-Type", mime)] return body, "200 OK", response_headers def not_found(self, request): mime = "text/plain" body = f"File {request.path} not found." active = make_active_helper(request) for path in ["not-found.html", "_notfound.html", "404.html"]: if self.clay.file_exists(path): mime = "text/html" body = self.clay.render_file(path, request=request, active=active) break response_headers = [ ("Content-Type", mime), ("Content-Length", str(len(body))) ] return body, "404 Not Found", response_headers def redirect_to(self, path): return "", "302 Found", [("Location", quote(path.encode("utf8")))] def run(self, host, port): # pragma: no cover server = GunicornMiddleware( self, bind=f"{host}:{port}", worker_class="eventlet", accesslog="-", access_log_format="%(h)s %(m)s %(U)s -> HTTP %(s)s", on_starting=on_starting ) server.run() def make_app(clay): app = WSGIApp(clay) app.wsgi = WhiteNoise( app.wsgi, root=clay.static_path, prefix="static/", index_file=True, autorefresh=True, ) return app
- Establish/refine and manage all procure and logistics activities, including possible management of vehicles and drivers. - Supervise the administrative needs of the staff including flight bookings, obtaining legal documentation, visa renewal, medical evaluation, medical coverage, etc. - Manage the Company’s registration in Iraq, Iraqi Kurdistan, Germany, Austria and Switzerland, and in any other country as deemed necessary. - Identify, establish and manage accommodation for international staff, ensuring effective security. - Ensure that contractual commitments (e.g. for the rental of vehicles and/or premises) are expressed in forms appropriate for the circumstances. - Ensure that all necessary legal requirements are met, e.g. obtain vehicle registration, work permits, import licenses, and such, by liaising with government agencies. - According to budget availability and following Company’s objectives, participate in relevant sections of projects linked with administrative issues (capacity building, animation of workshops or training sessions for NGO partners) etc. - Supervision of cash books and bank books management. - Co-ordinate and maintain a cash forecasting system to ensure that adequate funds are available to meet the working requirements of the programs. - Consolidation of various locations’ accountancies on SAGA software. - Preparation of audits (general audit, donors’ audits). -Prepare the consolidated budget for all grants. -A lot of traveling to our International Facilities in Austria, Saudi Arabia, Ecuador, Madagascar, Brazil, Kuwait, Netherland, Poland, China, Lebanon, Kenyan, etc. is involved this position, hence, why OPEC must need to acquire Diplomatic Passport for all selected employees. - Excellent written and spoken English is essential; French, Kurdish or Arabic language skills desirable.
"""Directory-watching logic. Contains :class:`~gitobox.watch.DirectoryWatcher`, the class that monitors the directory for changes. Uses `pyinotify`, so it's only available on Linux. """ from __future__ import unicode_literals import logging from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler from gitobox.timer import ResettableTimer class DirectoryWatcher(FileSystemEventHandler): ALL_CHANGED = None def __init__(self, folder, callback, lock, timeout): self._callback = callback self.observer = Observer() self._folder = folder self._changes = set() self.observer.schedule(self, str(folder), recursive=True) self._timer = ResettableTimer(timeout, self._timer_expired, lock=lock) def assume_all_changed(self): self._changes.add(DirectoryWatcher.ALL_CHANGED) self._timer.start() def run(self): self.observer.start() def _timer_expired(self): changes = self._changes self._changes = set() logging.info("Directory stable, syncing...") if DirectoryWatcher.ALL_CHANGED in changes: self._callback() else: self._callback(changes) def on_moved(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Moved %s: from %s to %s", what, event.src_path, event.dest_path) self._changes.add(event.src_path) self._changes.add(event.dest_path) self._timer.start() def on_created(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Created %s: %s", what, event.src_path) self._changes.add(event.src_path) self._timer.start() def on_deleted(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Deleted %s: %s", what, event.src_path) self._changes.add(event.src_path) self._timer.start() def on_modified(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Modified %s: %s", what, event.src_path) self._changes.add(event.src_path) self._timer.start()
Where do we get the stuff? Width (to furthest point): 23.5" Depth (to furthest point): 26.75" Height (to tallest point): 34.75" Arm Width Thickness: Dimensions may change without immediate updates. If dimensions must be precise, please consult directly with us for any recent changes/updates. Dimensions are typically accurate within 1/2". Dimensions deemed to be accurate, but slight variations can exist in each product. Item is unassembled - some assembly required. Customer Reviews: 5 of 5 Total Reviews: 7 Write a review. Good quality. One or two slight defects only. Could use step-by-step assembly instructions, but it was not too hard to figure out. Like having extra screws. Chairs are as expected. Priced well. Would order. again. Having once worked at Smith & Hawken and having sold a lot of high end teak furniture, I can tell you that this teak, though not the highest grade, is excellent quality. The main difference is that there is some inconsistency in the wood color, but frankly, for the price, I can live with this, and I think over time as the teak ages, it will all even out anyway. I wanted something for the front of the house to kind of warm it up and make use of a stone patio I had built. The furniture looks amazing and I've caught my myself parking my tush on the bench more than a few times. Who knew that it would be comfortable too - win-win!! Got the Sumbawa bench and arm chair and a side table. The bench had some minor shipping damage and customer service was amazingly fast and accommodating. Assembly was quick and painless. Just love the way it looks. After shopping our local outdoor furniture store and finding out their teaks chairs started at $399 each, I sought out a plan B. That's when I happened across Teak Closeouts. I found their website to be helpful and informative. It was a hard choice, but we decided the Sumbawa arm chair fit best on our new outdoor kitchen/fireplace area. Took a shot and ordered 4 chairs and a side table. To my delight, we received our shipment two days later. The chairs were pre-drilled and very easy to put together. We had to make one quick call to customer service and got a very helpful person on-the-spot. That's when we found out that we were the problem, not the chairs. Overall, I am extremely pleased with the quality and looks of the chairs and with the whole experience with Teak Closeouts. We have already recommended them to several friends and plan to order again in the future. I wanted a teak horse, but my husband put a halt to that! I ordered one of these to have a sturdy chair to use for sit-down showers as I recovered from total knee replacement. It is great and only slightly more expensive than those flimsy plastic and aluminum shower seats. Very stable and the seat is high enough that I can easily stand from the chair, even with my new knee. Now I have ordered a second chair to keep in my living room because it is so comfortable and the right height. Once I am recovered we plan to move both of these chairs to the patio for outdoor use, but I'm tempted to keep them in the house for everyday use in the TV room. Copyright © 2008-2017 Teak Closeouts, LLC. All Rights Reserved.
''' ios-s3-dist.py Python 3 Script to assist in distributing ad-hoc and enterprise iOS builds. Uploads build to S3 and creates a manifest.plist file to install it with. Required information should be profiled in a 'config.json' file. Usage: ios-dist.py {filename or path to build} ''' def main(build_filename): import tinys3, json, os, plistlib, shutil, zipfile cnfg = json.load( open('config.json') ) # ---- Get information from build's Info.plist zfile = zipfile.ZipFile(build_filename) for name in zfile.namelist(): if name.endswith('Info.plist'): zfile.extract(name, 'temp') shutil.move('temp/'+name, 'temp/Info.plist') shutil.rmtree('temp/Payload') info = plistlib.load( open('temp/Info.plist', 'rb') ) bundle_name = info['CFBundleName'] bundle_identifier = info['CFBundleIdentifier'] bundle_version = info['CFBundleVersion'] # ---- Determine which build # this is by the number of existing builds conn = tinys3.Connection(cnfg['s3_access_key'], cnfg['s3_secret_key']) uploaded_builds = conn.list( 'b/'+bundle_name+'-'+bundle_version, cnfg['bucket_name'] ) b_num = 1 for x in uploaded_builds: b_num += 1 build_number = 'b{0}'.format(b_num) # ---- Generate filenames from extracted information # Ex: 'AppName-2.0-b5.ipa' bd_filename = bundle_name+'-'+bundle_version+'-'+build_number+'.ipa' # Ex: 'manifest-2.0-b5.plist' mn_filename = 'manifest-'+bundle_version+'-'+build_number+'.plist' # ---- Create manifest.plist file from template # {0} - URL to .ipa # {1} - Bundle identifier # {2} - Bundle version # {3} - Bundle name template_file = open('manifest-template', 'r') manifest_data = template_file.read().format( 'https://s3.amazonaws.com/'+cnfg['bucket_name']+'/b/'+bd_filename, bundle_identifier, bundle_version, bundle_name ) template_file.close() manifest_file = open('temp/manifest.plist', 'w') manifest_file.write(manifest_data) manifest_file.close() # ---- Upload build and manifest to S3 print('\nUploading build...') build_file = open(build_filename, 'rb') r = conn.upload(bd_filename, build_file, cnfg['bucket_name']+'/b') if r.status_code != 200: print('Error: Build upload unsuccessful (Status code {0)'\ .format(r.status_code)) shutil.rmtree('temp') return print('Uploading manifest...') manifest_file = open('temp/manifest.plist', 'rb') r = conn.upload(mn_filename, manifest_file, cnfg['bucket_name']+'/m') if r.status_code != 200: print('Error: Manifest upload unsuccessful (Status code {0)'\ .format(r.status_code)) # Try to clean up conn.delete(bd_filename, cnfg['bucket_name']+'/b') shutil.rmtree('temp') return # ---- Clean up and finish shutil.rmtree('temp') print('\nUpload successful! ({0})\n'.format(bd_filename)) aws = 'https://s3.amazonaws.com/' b_url = aws+cnfg['bucket_name']+'/b/'+bd_filename m_url = aws+cnfg['bucket_name']+'/m/'+mn_filename print('-'*32) print('Build : {0}'.format(b_url)) print('Manifest : {0}'.format(m_url)) itms = '\nitms-services://?action=download-manifest&url={0}' print(itms.format(m_url)) print('-'*32+'\n') if __name__ == '__main__': import sys if len(sys.argv) == 2: build_path = sys.argv[1] main(build_path) else: print('\nUsage:\n\tios-dist.py {filename or path to build}\n')
Nelson's Truck & Tire provides Tire Purchase & Installation services to Pleasanton, TX, Jourdanton, TX, Tilden, TX, and other surrounding areas. Why Should You Have Mobile Tire Purchase & Installation Services Performed by Nelson's Truck & Tire? We proudly service the Tire Purchase & Installation needs of customers in Pleasanton, TX, Jourdanton, TX, Tilden, TX, and other surrounding areas.
''' objtoolz.metas.memoized ``````````````````````` Metaclass that allows memoization of instances ''' from toolz import memoize from ..compat import wraps __all__ = ('Memoized',) def _default_cache_key(args, kwargs): """By default, toolz.memoize will only cache positional args if no cache key is passed and it can't determine if there's keyword arguments. However, this will cause memoize to cache *both* if a cache key func isn't provided. """ return (args or None, frozenset(kwargs.items()) or None) class Memoized(type): """Metaclass for memoizing object instantiation. In Python 3 a cache type and cacheing key can be specified at class creation like this: .. code-block:: python class MyClass(metaclass=Memoized, cache=OrderedDict()) However, in Python 2, they must be specified after the fact """ def __new__(mcls, name, bases, attrs, **kwargs): return super(Memoized, mcls).__new__(mcls, name, bases, attrs) def __init__(cls, name, bases, attrs, key=_default_cache_key, cache=None): if cache is None: cache = {} cls._cache = cache # wrap in staticmethod for PY2 support # otherwise it's created as an UnboundMethod cls._cache_key = staticmethod(key) return super(Memoized, cls).__init__(name, bases, attrs) def __call__(cls, *args, **kwargs): """Memoize actual object instantiation from the created class """ @wraps(cls) @memoize(cache=cls._cache, key=cls._cache_key) def rememberer(*a, **k): return super(Memoized, cls).__call__(*a, **k) return rememberer(*args, **kwargs)
On Wednesday 11th November children from our School Council, House Captains and our Head Boy and Head Girl attended a service in Central Park to mark Remembrance Day. Bishop Alan Williams led us in prayer in what was a very moving service. Nicole read beautifully and Helen and Pedro laid a wreath on behalf of all at St Michael's. As always the children behaved impeccably and were a credit to us.
# coding=utf-8 # # custom_function_example.py - Custom function example file for importing into Mycodo # # Copyright (C) 2015-2020 Kyle T. Gabriel <[email protected]> # # This file is part of Mycodo # # Mycodo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Mycodo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Mycodo. If not, see <http://www.gnu.org/licenses/>. # # Contact at kylegabriel.com # import datetime import time from flask_babel import lazy_gettext from mycodo.databases.models import CustomController from mycodo.functions.base_function import AbstractFunction from mycodo.utils.constraints_pass import constraints_pass_positive_value from mycodo.utils.database import db_retrieve_table_daemon FUNCTION_INFORMATION = { 'function_name_unique': 'example_function_loop_with_status', 'function_name': 'Example: Simple Loop with Status', 'message': 'This is an example function that will increment a stored variable once every 60 seconds. ' 'A status call will be made to the function from the web UI and the return string along ' 'with the current time will be displayed for the user every Status Period. The Status ' 'Widget will also display this status.', 'options_disabled': [ 'measurements_select', 'measurements_configure' ], # These options will appear in the settings of the Function, # which the user can use to set different values and options for the Function. # These settings can only be changed when the Function is inactive. 'custom_options': [ { 'id': 'period', 'type': 'float', 'default_value': 60, 'required': True, 'constraints_pass': constraints_pass_positive_value, 'name': lazy_gettext('Period (seconds)'), 'phrase': lazy_gettext('The duration (seconds) between measurements or actions') }, { 'id': 'start_offset', 'type': 'integer', 'default_value': 10, 'required': True, 'name': 'Start Offset', 'phrase': 'The duration (seconds) to wait before the first operation' }, { 'id': 'period_status', 'type': 'integer', 'default_value': 60, 'required': True, 'name': 'Status Period (seconds)', 'phrase': 'The duration (seconds) to update the Function status on the UI' } ] } class CustomModule(AbstractFunction): """ Class to operate custom controller """ def __init__(self, function, testing=False): super(CustomModule, self).__init__(function, testing=testing, name=__name__) self.timer_loop = None self.loop_counter = 0 # # Initialize what you defined in custom_options, above # self.period = None self.start_offset = None self.period_status = None # # Set custom options # custom_function = db_retrieve_table_daemon( CustomController, unique_id=self.unique_id) self.setup_custom_options( FUNCTION_INFORMATION['custom_options'], custom_function) if not testing: self.initialize_variables() def initialize_variables(self): # import controller-specific modules here # You may import something you defined in dependencies_module self.timer_loop = time.time() + self.start_offset def loop(self): if self.timer_loop > time.time(): return while self.timer_loop < time.time(): self.timer_loop += self.period self.logger.info( "This text will appear in the Daemon Log as an INFO line") self.logger.debug( "This text will appear in the Daemon Log as an DEBUG line and " "will only appear if Log Level: Debug is enabled") self.loop_counter += 1 self.logger.info("Loop counter: {}".format(self.loop_counter)) def function_status(self): return_dict = { 'string_status': "This info is being returned from the Function Module." "\nCurrent time: {}" "\nLoop count: {}".format( datetime.datetime.now(), self.loop_counter), 'error': [] } return return_dict def button_one(self, args_dict): self.logger.error("Button One Pressed!: {}".format(int(args_dict['button_one_value']))) return "Here return message will be seen in the web UI. " \ "This only works when 'wait_for_return' is set True." def button_two(self, args_dict): self.logger.error("Button Two Pressed!: {}".format(int(args_dict['button_two_value']))) return "This message will never be seen in the web UI because this process is threaded"
Tulemas on põnevalt loominguline ja sportlik noortevahetus Leedus! MTÜ Edela Eesti Arenduskeskus is promoting a call for participants for a youth exchange called "Catching Winds" which will take place on the 4th until 13th September 2014 in Kintai, Lithuania. The key theme of project Catching Way(s) of the Winds: Art and Sport – balance for healthier body and soul. Today the most actual questions and interest of young peoples all around the Europe are: active and healthy lifestyle, ways of expressing yourself, valuables activities and busyness during freetime, habits of healthy lifestyle. During youth mobility program, 28 participants from Estonia, Poland, Latvia and Lithuania will try unique methods for expressing and developing creativity, learn ways to stay physically active and explore healthier lifestyle. The main idea is to invite people of different characters and to create attractive and extraordinary conditions for personal and interpersonal learning in multicultural environment. We want to involve young people who uses there athletic side to express their personality, young people who are more fond of art and know how to use their creativity for handcrafts, also, youth who more or less know how to combine and harmonize art with sport, and most importantly, we want to invite young people who faces time, finance, geographical or psychological challenges and there for has less opportunities to engage into art or sport. This youth exchange will bring together 28 participants from 4th different countries. The youth exchange will be held in English. The Erasmus + Programme will cover 100% of the accommodation, food and activities and will reimburse 100% of the travel costs for the participants (up to 80 euros). There will be a participation fee of 10 Eur. per participant.
from ctypes import * import sys import os import bacon from bacon.core import lib from bacon import native from bacon import graphics class Window(object): '''Properties of the game window. The window is constructed automatically when :func:`run` is called. The :data:`window` singleton provides access to the members of this class both before and after ``run`` is called. For example, to set up some common window properties for a game:: bacon.window.title = 'Destiny of Swords' bacon.window.width = 800 bacon.window.height = 600 All properties can be modified at runtime, for example to toggle in and out of fullscreen. ''' def __init__(self): self._width = -1 self._height = -1 self._resizable = False self._fullscreen = False self._target = None # Current scale/bias to apply from window space to target space self._target_offset_x = 0.0 self._target_offset_y = 0.0 self._target_scale = 0.0 if not native._mock_native: width = c_int() height = c_int() lib.GetWindowSize(byref(width), byref(height)) self._width = width.value self._height = height.value content_scale = c_float() lib.GetWindowContentScale(byref(content_scale)) self._content_scale = content_scale.value self.title = os.path.basename(sys.argv[0]) def _get_width(self): return self._width def _set_width(self, width): lib.SetWindowSize(width, self._height) self._width = width width = property(_get_width, _set_width, doc='''Get or set the width of the drawable part of the window, in pixels.''') def _get_height(self): return self._height def _set_height(self, height): lib.SetWindowSize(self._width, height) self._height = height height = property(_get_height, _set_height, doc='''Get or set the height of the drawable part of the window, in pixels.''') def _get_title(self): return self._title def _set_title(self, title): lib.SetWindowTitle(title.encode('utf-8')) self._title = title title = property(_get_title, _set_title, doc='''Get or set the title of the window (a string)''') def _is_resizable(self): return self._resizable def _set_resizable(self, resizable): lib.SetWindowResizable(resizable) self._resizable = resizable resizable = property(_is_resizable, _set_resizable, doc='''If ``True`` the window can be resized and maximized by the user. See :func:`Game.on_resize`.''') def _is_fullscreen(self): return self._fullscreen def _set_fullscreen(self, fullscreen): lib.SetWindowFullscreen(fullscreen) self._fullscreen = fullscreen fullscreen = property(_is_fullscreen, _set_fullscreen, doc='''Set to ``True`` to make the game fullscreen, ``False`` to play in a window.''') def _get_target(self): return self._target def _set_target(self, target): self._target = target target = property(_get_target, _set_target, doc='''Optional image to use as the default render target. If set, all rendering will be to this image, which will appear scaled and letterboxed if necessary in the center of the window. :attr:`width`, :attr:`height` and :attr:`content_scale` will return the dimensions of this target instead of the window dimensions. :type: :class:`Image`''') def _get_content_scale(self): return self._content_scale def _set_content_scale(self, content_scale): lib.SetWindowContentScale(content_scale) self._content_scale = content_scale content_scale = property(_get_content_scale, _set_content_scale, doc='''The scaling factor applied to the window. On Windows this is always 1.0. On OS X with a retina display attached, ``content_scale`` will default to 2.0. Fonts and offscreen render targets are created at this content scale by default, to match the pixel density. You can explicitly set ``content_scale`` to 1.0, disabling the high-resolution framebuffer. You should do so before loading any assets. :type: float ''') #: The singleton :class:`Window` instance. window = Window() def _window_resize_event_handler(width, height): window._width = width window._height = height bacon._current_game.on_resize(width, height) _window_frame_target = None def _begin_frame(): global _window_frame_target _window_frame_target = window._target if _window_frame_target: graphics.push_target(_window_frame_target) target_aspect = _window_frame_target._width / float(_window_frame_target._height) window_aspect = window._width / float(window._height) if target_aspect > window_aspect: width = window._width height = width / target_aspect else: height = window._height width = height * target_aspect window._target_scale = width / float(_window_frame_target._width) window._target_offset_x = int(window._width / 2 - width / 2) window._target_offset_y = int(window._height / 2 - height / 2) else: window._target_scale = 1.0 window._target_offset_x = window._target_offset_y = 0.0 def _end_frame(): global _window_frame_target if _window_frame_target: graphics.pop_target() graphics.clear(0, 0, 0, 1) graphics.set_color(1, 1, 1, 1) x = window._target_offset_x y = window._target_offset_y width = _window_frame_target._width * window._target_scale height = _window_frame_target._height * window._target_scale graphics.draw_image(_window_frame_target, x, y, x + width, y + height) _window_frame_target = None
Well, as you know, yesterday we met our first goal of $3,500 for 70 children to get off the mats on the ground into bunk beds with mattresses. The campaign was so amazing! Thanks again to all who participated! As I was thinking of how we could finish this campaign strong, I thought about pillows. The most logical piece of bedding after the bed is giving a pillow! Last year, through a Mother’s Group in Kansas City, we were able to get pillows and pillow cases for the 200 children at the time. Not only were they pillow cases but these cases had the children’s name on them. These women spent hours upon hours puffy painting and writing each child’s name on the pillow case. It was an amazing gift of love for our children. So, 200 of our current 400 children have bunk beds, mattresses, and pillows. We were able to give them their pillows at camp May 2012 and it was such a special time. We have 200 more children who have bunk beds and mattresses but they don’t have pillows yet. The Mother’s Group in Kansas has thought about doing pillow cases again for the new 200 children. Wouldn’t it be great if they made the pillow cases for the children, with their names on them and you and I can give the gift of the pillow? That is such great team work! Each pillow is $6. We need 200 pillows. The total would $1,200. Do you think we can do it in one week? We only have 7 days until my birthday! If 34 people gave $35 we would have it covered. I think I still have 34 friends out there interested in giving these kids a pillow! Our new goal: $1,200 in 7 days to give 200 children personalized pillows and pillow cases! We can do it! To donate click the link below.
# # MythBox for XBMC - http://mythbox.googlecode.com # Copyright (C) 2011 [email protected] # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # import logging import xbmcgui import mythbox.msg as m from mythbox.bus import Event from mythbox.mythtv.db import inject_db from mythbox.mythtv.conn import inject_conn from mythbox.mythtv.domain import StatusException, Job from mythbox.mythtv.enums import JobType, JobStatus from mythbox.ui.player import MountedPlayer, StreamingPlayer, NoOpCommercialSkipper, TrackingCommercialSkipper from mythbox.ui.schedules import ScheduleDialog from mythbox.ui.toolkit import Action, BaseWindow, window_busy from mythbox.util import safe_str, catchall, catchall_ui, run_async, coalesce, to_kwargs from mythbox.ui import toolkit log = logging.getLogger('mythbox.ui') class RecordingDetailsWindow(BaseWindow): def __init__(self, *args, **kwargs): BaseWindow.__init__(self, *args, **kwargs) [setattr(self,k,v) for k,v in kwargs.iteritems() if k in ('settings', 'translator', 'platform', 'fanArt', 'cachesByName', 'programIterator', 'bus',)] [setattr(self,k,v) for k,v in self.cachesByName.iteritems() if k in ('mythChannelIconCache', 'mythThumbnailCache', 'domainCache')] self.t = self.translator.get self.program = self.programIterator.current() self.isDeleted = False self.initialized = False self.streaming = self.settings.getBoolean('streaming_enabled') self.channels = None @catchall_ui def onInit(self): if not self.initialized: self.initialized = True self.win = xbmcgui.Window(xbmcgui.getCurrentWindowId()) # Buttons self.playButton = self.getControl(250) self.playSkipButton = self.getControl(251) self.deleteButton = self.getControl(252) self.rerecordButton = self.getControl(253) self.firstInQueueButton = self.getControl(254) self.refreshButton = self.getControl(255) self.editScheduleButton = self.getControl(256) self.advancedButton = self.getControl(257) self.dispatcher = { self.playButton.getId() : self.play, self.playSkipButton.getId() : self.playWithCommSkip, self.deleteButton.getId() : self.delete, self.rerecordButton.getId() : self.rerecord, self.firstInQueueButton.getId(): self.moveToFrontOfJobQueue, self.refreshButton.getId() : self.refresh, self.editScheduleButton.getId(): self.editSchedule, 301:self.doCommFlag, 302:self.doTranscode, 303:self.doUserJob1, 304:self.doUserJob2, 305:self.doUserJob3, 306:self.doUserJob4, 307:self.doRefreshFanart } self.render() def doRefreshFanart(self): self.fanArt.clear(self.program) self.refresh() self.bus.publish({'id' : Event.FANART_REFRESHED, 'program' : self.program}) toolkit.showPopup('Fan Art', 'Refreshed Fan Art for %s' % self.program.title(), 5000) def doCommFlag(self): self.queueJob(JobType.COMMFLAG) def doTranscode(self): self.queueJob(JobType.TRANSCODE) def doUserJob1(self): self.queueJob(JobType.USERJOB & JobType.USERJOB1) def doUserJob2(self): self.queueJob(JobType.USERJOB & JobType.USERJOB2) def doUserJob3(self): self.queueJob(JobType.USERJOB & JobType.USERJOB3) def doUserJob4(self): self.queueJob(JobType.USERJOB & JobType.USERJOB4) @inject_db def queueJob(self, jobType): job = Job.fromProgram(self.program, jobType) self.db().addJob(job) numJobs = len(self.db().getJobs(jobStatus=JobStatus.QUEUED)) toolkit.showPopup('Job Queue', 'Queued as job %d of %d ' % (numJobs,numJobs), 5000) @inject_db def autoexpire(self): self.db().setRecordedAutoexpire( self.program.getChannelId(), self.program.starttime(), not self.program.isAutoExpire()) self.refresh() def delete(self): yes = True if self.settings.isConfirmOnDelete(): yes = xbmcgui.Dialog().yesno(self.t(m.CONFIRMATION), self.t(m.ASK_DELETE_RECORDING)) @run_async @catchall @inject_conn def deleteAsync(self): self.conn().deleteRecording(self.program) if yes: deleteAsync(self) self.isDeleted = True self.close() def rerecord(self): yes = True if self.settings.isConfirmOnDelete(): yes = xbmcgui.Dialog().yesno(self.t(m.CONFIRMATION), self.t(m.ASK_RERECORD_RECORDING)) @run_async @catchall @inject_conn def rerecordAsync(self): self.conn().rerecordRecording(self.program) if yes: rerecordAsync(self) self.isDeleted = True self.close() @inject_db def moveToFrontOfJobQueue(self): jobs = self.db().getJobs(program=self.program, jobStatus=JobStatus.QUEUED, jobType=JobType.COMMFLAG) if len(jobs) == 1: job = jobs[0] job.moveToFrontOfQueue() self.refresh() else: xbmcgui.Dialog().ok(self.t(m.ERROR), self.t(m.JOB_NOT_FOUND)) @inject_conn def canStream(self): # TODO: Merge with duplicate method in RecordingDetailsWindow if not self.conn().protocol.supportsStreaming(self.platform): xbmcgui.Dialog().ok(self.t(m.ERROR), 'Streaming from a MythTV %s backend to XBMC' % self.conn().protocol.mythVersion(), '%s is broken. Try playing again after deselecting' % self.platform.xbmcVersion(), 'MythBox > Settings > MythTV > Enable Streaming') return False return True @catchall_ui def play(self): log.debug("Playing %s .." % safe_str(self.program.title())) deps = to_kwargs(self, ['program', 'mythThumbnailCache', 'translator', 'settings', 'platform']) if self.streaming: if not self.canStream(): return # Play via myth:// p = StreamingPlayer(**deps) p.playRecording(NoOpCommercialSkipper(p, self.program, self.translator)) else: # Play via local fs p = MountedPlayer(**deps) p.playRecording(NoOpCommercialSkipper(p, self.program, self.translator)) del p def playWithCommSkip(self): log.debug("Playing with skip %s .." % safe_str(self.program.title())) deps = to_kwargs(self, ['program', 'mythThumbnailCache', 'translator', 'settings', 'platform']) if self.streaming: if not self.canStream(): return # Play via myth:// p = StreamingPlayer(**deps) p.playRecording(NoOpCommercialSkipper(p, self.program, self.translator)) else: # Play via local fs p = MountedPlayer(**deps) p.playRecording(TrackingCommercialSkipper(p, self.program, self.translator)) del p @inject_db def editSchedule(self): if self.program.getScheduleId() is None: xbmcgui.Dialog().ok(self.t(m.INFO), self.t(m.ERR_NO_RECORDING_SCHEDULE)) return schedules = self.db().getRecordingSchedules(scheduleId=self.program.getScheduleId()) if len(schedules) == 0: xbmcgui.Dialog().ok(self.t(m.INFO), self.t(m.ERR_SCHEDULE_NOT_FOUND) % self.program.getScheduleId()) return editScheduleDialog = ScheduleDialog( 'mythbox_schedule_dialog.xml', self.platform.getScriptDir(), forceFallback=True, schedule=schedules[0], **to_kwargs(self, ['translator', 'platform', 'settings', 'mythChannelIconCache'])) editScheduleDialog.doModal() if editScheduleDialog.shouldRefresh: self.render() def nextRecording(self): self.program = self.programIterator.next() self.render() def previousRecording(self): self.program = self.programIterator.previous() self.render() def isAdvancedBladeActive(self): buttonIds = [self.firstInQueueButton.getId(),300,301,302,303,304,305,306] return self.getFocusId() in buttonIds @catchall_ui def onAction(self, action): id = action.getId() if id in (Action.PREVIOUS_MENU, Action.PARENT_DIR): if self.isAdvancedBladeActive(): self.setFocus(self.advancedButton) else: self.close() elif id == Action.PAGE_UP: self.previousRecording() elif id == Action.PAGE_DOWN: self.nextRecording() else: log.debug('unhandled action = %s id = %s' % (action, action.getId())) def onFocus(self, controlId): pass @catchall_ui @window_busy def onClick(self, controlId): #log.debug('onClick %s ' % controlId) source = self.getControl(controlId) try: self.dispatcher[source.getId()]() return True except KeyError: return False @inject_conn def refresh(self): refreshedProgram = self.conn().getRecording(self.program.getChannelId(), self.program.recstarttime()) if refreshedProgram: self.program = refreshedProgram self.render() else: raise Exception, self.t(m.RECORDING_NOT_FOUND) % self.program.title() @window_busy def render(self): self.renderDetail() self.renderChannel() self.renderThumbnail() self.renderUserJobs() self.renderCommBreaks() # async self.renderSeasonAndEpisode(self.program) # async def renderDetail(self): s = self.program self.setWindowProperty('title', s.fullTitle()) self.setWindowProperty('airDate', s.formattedAirDateTime()) self.setWindowProperty('originalAirDate', s.formattedOriginalAirDate()) self.setWindowProperty('channel', s.formattedChannel()) self.setWindowProperty('description', s.formattedDescription()) self.setWindowProperty('category', s.category()) self.setWindowProperty('episode', '...') self.setWindowProperty('fileSize', s.formattedFileSize()) self.setWindowProperty('autoExpire', (('No', 'Yes')[s.isAutoExpire()])) self.setWindowProperty('commBreaks', '...') self.setWindowProperty('recordingNofM', self.t(m.RECORDING_N_OF_M) % (str(self.programIterator.index() + 1), str(self.programIterator.size()))) @catchall @inject_db def renderChannel(self): if not self.channels: self.channels = {} for c in self.domainCache.getChannels(): self.channels[c.getChannelId()] = c if self.program.getChannelId() in self.channels: icon = self.mythChannelIconCache.get(self.channels[self.program.getChannelId()]) if icon: self.setWindowProperty('channelIcon', icon) def renderThumbnail(self): thumbFile = self.mythThumbnailCache.get(self.program) self.setWindowProperty('thumbnailShadow', 'mb-DialogBack.png') if thumbFile: self.setWindowProperty('thumbnail', thumbFile) else: self.setWindowProperty('thumbnail', 'mythbox-logo.png') log.error('Recording thumbnail preview image not found: %s' % safe_str(self.program.title())) @run_async @catchall @inject_db @coalesce def renderCommBreaks(self): self.playSkipButton.setEnabled(self.program.hasCommercials()) self.firstInQueueButton.setEnabled(False) commBreaks = '-' if self.program.isCommFlagged(): if self.program.hasCommercials(): # Only move focus to Skip button if user hasn't changed the initial focus if self.getFocusId() == self.playButton.getId(): self.setFocus(self.playSkipButton) commBreaks = "%d" % len(self.program.getCommercials()) else: commBreaks = self.t(m.NONE) else: jobs = self.db().getJobs(program=self.program, jobType=JobType.COMMFLAG) if len(jobs) == 1: job = jobs[0] if job.jobStatus == JobStatus.QUEUED: position, numJobs = job.getPositionInQueue() commBreaks = self.t(m.QUEUED_N_OF_M) % (position, numJobs) if position != 1: self.firstInQueueButton.setEnabled(True) elif job.jobStatus == JobStatus.RUNNING: try: commBreaks = self.t(m.N_AT_M_FPS) % ('%d%%' % job.getPercentComplete(), '%2.0f' % job.getCommFlagRate()) except StatusException: commBreaks = job.comment else: commBreaks = job.formattedJobStatus() if log.isEnabledFor(logging.DEBUG): commBreaks += ' (%s)' % self.program.getFPS() self.setWindowProperty('commBreaks', commBreaks) @run_async @catchall @coalesce def renderSeasonAndEpisode(self, boundProgram): season, episode = None, None try: season, episode = self.fanArt.getSeasonAndEpisode(boundProgram) finally: if boundProgram == self.program: self.setWindowProperty('episode', ['-', '%sx%s' % (season, episode)][bool(season) and bool(episode)]) else: log.debug('Program changed since spawning...recursing...') self.renderSeasonAndEpisode(self.program) @inject_db def renderUserJobs(self): jobs = { 'UserJob1': {'control':303, 'descColumn':'UserJobDesc1'}, 'UserJob2': {'control':304, 'descColumn':'UserJobDesc2'}, 'UserJob3': {'control':305, 'descColumn':'UserJobDesc3'}, 'UserJob4': {'control':306, 'descColumn':'UserJobDesc4'} } for jobName in jobs.keys(): jobCommand = self.db().getMythSetting(jobName) jobButton = self.getControl(jobs[jobName]['control']) if jobCommand is None or len(jobCommand) == 0: jobButton.setVisible(False) else: jobButton.setLabel(self.db().getMythSetting(jobs[jobName]['descColumn']))
♦ Cold calling & Scheduling Appointments. ♦ Interacting with clients through verbal as well as written communication modes. ♦ Gathering client requirement and coordinating with developer team. ♦ Visit client for presentation and taking regular follow ups. ♦ Maintaining relationships with existing customers. communication modes. ♦ Reporting to the manager or head of the department.
import django from itertools import chain from .django_db_models_expressions import CTERef from django.db.models import sql class QuerySet: def attach(self, *querysets): clone = sql.WithQuery(self.query) for qs in querysets: clone.query.add_with(qs.query) return clone def as_insert(self, **kwargs): raise NotImplementedError("Not implemented yet") """ clone = self._clone() clone.query = self.query.clone(sql.InsertSelectQuery) self._for_write = True clone.query.add_update_values(kwargs) if fields: fields = [self.model._meta.get_field(f) for f in fields] clone.query.insert_values(fields, objs, raw=raw) return clone """ def as_update(self, **kwargs): clone = self._clone() clone.query = self.query.clone(sql.UpdateReturningQuery) print("clone is", type(clone)) print("clone query is", type(clone.query)) self._for_write = True clone.query.add_update_values(kwargs) # Clear any annotations so that they won't be present in subqueries. clone.query._annotations = None return clone def with_literals(self, qs): pass def ref(self, field): # These are not validated return CTERef(with_query=self.query, field_name=field) class LiteralQuerySet(django.db.models.QuerySet): """ CTEs can be connected to a query to enable WITH style queries """ def __init__(self, model=None, query=None, values=None, enum_field=None, *args, **kwargs): query = query or sql.LiteralQuery(model) super().__init__(model=model, query=query, *args, **kwargs) if values: self.append(values) if enum_field: self.enum_field(enum_field) def enum_field(self, field_name): self.query.enum_field = field_name return self def clear(self): self.query.clear_values() return self def append(self, values): self.query.literal_values(values) return self def defer(self, *fields): raise NotImplementedError("LiteralQuerySet does not implement defer()") def delete(self): raise TypeError("Queries with literal values can't be deleted") def order_by(self, *field_names): raise NotImplementedError("LiteralQuerySet does not implement order_by()") def distinct(self, *field_names): raise NotImplementedError("LiteralQuerySet does not implement distinct()") def extra(self, *args, **kwargs): raise NotImplementedError("LiteralQuerySet does not implement extra()") def reverse(self): raise NotImplementedError("LiteralQuerySet does not implement reverse()")
So…when I’m up @ MSU, and I go to Google, there is a link for Google Scholar on the main page…this doesn’t show up when I am off campus…does google "know" I’m searching from an academic IP? Anyone else notice this?? One Response to spies like us? Try accessing Google from a mobile phone… it takes you to their mobile site and translates all websites to what they consider a “phone-friendly” format.
''' Created on Nov 5, 2014 @author: jesse ''' import Queue import SocketServer from BaseCommand import BaseCommand from FlyCommand import FlyCommand from TerminateCommand import TerminateCommand from getch import getch from multiprocessing import Process import multiprocessing from threading import Thread import time import math class Android(object): ''' Allows for live keyboard input to calibrate or fly the crazyflie. ''' crazyFlie = None parameters = None data = None control = {} stopThread = False def __init__(self, crazyFlie, parameters, data, queue): ''' Creates the new object. ''' self.crazyFlie = crazyFlie self.queue = queue def start(self): t = Thread(target = self.run) t.start() def run(self): acceloServer = AcceloServer() t = Thread(target = acceloServer.run) t.start() prevCommandTime = time.time() noDataCount = 0 while not self.stopThread: data = AcceloHandler.data if data is not None: command = FlyCommand() if "dir" in data: dirData = data["dir"] command.pitch = dirData[0] command.roll = dirData[1] if "thr" in data: thrData = data["thr"] command.thrust = thrData[0] command.yaw = thrData[1] print "Command pitch: " + str(command.pitch) + ", roll: " + str(command.roll) self.queue.addCommand(command) elif noDataCount < 1000: noDataCount += 1 else: print "Receiving no data, stopping." self.queue.addCommand(TerminateCommand()) self.stopThread = True acceloServer.stop() time.sleep(0.1) class AcceloServer(): HOST = "" PORT = 5555 server = None def __init__(self): self.server = SocketServer.UDPServer((self.HOST, self.PORT), AcceloHandler) def run(self): self.server.serve_forever() def stop(self): self.server.shutdown() class AcceloHandler(SocketServer.BaseRequestHandler): data = {} ips = {} def handle(self): addr = self.client_address[0] data = self.request[0].strip() if "dir" not in AcceloHandler.ips: print "{} controls direction.".format(addr) AcceloHandler.ips["dir"] = addr AcceloHandler.data["dir"] = {} elif "thr" not in AcceloHandler.ips and addr != AcceloHandler.ips["dir"]: print "{} controls thrust and yaw.".format(addr) AcceloHandler.ips["thr"] = addr AcceloHandler.data["thr"] = {} splitData = data.split(",") dirs = self.acceloToValue(float(splitData[2]), float(splitData[3]), float(splitData[4])) if addr == AcceloHandler.ips["dir"]: AcceloHandler.data["dir"] = dirs elif addr == AcceloHandler.ips["thr"]: # FIXME Very hacky, but see if it works. AcceloHandler.data["thr"] = (dirs[0]*-1000, 10*dirs[1]) def acceloToValue(self, ax, ay, az): if az != 0: sDir = math.atan(-ax/az) * (90 / math.pi) fDir = math.atan(-ay/az) * (90 / math.pi) else: fDir = 0 sDir = 0 return (fDir, sDir)
A universal and prehistoric symbol, Our Sun Earring represents the cycles of rebirth, life and death. There is a sun inside each one of us, a mirror of what we see in the sky. By watching the movement of the sun, we are watching our souls move towards spiritual enlightenment.
""" Implementation of the class `KoumoutsakosLeonard1995` that contains the instantaneous drag coefficients of an impulsively-started 2D cylinder for Reynolds numbers 40, 550, and 3000. The drag coefficient data files are located in the folder `resources/results` of the snake package. _References:_ * Koumoutsakos, P., & Leonard, A. (1995). High-resolution simulations of the flow around an impulsively started cylinder using vortex methods. Journal of Fluid Mechanics, 296, 1-38. """ import os import numpy from snake.force import Force class KoumoutsakosLeonard1995(object): """ Container to store results from Koumoutsakos and Leonard (1995). """ def __init__(self, file_path=None, Re=None): """ Initializes. Parameters ---------- file_path: string, optional Path of the file containing the instantaneous drag coefficients; default: None. Re: float, optional Reynolds number; default: None. """ self.description = 'Koumoutsakos and Leonard (1995)' self.cd = None if file_path or Re: self.read_drag(file_path=file_path, Re=Re) def read_drag(self, file_path=None, Re=None): """ Reads the instantaneous drag coefficients from file. Parameters ---------- file_path: string, optional Path of the file containing the instantaneous drag coefficients; default: None. Re: float, optional Reynolds number; default: None. """ if not (file_path or Re): print('[error] please provide path of file of Reynolds number') return print('[info] reading drag coefficients ...'), if not file_path: file_name = ('koumoutsakos_leonard_1995_' 'cylinder_dragCoefficientRe{}.dat'.format(Re)) file_path = os.path.join(os.environ['SNAKE'], 'resources', 'results', file_name) with open(file_path, 'r') as infile: times, drag = numpy.loadtxt(infile, dtype=float, comments='#', unpack=True) self.cd = Force(0.5 * times, drag) print('done')
I've been thinking about journalist sources. I know some of mine are among my FaceBook and LinkedIn friends. Could that become a problem in the future? What if I write about Apple and Steve Jobs thinks it has been leaked and sacks a friend who works at Apple, because he can see my social network even though that's not my source. What about a two degree separation? A friend of a friend...could that still cause a problem? Should I keep my network private?
import itertools import re import codecs from datetime import datetime, date from .account import Account, AccountFactory from .value import Value from .ledger import Ledger from .transaction import Transaction, UndefinedTransaction, UnbalancedTransaction from .directive import Directive, UnsupportedDirective from .entry import Entry from .util import PledgerException, itersplit date_formats = { "default": "%Y/%m/%d", "year": "%Y", "month": "%b"} class MalformedHeader(PledgerException): pass class Parser(object): def __init__(self): self.precision = 2 self.repo = AccountFactory() def parse_account(self, name): return self.repo.parse(name) def parse_value(self, str): return Value.parse(str) def parse_ledger(self, filename, str=None): if str is None: str = codecs.open(filename, "r", "utf-8").read() def f(number_line): return number_line[1] == "" lines = zip(itertools.count(1), str.split("\n")) try: transactions = [self.parse_transaction( group) for group in itersplit(f, lines)] except PledgerException as e: e.filename = filename raise e return Ledger(filename, [t for t in transactions if t], self) def parse_entry(self, str): tags = self.parse_tags(str) or {} str = re.sub(";.*$", "", str) elements = [e for e in re.split(r" +", str) if e] if len(elements) >= 1: account = self.parse_account(elements[0]) amount = None if len(elements) >= 2: amount = self.parse_value(elements[1]) if account: return Entry(account, amount, tags) def parse_transaction(self, lines): if hasattr(lines, "split"): lines = list(zip(itertools.count(1), iter(lines.split("\n")))) tags = {} # discard initial comments while lines and re.match(r'\s*;', lines[0][1]): lines = lines[1:] if len(lines) == 0: return None n, header = lines[0] lines = lines[1:] # skip rules if len(header) == 0 or header[0] == "=": return None directive = self.parse_directive(header) if directive: return directive # parse transaction tags if lines: n, line = lines[0] tags = self.parse_tags(line, begin=True) if tags: lines = lines[1:] try: date, label, cleared = self.parse_header(header) date = self.parse_date(date) if date is None: raise MalformedHeader() entries = [self.parse_entry(line) for n, line in lines] line_numbers = [n for n, line in lines] transaction = Transaction.balanced(entries, date, label) if tags: transaction.tags = tags if cleared: transaction.tags["cleared"] = True return transaction except UnbalancedTransaction as e: e.line_number = n raise e except UndefinedTransaction as e: e.line_number = line_numbers[e.index] raise e except MalformedHeader as e: e.line_number = n raise e def parse_date(self, str, format="default"): try: return datetime.strptime(str, date_formats[format]).date() except ValueError: pass def parse_month(self, str): base = self.parse_date(str, "month") if base: return date(date.today().year, base.month, 1) def parse_year(self, str): base = self.parse_date(str, "year") if base: return date(base.year, 1, 1) def parse_fuzzy_date(self, str): for parser in [self.parse_date, self.parse_month, self.parse_year]: result = parser(str) if result: return result return None def parse_header(self, str): m = re.match(r'^(\S+)\s+(\*\s+)?(.*)$', str) if m: return m.group(1), m.group(3), m.group(2) else: raise MalformedHeader() def parse_tags(self, str, begin=False): pattern = r'\s*;\s*(.*)$' if begin: m = re.match(pattern, str) else: m = re.search(pattern, str) if m: tagstring = m.group(1) tag_dict = [] while True: result = self.parse_tag(tagstring) if result is None: break tag, index = result tag_dict.append(tag) tagstring = tagstring[index:] return dict(tag_dict) def parse_tag(self, str): m = re.match(r':?(\S+):"([^"]*)"\s*', str) if m: return ((m.group(1), m.group(2)), m.end()) m = re.match(r":?(\S+):'([^']*)'\s*", str) if m: return ((m.group(1), m.group(2)), m.end()) m = re.match(r':?(\S+):(\S*)\s*', str) if m: return ((m.group(1), m.group(2)), m.end()) m = re.match(r'\[(\S+)\]\s*', str) if m: try: return (("date", self.parse_date(m.group(1))), m.end()) except ValueError: pass def parse_directive(self, str): if str[0] == '!': args = str[1:].split(' ') name = args[0] args = args[1:] directive_class = Directive.directives.get(name) if directive_class: return directive_class(*args) else: raise UnsupportedDirective(name)
Zack Davisson is an award winning translator, writer, and folklorist. He is the author of Yurei: The Japanese Ghost from Chin Music Press, and contributor to Wayward from Image comics. He contributed to exhibitions at the Wereldmuseum Rotterdam and Henry Art Museum, has been featured on NPR and in The New York Times, and has written articles for Weird Tales Magazine, Japanzine, Metropolis, Kansai Time-Out, The Comics Journal, and Electric Literature. As a translator, Davisson was nominated for the 2014 Japanese-US Friendship Commission Translation Prize for his translation of the Eisner Award winning Showa: A History of Japan. For Drawn and Quarterly, Davisson translates and curates the famous folklore comic Kitaro. For Dark Horse, he translates Satoshi Kon's work, including Opus: Seraphim: 26661336 Wings, and The Art of Satoshi Kon, and for Kodansha he translates Leiji Matsumoto’s acclaimed Queen Emeraldas. He was a researcher and on-screen talent for National Geographic's TV special Okinawa: The Lost Ghosts of Japan and maintains the popular Japanese folklore website Hyakumonogatari.com. He resides in Seattle, Washington, with his wife, Miyuki, dog Mochi, cats Bagheera and Sheer Khan, and several ghosts.
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg from neutron.common import exceptions as n_exc from neutron.openstack.common import log as logging from neutron.plugins.common import constants LOG = logging.getLogger(__name__) serviceprovider_opts = [ cfg.MultiStrOpt('service_provider', default=[], help=_('Defines providers for advanced services ' 'using the format: ' '<service_type>:<name>:<driver>[:default]')) ] cfg.CONF.register_opts(serviceprovider_opts, 'service_providers') #global scope function that should be used in service APIs def normalize_provider_name(name): return name.lower() def parse_service_provider_opt(): """Parse service definition opts and returns result.""" def validate_name(name): if len(name) > 255: raise n_exc.Invalid( _("Provider name is limited by 255 characters: %s") % name) svc_providers_opt = cfg.CONF.service_providers.service_provider res = [] for prov_def in svc_providers_opt: split = prov_def.split(':') try: svc_type, name, driver = split[:3] except ValueError: raise n_exc.Invalid(_("Invalid service provider format")) validate_name(name) name = normalize_provider_name(name) default = False if len(split) == 4 and split[3]: if split[3] == 'default': default = True else: msg = (_("Invalid provider format. " "Last part should be 'default' or empty: %s") % prov_def) LOG.error(msg) raise n_exc.Invalid(msg) if svc_type not in constants.ALLOWED_SERVICES: msg = (_("Service type '%(svc_type)s' is not allowed, " "allowed types: %(allowed)s") % {'svc_type': svc_type, 'allowed': constants.ALLOWED_SERVICES}) LOG.error(msg) raise n_exc.Invalid(msg) res.append({'service_type': svc_type, 'name': name, 'driver': driver, 'default': default}) return res class ServiceProviderNotFound(n_exc.InvalidInput): message = _("Service provider '%(provider)s' could not be found " "for service type %(service_type)s") class DefaultServiceProviderNotFound(n_exc.InvalidInput): message = _("Service type %(service_type)s does not have a default " "service provider") class ServiceProviderAlreadyAssociated(n_exc.Conflict): message = _("Resource '%(resource_id)s' is already associated with " "provider '%(provider)s' for service type '%(service_type)s'") class ProviderConfiguration(object): def __init__(self, prov_data): self.providers = {} for prov in prov_data: self.add_provider(prov) def _ensure_driver_unique(self, driver): for k, v in self.providers.items(): if v['driver'] == driver: msg = (_("Driver %s is not unique across providers") % driver) LOG.exception(msg) raise n_exc.Invalid(msg) def _ensure_default_unique(self, type, default): if not default: return for k, v in self.providers.items(): if k[0] == type and v['default']: msg = _("Multiple default providers " "for service %s") % type LOG.exception(msg) raise n_exc.Invalid(msg) def add_provider(self, provider): self._ensure_driver_unique(provider['driver']) self._ensure_default_unique(provider['service_type'], provider['default']) provider_type = (provider['service_type'], provider['name']) if provider_type in self.providers: msg = (_("Multiple providers specified for service " "%s") % provider['service_type']) LOG.exception(msg) raise n_exc.Invalid(msg) self.providers[provider_type] = {'driver': provider['driver'], 'default': provider['default']} def _check_entry(self, k, v, filters): # small helper to deal with query filters if not filters: return True for index, key in enumerate(['service_type', 'name']): if key in filters: if k[index] not in filters[key]: return False for key in ['driver', 'default']: if key in filters: if v[key] not in filters[key]: return False return True def _fields(self, resource, fields): if fields: return dict(((key, item) for key, item in resource.items() if key in fields)) return resource def get_service_providers(self, filters=None, fields=None): return [self._fields({'service_type': k[0], 'name': k[1], 'driver': v['driver'], 'default': v['default']}, fields) for k, v in self.providers.items() if self._check_entry(k, v, filters)]
Not long ago, vacation rental rate structures were simple and straightforward. Rates were set in advance with the property owners prior to when the “annual vacation planner” went to print. There were no flash sales or last minute discount email blasts. Capacity restrictions were minimal, limited to “Saturday-to-Saturday” or “four-night minimums.” Guests could easily grasp the idea that it costs more to visit the beach on July 4 or to go skiing on February 14. Distribution channels were easier to manage as most reservations came in by phone calls or via direct website bookings, and pricing was opaque as it took time to shop competitors for an “apples to apples” comparison. In recent years, VR managers became more adept at watching demand trends, while advances in property management systems simplified the adjusting of rates according to demand. Marketing executives realized that discounts and promotions could increase sales during periods of lower than expected demand; they also began using stay pattern restrictions to optimize yield when demand was strong. At the same time, pricing became transparent as listing services and online travel agencies made it easy for consumers to compare rates. As a result, today’s agents are required to convey complex rate structures to the most educated callers our industry has ever had! Therefore, reservations sales agents need the skills to respond to the many challenging questions and objections they receive daily. Here’s another perspective: The “normal” rates are the high-season/top tier rates. It’s not that we raise rates when it’s busy, it’s that we lower them when it’s slow! Example: “July is a fantastic time here at Beachtown USA so our prevailing rates apply. However, we always offer lower rates whenever it’s slower. Are your dates flexible?” Revenue managers love agents who shift demand to lower-rated dates because that’s when occupancy is needed. Most companies seem to find that the higher-rated properties tend to rent first. This might include ocean front condos, first row beach homes, or condos closest to the lake or ski lift. It might also include homes with the most contemporary décor and furnishings. Without training, when your agents do a search and find these options are already booked chances are they are saying “Oh, all we have left at that time is…” Instead, they should position remaining options as still being a good value by saying “Fortunately, at that time we still have…” If there are glaring deficiencies about the décor or location they should point those out so as to manage expectations, but they should then continue on to mention the positive attributes of the remaining options. To maximize revenue all properties must be booked!
class Restaurant(): """Describes a place where you can get somethint to eat and drink""" def __init__(self, restaurant_name, cuisine_type): self.name = restaurant_name self.cuisine = cuisine_type self.number_served = 0 def describe_restaurant(self): print('name: ' + self.name) print('cuisine: ' + self.cuisine) def open_restaurant(self): print('we are open!') def set_number_served(self, number_served): self.number_served = number_served def increment_number_served(self, number_served): self.number_served += number_served class IceCreamStand(Restaurant): """A small stand where you can get some cold ice cream""" def __init__( self, restaurant_name, cuisine_type, flavors=['vanilla', 'chocolate', 'strawberry']): super().__init__(restaurant_name, cuisine_type) self.flavors = flavors def display_flavors(self): text = 'I have: \n' for flavor in self.flavors: text += '\t- ' + flavor + '\n' print(text) stand = IceCreamStand("Sam's Ice Bar", 'Snacks') stand.describe_restaurant() stand.display_flavors()
NEW YORK (AP) — The Hoff has gotten hitched for the third time. David Hasselhoff's publicist confirmed Tuesday that the actor has married model Hayley Roberts. The small ceremony between the 66-year-old "Baywatch" star and 38-year-old Roberts took place in Italy with close friends and family. The couple first met seven years ago when Hasselhoff was a judge on "Britain's Got Talent." He was filming auditions at a hotel and Roberts approached him for an autograph. He said he'd only give it to her if she gave him her phone number. The pair got engaged in 2016. The "Knight Rider" actor has two daughters from his marriage to actress Pamela Bach.
# -*- coding: utf-8 -*- # # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import json import mock from StringIO import StringIO from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import Client from rest_framework.test import APITestCase from rest_framework import status from pdc.apps.bindings import models as binding_models from pdc.apps.common.test_utils import create_user, TestCaseWithChangeSetMixin from pdc.apps.release.models import Release, ProductVersion from pdc.apps.component.models import (ReleaseComponent, BugzillaComponent) import pdc.apps.release.models as release_models import pdc.apps.common.models as common_models from . import models class ComposeModelTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.compose = models.Compose.objects.get(id=1) def test_get_rpms_existing(self): self.assertEqual(unicode(self.compose.get_rpms('bash')), '[<RPM: bash-0:1.2.3-4.b1.x86_64.rpm>]') def test_get_rpms_nonexisting(self): self.assertEqual(list(self.compose.get_rpms('foo')), []) def test_get_arch_testing_status(self): self.assertDictEqual(self.compose.get_arch_testing_status(), {'Server': {'x86_64': 'untested'}, 'Server2': {'x86_64': 'untested'}}) class VersionFinderTestCase(APITestCase): # TODO: This test case could be removed after removing endpoint 'compose/package' fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/product.json", "pdc/apps/release/fixtures/tests/product_version.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def setUp(self): self.url = reverse('findcomposewitholderpackage-list') def test_bad_args_missing_rpm_name(self): response = self.client.get(self.url, {'compose': 'compose-1'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn('rpm_name', response.data.get('detail')) def test_bad_args_missing_release_and_compose(self): response = self.client.get(self.url, {'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn('release', response.data.get('detail')) self.assertIn('compose', response.data.get('detail')) def test_missing_previous_compose(self): response = self.client.get(self.url, {'compose': 'compose-1', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_same_version(self): response = self.client.get(self.url, {'compose': 'compose-2', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_older_rpm(self): response = self.client.get(self.url, {'compose': 'compose-3', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose'), "compose-2") self.assertEqual(response.data.get('packages'), ["bash-0:1.2.3-4.b1.x86_64.rpm"]) def test_same_version_different_arch(self): """There is a previous compose with same version of package, but with different RPM.arch.""" models.ComposeRPM.objects.filter(pk=1).update(rpm=3) response = self.client.get(self.url, {'compose': 'compose-2', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_get_for_release(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_with_latest(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_to_dict(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'to_dict': True}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected = [ {'compose': u'compose-1', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': [u'compose-1', u'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-2', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': [u'compose-1', u'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-3', 'packages': [ {'name': u'bash', 'version': u'5.6.7', 'epoch': 0, 'release': u'8', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': None, 'filename': 'bash-5.6.7-8.x86_64.rpm', 'id': 2, 'linked_composes': [u'compose-3'], 'linked_releases': []}]} ] self.assertEqual(response.data, expected) def test_get_for_product_version(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() response = self.client.get(self.url, {'rpm_name': 'bash', 'product_version': 'product-1'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_product_version_with_latest(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() response = self.client.get(self.url, {'rpm_name': 'bash', 'product_version': 'product-1', 'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_included_compose_type(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'included_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}]) def test_get_for_excluded_compose_type(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'excluded_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) class FindComposeByReleaseRPMTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def test_get_for_release(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_with_latest(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_to_dict(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'to_dict': True}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected = [ {'compose': u'compose-1', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': ['compose-1', 'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-2', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': ['compose-1', 'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-3', 'packages': [ {'name': u'bash', 'version': u'5.6.7', 'epoch': 0, 'release': u'8', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': None, 'filename': 'bash-5.6.7-8.x86_64.rpm', 'id': 2, 'linked_composes': ['compose-3'], 'linked_releases': []}]} ] self.assertEqual(response.data, expected) def test_get_for_excluded_compose_type(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'excluded_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_included_compose_type(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'included_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}]) class FindOlderComposeByComposeRPMTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def test_missing_previous_compose(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-1', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_same_version(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-2', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_older_rpm(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-3', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose'), "compose-2") self.assertEqual(response.data.get('packages'), ["bash-0:1.2.3-4.b1.x86_64.rpm"]) def test_previous_compose_has_older_rpm_with_to_dict(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-3', 'rpm_name': 'bash'}) response = self.client.get(url, {'to_dict': True}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose'), "compose-2") packages = response.data.get('packages') self.assertEqual(len(packages), 1) self.assertItemsEqual(packages[0].pop('linked_composes'), ['compose-1', 'compose-2']) self.assertEqual(packages[0].pop('linked_releases'), []) packages[0].pop('id') self.assertDictEqual( dict(packages[0]), {'name': 'bash', 'version': '1.2.3', 'epoch': 0, 'release': '4.b1', 'arch': 'x86_64', 'srpm_name': 'bash', 'srpm_nevra': 'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm'}) def test_same_version_different_arch(self): """There is a previous compose with same version of package, but with different RPM.arch.""" models.ComposeRPM.objects.filter(pk=1).update(rpm=3) url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-2', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) class FindCompoeByProductVersionRPMTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/product.json", "pdc/apps/release/fixtures/tests/product_version.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def setUp(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() self.url = reverse('findcomposesbypvr-list', kwargs={'rpm_name': 'bash', 'product_version': 'product-1'}) def test_get_for_product_version(self): response = self.client.get(self.url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_product_version_with_latest(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() response = self.client.get(self.url, {'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_included_compose_type(self): response = self.client.get(self.url, {'included_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}]) def test_get_for_excluded_compose_type(self): response = self.client.get(self.url, {'excluded_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) class ComposeAPITestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def test_get_existing(self): response = self.client.get(reverse('compose-detail', args=["compose-1"])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['sigkeys'], ['ABCDEF']) self.assertEqual(response.data['rpm_mapping_template'], 'http://testserver/rest_api/v1/composes/compose-1/rpm-mapping/{{package}}/') def test_compose_with_unsigned_package(self): crpm = models.ComposeRPM.objects.all()[0] crpm.sigkey = None crpm.save() response = self.client.get(reverse('compose-detail', args=["compose-1"])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertItemsEqual(response.data['sigkeys'], ['ABCDEF', None]) def test_get_nonexisting(self): response = self.client.get(reverse('compose-detail', args=["does-not-exist"])) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_list(self): response = self.client.get(reverse('compose-list'), {}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_composeid(self): response = self.client.get(reverse('compose-list'), {"compose_id": "compose-1"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_composeid_nonexisting(self): response = self.client.get(reverse('compose-list'), {"compose_id": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmname(self): response = self.client.get(reverse('compose-list'), {"rpm_name": "bash"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmname_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_name": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_srpmname(self): response = self.client.get(reverse('compose-list'), {"srpm_name": "bash"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_srpmname_nonexisting(self): response = self.client.get(reverse('compose-list'), {"srpm_name": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmversion(self): response = self.client.get(reverse('compose-list'), {"rpm_version": "1.2.3"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmversion_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_version": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmrelease(self): response = self.client.get(reverse('compose-list'), {"rpm_release": "4.b1"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmrelease_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_release": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmarch(self): response = self.client.get(reverse('compose-list'), {"rpm_arch": "x86_64"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmarch_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_arch": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmnvr(self): response = self.client.get(reverse('compose-list'), {"rpm_nvr": "bash-1.2.3-4.b1"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmnvr_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_nvr": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmnvr_invalid(self): response = self.client.get(reverse('compose-list'), {"rpm_nvr": "invalid"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_query_compose_rpmnvra(self): response = self.client.get(reverse('compose-list'), {"rpm_nvra": "bash-1.2.3-4.b1.x86_64"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmnvra_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_nvra": "does-not-exist.arch"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmnvra_invalid(self): response = self.client.get(reverse('compose-list'), {"rpm_nvra": "invalid"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_query_compose_acceptance_testing(self): response = self.client.get(reverse('compose-list'), {"acceptance_testing": "untested"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_acceptance_testing_nonexisting(self): response = self.client.get(reverse('compose-list'), {"acceptance_testing": "broken"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) class ComposeApiOrderingTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/product.json", "pdc/apps/release/fixtures/tests/product_version.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def test_compose_list_is_ordered(self): response = self.client.get(reverse('compose-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( [x['compose_id'] for x in response.data.get('results', [])], ['compose-1', 'compose-2', 'compose-3'] ) def test_compose_in_release_are_ordered(self): response = self.client.get(reverse('release-detail', args=['release-1.0'])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose_set', []), ['compose-1', 'compose-2', 'compose-3']) class ComposeUpdateTestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/more_releases.json", ] def test_can_not_perform_full_update(self): response = self.client.put(reverse('compose-detail', args=['compose-1']), {}) self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) def test_can_update_acceptance_testing_state(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'acceptance_testing': 'passed'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('acceptance_testing'), 'passed') self.assertNumChanges([1]) def test_can_not_update_compose_label(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'compose_label': 'i am a label'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_update_linked_releases(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates']}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) self.assertNumChanges([1]) def test_update_both_linked_release_and_acceptance(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates'], 'acceptance_testing': 'passed'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) self.assertEqual(response.data.get('acceptance_testing'), 'passed') self.assertNumChanges([2]) def test_update_acceptance_preserves_links(self): self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates']}, format='json') response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'acceptance_testing': 'passed'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) self.assertNumChanges([1, 1]) def test_update_can_not_link_to_same_release(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0']}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn('non_field_errors', response.data) def test_update_can_not_link_to_same_release_twice(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates', 'release-1.0-updates']}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) def test_partial_update_empty(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_patch_linked_releases_not_a_list(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': 'release-1.0-updates'}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, {'linked_releases': ['Expected a list.']}) self.assertNumChanges([]) def test_patch_linked_releases_null(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': None}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, {'linked_releases': ['This field may not be null.']}) self.assertNumChanges([]) def test_patch_linked_releases_list_with_null(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': [None]}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, {'linked_releases': ['Expected a string instead of <None>.']}) self.assertNumChanges([]) def test_bulk_update_put(self): response = self.client.put(reverse('compose-list'), {'compose-1': {'linked_releases': []}}, format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) self.assertNumChanges([]) def test_bulk_update_patch(self): response = self.client.patch(reverse('compose-list'), {'compose-1': {'linked_releases': ['release-1.0-updates']}}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertNumChanges([1]) self.assertEqual(response.data.keys(), ['compose-1']) self.assertEqual(response.data['compose-1'].get('linked_releases'), ['release-1.0-updates']) def test_partial_update_extra_field(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'foo': 'bar'}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_update_testing_status_on_arch(self): data = {'Server': {'x86_64': 'passed'}, 'Server2': {'x86_64': 'untested'}} response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('rtt_tested_architectures', {}), data) vararch = models.VariantArch.objects.get(arch__name='x86_64', variant__variant_uid='Server', variant__compose__compose_id='compose-1') self.assertEqual(vararch.rtt_testing_status.name, 'passed') self.assertNumChanges([1]) def test_update_testing_status_on_non_existing_tree(self): inputs = [ ({'Foo': {'x86_64': 'passed'}}, 'Foo.x86_64 not in compose compose-1.'), ({'Server': {'foo': 'passed'}}, 'Server.foo not in compose compose-1.'), ] for data, err in inputs: response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data.get('rtt_tested_architectures', ''), err) self.assertNumChanges([]) def test_update_testing_status_to_non_existing_status(self): data = {'Server': {'x86_64': 'awesome'}} response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data.get('rtt_tested_architectures', ''), '"awesome" is not a known testing status for Server.x86_64.') def test_update_testing_status_with_malformed_data(self): inputs = [ ({'Server': 'passed'}, 'Server: "passed" is not a dict'), ('passed', 'rtt_tested_architectures: "passed" is not a dict'), ] for data, err in inputs: response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data.get('detail', []), [err]) self.assertNumChanges([]) class OverridesRPMAPITestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ 'pdc/apps/release/fixtures/tests/release.json', 'pdc/apps/compose/fixtures/tests/compose_overriderpm.json', ] def setUp(self): self.release = release_models.Release.objects.get(release_id='release-1.0') self.override_rpm = {'id': 1, 'release': 'release-1.0', 'variant': 'Server', 'arch': 'x86_64', 'srpm_name': 'bash', 'rpm_name': 'bash-doc', 'rpm_arch': 'x86_64', 'include': False, 'comment': '', 'do_not_delete': False} self.do_not_delete_orpm = {'release': 'release-1.0', 'variant': 'Server', 'arch': 'x86_64', 'srpm_name': 'bash', 'rpm_name': 'bash-doc', 'rpm_arch': 'src', 'include': True, 'comment': '', 'do_not_delete': True} def test_query_existing(self): response = self.client.get(reverse('overridesrpm-list'), {'release': 'release-1.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) self.assertEqual(response.data['results'][0], self.override_rpm) def test_query_nonexisting(self): response = self.client.get(reverse('overridesrpm-list'), {'release': 'release-1.1'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_delete_existing(self): response = self.client.delete(reverse('overridesrpm-detail', args=[1])) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(models.OverrideRPM.objects.count(), 0) self.assertNumChanges([1]) def test_delete_non_existing(self): response = self.client.delete(reverse('overridesrpm-list', args=[42])) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(models.OverrideRPM.objects.count(), 1) self.assertNumChanges([]) def test_create_duplicit(self): response = self.client.post(reverse('overridesrpm-list'), self.override_rpm) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_create_correct(self): self.override_rpm["rpm_name"] = "bash-debuginfo" del self.override_rpm["id"] response = self.client.post(reverse('overridesrpm-list'), self.override_rpm) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_create_extra_field(self): self.override_rpm["rpm_name"] = "bash-debuginfo" self.override_rpm["foo"] = "bar" response = self.client.post(reverse('overridesrpm-list'), self.override_rpm) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_clear(self): response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0'}) self.assertEqual(models.OverrideRPM.objects.count(), 0) self.assertItemsEqual(response.data, [self.override_rpm]) def test_clear_preserve_do_not_delete(self): models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0'}) self.assertEqual(models.OverrideRPM.objects.count(), 1) self.assertItemsEqual(response.data, [self.override_rpm]) def test_delete_with_extra_param(self): models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0', 'variant': "Server", 'arch': 'x86_64', 'rpm_name': 'bash-doc', 'rpm_arch': 'src', 'srpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_clear_with_extra_param(self): models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0', 'srpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_clear_force(self): override = models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") self.do_not_delete_orpm['id'] = override.pk response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0', 'force': True}) self.assertEqual(models.OverrideRPM.objects.count(), 0) self.assertItemsEqual(response.data, [self.override_rpm, self.do_not_delete_orpm]) def test_delete_two_by_id(self): override = models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), [1, override.pk], format='json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertNumChanges([2]) self.assertEqual(models.OverrideRPM.objects.count(), 0) class ComposeRPMViewAPITestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", ] def setUp(self): with open('pdc/apps/release/fixtures/tests/composeinfo.json', 'r') as f: self.compose_info = json.loads(f.read()) with open('pdc/apps/compose/fixtures/tests/rpm-manifest.json', 'r') as f: self.manifest = json.loads(f.read()) # Caching ids makes it faster, but the cache needs to be cleared for each test. models.Path.CACHE = {} common_models.SigKey.CACHE = {} def test_import_and_retrieve_manifest(self): response = self.client.post(reverse('releaseimportcomposeinfo-list'), self.compose_info, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) response = self.client.post(reverse('composerpm-list'), {'rpm_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertNumChanges([11, 5]) self.assertEqual(models.ComposeRPM.objects.count(), 6) response = self.client.get(reverse('composerpm-detail', args=['TP-1.0-20150310.0'])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertDictEqual(dict(response.data), self.manifest) class ComposeImageAPITestCase(TestCaseWithChangeSetMixin, APITestCase): def setUp(self): with open('pdc/apps/release/fixtures/tests/composeinfo.json', 'r') as f: self.compose_info = json.loads(f.read()) with open('pdc/apps/compose/fixtures/tests/image-manifest.json', 'r') as f: self.manifest = json.loads(f.read()) self.client.post(reverse('releaseimportcomposeinfo-list'), self.compose_info, format='json') # Caching ids makes it faster, but the cache needs to be cleared for each test. models.Path.CACHE = {} def test_import_images_by_deprecated_api(self): # TODO: remove this test after next release response = self.client.post(reverse('composeimportimages-list'), {'image_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertNumChanges([11, 5]) self.assertEqual(models.ComposeImage.objects.count(), 4) response = self.client.get(reverse('image-list'), {'compose': 'TP-1.0-20150310.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('count'), 4) def test_import_images(self): response = self.client.post(reverse('composeimage-list'), {'image_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertNumChanges([11, 5]) self.assertEqual(models.ComposeImage.objects.count(), 4) response = self.client.get(reverse('image-list'), {'compose': 'TP-1.0-20150310.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('count'), 4) def test_import_and_retrieve_images(self): response = self.client.post(reverse('composeimage-list'), {'image_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) response = self.client.get(reverse('composeimage-detail', args=['TP-1.0-20150310.0'])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertDictEqual(dict(response.data), self.manifest) class RPMMappingAPITestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.release = release_models.Release.objects.latest('id') self.compose = models.Compose.objects.get(compose_id='compose-1') self.url = reverse('composerpmmapping-detail', args=[self.compose.compose_id, 'bash']) def test_get_rpm_mapping(self): response = self.client.get(self.url, {}, format='json') expected_data = { 'Server': { 'x86_64': { 'bash': ['x86_64'], } } } self.assertEqual(response.data, expected_data) def test_get_rpm_mapping_for_nonexisting_compose(self): url = reverse('composerpmmapping-detail', args=['foo-bar', 'bash']) response = self.client.get(url, {}, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_rpm_mapping_includes_overrides(self): models.OverrideRPM.objects.create(variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='src', include=True, release=self.release) response = self.client.get(self.url, {}, format='json') expected_data = { 'Server': { 'x86_64': { 'bash': ['src', 'x86_64'], } } } self.assertEqual(response.data, expected_data) def test_rpm_mapping_can_exclude_overrides(self): models.OverrideRPM.objects.create(variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='src', include=True, release=self.release) self.url += '?disable_overrides=1' response = self.client.get(self.url, {}, format='json') expected_data = { 'Server': { 'x86_64': { 'bash': ['x86_64'], 'bash-doc': ['x86_64'], } } } self.assertEqual(response.data, expected_data) def test_does_not_return_empty_container(self): models.OverrideRPM.objects.create(variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=False, release=self.release) response = self.client.get(self.url, {}, format='json') self.assertEqual(response.data, {}) def test_partial_update(self): self.client.force_authenticate(create_user("user", perms=[])) self.client.patch(self.url, [{"action": "create", "srpm_name": "bash", "rpm_name": "bash-magic", "rpm_arch": "src", "variant": "Client", "arch": "x86_64", "do_not_delete": False, "comment": "", "include": True}], format='json') orpm = models.OverrideRPM.objects.get(srpm_name="bash", rpm_name="bash-magic", rpm_arch="src", variant="Client", arch="x86_64", include=True, do_not_delete=False, comment="") self.assertIsNotNone(orpm) def test_update(self): self.client.force_authenticate(create_user("user", perms=[])) new_mapping = {'Server': {'x86_64': {'bash': ['x86_64', 'i386']}}} response = self.client.put(self.url, new_mapping, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [{'action': 'create', 'srpm_name': 'bash', 'rpm_name': 'bash', 'rpm_arch': 'i386', 'variant': 'Server', 'arch': 'x86_64', 'include': True, 'release_id': 'release-1.0'}]) self.assertEqual(0, models.OverrideRPM.objects.filter(rpm_arch='i386').count()) def test_update_with_perform(self): self.client.force_authenticate(create_user("user", perms=[])) new_mapping = {'Server': {'x86_64': {'bash': ['x86_64', 'i386']}}} response = self.client.put(self.url + '?perform=1', new_mapping, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [{'action': 'create', 'srpm_name': 'bash', 'rpm_name': 'bash', 'rpm_arch': 'i386', 'variant': 'Server', 'arch': 'x86_64', 'include': True, 'release_id': 'release-1.0'}]) self.assertEqual(1, models.OverrideRPM.objects.filter(rpm_arch='i386').count()) class FilterBugzillaProductsAndComponentsTestCase(APITestCase): fixtures = [ "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/component/fixtures/tests/release_component.json", "pdc/apps/component/fixtures/tests/upstream.json", "pdc/apps/component/fixtures/tests/global_component.json" ] def setUp(self): # Construct a new release and release component self.release = Release.objects.create( release_id='release-2.0', short='release', version='2.0', name='Awesome Release', release_type_id=1, ) self.bugzilla_component = BugzillaComponent.objects.create(name='kernel') filesystems = BugzillaComponent.objects.create(name='filesystems', parent_component=self.bugzilla_component) BugzillaComponent.objects.create(name='ext4', parent_component=filesystems) pyth = BugzillaComponent.objects.create(name='python', parent_component=self.bugzilla_component) BugzillaComponent.objects.create(name='bin', parent_component=pyth) ReleaseComponent.objects.create( release=self.release, global_component_id=1, name='kernel', bugzilla_component=self.bugzilla_component ) def test_filter_bugzilla_products_components_with_rpm_nvr(self): url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=bash-1.2.3-4.b1', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) def test_filter_with_invalid_nvr(self): url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=xxx', format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_filter_with_nvr_without_rpms(self): url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=GConf2-3.2.6-8.el71', format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_filter_without_nvr(self): url = reverse('bugzilla-list') response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @mock.patch('pdc.apps.compose.models.Compose.objects.filter') def test_filter_without_srpm_component_name_mapping(self, mock_filter): release_component, _ = ReleaseComponent.objects.get_or_create( global_component_id=1, release=self.release, bugzilla_component=self.bugzilla_component, name='bash') mock_filter.return_value = mock.Mock() mock_filter.return_value.distinct.return_value = [mock.Mock()] mock_filter.return_value.distinct.return_value[0].release = self.release.release_id url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=bash-1.2.3-4.b1', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn('kernel', response.content) @mock.patch('pdc.apps.compose.models.Compose.objects.filter') def test_filter_with_srpm_component_name_mapping(self, mock_filter): release_component, _ = ReleaseComponent.objects.get_or_create( global_component_id=1, release=self.release, name='kernel') binding_models.ReleaseComponentSRPMNameMapping.objects.create( srpm_name='bash', release_component=release_component) mock_filter.return_value = mock.Mock() mock_filter.return_value.distinct.return_value = [mock.Mock()] mock_filter.return_value.distinct.return_value[0].release = self.release.release_id url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=bash-1.2.3-4.b1', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn('kernel', response.content) class RPMMappingTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.compose = models.Compose.objects.get(compose_id='compose-1') self.mapping, _ = self.compose.get_rpm_mapping('bash') def test_compute_diff_add_new(self): new_mapping = models.ComposeRPMMapping(data={'Server': {'x86_64': {'bash': ['src', 'x86_64']}}}) changes = self.mapping.compute_changes(new_mapping) self.assertEqual(len(changes), 1) self.assertEqual(changes[0], {'action': 'create', 'variant': 'Server', 'arch': 'x86_64', 'include': True, 'release_id': 'release-1.0', 'rpm_name': 'bash', 'srpm_name': 'bash', 'rpm_arch': 'src'}) def test_compute_diff_add_excluded(self): new_mapping = models.ComposeRPMMapping(data={'Server': {'x86_64': {'bash': ['x86_64'], 'bash-doc': ['x86_64']}}}) changes = self.mapping.compute_changes(new_mapping) self.assertEqual(len(changes), 1) self.assertEqual(changes[0], {'action': 'delete', 'variant': 'Server', 'arch': 'x86_64', 'include': False, 'release_id': 'release-1.0', 'rpm_name': 'bash-doc', 'srpm_name': 'bash', 'rpm_arch': 'x86_64'}) def test_compute_diff_remove_existing(self): new_mapping = models.ComposeRPMMapping(data={}) changes = self.mapping.compute_changes(new_mapping) self.assertEqual(len(changes), 1) self.assertEqual(changes[0], {'action': 'create', 'variant': 'Server', 'arch': 'x86_64', 'include': False, 'release_id': 'release-1.0', 'rpm_name': 'bash', 'srpm_name': 'bash', 'rpm_arch': 'x86_64'}) class OverrideManagementTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.initial_form_data = { 'checks-0-included': 'on', 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-1-variant': 'Server', 'checks-1-arch': 'x86_64', 'checks-1-rpm_name': 'bash-doc', 'checks-1-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 2, 'checks-TOTAL_FORMS': 2, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } def test_can_access_management_form(self): client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.status_code, 200) # There is one package in fixtures self.assertEqual(len(response.context['forms']), 1) def test_submit_no_changes(self): client = Client() response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 0) def test_submit_disable(self): client = Client() del self.initial_form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash', 'rpm_arch': 'x86_64', 'include': False, 'action': 'create', 'srpm_name': 'bash', 'release_id': 'release-1.0'}, data[0]) def test_submit_enable(self): client = Client() self.initial_form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-doc', 'rpm_arch': 'x86_64', 'include': False, 'action': 'delete', 'srpm_name': 'bash', 'release_id': 'release-1.0', 'comment': '', 'do_not_delete': False}, data[0]) def test_submit_new_override(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': 'x86_64', 'news-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data[0]) def test_submit_new_override_on_new_variant(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-new_variant': 0, 'for_new_vararch-0-rpm_name': 'bash-completion', 'for_new_vararch-0-rpm_arch': 'x86_64', 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server-optional', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data[0]) def test_submit_more_different_changes(self): client = Client() del self.initial_form_data['checks-0-included'] self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': 'x86_64', 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-new_variant': 0, 'for_new_vararch-0-rpm_name': 'bash-completion', 'for_new_vararch-0-rpm_arch': 'x86_64', 'news-TOTAL_FORMS': 1, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 3) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server-optional', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash', 'rpm_arch': 'x86_64', 'include': False}, data) def test_submit_more_same_changes(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': 'x86_64', 'news-1-variant': 'Server', 'news-1-arch': 'x86_64', 'news-1-rpm_name': 'bash-magic', 'news-1-rpm_arch': 'src', 'news-TOTAL_FORMS': 2, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 2) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-magic', 'rpm_arch': 'src', 'include': True}, data) def test_submit_enable_and_disable(self): client = Client() del self.initial_form_data['checks-0-included'] self.initial_form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 2) self.assertIn({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-doc', 'rpm_arch': 'x86_64', 'include': False, 'action': 'delete', 'srpm_name': 'bash', 'release_id': 'release-1.0', 'comment': '', 'do_not_delete': False}, data) self.assertIn({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash', 'rpm_arch': 'x86_64', 'include': False, 'action': 'create', 'srpm_name': 'bash', 'release_id': 'release-1.0'}, data) def test_submit_incorrect_new_override_missing_rpm_arch(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': '', 'news-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_missing_rpm_name(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': '', 'news-0-rpm_arch': 'src', 'news-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_rpm_arch(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': 'bash-completion', 'for_new_vararch-0-rpm_arch': '', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_v_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_rpm_name(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': '', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_v_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_variant_name(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': '', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': 'bash-magic', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'variant_forms', 0, None, 'Both variant and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_variant_arch(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': '', 'for_new_vararch-0-rpm_name': 'bash-magic', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'variant_forms', 0, None, 'Both variant and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_and_old_variant(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': 'bash-magic', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'for_new_vararch-0-variant': 'Server', 'for_new_vararch-0-arch': 'i686', 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_v_forms', 0, None, 'Can not reference both old and new variant.arch.') self.assertContains(response, 'There are errors in the form.') def test_submit_preview_no_change(self): client = Client() response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertContains(response, 'No changes') class OverridePreviewTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.form_data = { 'checks-0-included': 'on', 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-1-variant': 'Server', 'checks-1-arch': 'x86_64', 'checks-1-rpm_name': 'bash-doc', 'checks-1-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 2, 'checks-TOTAL_FORMS': 2, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } self.preview_form_data = { 'preview_submit': True, 'form-TOTAL_FORMS': 0, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, } def _populate_preview_form(self, response): """Parse response and prepare form data for preview submission.""" def set_val(dict, key, val): if isinstance(val, bool): if val: dict[key] = 'on' dict[key] = val for (i, action) in enumerate(json.loads(response.context['compressed'])): for k in action: set_val(self.preview_form_data, 'form-%d-%s' % (i, k), action[k]) self.preview_form_data['form-TOTAL_FORMS'] += 1 self.preview_form_data['initial_data'] = response.context['compressed'] def test_submit_with_comment_and_missing_do_not_delete(self): client = Client() del self.form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) self.preview_form_data['form-0-comment'] = 'do not delete me' response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 200) self.assertContains(response, 'There are errors in the form.') self.assertFormsetError(response, 'forms', 0, None, 'Comment needs do_not_delete checked.') def test_submit_ok_no_comment(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") del self.form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 2) orpm = models.OverrideRPM.objects.latest('id') self.assertEqual(orpm.include, False) self.assertEqual(orpm.do_not_delete, False) self.assertEqual(orpm.comment, '') def test_submit_ok_with_comment(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") del self.form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) self.preview_form_data.update({ 'form-0-do_not_delete': 'on', 'form-0-comment': 'do not delete me', }) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 2) orpm = models.OverrideRPM.objects.latest('id') self.assertEqual(orpm.include, False) self.assertEqual(orpm.do_not_delete, True) self.assertEqual(orpm.comment, 'do not delete me') def test_submit_ok_should_delete(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) del self.preview_form_data['form-0-do_not_delete'] response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 0) def test_submit_ok_should_set_do_not_delete(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) self.preview_form_data.update({ 'form-0-comment': 'comment', 'form-0-do_not_delete': 'on', }) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 1) orpm = models.OverrideRPM.objects.latest('id') self.assertEqual(orpm.do_not_delete, True) self.assertEqual(orpm.comment, 'comment') self.assertEqual(orpm.include, True) def test_submit_ok_should_remove_do_not_delete_and_delete(self): orpm = models.OverrideRPM.objects.latest('id') orpm.do_not_delete = True orpm.save() client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) del self.preview_form_data['form-0-do_not_delete'] response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 0) def test_submit_ok_disable_override_without_compose_rpm__should_delete(self): orpm = models.OverrideRPM.objects.latest('id') orpm.rpm_name = 'bash-magic' orpm.include = True orpm.save() client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data.update({ 'checks-1-included': 'on', 'checks-2-variant': 'Server', 'checks-2-arch': 'x86_64', 'checks-2-rpm_name': 'bash-magic', 'checks-2-rpm_arch': 'x86_64', 'checks-TOTAL_FORMS': 3, }) response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self.assertEqual(len(response.context['forms']), 1) self._populate_preview_form(response) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 0) class OverridePreviewBulkTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/compose_composerpm_more.json", ] def setUp(self): self.initial_form_data = { 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-1-variant': 'Server', 'checks-1-arch': 'x86_64', 'checks-1-rpm_name': 'bash-completion', 'checks-1-rpm_arch': 'x86_64', 'checks-2-included': 'on', 'checks-2-variant': 'Server', 'checks-2-arch': 'x86_64', 'checks-2-rpm_name': 'bash-debuginfo', 'checks-2-rpm_arch': 'x86_64', 'checks-3-included': 'on', 'checks-3-variant': 'Server', 'checks-3-arch': 'x86_64', 'checks-3-rpm_name': 'bash-doc', 'checks-3-rpm_arch': 'x86_64', 'checks-4-variant': 'Server', 'checks-4-arch': 'x86_64', 'checks-4-rpm_name': 'bash-magic', 'checks-4-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 5, 'checks-TOTAL_FORMS': 5, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } self.preview_form_data = { 'preview_submit': True, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, } def test_more_changes_at_the_same_time(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['forms']), 5) self.preview_form_data.update({ 'initial_data': response.context['compressed'], 'form-TOTAL_FORMS': 5, 'form-0-action': 'create', 'form-0-variant': 'Server', 'form-0-arch': 'x86_64', 'form-0-rpm_name': 'bash', 'form-0-rpm_arch': 'x86_64', 'form-0-include': 'False', 'form-1-action': 'create', 'form-1-variant': 'Server', 'form-1-arch': 'x86_64', 'form-1-rpm_name': 'bash-competion', 'form-1-rpm_arch': 'x86_64', 'form-1-include': 'False', 'form-2-action': 'delete', 'form-2-variant': 'Server', 'form-2-arch': 'x86_64', 'form-2-rpm_name': 'bash-debuginfo', 'form-2-rpm_arch': 'x86_64', 'form-2-include': 'False', 'form-3-action': 'delete', 'form-3-variant': 'Server', 'form-3-arch': 'x86_64', 'form-3-rpm_name': 'bash-doc', 'form-3-rpm_arch': 'x86_64', 'form-3-include': 'False', 'form-4-action': 'delete', 'form-4-variant': 'Server', 'form-4-arch': 'x86_64', 'form-4-rpm_name': 'bash-magic', 'form-4-rpm_arch': 'x86_64', 'form-4-include': 'False', }) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertItemsEqual( [o.export() for o in models.OverrideRPM.objects.all()], [{"release_id": 'release-1.0', "variant": 'Server', "arch": 'x86_64', "srpm_name": 'bash', "rpm_name": 'bash', "rpm_arch": 'x86_64', "include": False, "comment": '', "do_not_delete": False}, {"release_id": 'release-1.0', "variant": 'Server', "arch": 'x86_64', "srpm_name": 'bash', "rpm_name": 'bash-completion', "rpm_arch": 'x86_64', "include": False, "comment": '', "do_not_delete": False}] ) class UselessOverrideTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.release = release_models.Release.objects.latest('id') def test_delete_unused_include_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=True) client = Client() with mock.patch('sys.stdout', new_callable=StringIO) as out: response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], []) self.assertIn('NOTICE', out.getvalue()) self.assertIn(str(orpm), out.getvalue()) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_delete_unused_exclude_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash-missing', rpm_arch='x86_64', include=False) client = Client() with mock.patch('sys.stdout', new_callable=StringIO) as out: response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], []) self.assertIn('NOTICE', out.getvalue()) self.assertIn(str(orpm), out.getvalue()) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_delete_unused_exclude_override_on_new_variant_arch(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='rpm_arch', include=False) client = Client() with mock.patch('sys.stdout', new_callable=StringIO) as out: response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], []) self.assertIn('NOTICE', out.getvalue()) self.assertIn(str(orpm), out.getvalue()) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_do_not_delete_unused_include_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=True, do_not_delete=True) client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_do_not_delete_unused_exclude_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash-missing', rpm_arch='x86_64', include=False, do_not_delete=True) client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_do_not_delete_unused_exclude_override_on_new_variant_arch(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='rpm_arch', include=False, do_not_delete=True) client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_update_unused_override_when_creating_conflict(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=True, do_not_delete=True) client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) form_data = { 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 1, 'checks-TOTAL_FORMS': 1, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } response = client.post('/override/manage/release-1.0/?package=bash', form_data) self.assertContains(response, 'warning') self.assertContains(response, 'Will modify override with do_not_delete set.') preview_data = { 'preview_submit': True, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, 'form-TOTAL_FORMS': 1, 'initial_data': response.context['compressed'], 'form-0-action': 'create', 'form-0-variant': 'Server', 'form-0-arch': 'x86_64', 'form-0-rpm_name': 'bash', 'form-0-rpm_arch': 'x86_64', 'form-0-include': 'False', } response = client.post('/override/manage/release-1.0/?package=bash', preview_data) self.assertEqual(response.status_code, 302) orpm = models.OverrideRPM.objects.latest('id') self.assertFalse(orpm.include)
Groomed eyebrows frame your face, and make all the difference in the world. The worst thing you can do for your appearance is wax or pluck your eyebrows too thin. The key to beautiful eyebrows is filling them in. For those of you who have blonde eyebrows, this is for you especially. Take all of your favorite celebrities for example, they all have amazing eyebrows, because they use a pencil or liner to make them big and beautiful. Go to a professional to shape your eyebrows, then have a makeup artist match a pencil or shadow to your brows. The bigger the better. Avoid rainbow shaped brows. Audrey Hepburn made brows famous. Have a makeup artist teach you the ropes the first time. Don't be scared to darken your brows if you have blonde hair. Give styling your brows a try, I promise you won't regret it! I love my dark eyebrows! I have blonde hair and have always had dark brows :) filling them in makes a world of difference for sure!