repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ajgrah2000/pytari2600 | pytari2600/cpu_gen/core.py | 1 | 34536 | from . import addressing
from . import instructions
from . import pc_state
class OpDecoder(object):
def __init__(self, pc_state, memory, instruction_lookup):
self.pc_state = pc_state
self.memory = memory
self.instruction_lookup = instruction_lookup
pass
def execute(self):
""" On first execution, replace the 'execute' call with more
direct/custom/generated function. """
op_code = self.memory.read(self.pc_state.PC)
instruction = self.instruction_lookup[op_code].clone()
# Calls 'exeucte' first, to allow it to 'replace itself'
instruction.execute()
self.execute = instruction.execute
class Core(object):
"""
CPU Core - Contains op code mappings.
"""
def __init__(self, clocks, memory, pc_state):
self.clocks = clocks
self.memory = memory
self.pc_state = pc_state
# Different addressing modes
self.aIZX = addressing.AddressIZX(self.pc_state, self.memory)
self.aIZY = addressing.AddressIZY(self.pc_state, self.memory)
self.aIMM = addressing.AddressIMM(self.pc_state, self.memory)
self.aZP = addressing.AddressZP (self.pc_state, self.memory)
self.aZPX = addressing.AddressZPX(self.pc_state, self.memory)
self.aZPY = addressing.AddressZPY(self.pc_state, self.memory)
self.aAbs = addressing.AddressAbs(self.pc_state, self.memory)
self.aAbx = addressing.AddressAbx(self.pc_state, self.memory)
self.aAby = addressing.AddressAby(self.pc_state, self.memory)
self.aInd = addressing.AddressIndirect(self.pc_state, self.memory)
self.aAcc = addressing.AddressAccumulator(self.pc_state, self.memory)
# Different instruction types
self.r = instructions.Reading(self.pc_state, self.memory)
self.nullR = instructions.NullReading(self.pc_state, self.memory)
self.aR = instructions.AccumulatorReading(self.pc_state, self.memory)
self.w = instructions.Writing(self.pc_state, self.memory)
self.regW = instructions.RegWriting(self.pc_state, self.memory)
self.nullW = instructions.NullWriting(self.pc_state, self.memory)
self.aW = instructions.AccumulatorWriting(self.pc_state, self.memory)
self.instruction_exe = instructions.InstructionExec(self.pc_state)
self.instruction_lookup = [False] * 256
self.PROGRAM_ENTRY_ADDR = 0xFFFC
self.memory = memory
self.pc_state.P.value = 0
self.pc_state.PC = 0x1000
# Generate instances of the op decoder
self.op_decoder = [OpDecoder(pc_state, memory, self.instruction_lookup) for x in range(0x10000)]
def get_save_state(self):
state = {}
state['pc_state'] = self.pc_state.get_save_state()
return state
def set_save_state(self, state):
self.pc_state.set_save_state(state['pc_state'])
def reset(self):
# 6502 Reset vector location.
self.pc_state.PC = self.memory.read16(self.PROGRAM_ENTRY_ADDR)
def initialise(self):
# 6502 Reset vector location.
self.populate_instruction_map()
def step(self):
self.op_decoder[self.memory.cartridge.get_absolute_address(self.pc_state.PC)].execute()
def populate_instruction_map(self):
dummy = pc_state.PC_Register()
# Single byte instructions (including ASL, ROL and LSR in accumulator modes)
self.instruction_lookup[0xEA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.A, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x0A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.A, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x4A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.A, self.instruction_exe.LSR_exec)
self.instruction_lookup[0xE8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.X, self.instruction_exe.INC_exec)
self.instruction_lookup[0xC8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.Y, self.pc_state.Y, self.instruction_exe.INC_exec)
self.instruction_lookup[0xCA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.X, self.instruction_exe.DEC_exec)
self.instruction_lookup[0x88] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.Y, self.pc_state.Y, self.instruction_exe.DEC_exec)
self.instruction_lookup[0x18] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLC_exec)
self.instruction_lookup[0xD8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLD_exec)
self.instruction_lookup[0x58] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLI_exec)
self.instruction_lookup[0xB8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLV_exec)
self.instruction_lookup[0x38] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.SEC_exec)
self.instruction_lookup[0x78] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.SEI_exec)
self.instruction_lookup[0xF8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.SED_exec)
# Break instruction, software 'interrupt'
self.instruction_lookup[0x00] = instructions.BreakInstruction(self.clocks, self.pc_state, self.memory, None)
# Register Transfers
self.instruction_lookup[0x9A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.S, self.instruction_exe.TNoStatus_exec)
self.instruction_lookup[0xBA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.S, self.pc_state.X, self.instruction_exe.TNoStatus_exec)
self.instruction_lookup[0x8A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.A, self.instruction_exe.TStatus_exec)
self.instruction_lookup[0xAA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.X, self.instruction_exe.TStatus_exec)
self.instruction_lookup[0xA8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.Y, self.instruction_exe.TStatus_exec)
self.instruction_lookup[0x98] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.Y, self.pc_state.A, self.instruction_exe.TStatus_exec)
# ADC
self.instruction_lookup[0x61] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x69] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x65] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x75] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x71] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x6D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x7D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x79] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.ADC_exec)
# ASL
self.instruction_lookup[0x06] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x16] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x0E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x1E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.ASL_exec)
# AND
self.instruction_lookup[0x21] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x29] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x25] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x35] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x31] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x2D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x3D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x39] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.AND_exec)
# BIT
self.instruction_lookup[0x24] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.BIT_exec)
self.instruction_lookup[0x2C] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.BIT_exec)
# CMP
self.instruction_lookup[0xC1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xC9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xC5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xD5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xD1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xCD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xDD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xD9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.CMP_exec)
# CPX
self.instruction_lookup[0xE0] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.CPX_exec)
self.instruction_lookup[0xE4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.CPX_exec)
self.instruction_lookup[0xEC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.CPX_exec)
# CPY
self.instruction_lookup[0xC0] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.CPY_exec)
self.instruction_lookup[0xC4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.CPY_exec)
self.instruction_lookup[0xCC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.CPY_exec)
# DEC
self.instruction_lookup[0xC6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.DEC_exec)
self.instruction_lookup[0xD6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.DEC_exec)
self.instruction_lookup[0xCE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.DEC_exec)
self.instruction_lookup[0xDE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.DEC_exec)
# EOR
self.instruction_lookup[0x41] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x49] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x45] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x55] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x51] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x4D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x5D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x59] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.EOR_exec)
# INC
self.instruction_lookup[0xE6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.INC_exec)
self.instruction_lookup[0xF6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.INC_exec)
self.instruction_lookup[0xEE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.INC_exec)
self.instruction_lookup[0xFE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.INC_exec)
# LDA
self.instruction_lookup[0xA1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xA9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xA5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xB5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xB1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xAD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xBD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xB9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.LDA_exec)
# LDX
self.instruction_lookup[0xA2] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xA6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xB6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xAE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xBE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.LDX_exec)
# LDY
self.instruction_lookup[0xA0] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xA4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xB4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xAC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xBC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.LDY_exec)
# LSR
self.instruction_lookup[0x46] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.LSR_exec)
self.instruction_lookup[0x56] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.LSR_exec)
self.instruction_lookup[0x4E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.LSR_exec)
self.instruction_lookup[0x5E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.LSR_exec)
# OR
self.instruction_lookup[0x01] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x09] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x05] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x15] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x11] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x0D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x1D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x19] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.OR_exec)
# ROL
self.instruction_lookup[0x26] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x36] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x2E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x3E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x2A] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAcc, self.aR, self.aW, self.instruction_exe.ROL_exec)
# ROR
self.instruction_lookup[0x66] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x76] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x6E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x7E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x6A] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAcc, self.aR, self.aW, self.instruction_exe.ROR_exec)
# SBC
self.instruction_lookup[0xE1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xE9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xE5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xF5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xF1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xED] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xFD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xF9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.SBC_exec)
# STA
self.instruction_lookup[0x81] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x85] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x95] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x91] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.nullR, self.regW, self.instruction_exe.STA_exec, self.pc_state.CYCLES_TO_CLOCK)
self.instruction_lookup[0x8D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x9D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.nullR, self.regW, self.instruction_exe.STA_exec, self.pc_state.CYCLES_TO_CLOCK)
self.instruction_lookup[0x99] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.nullR, self.regW, self.instruction_exe.STA_exec, self.pc_state.CYCLES_TO_CLOCK)
# SAX
self.instruction_lookup[0x83] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.nullR, self.regW, self.instruction_exe.SAX_exec)
self.instruction_lookup[0x87] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.SAX_exec)
self.instruction_lookup[0x8F] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.SAX_exec)
self.instruction_lookup[0x97] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.nullR, self.regW, self.instruction_exe.SAX_exec)
# STX
self.instruction_lookup[0x86] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.STX_exec)
self.instruction_lookup[0x96] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.nullR, self.regW, self.instruction_exe.STX_exec)
self.instruction_lookup[0x8E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.STX_exec)
# STY
self.instruction_lookup[0x84] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.STY_exec)
self.instruction_lookup[0x94] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.STY_exec)
self.instruction_lookup[0x8C] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.STY_exec)
# DCP
self.instruction_lookup[0xC3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xC7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xD7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xD3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xCF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xDF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xDB] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.w, self.instruction_exe.DCP_exec)
# JSR
self.instruction_lookup[0x20] = instructions.JumpSubRoutineInstruction(self.clocks, self.pc_state, self.memory, None)
# Barnch
# BPL case 0x10: if (self.pc_state.P.status.N == 0)
self.instruction_lookup[0x10] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x80, 0x00, None)
# BMI case 0x30: if (self.pc_state.P.status.N == 1)
self.instruction_lookup[0x30] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x80, 0x80, None)
# BVC case 0x50: if (self.pc_state.P.status.V == 0)
self.instruction_lookup[0x50] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x40, 0x00, None)
# BVS case 0x70: if (self.pc_state.P.status.V == 1)
self.instruction_lookup[0x70] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x40, 0x40, None)
# BCC case 0x90: if (self.pc_state.P.status.C == 0)
self.instruction_lookup[0x90] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x01, 0x00, None)
# BCS case 0xB0: if (self.pc_state.P.status.C == 1)
self.instruction_lookup[0xB0] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x01, 0x01, None)
# BNE case 0xD0: self.clocks += 2*CYCLES_TO_CLOCK if (self.pc_state.P.status.Z == 0)
self.instruction_lookup[0xD0] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x02, 0x00, None)
# BEO case 0xF0: if (self.pc_state.P.status.Z == 1)
self.instruction_lookup[0xF0] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x02, 0x02, None)
self.instruction_lookup[0x40] = instructions.ReturnFromInterrupt(self.clocks, self.pc_state, self.memory, None)
# RTS
self.instruction_lookup[0x60] = instructions.ReturnFromSubRoutineInstruction(self.clocks, self.pc_state, self.memory, None)
# JMP, absolute (effectively immediate)
self.instruction_lookup[0x4C] = instructions.JumpInstruction(self.clocks, self.pc_state, self.aAbs, None)
# JMP, indirect (effectively absolute)
self.instruction_lookup[0x6C] = instructions.JumpInstruction(self.clocks, self.pc_state, self.aInd, None)
# PHP
self.instruction_lookup[0x08] = instructions.PHPInstruction(self.clocks, self.pc_state, self.memory, None)
# PLP
self.instruction_lookup[0x28] = instructions.PLPInstruction(self.clocks, self.pc_state, self.memory, None)
# PHA
self.instruction_lookup[0x48] = instructions.PHAInstruction(self.clocks, self.pc_state, self.memory, None)
# PLA
self.instruction_lookup[0x68] = instructions.PLAInstruction(self.clocks, self.pc_state, self.memory, None)
# Illigal instructions
# SLO
self.instruction_lookup[0x07] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.SLO_exec)
# Undocumented instructions
self.instruction_lookup[0x04] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x14] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x34] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x44] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x54] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x64] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x74] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x80] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x82] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x89] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xC2] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xD4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xE2] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xF4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
# LAX
self.instruction_lookup[0xA7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xB7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xAF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xBF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xA3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xB3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.LAX_exec)
# ASR
self.instruction_lookup[0x4B] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.ASR_exec)
# SBX
self.instruction_lookup[0xCB] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.SBX_exec)
| mit | -1,423,468,789,650,102,000 | 94.933333 | 197 | 0.734132 | false |
jschleic/waymarked-trails-site | db/configs.py | 1 | 8253 | # This file is part of the Waymarked Trails Map Project
# Copyright (C) 2015 Sarah Hoffmann
#
# This is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
""" Default configurations.
"""
class RouteDBConfig(object):
schema = None
srid = 3857
country_table = 'country_osm_grid'
change_table = 'changed_objects'
segment_table = 'segments'
hierarchy_table = 'hierarchy'
route_table = 'routes'
style_table = 'defstyle'
relation_subset = None
class SlopeDBConfig(RouteDBConfig):
joinedway_table = 'joined_slopeways'
way_subset = None
class RouteTableConfig(object):
table_name = 'routes'
network_map = {}
tag_filter = None
symbols = None
class PisteTableConfig(object):
route_table_name = 'routes'
way_table_name = 'slopeways'
style_table_name = 'defstyle'
symbols = None
difficulty_map = {'novice' : 1,
'easy' : 2,
'intermediate' : 3,
'advanced' : 4,
'expert' : 5,
'extreme' : 6,
'freeride' : 10,
# unknown value: 0
}
piste_type = {'downhill' : 1,
'nordic' : 2,
'skitour' : 3,
'sled' : 4,
'hike' : 5,
'sleigh' : 6,
# unknown value : 0
}
class RouteStyleTableConfig(object):
table_name = 'defstyle'
@staticmethod
def segment_info(self, relinfo):
classvalues = [ 0x40000000, 0x400000, 0x4000, 0x40]
level = min(relinfo['level'] / 10, 3)
cl = classvalues[int(level)]
self.classification |= cl
if relinfo['symbol'] is not None:
self.add_shield(relinfo['symbol'], cl >= 0x4000)
class GuidePostConfig:
table_name = 'guideposts'
node_subset = "tags @> 'tourism=>information, information=>guidepost'::hstore"
subtype = None
require_subtype = False
class NetworkNodeConfig:
table_name = 'networknodes'
node_tag = 'ref'
class ShieldConfiguration(object):
symbol_outdir = None
symbol_dir = 'maps/symbols'
image_size = (15, 15)
wide_image_size = (22, 15)
image_border_width = 2.5
text_border_width = 2.5
text_bgcolor = (1, 1, 1) # white
text_color = (0, 0, 0) # black
text_font = "DejaVu-Sans Condensed Bold 7.5"
level_colors = ((0.7, 0.01, 0.01),
(0.08, 0.18, 0.92),
(0.99, 0.64, 0.02),
(0.55, 0.0, 0.86))
swiss_mobile_font ='DejaVu-Sans Oblique Bold 10'
swiss_mobile_bgcolor = (0.48, 0.66, 0.0)
swiss_mobile_color = (1, 1, 1)
swiss_mobile_networks = ('rwn', 'nwn', 'lwn')
swiss_mobile_operators = ('swiss mobility',
'wanderland schweiz',
'schweiz mobil',
'skatingland schweiz',
'veloland schweiz',
'schweizmobil',
'stiftung schweizmobil'
)
jel_path = "jel"
jel_types = ("3","4","atl","atlv","bfk","bor","b","but","c","eml","f3","f4",
"fatl","fatlv","fbor","fb","fc","feml","ffut","fii","fivv",
"fkor","flo","fl","fm","fmtb","fnw","fpec","f","f+","fq","ftfl",
"ftmp","ft","fut","fx","ii","ivv","k3","k4","karsztb","katl",
"katlv","kbor","kb","kc","keml","kfut","kii","kivv","kkor",
"klo","kl","km","kmtb","knw","kor","kpec","k","k+","kq","ktfl",
"ktmp","kt","kx","l3","l4","latl","latlv","lbor","lb","lc",
"leml","lfut","lii","livv","lkor","llo","ll","lm","lmtb","lnw",
"lo","lpec","l","l+","lq","ls","ltfl","ltmp","lt","lx","mberc",
"m","mtb","nw","p3","p4","palma","palp","patl","patlv","pbor",
"pb","pc","pec","peml","pfut","pii","pivv","pkor","plo","pl",
"pmet","pm","pmtb","+","pnw","ppec","p","p+","pq","ptfl","ptmp",
"pt","px","q","rc","s3","s4","salp","satl","satlv","sbarack",
"sbor","sb","sc","seml","sfut","sgy","sii","sivv","skor","slo",
"sl","sm","smtb","smz","snw","spec","s","s+","sq","ste","stfl",
"stj","stm","stmp","st","sx","sz","tfl","tmp","tny","t","x",
"z3","z4","zatl","zatlv","zbic","zbor","zb","zc","zeml","zfut",
"zii","zivv","zkor","zlo","zl","zm","zmtb","znw","zpec","z",
"z+","zq","ztfl","ztmp","zt","zut","zx","zszolo")
cai_border_width = 5
kct_path = 'kct'
kct_colors = {'red' : (1, 0, 0),
'blue' : (0.04, 0.34, 0.64),
'green' : (0, 0.51, 0.31),
'yellow' : (1.0, 0.81, 0)}
kct_types = ('major', 'local', 'interesting_object', 'learning',
'peak', 'ruin', 'spring', 'horse')
osmc_path = 'osmc'
osmc_colors = { 'black' : (0, 0, 0),
'blue' : (0.03, 0.20, 1),
'brown' : (0.59, 0.32, 0.11),
'gray' : (0.5, 0.5, 0.5),
'green' : (0.34, 0.68, 0),
'orange' : (1, 0.64, 0.02),
'purple' : (0.70, 0.06, 0.74),
'red' : (0.88, 0.15, 0.05),
'white' : (1, 1, 1),
'yellow' : (0.91, 0.88, 0.16)
}
shield_path = 'shields'
shield_names = {}
slope_colors = ((0, 0, 0),
(0.0, 0.439, 0.16),
(0.082, 0.18, 0.925),
(0.698, 0.012, 0.012),
(0, 0, 0),
(0, 0, 0),
(0, 0, 0),
(1.0, 0.639, 0.016))
# used in backgrounds
color_names = {
'black' : (0., 0., 0.),
'gray' : (.5, .5, .5),
'grey' : (.5, .5, .5),
'maroon' : (.5, 0., 0.),
'olive' : (.5, .5, 0.),
'green' : (0., .5, 0.),
'teal' : (0., .5, .5),
'navy' : (0., 0., .5),
'purple' : (.5, 0., .5),
'white' : (1., 1., 1.),
'silver' : (.75, .75, .75),
'red' : (1., 0., 0.),
'yellow' : (1., 1., 0.),
'lime' : (0., 1., 0.),
'aqua' : (0., 1., 1.),
'blue' : (0., 0., 1.),
'fuchsia' : (1., 0., 1.) }
# used for underlining text
colorbox_names = {
'aqua' : [(0., 1., 1.), (.5, .5, .5)],
'black' : [(0., 0., 0.), (1., 1., 1.)],
'blue' : [(0., 0., 1.), (1., 1., 1.)],
'brown' : [(0.76, 0.63, 0.08), (.3, .3, .3)],
'green' : [(0., 1., 0.), (.5, .5, .5)],
'gray' : [(.5, .5, .5), (1., 1., 1.)],
'grey' : [(.6, .6, .6), (.6, .6, .6)],
'maroon' : [(.5, 0., 0.), (1., 1., 1.)],
'orange' : [(1., .65, 0.), (1., 1., 1.)],
'pink' : [(1., 0., 1.), (1., 1., 1.)],
'purple' : [(.5, 0., .5), (1., 1., 1.)],
'red' : [(1., 0., 0.), (1., 1., 1.)],
'violet' : [(.55, .22, .79), (1., 1., 1.)],
'white' : [(1., 1., 1.), (0., 0., 0.)],
'yellow' : [(1., 1., 0.), (.51, .48, .23)],
}
| gpl-3.0 | 4,714,663,236,911,449,000 | 36.513636 | 82 | 0.41706 | false |
GNOME/gom | examples/gom.py | 1 | 2292 | #!/usr/bin/python3
from gi.types import GObjectMeta
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gom
# Need a metaclass until we get something like _gclass_init_
# https://bugzilla.gnome.org/show_bug.cgi?id=701843
class ItemResourceMeta(GObjectMeta):
def __init__(cls, name, bases, dct):
super(ItemResourceMeta, cls).__init__(name, bases, dct)
cls.set_table("items")
cls.set_primary_key("id")
cls.set_notnull("name")
class ItemResource(Gom.Resource, metaclass=ItemResourceMeta):
id = GObject.Property(type=int)
name = GObject.Property(type=str)
if __name__ == '__main__':
# Connect to the database
adapter = Gom.Adapter()
adapter.open_sync(":memory:")
# Create the table
repository = Gom.Repository(adapter=adapter)
repository.automatic_migrate_sync(1, [ItemResource])
# Insert an item
item = ItemResource(repository=repository, name="item1")
item.save_sync()
# Fetch the item back
item = repository.find_one_sync(ItemResource, None)
assert item.id == 1
assert item.name == 'item1'
# Insert a new item
item = ItemResource(repository=repository, name="item2")
item.save_sync()
# Fetch them all with a None filter, ordered by name
names = ['item2', 'item1']
sorting = Gom.Sorting(ItemResource, "name", Gom.SortingMode.DESCENDING)
group = repository.find_sorted_sync(ItemResource, None, sorting)
count = len(group)
assert count == 2
group.fetch_sync(0, count)
for i, item in enumerate(group):
assert item.name == names[i]
# Fetch only one of them with a filter, asynchronously
loop = GLib.MainLoop()
def fetch_cb(group, result, user_data):
group.fetch_finish(result)
item = group[0]
assert item.name == "item2"
# Close the database
adapter.close_sync()
loop.quit()
def find_cb(repository, result, user_data):
group = repository.find_finish(result)
count = len(group)
assert count == 1
group.fetch_async(0, count, fetch_cb, None)
filter = Gom.Filter.new_eq(ItemResource, "name", "item2")
group = repository.find_async(ItemResource, filter, find_cb, None)
loop.run()
| lgpl-2.1 | -4,386,300,202,051,084,300 | 26.614458 | 75 | 0.654014 | false |
magopian/test_sugardough | test_sugardough/settings/base.py | 1 | 3612 | """
Django settings for test_sugardough project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import dj_database_url
from decouple import Csv, config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', cast=bool)
TEMPLATE_DEBUG = config('DEBUG', default=DEBUG, cast=bool)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# Application definition
INSTALLED_APPS = [
# Project specific apps
'test_sugardough.base',
# Third party apps
'django_nose',
# Django apps
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
for app in config('EXTRA_APPS', default='', cast=Csv()):
INSTALLED_APPS.append(app)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'csp.middleware.CSPMiddleware',
)
ROOT_URLCONF = 'test_sugardough.urls'
WSGI_APPLICATION = 'test_sugardough.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': config(
'DATABASE_URL',
cast=dj_database_url.parse
)
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = config('LANGUAGE_CODE', default='en-us')
TIME_ZONE = config('TIME_ZONE', default='UTC')
USE_I18N = config('USE_I18N', default=True, cast=bool)
USE_L10N = config('USE_L10N', default=True, cast=bool)
USE_TZ = config('USE_TZ', default=True, cast=bool)
STATIC_ROOT = config('STATIC_ROOT', default=os.path.join(BASE_DIR, 'static'))
STATIC_URL = config('STATIC_URL', '/static/')
MEDIA_ROOT = config('MEDIA_ROOT', default=os.path.join(BASE_DIR, 'media'))
MEDIA_URL = config('MEDIA_URL', '/media/')
SESSION_COOKIE_SECURE = config('SESSION_COOKIE_SECURE', default=True, cast=bool)
TEMPLATE_LOADERS = (
'jingo.Loader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
JINGO_EXCLUDE_APPS = [
'admin',
]
# Django-CSP
CSP_DEFAULT_SRC = (
"'self'",
)
CSP_FONT_SRC = (
"'self'",
'http://*.mozilla.net',
'https://*.mozilla.net'
)
CSP_IMG_SRC = (
"'self'",
'http://*.mozilla.net',
'https://*.mozilla.net',
)
CSP_SCRIPT_SRC = (
"'self'",
'http://www.mozilla.org',
'https://www.mozilla.org',
'http://*.mozilla.net',
'https://*.mozilla.net',
)
CSP_STYLE_SRC = (
"'self'",
"'unsafe-inline'",
'http://www.mozilla.org',
'https://www.mozilla.org',
'http://*.mozilla.net',
'https://*.mozilla.net',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
SESSION_COOKIE_SECURE = not DEBUG
| mpl-2.0 | 7,963,321,101,873,390,000 | 23.910345 | 80 | 0.688261 | false |
eayunstack/neutron | neutron/services/auto_allocate/db.py | 1 | 16534 | # Copyright 2015-2016 Hewlett Packard Enterprise Development Company, LP
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import network as net_def
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import exceptions as n_exc
from neutron_lib.objects import exceptions as obj_exc
from neutron_lib.plugins import constants
from neutron_lib.plugins import directory
from oslo_log import log as logging
from neutron._i18n import _
from neutron.common import exceptions as c_exc
from neutron.db import _resource_extend as resource_extend
from neutron.db import _utils as db_utils
from neutron.db import api as db_api
from neutron.db import common_db_mixin
from neutron.extensions import l3
from neutron.objects import auto_allocate as auto_allocate_obj
from neutron.objects import base as base_obj
from neutron.objects import network as net_obj
from neutron.plugins.common import utils as p_utils
from neutron.services.auto_allocate import exceptions
LOG = logging.getLogger(__name__)
IS_DEFAULT = 'is_default'
CHECK_REQUIREMENTS = 'dry-run'
@db_api.retry_if_session_inactive()
def _ensure_external_network_default_value_callback(
resource, event, trigger, context, request, network, **kwargs):
"""Ensure the is_default db field matches the create/update request."""
is_default = request.get(IS_DEFAULT)
if is_default is None:
return
if is_default:
# ensure there is only one default external network at any given time
pager = base_obj.Pager(limit=1)
objs = net_obj.ExternalNetwork.get_objects(context,
_pager=pager, is_default=True)
if objs:
if objs[0] and network['id'] != objs[0].network_id:
raise exceptions.DefaultExternalNetworkExists(
net_id=objs[0].network_id)
orig = kwargs.get('original_network')
if orig and orig.get(IS_DEFAULT) == is_default:
return
network[IS_DEFAULT] = is_default
# Reflect the status of the is_default on the create/update request
obj = net_obj.ExternalNetwork.get_object(context,
network_id=network['id'])
if obj:
obj.is_default = is_default
obj.update()
@resource_extend.has_resource_extenders
class AutoAllocatedTopologyMixin(common_db_mixin.CommonDbMixin):
def __new__(cls, *args, **kwargs):
# NOTE(kevinbenton): we subscribe on object construction because
# the tests blow away the callback manager for each run
new = super(AutoAllocatedTopologyMixin, cls).__new__(cls, *args,
**kwargs)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_UPDATE)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_CREATE)
return new
# TODO(armax): if a tenant modifies auto allocated resources under
# the hood the behavior of the get_auto_allocated_topology API is
# undetermined. Consider adding callbacks to deal with the following
# situations:
# - insert subnet -> plug router interface
# - delete router -> remove the entire topology
# - update subnet -> prevent operation
# - update router gateway -> prevent operation
# - ...
@property
def core_plugin(self):
if not getattr(self, '_core_plugin', None):
self._core_plugin = directory.get_plugin()
return self._core_plugin
@property
def l3_plugin(self):
if not getattr(self, '_l3_plugin', None):
self._l3_plugin = directory.get_plugin(constants.L3)
return self._l3_plugin
@staticmethod
@resource_extend.extends([net_def.COLLECTION_NAME])
def _extend_external_network_default(net_res, net_db):
"""Add is_default field to 'show' response."""
if net_db.external is not None:
net_res[IS_DEFAULT] = net_db.external.is_default
return net_res
def get_auto_allocated_topology(self, context, tenant_id, fields=None):
"""Return tenant's network associated to auto-allocated topology.
The topology will be provisioned upon return, if network is missing.
"""
fields = fields or []
tenant_id = self._validate(context, tenant_id)
if CHECK_REQUIREMENTS in fields:
# for dry-run requests, simply validates that subsequent
# requests can be fulfilled based on a set of requirements
# such as existence of default networks, pools, etc.
return self._check_requirements(context, tenant_id)
elif fields:
raise n_exc.BadRequest(resource='auto_allocate',
msg=_("Unrecognized field"))
# Check for an existent topology
network_id = self._get_auto_allocated_network(context, tenant_id)
if network_id:
return self._response(network_id, tenant_id, fields=fields)
# See if we indeed have an external network to connect to, otherwise
# we will fail fast
default_external_network = self._get_default_external_network(
context)
# If we reach this point, then we got some work to do!
network_id = self._build_topology(
context, tenant_id, default_external_network)
return self._response(network_id, tenant_id, fields=fields)
def delete_auto_allocated_topology(self, context, tenant_id):
tenant_id = self._validate(context, tenant_id)
topology = self._get_auto_allocated_topology(context, tenant_id)
if topology:
subnets = self.core_plugin.get_subnets(
context,
filters={'network_id': [topology['network_id']]})
self._cleanup(
context, network_id=topology['network_id'],
router_id=topology['router_id'], subnets=subnets)
def _build_topology(self, context, tenant_id, default_external_network):
"""Build the network topology and returns its network UUID."""
try:
subnets = self._provision_tenant_private_network(
context, tenant_id)
network_id = subnets[0]['network_id']
router = self._provision_external_connectivity(
context, default_external_network, subnets, tenant_id)
network_id = self._save(
context, tenant_id, network_id, router['id'], subnets)
return network_id
except exceptions.UnknownProvisioningError as e:
# Clean partially provisioned topologies, and reraise the
# error. If it can be retried, so be it.
LOG.error("Unknown error while provisioning topology for "
"tenant %(tenant_id)s. Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
self._cleanup(
context, network_id=e.network_id,
router_id=e.router_id, subnets=e.subnets)
raise e.error
def _check_requirements(self, context, tenant_id):
"""Raise if requirements are not met."""
self._get_default_external_network(context)
try:
self._get_supported_subnetpools(context)
except n_exc.NotFound:
raise exceptions.AutoAllocationFailure(
reason=_("No default subnetpools defined"))
return {'id': 'dry-run=pass', 'tenant_id': tenant_id}
def _validate(self, context, tenant_id):
"""Validate and return the tenant to be associated to the topology."""
if tenant_id == 'None':
# NOTE(HenryG): the client might be sending us astray by
# passing no tenant; this is really meant to be the tenant
# issuing the request, therefore let's get it from the context
tenant_id = context.tenant_id
if not context.is_admin and tenant_id != context.tenant_id:
raise n_exc.NotAuthorized()
return tenant_id
def _get_auto_allocated_topology(self, context, tenant_id):
"""Return the auto allocated topology record if present or None."""
return auto_allocate_obj.AutoAllocatedTopology.get_object(
context, project_id=tenant_id)
def _get_auto_allocated_network(self, context, tenant_id):
"""Get the auto allocated network for the tenant."""
network = self._get_auto_allocated_topology(context, tenant_id)
if network:
return network['network_id']
@staticmethod
def _response(network_id, tenant_id, fields=None):
"""Build response for auto-allocated network."""
res = {
'id': network_id,
'tenant_id': tenant_id
}
return db_utils.resource_fields(res, fields)
def _get_default_external_network(self, context):
"""Get the default external network for the deployment."""
default_external_networks = net_obj.ExternalNetwork.get_objects(
context, is_default=True)
if not default_external_networks:
LOG.error("Unable to find default external network "
"for deployment, please create/assign one to "
"allow auto-allocation to work correctly.")
raise exceptions.AutoAllocationFailure(
reason=_("No default router:external network"))
if len(default_external_networks) > 1:
LOG.error("Multiple external default networks detected. "
"Network %s is true 'default'.",
default_external_networks[0]['network_id'])
return default_external_networks[0].network_id
def _get_supported_subnetpools(self, context):
"""Return the default subnet pools available."""
default_subnet_pools = [
self.core_plugin.get_default_subnetpool(
context, ver) for ver in (4, 6)
]
available_pools = [
s for s in default_subnet_pools if s
]
if not available_pools:
LOG.error("No default pools available")
raise n_exc.NotFound()
return available_pools
def _provision_tenant_private_network(self, context, tenant_id):
"""Create a tenant private network/subnets."""
network = None
try:
network_args = {
'name': 'auto_allocated_network',
'admin_state_up': False,
'tenant_id': tenant_id,
'shared': False
}
network = p_utils.create_network(
self.core_plugin, context, {'network': network_args})
subnets = []
for pool in self._get_supported_subnetpools(context):
subnet_args = {
'name': 'auto_allocated_subnet_v%s' % pool['ip_version'],
'network_id': network['id'],
'tenant_id': tenant_id,
'ip_version': pool['ip_version'],
'subnetpool_id': pool['id'],
}
subnets.append(p_utils.create_subnet(
self.core_plugin, context, {'subnet': subnet_args}))
return subnets
except (c_exc.SubnetAllocationError, ValueError,
n_exc.BadRequest, n_exc.NotFound) as e:
LOG.error("Unable to auto allocate topology for tenant "
"%(tenant_id)s due to missing or unmet "
"requirements. Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
if network:
self._cleanup(context, network['id'])
raise exceptions.AutoAllocationFailure(
reason=_("Unable to provide tenant private network"))
except Exception as e:
network_id = network['id'] if network else None
raise exceptions.UnknownProvisioningError(e, network_id=network_id)
def _provision_external_connectivity(
self, context, default_external_network, subnets, tenant_id):
"""Uplink tenant subnet(s) to external network."""
router_args = {
'name': 'auto_allocated_router',
l3.EXTERNAL_GW_INFO: {'network_id': default_external_network},
'tenant_id': tenant_id,
'admin_state_up': True
}
router = None
attached_subnets = []
try:
router = self.l3_plugin.create_router(
context, {'router': router_args})
for subnet in subnets:
self.l3_plugin.add_router_interface(
context, router['id'], {'subnet_id': subnet['id']})
attached_subnets.append(subnet)
return router
except n_exc.BadRequest as e:
LOG.error("Unable to auto allocate topology for tenant "
"%(tenant_id)s because of router errors. "
"Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
router_id = router['id'] if router else None
self._cleanup(context,
network_id=subnets[0]['network_id'],
router_id=router_id, subnets=attached_subnets)
raise exceptions.AutoAllocationFailure(
reason=_("Unable to provide external connectivity"))
except Exception as e:
router_id = router['id'] if router else None
raise exceptions.UnknownProvisioningError(
e, network_id=subnets[0]['network_id'],
router_id=router_id, subnets=subnets)
def _save(self, context, tenant_id, network_id, router_id, subnets):
"""Save auto-allocated topology, or revert in case of DB errors."""
try:
auto_allocate_obj.AutoAllocatedTopology(
context, project_id=tenant_id, network_id=network_id,
router_id=router_id).create()
self.core_plugin.update_network(
context, network_id,
{'network': {'admin_state_up': True}})
except obj_exc.NeutronDbObjectDuplicateEntry:
LOG.debug("Multiple auto-allocated networks detected for "
"tenant %s. Attempting clean up for network %s "
"and router %s.",
tenant_id, network_id, router_id)
self._cleanup(
context, network_id=network_id,
router_id=router_id, subnets=subnets)
network_id = self._get_auto_allocated_network(context, tenant_id)
except Exception as e:
raise exceptions.UnknownProvisioningError(
e, network_id=network_id,
router_id=router_id, subnets=subnets)
return network_id
def _cleanup(self, context, network_id=None, router_id=None, subnets=None):
"""Clean up auto allocated resources."""
# Concurrent attempts to delete the topology may interleave and
# cause some operations to fail with NotFound exceptions. Rather
# than fail partially, the exceptions should be ignored and the
# cleanup should proceed uninterrupted.
if router_id:
for subnet in subnets or []:
ignore_notfound(
self.l3_plugin.remove_router_interface,
context, router_id, {'subnet_id': subnet['id']})
ignore_notfound(self.l3_plugin.delete_router, context, router_id)
if network_id:
ignore_notfound(
self.core_plugin.delete_network, context, network_id)
def ignore_notfound(func, *args, **kwargs):
"""Call the given function and pass if a `NotFound` exception is raised."""
try:
return func(*args, **kwargs)
except n_exc.NotFound:
pass
| apache-2.0 | 3,746,684,057,430,448,000 | 42.740741 | 79 | 0.610016 | false |
min2209/dwt | WTN/depth_model.py | 1 | 6876 | import numpy as np
from math import ceil
import tensorflow as tf
import math
import scipy.io as sio
VGG_MEAN = [103.939, 116.779, 123.68]
class Network:
def __init__(self, params, wd=5e-5, modelWeightPaths=None):
self._params = params
self._images = tf.placeholder("float")
self._batch_images = tf.expand_dims(self._images, 0)
self._gt = tf.placeholder("float")
self._batch_gt = tf.expand_dims(self._gt, 0)
self._wd = wd
self.modelDict = {}
if modelWeightPaths is not None:
for path in modelWeightPaths:
self.modelDict.update(sio.loadmat(path))
def build(self, inputData, ss, keepProb=1):
self.conv1_1 = self._conv_layer(inputData, params=self._params["depth/conv1_1"])
self.conv1_2 = self._conv_layer(self.conv1_1, params=self._params["depth/conv1_2"])
self.pool1 = self._average_pool(self.conv1_2, 'depth/pool')
self.conv2_1 = self._conv_layer(self.pool1, params=self._params["depth/conv2_1"])
self.conv2_2 = self._conv_layer(self.conv2_1, params=self._params["depth/conv2_2"])
self.conv2_3 = self._conv_layer(self.conv2_2, params=self._params["depth/conv2_3"])
self.conv2_4 = self._conv_layer(self.conv2_3, params=self._params["depth/conv2_4"])
self.pool2 = self._average_pool(self.conv2_4, 'depth/pool')
self.fcn1 = self._conv_layer_dropout(self.pool2, params=self._params["depth/fcn1"], keepProb=keepProb)
self.fcn2 = self._conv_layer_dropout(self.fcn1, params=self._params["depth/fcn2"], keepProb=keepProb)
self.outputData = self._upscore_layer(self.fcn2, params=self._params["depth/upscore"],
shape=tf.shape(inputData))
self.outputDataArgMax = tf.argmax(input=self.outputData, dimension=3)
def _max_pool(self, bottom, name):
return tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME', name=name)
def _average_pool(self, bottom, name):
return tf.nn.avg_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME', name=name)
def _conv_layer(self, bottom, params):
with tf.variable_scope(params["name"]) as scope:
filt = self.get_conv_filter(params)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(params)
if params["act"] == "relu":
activation = tf.nn.relu(tf.nn.bias_add(conv, conv_biases))
elif params["act"] == "lin":
activation = tf.nn.bias_add(conv, conv_biases)
elif params["act"] == "tanh":
activation = tf.nn.tanh(tf.nn.bias_add(conv, conv_biases))
return activation
def _conv_layer_dropout(self, bottom, params, keepProb):
with tf.variable_scope(params["name"]) as scope:
filt = self.get_conv_filter(params)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(params)
if params["act"] == "relu":
activation = tf.nn.relu(tf.nn.bias_add(conv, conv_biases))
elif params["act"] == "lin":
activation = tf.nn.bias_add(conv, conv_biases)
elif params["act"] == "tanh":
activation = tf.nn.tanh(tf.nn.bias_add(conv, conv_biases))
activation = tf.nn.dropout(activation, keepProb, seed=0)
return activation
# WEIGHTS GENERATION
def get_bias(self, params):
if params["name"]+"/biases" in self.modelDict:
init = tf.constant_initializer(value=self.modelDict[params["name"]+"/biases"], dtype=tf.float32)
print "loaded " + params["name"] + "/biases"
else:
init = tf.constant_initializer(value=0.0)
print "generated " + params["name"] + "/biases"
var = tf.get_variable(name="biases", initializer=init, shape=params["shape"][3])
return var
def get_conv_filter(self, params):
if params["name"]+"/weights" in self.modelDict:
init = tf.constant_initializer(value=self.modelDict[params["name"]+"/weights"], dtype=tf.float32)
var = tf.get_variable(name="weights", initializer=init, shape=params["shape"])
print "loaded " + params["name"]+"/weights"
else:
if params["std"]:
stddev = params["std"]
else:
fanIn = params["shape"][0]*params["shape"][1]*params["shape"][2]
stddev = (2/float(fanIn))**0.5
init = tf.truncated_normal(shape=params["shape"], stddev=stddev, seed=0)
var = tf.get_variable(name="weights", initializer=init)
print "generated " + params["name"] + "/weights"
if not tf.get_variable_scope().reuse:
weightDecay = tf.mul(tf.nn.l2_loss(var), self._wd,
name='weight_loss')
tf.add_to_collection('losses', weightDecay)
return var
def _upscore_layer(self, bottom, shape, params):
strides = [1, params["stride"], params["stride"], 1]
with tf.variable_scope(params["name"]):
in_features = bottom.get_shape()[3].value
new_shape = [shape[0], shape[1], shape[2], params["outputChannels"]]
output_shape = tf.pack(new_shape)
f_shape = [params["ksize"], params["ksize"], params["outputChannels"], in_features]
weights = self.get_deconv_filter(f_shape, params)
deconv = tf.nn.conv2d_transpose(bottom, weights, output_shape,
strides=strides, padding='SAME')
return deconv
def get_deconv_filter(self, f_shape, params):
if params["name"]+"/up_filter" in self.modelDict:
init = tf.constant_initializer(value=self.modelDict[params["name"]+"/up_filter"], dtype=tf.float32)
print "loaded " + params["name"]+"/up_filter"
else:
width = f_shape[0]
height = f_shape[0]
f = ceil(width / 2.0)
c = (2 * f - 1 - f % 2) / (2.0 * f)
bilinear = np.zeros([f_shape[0], f_shape[1]])
for x in range(width):
for y in range(height):
value = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
bilinear[x, y] = value
weights = np.zeros(f_shape)
for i in range(f_shape[2]):
weights[:, :, i, i] = bilinear
init = tf.constant_initializer(value=weights,
dtype=tf.float32)
print "generated " + params["name"] + "/up_filter"
return tf.get_variable(name="up_filter", initializer=init, shape=f_shape) | mit | -6,771,102,239,200,756,000 | 41.98125 | 111 | 0.5605 | false |
foxdog-studios/pyddp | ddp/messages/server/connected_message_serializer.py | 1 | 1037 | # -*- coding: utf-8 -*-
# Copyright 2014 Foxdog Studios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .constants import MSG_CONNECTED
from .server_message_serializer import ServerMessageSerializer
__all__ = ['ConnectedMessageSerializer']
class ConnectedMessageSerializer(ServerMessageSerializer):
MESSAGE_TYPE = MSG_CONNECTED
def serialize_fields(self, message):
return {'session': message.session}
| apache-2.0 | -4,293,238,818,802,430,000 | 31.40625 | 74 | 0.756027 | false |
ngageoint/scale | scale/storage/move_files_job.py | 1 | 4159 | """Defines the functions necessary to move a file to a different workspace/uri"""
from __future__ import unicode_literals
import logging
import os
import sys
from error.exceptions import ScaleError, get_error_by_exception
from messaging.manager import CommandMessageManager
from storage.brokers.broker import FileDownload, FileMove, FileUpload
from storage.messages.move_files import create_move_file_message
from storage.models import ScaleFile
logger = logging.getLogger(__name__)
GENERAL_FAIL_EXIT_CODE = 1
def move_files(file_ids, new_workspace=None, new_file_path=None):
"""Moves the given files to a different workspace/uri
:param file_ids: List of ids of ScaleFile objects to move; should all be from the same workspace
:type file_ids: [int]
:param new_workspace: New workspace to move files to
:type new_workspace: `storage.models.Workspace`
:param new_file_path: New path for files
:type new_file_path: string
"""
try:
messages = []
files = ScaleFile.objects.all()
files = files.select_related('workspace')
files = files.defer('workspace__json_config')
files = files.filter(id__in=file_ids).only('id', 'file_name', 'file_path', 'workspace')
old_files = []
old_workspace = files[0].workspace
if new_workspace:
# We need a local path to copy the file, try to get a direct path from the broker, if that fails we must
# download the file and copy from there
# TODO: a future refactor should make the brokers work off of file objects instead of paths so the extra
# download is not necessary
paths = old_workspace.get_file_system_paths([files])
local_paths = []
if paths:
local_paths = paths
else:
file_downloads = []
for file in files:
local_path = os.path.join('/tmp', file.file_name)
file_downloads.append(FileDownload(file, local_path, False))
local_paths.append(local_path)
ScaleFile.objects.download_files(file_downloads)
uploads = []
for file, path in zip(files, local_paths):
old_path = file.file_path
old_files.append(ScaleFile(file_name=file.file_name, file_path=file.file_path))
file.file_path = new_file_path if new_file_path else file.file_path
logger.info('Copying %s in workspace %s to %s in workspace %s', old_path, file.workspace.name,
file.file_path, new_workspace.name)
file_upload = FileUpload(file, path)
uploads.append(file_upload)
message = create_move_file_message(file_id=file.id)
messages.append(message)
ScaleFile.objects.upload_files(new_workspace, uploads)
elif new_file_path:
moves = []
for file in files:
logger.info('Moving %s to %s in workspace %s', file.file_path, new_file_path,
file.workspace.name)
moves.append(FileMove(file, new_file_path))
message = create_move_file_message(file_id=file.id)
messages.append(message)
ScaleFile.objects.move_files(moves)
else:
logger.info('No new workspace or file path. Doing nothing')
CommandMessageManager().send_messages(messages)
if new_workspace:
# Copied files to new workspace, so delete file in old workspace (if workspace provides local path to do so)
old_workspace.delete_files(old_files, update_model=False)
except ScaleError as err:
err.log()
sys.exit(err.exit_code)
except Exception as ex:
exit_code = GENERAL_FAIL_EXIT_CODE
err = get_error_by_exception(ex.__class__.__name__)
if err:
err.log()
exit_code = err.exit_code
else:
logger.exception('Error performing move_files steps')
sys.exit(exit_code)
| apache-2.0 | -5,702,555,513,659,372,000 | 40.178218 | 120 | 0.605674 | false |
bnewbold/diffoscope | tests/comparators/test_fonts.py | 1 | 2131 | # -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2015 Jérémy Bobbio <[email protected]>
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <http://www.gnu.org/licenses/>.
import os.path
import pytest
from diffoscope.comparators import specialize
from diffoscope.comparators.binary import FilesystemFile, NonExistingFile
from diffoscope.comparators.fonts import TtfFile
from diffoscope.config import Config
from conftest import tool_missing
TEST_FILE1_PATH = os.path.join(os.path.dirname(__file__), '../data/Samyak-Malayalam1.ttf')
TEST_FILE2_PATH = os.path.join(os.path.dirname(__file__), '../data/Samyak-Malayalam2.ttf')
@pytest.fixture
def ttf1():
return specialize(FilesystemFile(TEST_FILE1_PATH))
@pytest.fixture
def ttf2():
return specialize(FilesystemFile(TEST_FILE2_PATH))
def test_identification(ttf1):
assert isinstance(ttf1, TtfFile)
def test_no_differences(ttf1):
difference = ttf1.compare(ttf1)
assert difference is None
@pytest.fixture
def differences(ttf1, ttf2):
return ttf1.compare(ttf2).details
@pytest.mark.skipif(tool_missing('showttf'), reason='missing showttf')
def test_diff(differences):
expected_diff = open(os.path.join(os.path.dirname(__file__), '../data/ttf_expected_diff')).read()
assert differences[0].unified_diff == expected_diff
def test_compare_non_existing(monkeypatch, ttf1):
monkeypatch.setattr(Config, 'new_file', True)
difference = ttf1.compare(NonExistingFile('/nonexisting', ttf1))
assert difference.source2 == '/nonexisting'
| gpl-3.0 | -1,024,706,894,903,309,300 | 35.689655 | 101 | 0.753759 | false |
cuducos/filterss | filterss/helpers.py | 1 | 4366 | import re
from .forms import FilterForm
from email.utils import parsedate_tz
from filterss import app
from flask import request
from textwrap import wrap
from urllib.parse import urlencode
from urllib.request import Request, urlopen
from werkzeug.local import LocalProxy
from xml.dom.minidom import parse
def set_filter(value):
"""
Return filter as lower case string (for case-insensitive search) or
return None for blank/False values
"""
if value:
return str(value).strip().lower()
return None
def get_filters(obj):
"""
Gets an object (form, request, etc) and return a dictionary with the filter
"""
# if it is a FilterForm object with keys
if type(obj) is FilterForm:
d = obj.data
d['url'] = d['rss_url']
# if it is a GET request
elif type(obj) is LocalProxy:
keys = app.config['FILTERS']
d = dict(zip(keys, map((lambda k: request.args.get(k)), keys)))
# error
else:
return False
# return a dictionary without empty items
return clean_filters(d)
def clean_filters(d):
"""
Delete empty fields from the filters dictionary, strip and convert strings
to lower case
"""
return dict((k, set_filter(v)) for k, v in d.items() if v)
def url_vars(d):
"""
Returns a string with the URL encoded (GET) vars
"""
cleaned = clean_filters(d)
cleaned.pop('rss_url', None)
return urlencode(cleaned)
def connect_n_parse(url):
"""
Connect to a given URL and return the parse of the result
"""
try:
ua = 'Mozilla/5.0'
accept = 'application/rss+xml,application/xhtml+xml,application/xml'
hdr = {'User-Agent': ua, 'Accept': accept}
req = Request(url, headers=hdr)
doc = urlopen(req)
except:
doc = urlopen(url)
return parse(doc)
def test_conditions(d, title, link):
"""
Gets a dicitonary with the filters and test them comparing to the values
from the RSS (title and link)
"""
# iterate through the filters
for k in d.keys():
# check if it is a title, link or none (skip)
if k[0:1] == 't':
rss_content = title
elif k[0:1] == 'l':
rss_content = link
else:
rss_content = False
# test the conditions only for title and link
if rss_content:
inclusive = True if k[-3:] == 'inc' else False
cond = test_single_condition(d[k], rss_content, inclusive)
# return false if a match is found
if not cond:
return False
# else, return true
return True
def test_single_condition(condition, value, inclusive):
"""
Separte multiple conditions separeted by commas (filters) and test them for
a given value; the inclusive boolean var decide if it should or should not
be present in the given value. It always returns a boolean.
"""
if condition is None:
return True
condictons = condition.split(',')
for c in condictons:
c = c.strip()
if c and c in value.lower():
return inclusive
return not inclusive
def remove_tags(string):
"""
Return str with certaing html/xml tags removed (title, link and pubDate)
"""
tags = ['title', 'link', 'pubDate']
tags_re = '({})'.format('|'.join(tags))
starttag_re = re.compile(r'<{}(/?>|(\s+[^>]*>))'.format(tags_re, re.U))
endtag_re = re.compile('</{}>'.format(tags_re))
string = starttag_re.sub('', string)
string = endtag_re.sub('', string)
string = string.replace('<![CDATA[', '')
string = string.replace(']]>', '')
return string.strip()
def word_wrap(txt, length=120):
"""
Return a wrapped a paragraph adding elipse after the first word that
appears after a given number of characters (length var)
"""
if len(txt) <= length or length == 0:
return txt
new_txt = wrap(txt, length)
return new_txt[0] + u'…'
def format_date(string):
"""
Return a date & time (dd/mm/yyyy hh:mm) from a rfc822 string format
"""
new_date = parsedate_tz(string)
y = new_date[0]
m = '{0:0>2}'.format(new_date[1])
d = '{0:0>2}'.format(new_date[2])
H = '{0:0>2}'.format(new_date[3])
i = '{0:0>2}'.format(new_date[4])
return '{}/{}/{} {}:{}'.format(d, m, y, H, i)
| mit | 3,528,421,072,841,698,000 | 26.446541 | 79 | 0.601971 | false |
uprm-research-resto/coliform-project | Coliform/GUI.py | 1 | 42197 | #!/usr/bin/env python3
#
# This is the main GUI function for Coliform Project
#
# This file is part of Coliform. https://github.com/Regendor/coliform-project
# (C) 2016
# Author: Osvaldo E Duran
# Licensed under the GNU General Public License version 3.0 (GPL-3.0)
import os
import time
import sys
try:
import matplotlib
matplotlib.use('Qt5Agg')
from PyQt5.QtCore import QTimer, Qt, QCoreApplication, QObject, pyqtSignal
from PyQt5.QtGui import QColor, QPalette
from PyQt5.QtWidgets import QApplication, QDesktopWidget, QMessageBox, QVBoxLayout, QHBoxLayout
from PyQt5.QtWidgets import QLabel, QMainWindow, QWidget, QGroupBox, QPushButton, QRadioButton, QLineEdit, QFileDialog
except ImportError:
from tkinter import messagebox
messagebox.showinfo(message='Please close this dialog and install dependencies typing the following in terminal:\n'
'python3\n'
'from Coliform import InitialSetup\n'
'InitialSetup.addShortcuts()\n'
'InitialSetup.installDependencies()\n')
from Coliform import OneWire, MultiPlot, RPiGPIO, RPiCamera, RGBSensor
import threading
# from datetime import datetime
'''
import as:
from Coliform import GUI
use as:
GUI.startGUI()
'''
class GUICenterWidget(QWidget):
def __init__(self):
super(GUICenterWidget, self).__init__()
self.initUI()
self.start_time = time.time()
def initUI(self):
self.tf = 'PlotTextFile.txt'
self.createTopLeftGroupBox()
self.createTopRightGroupBox()
self.createBottomLeftGroupBox()
self.createBottomRightGroupBox()
topLayout = QHBoxLayout()
topLayout.addWidget(self.topLeftGroupBox)
topLayout.addWidget(self.topRightGroupBox)
bottomLayout = QHBoxLayout()
bottomLayout.addWidget(self.bottomLeftGroupBox)
bottomLayout.addWidget(self.bottomRightGroupBox)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addLayout(bottomLayout)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
self.show()
def createTopLeftGroupBox(self):
self.topLeftGroupBox = QGroupBox("Temperature Sensor")
tempLabel = QLabel('Temperature: ')
self.tempValLabel = QLabel('NULL')
plotButton = QPushButton("Show Plot")
plotButton.clicked.connect(self.tempPlot)
saveDataButton = QPushButton('Save Data File')
saveDataButton.clicked.connect(self.savefile)
vbox1 = QVBoxLayout()
vbox1.addWidget(tempLabel)
vbox1.addWidget(self.tempValLabel)
vbox2 = QVBoxLayout()
vbox2.addWidget(plotButton)
vbox2.addWidget(saveDataButton)
layout = QHBoxLayout()
layout.addLayout(vbox1)
layout.addLayout(vbox2)
layout.addStretch(1)
self.topLeftGroupBox.setLayout(layout)
def createTopRightGroupBox(self):
self.topRightGroupBox = QGroupBox('Heater')
heatLabel = QLabel('Target Temperature(C):')
heatEntry = QLineEdit()
heatEntry.textChanged[str].connect(self.tempOnChanged)
heatEntry.setText('41')
self.heatButton = QPushButton('Heater ON')
self.heatButton.clicked.connect(self.heaterPower)
hbox1 = QHBoxLayout()
hbox1.addWidget(heatLabel)
hbox1.addWidget(heatEntry)
hbox2 = QHBoxLayout()
hbox2.addWidget(self.heatButton)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.topRightGroupBox.setLayout(layout)
def createBottomLeftGroupBox(self):
self.bottomLeftGroupBox = QGroupBox('Pump')
self.pumpPowerButton = QPushButton('Power ON')
self.pumpPowerButton.clicked.connect(self.pumpPower)
pumpEntry = QLineEdit()
pumpEntry.textChanged[str].connect(self.pumpOnChanged)
pumpValChangeButton = QPushButton('Submit')
pumpValChangeButton.clicked.connect(self.pumppowerchange)
layout = QVBoxLayout()
layout.addWidget(self.pumpPowerButton)
layout.addWidget(pumpEntry)
layout.addWidget(pumpValChangeButton)
layout.addStretch(1)
self.bottomLeftGroupBox.setLayout(layout)
def tempOnChanged(self, text):
if text != '':
self.tempTarget = int(float(text))
def pumpOnChanged(self, text):
if text:
self.pumppwmvalue = int(float(text))
def createBottomRightGroupBox(self):
self.bottomRightGroupBox = QGroupBox('Status')
self.tempSensorLbl = QLabel('Temp. Sensor OFF')
self.pumpLbl = QLabel('Pump OFF')
self.heatLbl = QLabel('Heater OFF')
layout = QVBoxLayout()
layout.addWidget(self.tempSensorLbl)
layout.addWidget(self.pumpLbl)
layout.addWidget(self.heatLbl)
layout.addStretch(1)
self.bottomRightGroupBox.setLayout(layout)
def statusOnChanged(self, text):
if 'Temp. Sensor' in text:
self.tempSensorStatus = text
self.tempSensorLbl.adjustSize()
elif 'Pump' in text:
self.pumpStatus = text
self.pumpLbl.adjustSize()
elif 'Heater' in text:
self.heatStatus = text
self.heatLbl.adjustSize()
def onewireOn(self):
try:
self.ids = OneWire.getOneWireID()
TemperatureDegrees, self.TemperatureNumber = OneWire.getTempList()
self.tempValLabel.setText(TemperatureDegrees)
MultiPlot.GeneratePlotDataFile(self.tf, self.TemperatureNumber, self.start_time)
if not self.ids:
self.tempSensorLbl.setText('Temp. Sensor OFF')
self.tempValLabel.setText('NULL')
self.tempValLabel.adjustSize()
else:
self.tempSensorLbl.setText('Temp. Sensor ON')
self.tempValLabel.adjustSize()
except IndexError:
pass
def tempPlot(self):
try:
self.y_title_axis = ['Temperature Plot', 'Temperature vs Time', 't(s)', 'T(C)', 'Sensor']
MultiPlot.Plot(self.tf, len(self.ids), self.y_title_axis)
except KeyError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('No temperature sensor connected.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def pumpPower(self):
if 'OFF' in self.pumpLbl.text():
self.PUMPPWM = RPiGPIO.Controller(11, 100)
self.PUMPPWM.startup()
self.pumpLbl.setText('Pump ON')
self.pumpPowerButton.setText('Power OFF')
elif 'ON' in self.pumpLbl.text():
self.PUMPPWM.shutdown()
self.pumpLbl.setText('Pump OFF')
self.pumpPowerButton.setText('Power ON')
def savefile(self):
tempfilename = 'TemperatureData.csv'
filepath = QFileDialog.getExistingDirectory(self, 'Choose Directory', os.sep.join((os.path.expanduser('~'), 'Desktop')))
self.y_variablename = 'TemperatureSensor'
MultiPlot.SaveToCsv(self.tf, tempfilename, filepath, len(self.ids), self.y_variablename)
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Information')
mb.setText('File saved to directory.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def heaterPower(self):
if 'OFF' in self.heatLbl.text():
self.heatLbl.setText('Heater ON')
self.heatButton.setText('Power OFF')
self.HEATPWM = RPiGPIO.Controller(12, 100)
self.HEATPWM.startup()
elif 'ON' in self.heatLbl.text():
self.HEATPWM.shutdown()
self.heatLbl.setText('Heater OFF')
self.heatButton.setText('Power ON')
def heaterinput(self):
if self.heatLbl.text() != 'Heater OFF':
value = float(self.tempTarget)
sensor = float(self.TemperatureNumber[1])
self.HEATPWM.HeaterPID(value, sensor)
def pumppowerchange(self):
try:
if self.pumppwmvalue > 100:
raise ValueError
else:
self.PUMPPWM.setIntensity(self.pumppwmvalue)
except ValueError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('Please type in a value between 0-100.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
class GUIMainWindow(QMainWindow):
def __init__(self):
super(GUIMainWindow, self).__init__()
self.initUI()
def initUI(self):
# QToolTip.setFont(QFont('SansSerif', 9))
self.cwidget = GUICenterWidget()
self.setCentralWidget(self.cwidget)
# self.setToolTip('This is a <b>QWidget</b> widget')
self.statusBar().showMessage('Ready')
self.center()
self.setWindowTitle('Coliform Control GUI')
self.onewiretimer = QTimer(self)
self.onewiretimer.timeout.connect(self.cwidget.onewireOn)
self.onewiretimer.start(1000)
# self.p = QPalette(self.palette())
# self.p.setColor(QPalette.Window, QColor(53, 53, 53))
# self.p.setColor(QPalette.WindowText, Qt.white)
# self.p.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
# self.p.setColor(QPalette.ToolTipBase, Qt.white)
# self.p.setColor(QPalette.ToolTipText, Qt.white)
# self.p.setColor(QPalette.Button, QColor(53, 53, 53))
# self.p.setColor(QPalette.ButtonText, Qt.white)
# self.p.setColor(QPalette.BrightText, Qt.red)
# self.p.setColor(QPalette.Highlight, QColor(142, 45, 197).lighter())
# self.p.setColor(QPalette.HighlightedText, Qt.black)
# self.setPalette(self.p)
self.show()
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', 'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def quitApp(self):
QCoreApplication.instance().quit()
class CameraCenterWidget(QWidget):
def __init__(self):
super(CameraCenterWidget, self).__init__()
self.initUI()
# self.start_time = time.time()
def initUI(self):
self.tf = 'PlotTextFile.txt'
self.statusbar = 'Ready'
self.createTopGroupBox()
self.createMidTopGroupBox()
self.createMidBottomGroupBox()
self.createBottomLeftGroupBox()
self.createBottomRightGroupBox()
topLayout = QVBoxLayout()
topLayout.addWidget(self.topGroupBox)
topLayout.addWidget(self.midTopGroupBox)
topLayout.addWidget(self.midBottomGroupBox)
bottomLayout = QHBoxLayout()
bottomLayout.addWidget(self.bottomLeftGroupBox)
bottomLayout.addWidget(self.bottomRightGroupBox)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addLayout(bottomLayout)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
self.show()
def createTopGroupBox(self):
self.topGroupBox = QGroupBox("Camera Capture Parameters")
delayLbl = QLabel('Delay:')
self.delayEntry = QLineEdit()
self.delayEntry.setText('5')
brightLbl = QLabel('Brightness:')
self.brightEntry = QLineEdit()
self.brightEntry.setText('50')
contrastLbl = QLabel('Contrast:')
self.contrastEntry = QLineEdit()
self.contrastEntry.setText('0')
shutterLbl = QLabel('Shutter Speed(μs)')
self.shutterEntry = QLineEdit()
self.shutterEntry.setText('0')
# Line 2
isoLbl = QLabel('ISO:')
self.isoEntry = QLineEdit()
self.isoEntry.setText('0')
prevTimeLbl = QLabel('Preview Timeout:')
self.prevTimeEntry = QLineEdit()
self.prevTimeEntry.setText('10')
resLbl = QLabel('Resolution:')
self.resEntry = QLineEdit()
self.resEntry.setText('2592x1944')
zoomLbl = QLabel('Zoom:')
self.zoomEntry = QLineEdit()
self.zoomEntry.setText('0.0, 0.0, 1.0, 1.0')
hbox1 = QHBoxLayout()
hbox1.addWidget(delayLbl)
hbox1.addWidget(self.delayEntry)
hbox1.addWidget(brightLbl)
hbox1.addWidget(self.brightEntry)
hbox1.addWidget(contrastLbl)
hbox1.addWidget(self.contrastEntry)
hbox1.addWidget(shutterLbl)
hbox1.addWidget(self.shutterEntry)
hbox2 = QHBoxLayout()
hbox2.addWidget(isoLbl)
hbox2.addWidget(self.isoEntry)
hbox2.addWidget(prevTimeLbl)
hbox2.addWidget(self.prevTimeEntry)
hbox2.addWidget(resLbl)
hbox2.addWidget(self.resEntry)
hbox2.addWidget(zoomLbl)
hbox2.addWidget(self.zoomEntry)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.topGroupBox.setLayout(layout)
def createMidTopGroupBox(self):
self.midTopGroupBox = QGroupBox('Auto White Balance Modes')
self.autoAwb = QRadioButton()
self.autoAwb.setText('auto')
self.autoAwb.toggled.connect(lambda: self.abtnstate(self.autoAwb))
self.fluorAwb = QRadioButton()
self.fluorAwb.setText('fluorescent')
self.fluorAwb.toggled.connect(lambda: self.abtnstate(self.fluorAwb))
self.incanAwb = QRadioButton()
self.incanAwb.setText('incandescent')
self.incanAwb.toggled.connect(lambda: self.abtnstate(self.incanAwb))
self.offAwb = QRadioButton()
self.offAwb.setText('off')
self.offAwb.toggled.connect(lambda: self.abtnstate(self.offAwb))
self.defaultAwb = QRadioButton()
self.defaultAwb.setText('default')
self.defaultAwb.toggled.connect(lambda: self.abtnstate(self.defaultAwb))
self.sunAwb = QRadioButton()
self.sunAwb.setText('sun')
self.sunAwb.toggled.connect(lambda: self.abtnstate(self.sunAwb))
self.cloudAwb = QRadioButton()
self.cloudAwb.setText('cloud')
self.cloudAwb.toggled.connect(lambda: self.abtnstate(self.cloudAwb))
self.shadeAwb = QRadioButton()
self.shadeAwb.setText('shade')
self.shadeAwb.toggled.connect(lambda: self.abtnstate(self.shadeAwb))
self.tungsAwb = QRadioButton()
self.tungsAwb.setText('tungsten')
self.tungsAwb.toggled.connect(lambda: self.abtnstate(self.tungsAwb))
self.flashAwb = QRadioButton()
self.flashAwb.setText('flash')
self.flashAwb.toggled.connect(lambda: self.abtnstate(self.flashAwb))
self.horizonAwb = QRadioButton()
self.horizonAwb.setText('horizon')
self.horizonAwb.toggled.connect(lambda: self.abtnstate(self.horizonAwb))
self.defaultAwb.setChecked(True)
hbox1 = QHBoxLayout()
hbox1.addWidget(self.autoAwb)
hbox1.addWidget(self.fluorAwb)
hbox1.addWidget(self.incanAwb)
hbox1.addWidget(self.offAwb)
hbox1.addWidget(self.defaultAwb)
hbox2 = QHBoxLayout()
hbox2.addWidget(self.sunAwb)
hbox2.addWidget(self.cloudAwb)
hbox2.addWidget(self.shadeAwb)
hbox2.addWidget(self.tungsAwb)
hbox2.addWidget(self.flashAwb)
hbox2.addWidget(self.horizonAwb)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.midTopGroupBox.setLayout(layout)
def createMidBottomGroupBox(self):
self.midBottomGroupBox = QGroupBox('Exposure Modes')
self.autoExp = QRadioButton()
self.autoExp.setText('auto')
self.autoExp.toggled.connect(lambda: self.btnstate(self.autoExp))
self.nightExp = QRadioButton()
self.nightExp.setText('night')
self.nightExp.toggled.connect(lambda: self.btnstate(self.nightExp))
self.offExp = QRadioButton()
self.offExp.setText('off')
self.offExp.toggled.connect(lambda: self.btnstate(self.offExp))
self.defaultExp = QRadioButton()
self.defaultExp.setText('default')
self.defaultExp.toggled.connect(lambda: self.btnstate(self.defaultExp))
self.sportsExp = QRadioButton()
self.sportsExp.setText('sports')
self.sportsExp.toggled.connect(lambda: self.btnstate(self.sportsExp))
self.longExp = QRadioButton()
self.longExp.setText('verylong')
self.longExp.toggled.connect(lambda: self.btnstate(self.longExp))
self.spotExp = QRadioButton()
self.spotExp.setText('spotlight')
self.spotExp.toggled.connect(lambda: self.btnstate(self.spotExp))
self.backExp = QRadioButton()
self.backExp.setText('backlight')
self.backExp.toggled.connect(lambda: self.btnstate(self.backExp))
self.fireExp = QRadioButton()
self.fireExp.setText('fireworks')
self.fireExp.toggled.connect(lambda: self.btnstate(self.fireExp))
self.antiExp = QRadioButton()
self.antiExp.setText('antishake')
self.antiExp.toggled.connect(lambda: self.btnstate(self.antiExp))
self.fixedExp = QRadioButton()
self.fixedExp.setText('fixedfps')
self.fixedExp.toggled.connect(lambda: self.btnstate(self.fixedExp))
self.beachExp = QRadioButton()
self.beachExp.setText('beach')
self.beachExp.toggled.connect(lambda: self.btnstate(self.beachExp))
self.snowExp = QRadioButton()
self.snowExp.setText('snow')
self.snowExp.toggled.connect(lambda: self.btnstate(self.snowExp))
self.nightpExp = QRadioButton()
self.nightpExp.setText('nightpreview')
self.nightpExp.toggled.connect(lambda: self.btnstate(self.nightpExp))
self.defaultExp.setChecked(True)
hbox1 = QHBoxLayout()
hbox1.addWidget(self.autoExp)
hbox1.addWidget(self.longExp)
hbox1.addWidget(self.nightExp)
hbox1.addWidget(self.defaultExp)
hbox1.addWidget(self.spotExp)
hbox1.addWidget(self.sportsExp)
hbox1.addWidget(self.offExp)
hbox2 = QHBoxLayout()
hbox2.addWidget(self.backExp)
hbox2.addWidget(self.fireExp)
hbox2.addWidget(self.antiExp)
hbox2.addWidget(self.fixedExp)
hbox2.addWidget(self.beachExp)
hbox2.addWidget(self.snowExp)
hbox2.addWidget(self.nightpExp)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.midBottomGroupBox.setLayout(layout)
def abtnstate(self, state):
if state.text() == 'auto':
if state.isChecked():
self.awbvar = 'auto'
elif state.text() == 'fluorescent':
if state.isChecked():
self.awbvar = 'fluorescent'
elif state.text() == 'incandescent':
if state.isChecked():
self.awbvar = 'incandescent'
elif state.text() == 'off':
if state.isChecked():
self.awbvar = 'off'
elif state.text() == 'default':
if state.isChecked():
self.awbvar = ''
elif state.text() == 'sun':
if state.isChecked():
self.awbvar = 'sun'
elif state.text() == 'cloud':
if state.isChecked():
self.awbvar = 'cloud'
elif state.text() == 'shade':
if state.isChecked():
self.awbvar = 'shade'
elif state.text() == 'tungsten':
if state.isChecked():
self.awbvar = 'tungsten'
elif state.text() == 'flash':
if state.isChecked():
self.awbvar = 'flash'
elif state.text() == 'horizon':
if state.isChecked():
self.awbvar = 'horizon'
def btnstate(self, state):
if state.text() == 'auto':
if state.isChecked():
self.expvar = 'auto'
elif state.text() == 'night':
if state.isChecked():
self.expvar = 'night'
elif state.text() == 'verylong':
if state.isChecked():
self.expvar = 'verylong'
elif state.text() == 'off':
if state.isChecked():
self.expvar = 'off'
elif state.text() == 'default':
if state.isChecked():
self.expvar = ''
elif state.text() == 'sports':
if state.isChecked():
self.expvar = 'sports'
elif state.text() == 'spotlight':
if state.isChecked():
self.expvar = 'spotlight'
elif state.text() == 'backlight':
if state.isChecked():
self.expvar = 'backlight'
elif state.text() == 'fireworks':
if state.isChecked():
self.expvar = 'fireworks'
elif state.text() == 'antishake':
if state.isChecked():
self.expvar = 'antishake'
elif state.text() == 'fikedfps':
if state.isChecked():
self.expvar = 'fixedfps'
if state.text() == 'beach':
if state.isChecked():
self.expvar = 'beach'
elif state.text() == 'snow':
if state.isChecked():
self.expvar = 'snow'
elif state.text() == 'nightpreview':
if state.isChecked():
self.expvar = 'nightpreview'
def createBottomLeftGroupBox(self):
self.bottomLeftGroupBox = QGroupBox('Camera Options')
captureBtn = QPushButton('Take Picture')
captureBtn.clicked.connect(self.takePictureThread)
setNormOptionsBtn = QPushButton('Set Normal Options')
setNormOptionsBtn.clicked.connect(self.normalSettings)
setDarkOptionsBtn = QPushButton('Set Low Light Options')
setDarkOptionsBtn.clicked.connect(self.darkSettings)
previewBtn = QPushButton('Camera Preview')
previewBtn.clicked.connect(self.cameraPreviewThread)
showPlotsBtn = QPushButton('Show Plots')
showPlotsBtn.clicked.connect(self.showPlots)
showImageBtn = QPushButton('Show Image')
showImageBtn.clicked.connect(lambda: self.showImage(showImageBtn.text()))
importImageBtn = QPushButton('Import Image')
importImageBtn.clicked.connect(self.importImageThread)
saveImageBtn = QPushButton('Save Image')
saveImageBtn.clicked.connect(self.saveImage)
showRedImageBtn = QPushButton('Show Red')
showRedImageBtn.clicked.connect(lambda: self.showImage(showRedImageBtn.text()))
showBlueImageBtn = QPushButton('Show Blue')
showBlueImageBtn.clicked.connect(lambda: self.showImage(showBlueImageBtn.text()))
showGreenImageBtn = QPushButton('Show Green')
showGreenImageBtn.clicked.connect(lambda: self.showImage(showGreenImageBtn.text()))
saveAllBtn = QPushButton('Save All')
saveAllBtn.clicked.connect(self.saveAllThread)
vbox1 = QVBoxLayout()
vbox1.addWidget(captureBtn)
vbox1.addWidget(setNormOptionsBtn)
vbox1.addWidget(setDarkOptionsBtn)
vbox1.addWidget(previewBtn)
vbox2 = QVBoxLayout()
vbox2.addWidget(showImageBtn)
vbox2.addWidget(showPlotsBtn)
vbox2.addWidget(importImageBtn)
vbox2.addWidget(saveImageBtn)
vbox3 = QVBoxLayout()
vbox3.addWidget(showRedImageBtn)
vbox3.addWidget(showGreenImageBtn)
vbox3.addWidget(showBlueImageBtn)
vbox3.addWidget(saveAllBtn)
layout = QHBoxLayout()
layout.addLayout(vbox1)
layout.addLayout(vbox2)
layout.addLayout(vbox3)
layout.addStretch(1)
self.bottomLeftGroupBox.setLayout(layout)
def takePictureThread(self):
self.statusbar = 'Taking Picture...'
captureThread = threading.Thread(target=self.takePicture)
captureThread.start()
def importImageThread(self):
self.statusbar = 'Importing Image...'
self.image = QFileDialog.getOpenFileName(self, 'Choose Image', os.sep.join((os.path.expanduser('~'), 'Desktop')),
'Image Files (*.png *.jpg *.jpeg)')
importThread = threading.Thread(target=self.importImage)
importThread.start()
def cameraPreviewThread(self):
self.statusbar = 'Loading Preview...'
previewThread = threading.Thread(target=self.cameraPreview)
previewThread.start()
def saveAllThread(self):
self.statusbar = 'Saving Files..'
self.directory = QFileDialog.getExistingDirectory(self, 'Choose Directory', os.path.expanduser('~'))
saveThread = threading.Thread(target=self.saveAll)
saveThread.start()
def takePicture(self):
iso = int(float(self.isoEntry.text()))
resolution_string = self.resEntry.text().split('x')
resolution = (int(float(resolution_string[0])), int(float(resolution_string[1])))
delay = int(float(self.delayEntry.text()))
brightness = int(float(self.brightEntry.text()))
contrast = int(float(self.contrastEntry.text()))
shutterspeed = int(float(self.shutterEntry.text()))
zoom = tuple(map(float, self.zoomEntry.text().split(',')))
exposuremode = self.expvar
awbmode = self.awbvar
self.rgb_array = RPiCamera.takePicture(iso=iso, timeout=delay, resolution=resolution, exposure=exposuremode,
brightness=brightness, contrast=contrast, shutterspeed=shutterspeed,
zoom=zoom, awb_mode=awbmode)
red_intensity, green_intensity, blue_intensity, intensity = RPiCamera.returnIntensity(self.rgb_array)
intensity_array = '\n'.join(['R:' + '{:.3f}'.format(red_intensity),
'G:' + '{:.3f}'.format(green_intensity),
'B:' + '{:.3f}'.format(blue_intensity),
'I:' + '{:.3f}'.format(intensity)])
self.intensityLbl.setText(intensity_array)
self.intensityLbl.adjustSize()
self.statusbar = 'Ready'
def normalSettings(self):
self.delayEntry.setText('5')
self.prevTimeEntry.setText('10')
self.shutterEntry.setText('0')
def darkSettings(self):
self.delayEntry.setText('50')
self.prevTimeEntry.setText('50')
self.shutterEntry.setText('6000000')
def cameraPreview(self):
iso = int(float(self.isoEntry.text()))
resolution_string = self.resEntry.text().split('x')
resolution = (int(float(resolution_string[0])), int(float(resolution_string[1])))
delay = int(float(self.prevTimeEntry.text()))
brightness = int(float(self.brightEntry.text()))
contrast = int(float(self.contrastEntry.text()))
shutterspeed = int(float(self.shutterEntry.text()))
zoom = tuple(map(float, self.zoomEntry.text().split(',')))
exposuremode = self.expvar
awbmode = self.awbvar
RPiCamera.startPreview(iso=iso, timeout=delay, resolution=resolution, exposure=exposuremode,
brightness=brightness, contrast=contrast, shutterspeed=shutterspeed,
zoom=zoom, awb_mode=awbmode)
self.statusbar = 'Ready'
def showPlots(self):
try:
RPiCamera.showPlot(self.rgb_array)
self.statusbar = 'Ready'
except ValueError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('Array not loaded, make sure you take picture or import an image first.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def showImage(self, text):
try:
if text == 'Show Red':
RPiCamera.showImage(self.rgb_array, 'r')
elif text == 'Show Green':
RPiCamera.showImage(self.rgb_array, 'g')
elif text == 'Show Blue':
RPiCamera.showImage(self.rgb_array, 'b')
else:
RPiCamera.showImage(self.rgb_array)
except ValueError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('Array not loaded, make sure you take picture or import an image first.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def saveImage(self):
filename = QFileDialog.getSaveFileName(self, 'Save Image As', os.sep.join((os.path.expanduser('~'), 'Desktop')), 'Image Files (*.png *.jpg *.jpeg)')
RPiCamera.saveImage(self.rgb_array, filename[0])
def saveAll(self):
foldername = 'ISO={}-Delay={}-Resolution={}-Brightness={}-Contrast={}-ShutterSpeed={}' \
'-Exposure={}-AutoWhiteBalance={}-' \
'Zoom={}'.format(self.isoEntry.text(), self.delayEntry.text(), self.resEntry.text(),
self.brightEntry.text(), self.contrastEntry.text(),
self.shutterEntry.text(), self.expvar, self.awbvar, self.zoomEntry.text())
RPiCamera.saveAllImages(self.rgb_array, self.directory, foldername)
self.statusbar = 'Ready'
def importImage(self):
self.rgb_array = RPiCamera.importImage(self.image[0])
red_intensity, green_intensity, blue_intensity, intensity = RPiCamera.returnIntensity(self.rgb_array)
intensity_array = '\n'.join(['R:' + '{:.3f}'.format(red_intensity),
'G:' + '{:.3f}'.format(green_intensity),
'B:' + '{:.3f}'.format(blue_intensity),
'I:' + '{:.3f}'.format(intensity)])
self.intensityLbl.setText(intensity_array)
self.intensityLbl.adjustSize()
self.statusbar = 'Ready'
def createBottomRightGroupBox(self):
self.bottomRightGroupBox = QGroupBox('Image Intensity Data')
self.intensityLbl = QLabel('Not Taken')
layout = QHBoxLayout()
layout.addWidget(self.intensityLbl)
layout.addStretch(1)
self.bottomRightGroupBox.setLayout(layout)
class CameraMainWindow(QMainWindow):
def __init__(self):
super(CameraMainWindow, self).__init__()
self.initUI()
def initUI(self):
# QToolTip.setFont(QFont('SansSerif', 9))
self.cwidget = CameraCenterWidget()
self.setCentralWidget(self.cwidget)
# self.setToolTip('This is a <b>QWidget</b> widget')
self.center()
self.setWindowTitle('Camera Control GUI')
self.statusBarTimer = QTimer(self)
self.statusBarTimer.timeout.connect(self.statusUpdate)
self.statusBarTimer.start(100)
# self.p = QPalette(self.palette())
# self.p.setColor(QPalette.Window, QColor(53, 53, 53))
# self.p.setColor(QPalette.WindowText, Qt.white)
# self.p.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
# self.p.setColor(QPalette.ToolTipBase, Qt.white)
# self.p.setColor(QPalette.ToolTipText, Qt.white)
# self.p.setColor(QPalette.Button, QColor(53, 53, 53))
# self.p.setColor(QPalette.ButtonText, Qt.white)
# self.p.setColor(QPalette.BrightText, Qt.red)
# self.p.setColor(QPalette.Highlight, QColor(142, 45, 197).lighter())
# self.p.setColor(QPalette.HighlightedText, Qt.black)
# self.setPalette(self.p)
self.show()
def statusUpdate(self):
self.statusBar().showMessage(self.cwidget.statusbar)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', 'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
# def quitApp(self):
# QCoreApplication.instance().quit()
class RGBCenterWidget(QWidget):
def __init__(self):
super(RGBCenterWidget, self).__init__()
self.initUI()
# self.start_time = time.time()
def initUI(self):
self.tf = 'PlotTextFile.txt'
self.statusbar = 'Ready'
self.createTopGroupBox()
self.createMidGroupBox()
self.createBottomLeftGroupBox()
self.createBottomRightGroupBox()
topLayout = QVBoxLayout()
topLayout.addWidget(self.topGroupBox)
topLayout.addWidget(self.midGroupBox)
bottomLayout = QHBoxLayout()
bottomLayout.addWidget(self.bottomLeftGroupBox)
bottomLayout.addWidget(self.bottomRightGroupBox)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addLayout(bottomLayout)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
self.show()
def createTopGroupBox(self):
self.topGroupBox = QGroupBox('Integration Time')
self.it2_4ms = QRadioButton()
self.it2_4ms.setText('2.4ms')
self.it2_4ms.toggled.connect(lambda: self.itstate(self.it2_4ms))
self.it24ms = QRadioButton()
self.it24ms.setText('24ms')
self.it24ms.toggled.connect(lambda: self.itstate(self.it24ms))
self.it50ms = QRadioButton()
self.it50ms.setText('50ms')
self.it50ms.toggled.connect(lambda: self.itstate(self.it50ms))
self.it101ms = QRadioButton()
self.it101ms.setText('101ms')
self.it101ms.toggled.connect(lambda: self.itstate(self.it101ms))
self.it154ms = QRadioButton()
self.it154ms.setText('154ms')
self.it154ms.toggled.connect(lambda: self.itstate(self.it154ms))
self.it700ms = QRadioButton()
self.it700ms.setText('700ms')
self.it700ms.toggled.connect(lambda: self.itstate(self.it700ms))
self.it2_4ms.setChecked(True)
layout = QHBoxLayout()
layout.addWidget(self.it2_4ms)
layout.addWidget(self.it24ms)
layout.addWidget(self.it50ms)
layout.addWidget(self.it101ms)
layout.addWidget(self.it154ms)
layout.addWidget(self.it700ms)
layout.addStretch(1)
self.topGroupBox.setLayout(layout)
def createMidGroupBox(self):
self.midGroupBox = QGroupBox('Gain')
self.gain1 = QRadioButton()
self.gain1.setText('1X')
self.gain1.toggled.connect(lambda: self.gnstate(self.gain1))
self.gain4 = QRadioButton()
self.gain4.setText('4X')
self.gain4.toggled.connect(lambda: self.gnstate(self.gain4))
self.gain16 = QRadioButton()
self.gain16.setText('16X')
self.gain16.toggled.connect(lambda: self.gnstate(self.gain16))
self.gain60 = QRadioButton()
self.gain60.setText('60X')
self.gain60.toggled.connect(lambda: self.gnstate(self.gain60))
self.gain1.setChecked(True)
layout = QHBoxLayout()
layout.addWidget(self.gain1)
layout.addWidget(self.gain4)
layout.addWidget(self.gain16)
layout.addWidget(self.gain60)
layout.addStretch(1)
self.midGroupBox.setLayout(layout)
def itstate(self, state):
if state.text() == '2.4ms':
if state.isChecked():
self.itvar = '2.4'
elif state.text() == '24ms':
if state.isChecked():
self.itvar = '24'
elif state.text() == '50ms':
if state.isChecked():
self.itvar = '50'
elif state.text() == '101ms':
if state.isChecked():
self.itvar = '101'
elif state.text() == '154ms':
if state.isChecked():
self.itvar = '154'
elif state.text() == '700ms':
if state.isChecked():
self.itvar = '700'
def gnstate(self, state):
if state.text() == '1X':
if state.isChecked():
self.gainvar = '1'
elif state.text() == '4X':
if state.isChecked():
self.gainvar = '4'
elif state.text() == '16X':
if state.isChecked():
self.gainvar = '16'
elif state.text() == '60X':
if state.isChecked():
self.gainvar = '60'
def createBottomLeftGroupBox(self):
self.bottomLeftGroupBox = QGroupBox('Sensor Options')
captureBtn = QPushButton('Capture Data')
captureBtn.clicked.connect(self.captureDataThread)
setNormOptionsBtn = QPushButton('Set Normal Options')
setNormOptionsBtn.clicked.connect(self.normalSettings)
setDarkOptionsBtn = QPushButton('Set Low Light Options')
setDarkOptionsBtn.clicked.connect(self.darkSettings)
saveBtn = QPushButton('Save Data')
saveBtn.clicked.connect(self.saveData)
layout = QVBoxLayout()
layout.addWidget(captureBtn)
layout.addWidget(setNormOptionsBtn)
layout.addWidget(setDarkOptionsBtn)
layout.addWidget(saveBtn)
layout.addStretch(1)
self.bottomLeftGroupBox.setLayout(layout)
def captureDataThread(self):
self.statusbar = 'Capturing Data...'
captureThread = threading.Thread(target=self.captureData)
captureThread.start()
def captureData(self):
self.red_intensity, self.green_intensity, self.blue_intensity, self.clear_unfiltered, self.lux,\
self.color_temperature = RGBSensor.Capture(integrationtime=float(self.itvar), gain=int(self.gainvar))
intensity_array = '\n'.join(['R:' + '{}'.format(self.red_intensity),
'G:' + '{}'.format(self.green_intensity),
'B:' + '{}'.format(self.blue_intensity),
'Clear:' + '{}'.format(self.clear_unfiltered),
'Luminosity:{} lux'.format(self.lux),
'Color Temperature:{} K'.format(self.color_temperature)])
self.intensityLbl.setText(intensity_array)
self.intensityLbl.adjustSize()
self.statusbar = 'Ready'
def normalSettings(self):
self.gain1.setChecked(True)
self.it2_4ms.setChecked(True)
def darkSettings(self):
self.gain60.setChecked(True)
self.it700ms.setChecked(True)
def saveData(self):
RGBSensor.saveData(self.red_intensity, self.green_intensity, self.blue_intensity, self.clear_unfiltered,
self.lux, self.color_temperature)
self.statusbar = 'Ready'
def createBottomRightGroupBox(self):
self.bottomRightGroupBox = QGroupBox('Sensor Data')
self.intensityLbl = QLabel('Not Taken')
layout = QHBoxLayout()
layout.addWidget(self.intensityLbl)
layout.addStretch(1)
self.bottomRightGroupBox.setLayout(layout)
class RGBMainWindow(QMainWindow):
def __init__(self):
super(RGBMainWindow, self).__init__()
self.initUI()
def initUI(self):
# QToolTip.setFont(QFont('SansSerif', 9))
self.cwidget = RGBCenterWidget()
self.setCentralWidget(self.cwidget)
# self.setToolTip('This is a <b>QWidget</b> widget')
self.center()
self.setWindowTitle('RGB Sensor GUI')
self.statusBarTimer = QTimer(self)
self.statusBarTimer.timeout.connect(self.statusUpdate)
self.statusBarTimer.start(100)
# self.p = QPalette(self.palette())
# self.p.setColor(QPalette.Window, QColor(53, 53, 53))
# self.p.setColor(QPalette.WindowText, Qt.white)
# self.p.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
# self.p.setColor(QPalette.ToolTipBase, Qt.white)
# self.p.setColor(QPalette.ToolTipText, Qt.white)
# self.p.setColor(QPalette.Button, QColor(53, 53, 53))
# self.p.setColor(QPalette.ButtonText, Qt.white)
# self.p.setColor(QPalette.BrightText, Qt.red)
# self.p.setColor(QPalette.Highlight, QColor(142, 45, 197).lighter())
# self.p.setColor(QPalette.HighlightedText, Qt.black)
# self.setPalette(self.p)
self.show()
def statusUpdate(self):
self.statusBar().showMessage(self.cwidget.statusbar)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', 'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
# def quitApp(self):
# QCoreApplication.instance().quit()
def startGUI():
app = QApplication(sys.argv)
mw = GUIMainWindow()
# cw = GUICenterWidget()
rc = app.exec_()
del app
sys.exit(rc)
def startCameraGUI():
app = QApplication(sys.argv)
mw = CameraMainWindow()
# cw = CameraCenterWidget()
rc = app.exec_()
del app
sys.exit(rc)
def startRGBSensorGUI():
app = QApplication(sys.argv)
mw = RGBMainWindow()
# cw = RGBCenterWidget()
rc = app.exec_()
del app
sys.exit(rc)
| gpl-3.0 | 4,477,484,512,611,405,000 | 34.548441 | 156 | 0.614324 | false |
andersonresende/django | django/test/client.py | 2 | 24532 | from __future__ import unicode_literals
import sys
import os
import re
import mimetypes
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core import urlresolvers
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest, ISO_8859_1, UTF_8
from django.core.signals import (request_started, request_finished,
got_request_exception)
from django.db import close_old_connections
from django.http import SimpleCookie, HttpRequest, QueryDict
from django.template import TemplateDoesNotExist
from django.test import signals
from django.utils.functional import curry, SimpleLazyObject
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils import six
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
from django.test.utils import ContextList
__all__ = ('Client', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend([to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
]])
else:
lines.extend([to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
]])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
if hasattr(file, 'content_type'):
content_type = file.content_type
else:
content_type = mimetypes.guess_type(file.name)[0]
if content_type is None:
content_type = 'application/octet-stream'
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, os.path.basename(file.name))),
to_bytes('Content-Type: %s' % content_type),
b'',
file.read()
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(path)
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME, None)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
got_request_exception.connect(self.store_exc_info, dispatch_uid="request-exception")
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(
lambda: urlresolvers.resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid="request-exception")
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate, login
user = authenticate(**credentials)
if (user and user.is_active and
apps.is_installed('django.contrib.sessions')):
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
return True
else:
return False
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((url, response.status_code))
url = urlsplit(url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
# Prevent loops
if response.redirect_chain[-1] in response.redirect_chain[0:-1]:
break
return response
| bsd-3-clause | -1,923,242,841,704,961,000 | 37.451411 | 112 | 0.587111 | false |
cul-it/Invenio | modules/bibrank/lib/bibrank_tag_based_indexer.py | 1 | 22836 | # -*- coding: utf-8 -*-
## Ranking of records using different parameters and methods.
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import os
import sys
import time
import ConfigParser
from invenio.config import \
CFG_SITE_LANG, \
CFG_ETCDIR, \
CFG_PREFIX
from invenio.search_engine import perform_request_search
from invenio.bibrank_citation_indexer import get_citation_weight, print_missing, get_cit_dict, insert_into_cit_db
from invenio.bibrank_downloads_indexer import *
from invenio.dbquery import run_sql, serialize_via_marshal, deserialize_via_marshal, \
wash_table_column_name, get_table_update_time
from invenio.errorlib import register_exception
from invenio.bibtask import task_get_option, write_message, task_sleep_now_if_required
from invenio.bibindex_engine import create_range_list
from invenio.intbitset import intbitset
options = {}
def remove_auto_cites(dic):
"""Remove auto-cites and dedupe."""
for key in dic.keys():
new_list = dic.fromkeys(dic[key]).keys()
try:
new_list.remove(key)
except ValueError:
pass
dic[key] = new_list
return dic
def citation_repair_exec():
"""Repair citation ranking method"""
## repair citations
for rowname in ["citationdict","reversedict"]:
## get dic
dic = get_cit_dict(rowname)
## repair
write_message("Repairing %s" % rowname)
dic = remove_auto_cites(dic)
## store healthy citation dic
insert_into_cit_db(dic, rowname)
return
def download_weight_filtering_user_repair_exec ():
"""Repair download weight filtering user ranking method"""
write_message("Repairing for this ranking method is not defined. Skipping.")
return
def download_weight_total_repair_exec():
"""Repair download weight total ranking method"""
write_message("Repairing for this ranking method is not defined. Skipping.")
return
def file_similarity_by_times_downloaded_repair_exec():
"""Repair file similarity by times downloaded ranking method"""
write_message("Repairing for this ranking method is not defined. Skipping.")
return
def single_tag_rank_method_repair_exec():
"""Repair single tag ranking method"""
write_message("Repairing for this ranking method is not defined. Skipping.")
return
def citation_exec(rank_method_code, name, config):
"""Rank method for citation analysis"""
#first check if this is a specific task
begin_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
if task_get_option("cmd") == "print-missing":
num = task_get_option("num")
print_missing(num)
else:
dict = get_citation_weight(rank_method_code, config)
if dict:
if task_get_option("id") or task_get_option("collection") or \
task_get_option("modified"):
# user have asked to citation-index specific records
# only, so we should not update citation indexer's
# last run time stamp information
begin_date = None
intoDB(dict, begin_date, rank_method_code)
else:
write_message("No need to update the indexes for citations.")
def download_weight_filtering_user(run):
return bibrank_engine(run)
def download_weight_total(run):
return bibrank_engine(run)
def file_similarity_by_times_downloaded(run):
return bibrank_engine(run)
def download_weight_filtering_user_exec (rank_method_code, name, config):
"""Ranking by number of downloads per User.
Only one full Text Download is taken in account for one
specific userIP address"""
begin_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
time1 = time.time()
dic = fromDB(rank_method_code)
last_updated = get_lastupdated(rank_method_code)
keys = new_downloads_to_index(last_updated)
filter_downloads_per_hour(keys, last_updated)
dic = get_download_weight_filtering_user(dic, keys)
intoDB(dic, begin_date, rank_method_code)
time2 = time.time()
return {"time":time2-time1}
def download_weight_total_exec(rank_method_code, name, config):
"""rankink by total number of downloads without check the user ip
if users downloads 3 time the same full text document it has to be count as 3 downloads"""
begin_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
time1 = time.time()
dic = fromDB(rank_method_code)
last_updated = get_lastupdated(rank_method_code)
keys = new_downloads_to_index(last_updated)
filter_downloads_per_hour(keys, last_updated)
dic = get_download_weight_total(dic, keys)
intoDB(dic, begin_date, rank_method_code)
time2 = time.time()
return {"time":time2-time1}
def file_similarity_by_times_downloaded_exec(rank_method_code, name, config):
"""update dictionnary {recid:[(recid, nb page similarity), ()..]}"""
begin_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
time1 = time.time()
dic = fromDB(rank_method_code)
last_updated = get_lastupdated(rank_method_code)
keys = new_downloads_to_index(last_updated)
filter_downloads_per_hour(keys, last_updated)
dic = get_file_similarity_by_times_downloaded(dic, keys)
intoDB(dic, begin_date, rank_method_code)
time2 = time.time()
return {"time":time2-time1}
def single_tag_rank_method_exec(rank_method_code, name, config):
"""Creating the rank method data"""
begin_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
rnkset = {}
rnkset_old = fromDB(rank_method_code)
rnkset_new = single_tag_rank(config)
rnkset = union_dicts(rnkset_old, rnkset_new)
intoDB(rnkset, begin_date, rank_method_code)
def single_tag_rank(config):
"""Connect the given tag with the data from the kb file given"""
write_message("Loading knowledgebase file", verbose=9)
kb_data = {}
records = []
write_message("Reading knowledgebase file: %s" % \
config.get(config.get("rank_method", "function"), "kb_src"))
input = open(config.get(config.get("rank_method", "function"), "kb_src"), 'r')
data = input.readlines()
for line in data:
if not line[0:1] == "#":
kb_data[string.strip((string.split(string.strip(line), "---"))[0])] = (string.split(string.strip(line), "---"))[1]
write_message("Number of lines read from knowledgebase file: %s" % len(kb_data))
tag = config.get(config.get("rank_method", "function"), "tag")
tags = config.get(config.get("rank_method", "function"), "check_mandatory_tags").split(", ")
if tags == ['']:
tags = ""
records = []
for (recids, recide) in options["recid_range"]:
task_sleep_now_if_required(can_stop_too=True)
write_message("......Processing records #%s-%s" % (recids, recide))
recs = run_sql("SELECT id_bibrec, value FROM bib%sx, bibrec_bib%sx WHERE tag=%%s AND id_bibxxx=id and id_bibrec >=%%s and id_bibrec<=%%s" % (tag[0:2], tag[0:2]), (tag, recids, recide))
valid = intbitset(trailing_bits=1)
valid.discard(0)
for key in tags:
newset = intbitset()
newset += [recid[0] for recid in (run_sql("SELECT id_bibrec FROM bib%sx, bibrec_bib%sx WHERE id_bibxxx=id AND tag=%%s AND id_bibxxx=id and id_bibrec >=%%s and id_bibrec<=%%s" % (tag[0:2], tag[0:2]), (key, recids, recide)))]
valid.intersection_update(newset)
if tags:
recs = filter(lambda x: x[0] in valid, recs)
records = records + list(recs)
write_message("Number of records found with the necessary tags: %s" % len(records))
records = filter(lambda x: x[0] in options["validset"], records)
rnkset = {}
for key, value in records:
if kb_data.has_key(value):
if not rnkset.has_key(key):
rnkset[key] = float(kb_data[value])
else:
if kb_data.has_key(rnkset[key]) and float(kb_data[value]) > float((rnkset[key])[1]):
rnkset[key] = float(kb_data[value])
else:
rnkset[key] = 0
write_message("Number of records available in rank method: %s" % len(rnkset))
return rnkset
def get_lastupdated(rank_method_code):
"""Get the last time the rank method was updated"""
res = run_sql("SELECT rnkMETHOD.last_updated FROM rnkMETHOD WHERE name=%s", (rank_method_code, ))
if res:
return res[0][0]
else:
raise Exception("Is this the first run? Please do a complete update.")
def intoDB(dict, date, rank_method_code):
"""Insert the rank method data into the database"""
mid = run_sql("SELECT id from rnkMETHOD where name=%s", (rank_method_code, ))
del_rank_method_codeDATA(rank_method_code)
serdata = serialize_via_marshal(dict);
midstr = str(mid[0][0]);
run_sql("INSERT INTO rnkMETHODDATA(id_rnkMETHOD, relevance_data) VALUES (%s,%s)", (midstr, serdata,))
if date:
run_sql("UPDATE rnkMETHOD SET last_updated=%s WHERE name=%s", (date, rank_method_code))
# FIXME: the following is a workaround for the citation indexer
# memory troubles, when Apache WSGI daemon processes may end up
# doubling the memory after citation dictionary is updated;
# therefore let us restart the WSGI daemon application after the
# citation indexer finished, which relieves this problem. The
# restart is done via touching invenio.wsgi file. The proper fix
# for this problem would be strict separation between citation
# indexer updating dicts and citation searcher loading dicts.
if rank_method_code == 'citation':
os.system('touch ' + os.path.join(CFG_PREFIX, 'var', 'www-wsgi',
'invenio.wsgi'))
def fromDB(rank_method_code):
"""Get the data for a rank method"""
id = run_sql("SELECT id from rnkMETHOD where name=%s", (rank_method_code, ))
res = run_sql("SELECT relevance_data FROM rnkMETHODDATA WHERE id_rnkMETHOD=%s", (id[0][0], ))
if res:
return deserialize_via_marshal(res[0][0])
else:
return {}
def del_rank_method_codeDATA(rank_method_code):
"""Delete the data for a rank method"""
id = run_sql("SELECT id from rnkMETHOD where name=%s", (rank_method_code, ))
run_sql("DELETE FROM rnkMETHODDATA WHERE id_rnkMETHOD=%s", (id[0][0], ))
def del_recids(rank_method_code, range_rec):
"""Delete some records from the rank method"""
id = run_sql("SELECT id from rnkMETHOD where name=%s", (rank_method_code, ))
res = run_sql("SELECT relevance_data FROM rnkMETHODDATA WHERE id_rnkMETHOD=%s", (id[0][0], ))
if res:
rec_dict = deserialize_via_marshal(res[0][0])
write_message("Old size: %s" % len(rec_dict))
for (recids, recide) in range_rec:
for i in range(int(recids), int(recide)):
if rec_dict.has_key(i):
del rec_dict[i]
write_message("New size: %s" % len(rec_dict))
intoDB(rec_dict, begin_date, rank_method_code)
else:
write_message("Create before deleting!")
def union_dicts(dict1, dict2):
"Returns union of the two dicts."
union_dict = {}
for (key, value) in dict1.iteritems():
union_dict[key] = value
for (key, value) in dict2.iteritems():
union_dict[key] = value
return union_dict
def rank_method_code_statistics(rank_method_code):
"""Print statistics"""
method = fromDB(rank_method_code)
max = ('', -999999)
maxcount = 0
min = ('', 999999)
mincount = 0
for (recID, value) in method.iteritems():
if value < min and value > 0:
min = value
if value > max:
max = value
for (recID, value) in method.iteritems():
if value == min:
mincount += 1
if value == max:
maxcount += 1
write_message("Showing statistic for selected method")
write_message("Method name: %s" % getName(rank_method_code))
write_message("Short name: %s" % rank_method_code)
write_message("Last run: %s" % get_lastupdated(rank_method_code))
write_message("Number of records: %s" % len(method))
write_message("Lowest value: %s - Number of records: %s" % (min, mincount))
write_message("Highest value: %s - Number of records: %s" % (max, maxcount))
write_message("Divided into 10 sets:")
for i in range(1, 11):
setcount = 0
distinct_values = {}
lower = -1.0 + ((float(max + 1) / 10)) * (i - 1)
upper = -1.0 + ((float(max + 1) / 10)) * i
for (recID, value) in method.iteritems():
if value >= lower and value <= upper:
setcount += 1
distinct_values[value] = 1
write_message("Set %s (%s-%s) %s Distinct values: %s" % (i, lower, upper, len(distinct_values), setcount))
def check_method(rank_method_code):
write_message("Checking rank method...")
if len(fromDB(rank_method_code)) == 0:
write_message("Rank method not yet executed, please run it to create the necessary data.")
else:
if len(add_recIDs_by_date(rank_method_code)) > 0:
write_message("Records modified, update recommended")
else:
write_message("No records modified, update not necessary")
def bibrank_engine(run):
"""Run the indexing task.
Return 1 in case of success and 0 in case of failure.
"""
try:
import psyco
psyco.bind(single_tag_rank)
psyco.bind(single_tag_rank_method_exec)
psyco.bind(serialize_via_marshal)
psyco.bind(deserialize_via_marshal)
except StandardError, e:
pass
startCreate = time.time()
try:
options["run"] = []
options["run"].append(run)
for rank_method_code in options["run"]:
task_sleep_now_if_required(can_stop_too=True)
cfg_name = getName(rank_method_code)
write_message("Running rank method: %s." % cfg_name)
file = CFG_ETCDIR + "/bibrank/" + rank_method_code + ".cfg"
config = ConfigParser.ConfigParser()
try:
config.readfp(open(file))
except StandardError, e:
write_message("Cannot find configurationfile: %s" % file, sys.stderr)
raise StandardError
cfg_short = rank_method_code
cfg_function = config.get("rank_method", "function") + "_exec"
cfg_repair_function = config.get("rank_method", "function") + "_repair_exec"
cfg_name = getName(cfg_short)
options["validset"] = get_valid_range(rank_method_code)
if task_get_option("collection"):
l_of_colls = string.split(task_get_option("collection"), ", ")
recIDs = perform_request_search(c=l_of_colls)
recIDs_range = []
for recID in recIDs:
recIDs_range.append([recID, recID])
options["recid_range"] = recIDs_range
elif task_get_option("id"):
options["recid_range"] = task_get_option("id")
elif task_get_option("modified"):
options["recid_range"] = add_recIDs_by_date(rank_method_code, task_get_option("modified"))
elif task_get_option("last_updated"):
options["recid_range"] = add_recIDs_by_date(rank_method_code)
else:
write_message("No records specified, updating all", verbose=2)
min_id = run_sql("SELECT min(id) from bibrec")[0][0]
max_id = run_sql("SELECT max(id) from bibrec")[0][0]
options["recid_range"] = [[min_id, max_id]]
if task_get_option("quick") == "no":
write_message("Recalculate parameter not used, parameter ignored.", verbose=9)
if task_get_option("cmd") == "del":
del_recids(cfg_short, options["recid_range"])
elif task_get_option("cmd") == "add":
func_object = globals().get(cfg_function)
func_object(rank_method_code, cfg_name, config)
elif task_get_option("cmd") == "stat":
rank_method_code_statistics(rank_method_code)
elif task_get_option("cmd") == "check":
check_method(rank_method_code)
elif task_get_option("cmd") == "print-missing":
func_object = globals().get(cfg_function)
func_object(rank_method_code, cfg_name, config)
elif task_get_option("cmd") == "repair":
func_object = globals().get(cfg_repair_function)
func_object()
else:
write_message("Invalid command found processing %s" % rank_method_code, sys.stderr)
raise StandardError
except StandardError, e:
write_message("\nException caught: %s" % e, sys.stderr)
register_exception()
raise StandardError
if task_get_option("verbose"):
showtime((time.time() - startCreate))
return 1
def get_valid_range(rank_method_code):
"""Return a range of records"""
write_message("Getting records from collections enabled for rank method.", verbose=9)
res = run_sql("SELECT collection.name FROM collection, collection_rnkMETHOD, rnkMETHOD WHERE collection.id=id_collection and id_rnkMETHOD=rnkMETHOD.id and rnkMETHOD.name=%s", (rank_method_code, ))
l_of_colls = []
for coll in res:
l_of_colls.append(coll[0])
if len(l_of_colls) > 0:
recIDs = perform_request_search(c=l_of_colls)
else:
recIDs = []
valid = intbitset()
valid += recIDs
return valid
def add_recIDs_by_date(rank_method_code, dates=""):
"""Return recID range from records modified between DATES[0] and DATES[1].
If DATES is not set, then add records modified since the last run of
the ranking method RANK_METHOD_CODE.
"""
if not dates:
try:
dates = (get_lastupdated(rank_method_code), '')
except Exception:
dates = ("0000-00-00 00:00:00", '')
if dates[0] is None:
dates = ("0000-00-00 00:00:00", '')
query = """SELECT b.id FROM bibrec AS b WHERE b.modification_date >= %s"""
if dates[1]:
query += " and b.modification_date <= %s"
query += " ORDER BY b.id ASC"""
if dates[1]:
res = run_sql(query, (dates[0], dates[1]))
else:
res = run_sql(query, (dates[0], ))
alist = create_range_list([row[0] for row in res])
if not alist:
write_message("No new records added since last time method was run")
return alist
def getName(rank_method_code, ln=CFG_SITE_LANG, type='ln'):
"""Returns the name of the method if it exists"""
try:
rnkid = run_sql("SELECT id FROM rnkMETHOD where name=%s", (rank_method_code, ))
if rnkid:
rnkid = str(rnkid[0][0])
res = run_sql("SELECT value FROM rnkMETHODNAME where type=%s and ln=%s and id_rnkMETHOD=%s", (type, ln, rnkid))
if not res:
res = run_sql("SELECT value FROM rnkMETHODNAME WHERE ln=%s and id_rnkMETHOD=%s and type=%s", (CFG_SITE_LANG, rnkid, type))
if not res:
return rank_method_code
return res[0][0]
else:
raise Exception
except Exception:
write_message("Cannot run rank method, either given code for method is wrong, or it has not been added using the webinterface.")
raise Exception
def single_tag_rank_method(run):
return bibrank_engine(run)
def showtime(timeused):
"""Show time used for method"""
write_message("Time used: %d second(s)." % timeused, verbose=9)
def citation(run):
return bibrank_engine(run)
# Hack to put index based sorting here, but this is very similar to tag
#based method and should re-use a lot of this code, so better to have here
#than separate
#
def index_term_count_exec(rank_method_code, name, config):
"""Creating the rank method data"""
write_message("Recreating index weighting data")
begin_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
# we must recalculate these every time for all records, since the
# weighting of a record is determined by the index entries of _other_
# records
rnkset = calculate_index_term_count(config)
intoDB(rnkset, begin_date, rank_method_code)
def calculate_index_term_count(config):
"""Calculate the weight of a record set based on number of enries of a
tag from the record in another index...useful for authority files"""
records = []
if config.has_section("index_term_count"):
index = config.get("index_term_count","index_table_name")
tag = config.get("index_term_count","index_term_value_from_tag")
# check against possible SQL injection:
dummy = get_table_update_time(index)
tag = wash_table_column_name(tag)
else:
raise Exception("Config file " + config + " does not have index_term_count section")
return()
task_sleep_now_if_required(can_stop_too=True)
write_message("......Processing all records")
query = "SELECT id_bibrec, value FROM bib%sx, bibrec_bib%sx WHERE tag=%%s AND id_bibxxx=id" % \
(tag[0:2], tag[0:2]) # we checked that tag is safe
records = list(run_sql(query, (tag,)))
write_message("Number of records found with the necessary tags: %s" % len(records))
rnkset = {}
for key, value in records:
hits = 0
if len(value):
query = "SELECT hitlist from %s where term = %%s" % index # we checked that index is a table
row = run_sql(query, (value,))
if row and row[0] and row[0][0]:
#has to be prepared for corrupted data!
try:
hits = len(intbitset(row[0][0]))
except:
hits = 0
rnkset[key] = hits
write_message("Number of records available in rank method: %s" % len(rnkset))
return rnkset
def index_term_count(run):
return bibrank_engine(run)
| gpl-2.0 | -8,720,981,345,532,647,000 | 40.294756 | 235 | 0.623621 | false |
silverapp/silver | silver/models/payment_methods.py | 1 | 8577 | # Copyright (c) 2017 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals
from typing import Union
from itertools import chain
from annoying.functions import get_object_or_None
from cryptography.fernet import InvalidToken, Fernet
from django.core.serializers.json import DjangoJSONEncoder
from django_fsm import TransitionNotAllowed
from model_utils.managers import InheritanceManager
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.utils import timezone
from silver import payment_processors
from silver.models import Invoice, Proforma
from silver.models.billing_entities import Customer
from silver.models.transactions import Transaction
class PaymentMethodInvalid(Exception):
pass
class PaymentMethod(models.Model):
class PaymentProcessors:
@classmethod
def as_choices(cls):
for name in settings.PAYMENT_PROCESSORS.keys():
yield (name, name)
@classmethod
def as_list(cls):
return [name for name in settings.PAYMENT_PROCESSORS.keys()]
payment_processor = models.CharField(choices=PaymentProcessors.as_choices(),
blank=False, null=False, max_length=256)
customer = models.ForeignKey(Customer, models.CASCADE)
added_at = models.DateTimeField(default=timezone.now)
data = models.JSONField(blank=True, null=True, default=dict, encoder=DjangoJSONEncoder)
verified = models.BooleanField(default=False)
canceled = models.BooleanField(default=False)
valid_until = models.DateTimeField(null=True, blank=True)
display_info = models.CharField(max_length=256, null=True, blank=True)
objects = InheritanceManager()
class Meta:
ordering = ['-id']
@property
def final_fields(self):
return ['payment_processor', 'customer', 'added_at']
@property
def irreversible_fields(self):
return ['verified', 'canceled']
def __init__(self, *args, **kwargs):
super(PaymentMethod, self).__init__(*args, **kwargs)
if self.id:
try:
payment_method_class = self.get_payment_processor().payment_method_class
if payment_method_class:
self.__class__ = payment_method_class
except AttributeError:
pass
@property
def transactions(self):
return self.transaction_set.all()
def get_payment_processor(self):
return payment_processors.get_instance(self.payment_processor)
def delete(self, using=None):
if not self.state == self.States.Uninitialized:
self.remove()
super(PaymentMethod, self).delete(using=using)
def encrypt_data(self, data: Union[str, bytes]) -> str:
if isinstance(data, str):
data = data.encode(encoding="utf-8")
key = settings.PAYMENT_METHOD_SECRET
return Fernet(key).encrypt(data).decode('utf-8')
def decrypt_data(self, crypted_data: Union[str, bytes]) -> str:
if not crypted_data:
return ""
if isinstance(crypted_data, str):
crypted_data = crypted_data.encode(encoding="utf-8")
key = settings.PAYMENT_METHOD_SECRET
return Fernet(key).decrypt(crypted_data).decode("utf-8")
def cancel(self):
if self.canceled:
raise ValidationError("You can't cancel a canceled payment method.")
cancelable_states = [Transaction.States.Initial,
Transaction.States.Pending]
transactions = self.transactions.filter(state__in=cancelable_states)
errors = []
for transaction in transactions:
if transaction.state == Transaction.States.Initial:
try:
transaction.cancel()
except TransitionNotAllowed:
errors.append("Transaction {} couldn't be canceled".format(transaction.uuid))
if transaction.state == Transaction.States.Pending:
payment_processor = self.get_payment_processor()
if (hasattr(payment_processor, 'void_transaction') and
not payment_processor.void_transaction(transaction)):
errors.append("Transaction {} couldn't be voided".format(transaction.uuid))
transaction.save()
if errors:
return errors
self.canceled = True
self.save()
return None
def clean_with_previous_instance(self, previous_instance):
if not previous_instance:
return
for field in self.final_fields:
old_value = getattr(previous_instance, field, None)
current_value = getattr(self, field, None)
if old_value != current_value:
raise ValidationError(
"Field '%s' may not be changed." % field
)
for field in self.irreversible_fields:
old_value = getattr(previous_instance, field, None)
current_value = getattr(self, field, None)
if old_value and old_value != current_value:
raise ValidationError(
"Field '%s' may not be changed anymore." % field
)
def full_clean(self, *args, **kwargs):
previous_instance = kwargs.pop('previous_instance', None)
super(PaymentMethod, self).full_clean(*args, **kwargs)
self.clean_with_previous_instance(previous_instance)
# this assumes that nobody calls clean and then modifies this object
# without calling clean again
setattr(self, '.cleaned', True)
@property
def allowed_currencies(self):
return self.get_payment_processor().allowed_currencies
@property
def public_data(self):
return {}
def __str__(self):
return u'{} - {} - {}'.format(self.customer,
self.get_payment_processor_display(),
self.pk)
def create_transactions_for_issued_documents(payment_method):
customer = payment_method.customer
if payment_method.canceled or not payment_method.verified:
return []
transactions = []
for document in chain(
Proforma.objects.filter(related_document=None, customer=customer,
state=Proforma.STATES.ISSUED),
Invoice.objects.filter(state=Invoice.STATES.ISSUED, customer=customer)
):
try:
transactions.append(Transaction.objects.create(
document=document, payment_method=payment_method
))
except ValidationError:
continue
return transactions
@receiver(pre_save)
def pre_payment_method_save(sender, instance=None, **kwargs):
if not isinstance(instance, PaymentMethod):
return
payment_method = instance
previous_instance = get_object_or_None(PaymentMethod, pk=payment_method.pk)
setattr(payment_method, '.previous_instance', previous_instance)
if not getattr(payment_method, '.cleaned', False):
payment_method.full_clean(previous_instance=previous_instance)
@receiver(post_save)
def post_payment_method_save(sender, instance, **kwargs):
if not isinstance(instance, PaymentMethod):
return
payment_method = instance
if hasattr(payment_method, '.cleaned'):
delattr(payment_method, '.cleaned')
previous_instance = getattr(payment_method, '.previous_instance', None)
if not (settings.SILVER_AUTOMATICALLY_CREATE_TRANSACTIONS or
not payment_method.verified or
(not payment_method.get_payment_processor().type ==
payment_processors.Types.Triggered)):
return
if not previous_instance or not previous_instance.verified:
create_transactions_for_issued_documents(payment_method)
| apache-2.0 | -5,897,001,218,787,682,000 | 32.244186 | 97 | 0.647779 | false |
Goldcap/django-selenium-testing | ppfa/selenium_tests/webdriver.py | 1 | 8427 | import os
import json
import redis
import shutil
import datetime
from ws4redis.redis_store import RedisMessage
from ws4redis.publisher import RedisPublisher
from pyvirtualdisplay import Display
from selenium import webdriver
from django.test import TestCase
from django.conf import settings
from selenium_tests.models import PpfaTestAssertion
class PpfaWebDriver(TestCase):
browser = None
profile = None
logger = None
testObject = None
runObject = None
errors = []
publisher = None
profile_path = None
redis_key = "proxy_request"
redis_session = 1
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
assert_failed_requests = True
def __init__(self, *args, **kw):
self.r = redis.StrictRedis(host=self.REDIS_HOST, port=self.REDIS_PORT, db=0)
session = self.r.get("proxy_request")
if session:
self.redis_session = session
#print self.redis_session
super(PpfaWebDriver, self).__init__(*args, **kw)
def set_up(self):
self.clearSession(200)
self.clearSession(404)
self.clearSession(500)
self.publisher = RedisPublisher(facility='foobar', broadcast=True)
self.broadcast("Starting Test '"+self.runObject.ppfa_test.name+"'")
self.startup()
def count_requests(self):
requests_200 = self.getSession(200)
self.broadcast("Total Requests (200): %s" % len(requests_200))
requests_404 = self.getSession(404)
self.broadcast("Total Requests (404): %s" % len(requests_404))
if len(requests_404) > 0 and self.assert_failed_requests:
self.failassertion( "Assets Missing", "from", "pageload" )
for failure in requests_404:
print failure
self.broadcast(failure)
requests_500 = self.getSession(500)
self.broadcast("Total Requests (500): %s" % len(requests_500))
if len(requests_500) > 0 and self.assert_failed_requests:
self.failassertion( "Assets Broken", "from", "pageload" )
for failure in requests_500:
print failure
self.broadcast(failure)
def tear_down(self):
self.count_requests()
if self.shut_down():
return self.runObject
def startup(self):
self.broadcast("Starting Xvfb Display")
display = Display(visible=0, size=(1024, 768))
display.start()
self.broadcast("Starting Firefox Browser")
self.profile = webdriver.FirefoxProfile()
# Direct = 0, Manual = 1, PAC = 2, AUTODETECT = 4, SYSTEM = 5
self.profile.set_preference("network.proxy.type", 1)
self.profile.set_preference("network.proxy.http",settings.PROXY_HOST)
self.profile.set_preference("network.proxy.http_port",int(settings.PROXY_PORT))
self.profile.set_preference("network.proxy.ssl",settings.PROXY_HOST)
self.profile.set_preference("network.proxy.ssl_port",int(settings.PROXY_PORT))
self.profile.set_preference("general.useragent.override","ppfa_test_runner")
self.profile.update_preferences()
self.profile_path = os.path.join('tmp',self.profile.profile_dir)
#print "Destination is in %s" % self.profile_path
source = os.path.join(os.path.dirname(__file__),'cert8.db')
#print "Source is in %s" % source
shutil.copy2(source, self.profile_path)
self.browser = webdriver.Firefox(self.profile)
def setSession( self, id ):
self.redis_session = id
self.r.set(self.redis_key,self.redis_session)
def clearSession( self, status ):
self.r.zremrangebyrank(self.redis_key+"::"+str(status)+"::"+str(self.redis_session),0,-1)
def getSession( self, status ):
print "Looking for %s" % (self.redis_key+"::"+str(status)+"::"+str(self.redis_session))
results = self.r.zrange(self.redis_key+"::"+str(status)+"::"+str(self.redis_session),0,-1)
return results
def page_source(self):
time = datetime.datetime.now().strftime("%I%M%p_%B_%d_%Y")
path = "screens/"+str(self.runObject.id)
if not os.path.exists(path):
os.mkdir(path)
filename = self.redis_key+"_"+str(self.redis_session)+"_"+str(time)+".html"
with open(os.path.join(path, filename), 'wb') as temp_file:
temp_file.write(self.browser.page_source.encode('ascii','replace'))
def screencap(self):
time = datetime.datetime.now().strftime("%I%M%p_%B_%d_%Y")
path = "screens/"+str(self.runObject.id)
if not os.path.exists(path):
os.mkdir(path)
filename = self.redis_key+"_"+str(self.redis_session)+"_"+str(time)+".png"
print filename
self.browser.save_screenshot(os.path.join(path, filename))
def broadcast( self, message ):
print message
if self.publisher:
message = {"message":message}
self.publisher.publish_message(RedisMessage(json.dumps(message)))
def runassertion( self, subject, verb, object ):
assertion = PpfaTestAssertion.objects.create(
ppfa_test=self.testObject,
ppfa_test_run=self.runObject,
subject=subject,
verb=verb,
object=object,
)
result = assertion.run_assertion(self.browser)
status_type = 'success'
if not result:
self.errors.append(assertion.id)
status_type = 'error'
self.logger.log("'%s' %s %s:: %s",[subject, verb, object, assertion.status_string],status_type)
self.broadcast("'%s' %s %s:: %s" % (subject, verb, object, assertion.status_string,))
return result
def passassertion( self, subject, verb, object ):
assertion = PpfaTestAssertion.objects.create(
ppfa_test=self.testObject,
ppfa_test_run=self.runObject,
subject=subject,
verb=verb,
object=object,
status=True
)
status_type = 'success'
self.logger.log("'%s' %s %s:: %s",[subject, verb, object, assertion.status_string],status_type)
self.broadcast("'%s' %s %s:: %s" % (subject, verb, object, assertion.status_string,))
return False
def failassertion( self, subject, verb, object ):
assertion = PpfaTestAssertion.objects.create(
ppfa_test=self.testObject,
ppfa_test_run=self.runObject,
subject=subject,
verb=verb,
object=object,
status=False
)
self.errors.append(assertion.id)
status_type = 'error'
self.logger.log("'%s' %s %s:: %s",[subject, verb, object, assertion.status_string],'error')
self.broadcast("'%s' %s %s:: %s" % (subject, verb, object, assertion.status_string,))
return False
def is_element_present(self, how, what):
try: self.browser.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.browser.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.browser.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def shut_down(self):
self.broadcast("Done Testing")
self.browser.quit()
for root, dirs, files in os.walk(self.profile_path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
return True
| mit | 5,567,993,571,545,259,000 | 36.959459 | 103 | 0.568055 | false |
F5Networks/f5-common-python | f5/bigip/tm/asm/policies/parameters.py | 1 | 4409 | # coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from distutils.version import LooseVersion
from f5.bigip.resource import AsmResource
from f5.bigip.resource import Collection
class UrlParametersCollection(Collection):
"""BIG-IP® ASM Urls Parameters sub-collection."""
def __init__(self, urls_s):
self.__class__.__name__ = 'Parameters_s'
super(UrlParametersCollection, self).__init__(urls_s)
self._meta_data['object_has_stats'] = False
self._meta_data['allowed_lazy_attributes'] = [Parameter]
self._meta_data['required_json_kind'] = 'tm:asm:policies:urls:parameters:parametercollectionstate'
self._meta_data['attribute_registry'] = {
'tm:asm:policies:urls:parameters:parameterstate': Parameter
}
class ParametersCollection(Collection):
"""BIG-IP® ASM Policies Parameters sub-collection."""
def __init__(self, policy):
self.__class__.__name__ = 'Parameters_s'
super(ParametersCollection, self).__init__(policy)
self._meta_data['object_has_stats'] = False
self._meta_data['allowed_lazy_attributes'] = [Parameter]
self._meta_data['required_json_kind'] = 'tm:asm:policies:parameters:parametercollectionstate'
self._meta_data['attribute_registry'] = {
'tm:asm:policies:parameters:parameterstate': Parameter
}
class Parameters_s(object):
"""As Parameters classes are used twice as a sub-collection.
We need to utilize __new__ method in order to keep the user
interface consistent.
"""
def __new__(cls, container):
from f5.bigip.tm.asm.policies import Policy
from f5.bigip.tm.asm.policies.urls import Url
if isinstance(container, Policy):
return ParametersCollection(container)
if isinstance(container, Url):
return UrlParametersCollection(container)
class Parameter(object):
"""As Parameter classes are used twice as a sub-collection.
We need to utilize __new__ method in order to keep the user
interface consistent.
"""
def __new__(cls, container):
if isinstance(container, ParametersCollection):
return ParametersResource(container)
if isinstance(container, UrlParametersCollection):
return UrlParametersResource(container)
class UrlParametersResource(AsmResource):
"""BIG-IP® ASM Urls Parameters resource."""
def __init__(self, urls_s):
self.__class__.__name__ = 'Parameter'
super(UrlParametersResource, self).__init__(urls_s)
self.tmos_v = urls_s._meta_data['bigip']._meta_data['tmos_version']
self._meta_data['required_json_kind'] = 'tm:asm:policies:urls:parameters:parameterstate'
def create(self, **kwargs):
"""Custom create method for v12.x and above.
Change of behavior in v12 where the returned selfLink is different
from target resource, requires us to append URI after object is
created. So any modify() calls will not lead to json kind
inconsistency when changing the resource attribute.
See issue #844
"""
if LooseVersion(self.tmos_v) < LooseVersion('12.0.0'):
return self._create(**kwargs)
else:
new_instance = self._create(**kwargs)
tmp_name = str(new_instance.id)
tmp_path = new_instance._meta_data['container']._meta_data['uri']
finalurl = tmp_path + tmp_name
new_instance._meta_data['uri'] = finalurl
return new_instance
class ParametersResource(AsmResource):
"""BIG-IP® ASM Urls Parameters resource."""
def __init__(self, policy):
self.__class__.__name__ = 'Parameter'
super(ParametersResource, self).__init__(policy)
self._meta_data['required_json_kind'] = 'tm:asm:policies:parameters:parameterstate'
| apache-2.0 | -8,019,605,598,075,704,000 | 37.304348 | 106 | 0.664018 | false |
ivbeg/lazyscraper | docs/conf.py | 1 | 8502 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# lazyscraper documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import lazyscraper
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'lazyscraper'
copyright = u'2018, Ivan Begtin'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = lazyscraper.__version__
# The full version, including alpha/beta/rc tags.
release = lazyscraper.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'lazyscraperdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'lazyscraper.tex',
u'lazyscraper documentation',
u'Ivan Begtin', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'lazyscraper',
u'lazyscraper documentation',
[u'Ivan Begtin'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'lazyscraper',
u'lazyscraper documentation',
u'Ivan Begtin',
'lazyscraper',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# sphinx.ext.intersphinx confs
intersphinx_mapping = {'python': ('https://docs.python.org/2', None)}
| apache-2.0 | -2,493,372,146,225,627,600 | 29.693141 | 84 | 0.705834 | false |
cheery/essence | essence2/graphics/patch9.py | 1 | 1727 | from surface import Surface
def borders(surface):
width, height = surface.size
y0 = 0
y1 = 0
x0 = 0
x1 = 0
i = 0
while i < height:
r,g,b,a = surface.at((0,i))
if a > 0:
y0 = i
break
i += 1
while i < height:
r,g,b,a = surface.at((0,i))
if a == 0:
y1 = i
break
i += 1
i = 0
while i < width:
r,g,b,a = surface.at((i,0))
if a > 0:
x0 = i
break
i += 1
while i < width:
r,g,b,a = surface.at((i,0))
if a == 0:
x1 = i
break
i += 1
return [1, x0, x1, width], [1, y0, y1, height]
class Patch9(object):
def __init__(self, surface):
self.surface = surface
self.subsurfaces = []
h, v = borders(surface)
for y in range(3):
row = []
for x in range(3):
area = (h[x], v[y]), (h[x+1]-h[x], v[y+1]-v[y])
row.append(surface.subsurface(area))
self.subsurfaces.append(row)
self.padding = h[1]-h[0], v[1]-v[0], h[3]-h[2], v[3]-v[2]
@staticmethod
def load(path):
return Patch9(Surface.load(path))
def comm_duck(self, target, ((x,y), (w,h))):
area = x,y,w,h
left, top, right, bottom = self.padding
h0, v0 = area[0], area[1]
h3, v3 = area[2] + h0, area[3] + v0
h = [h0, h0+left, h3-right, h3]
v = [v0, v0+top, v3-bottom, v3]
for y, row in enumerate(self.subsurfaces):
for x, surface in enumerate(row):
sector = (h[x], v[y]), (h[x+1]-h[x], v[y+1]-v[y])
target(surface, sector)
| gpl-3.0 | -2,912,530,288,493,200,000 | 25.984375 | 65 | 0.437174 | false |
debianitram/w2p-acreditacion | languages/es.py | 1 | 25147 | # -*- coding: utf-8 -*-
{
'!=': '!=',
'!langcode!': 'es',
'!langname!': 'Español',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualice" es una expresión opcional como "campo1=\'nuevo_valor\'". No se puede actualizar o eliminar resultados de un JOIN',
'%(nrows)s records found': '%(nrows)s registros encontrados',
'%s %%{position}': '%s %%{posición}',
'%s %%{row} deleted': '%s %%{fila} %%{eliminada}',
'%s %%{row} updated': '%s %%{fila} %%{actualizada}',
'%s selected': '%s %%{seleccionado}',
'%Y-%m-%d': '%d/%m/%Y',
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
'(something like "it-it")': '(algo como "eso-eso")',
'+ And': '+ And',
'+ Or': '+ Or',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'@markmin\x01An error occured, please [[reload %s]] the page': 'Ha ocurrido un error, por favor [[recargar %s]] la página',
'@markmin\x01Number of entries: **%s**': 'Número de entradas: **%s**',
'[Wiki]': '[Wiki]',
'A new version of web2py is available': 'Hay una nueva versión de web2py disponible',
'A new version of web2py is available: %s': 'Hay una nueva versión de web2py disponible: %s',
'About': 'Acerca de',
'about': 'acerca de',
'About application': 'Acerca de la aplicación',
'Abreviatura': 'Abreviatura',
'Access Control': 'Control de Acceso',
'Acreditado': 'Acreditado',
'Add': 'Añadir',
'Add Record': 'Add Record',
'Add record to database': 'Agrego un registro a la base de datos',
'Add this to the search as an AND term': 'Add this to the search as an AND term',
'Add this to the search as an OR term': 'Add this to the search as an OR term',
'additional code for your application': 'código adicional para su aplicación',
'admin': 'admin',
'admin disabled because no admin password': 'admin deshabilitado por falta de contraseña',
'admin disabled because not supported on google app engine': 'admin deshabilitado, no es soportado en GAE',
'admin disabled because unable to access password file': 'admin deshabilitado, imposible acceder al archivo con la contraseña',
'Admin is disabled because insecure channel': 'Admin deshabilitado, el canal no es seguro',
'Admin is disabled because unsecure channel': 'Admin deshabilitado, el canal no es seguro',
'Administración': 'Administración',
'Administrative interface': 'Interfaz administrativa',
'Administrative Interface': 'Interfaz Administrativa',
'Administrator Password:': 'Contraseña del Administrador:',
'Ajax Recipes': 'Recetas AJAX',
'An error occured, please %s the page': 'Ha ocurrido un error, por favor %s la página',
'And': 'Y',
'and rename it (required):': 'y renómbrela (requerido):',
'and rename it:': ' y renómbrelo:',
'Apellido': 'Apellido',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'admin deshabilitado, el canal no es seguro',
'application "%s" uninstalled': 'aplicación "%s" desinstalada',
'application compiled': 'aplicación compilada',
'application is compiled and cannot be designed': 'la aplicación está compilada y no puede ser modificada',
'Apply changes': 'Aplicar cambios',
'Appointment': 'Nombramiento',
'Are you sure you want to delete file "%s"?': '¿Está seguro que desea eliminar el archivo "%s"?',
'Are you sure you want to delete this object?': '¿Está seguro que desea borrar este objeto?',
'Are you sure you want to uninstall application "%s"': '¿Está seguro que desea desinstalar la aplicación "%s"',
'Are you sure you want to uninstall application "%s"?': '¿Está seguro que desea desinstalar la aplicación "%s"?',
'at': 'en',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENCION: Inicio de sesión requiere una conexión segura (HTTPS) o localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENCION: NO EJECUTE VARIAS PRUEBAS SIMULTANEAMENTE, NO SON THREAD SAFE.',
'ATTENTION: you cannot edit the running application!': 'ATENCION: no puede modificar la aplicación que está ejecutandose!',
'Authentication': 'Autenticación',
'Authentication failed at client DB!': '¡La autenticación ha fallado en la BDD cliente!',
'Authentication failed at main DB!': '¡La autenticación ha fallado en la BDD principal!',
'Available Databases and Tables': 'Bases de datos y tablas disponibles',
'Back': 'Atrás',
'Buy this book': 'Compra este libro',
"Buy web2py's book": "Buy web2py's book",
'Cache': 'Caché',
'cache': 'caché',
'Cache Keys': 'Llaves de la Caché',
'cache, errors and sessions cleaned': 'caché, errores y sesiones eliminados',
'Cannot be empty': 'No puede estar vacío',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'No se puede compilar: hay errores en su aplicación. Depure, corrija errores y vuelva a intentarlo.',
'cannot create file': 'no es posible crear archivo',
'cannot upload file "%(filename)s"': 'no es posible subir archivo "%(filename)s"',
'Cfecha': 'Cfecha',
'Change Password': 'Cambie la Contraseña',
'Change password': 'Cambie la contraseña',
'change password': 'cambie la contraseña',
'check all': 'marcar todos',
'Check to delete': 'Marque para eliminar',
'choose one': 'escoja uno',
'clean': 'limpiar',
'Clear': 'Limpiar',
'Clear CACHE?': '¿Limpiar CACHÉ?',
'Clear DISK': 'Limpiar DISCO',
'Clear RAM': 'Limpiar RAM',
'Click on the link %(link)s to reset your password': 'Pulse en el enlace %(link)s para reiniciar su contraseña',
'click to check for upgrades': 'haga clic para buscar actualizaciones',
'client': 'cliente',
'Client IP': 'IP del Cliente',
'Close': 'Cerrar',
'Comma-separated export including columns not shown; fields from other tables are exported as raw values for faster export': 'Comma-separated export including columns not shown; fields from other tables are exported as raw values for faster export',
'Comma-separated export of visible columns. Fields from other tables are exported as they appear on-screen but this may be slow for many rows': 'Comma-separated export of visible columns. Fields from other tables are exported as they appear on-screen but this may be slow for many rows',
'Community': 'Comunidad',
'compile': 'compilar',
'compiled application removed': 'aplicación compilada eliminada',
'Components and Plugins': 'Componentes y Plugins',
'Config.ini': 'Config.ini',
'Confirm Password': 'Confirm Password',
'Consultas Docente': 'Consultas Docente',
'contains': 'contiene',
'Controller': 'Controlador',
'Controllers': 'Controladores',
'controllers': 'controladores',
'Copyright': 'Copyright',
'create file with filename:': 'cree archivo con nombre:',
'Create new application': 'Cree una nueva aplicación',
'create new application:': 'nombre de la nueva aplicación:',
'Create New Page': 'Create New Page',
'Created By': 'Creado Por',
'Created On': 'Creado En',
'CSV': 'CSV',
'CSV (hidden cols)': 'CSV (columnas ocultas)',
'Current request': 'Solicitud en curso',
'Current response': 'Respuesta en curso',
'Current session': 'Sesión en curso',
'currently saved or': 'actualmente guardado o',
'Curso': 'Curso',
'Curso Persona': 'Curso Persona',
'Cursos': 'Cursos',
'customize me!': '¡Adáptame!',
'data uploaded': 'datos subidos',
'Database': 'Base de datos',
'Database %s select': 'selección en base de datos %s',
'database administration': 'administración de base de datos',
'Database Administration (appadmin)': 'Administración de Base de Datos (appadmin)',
'Date and Time': 'Fecha y Hora',
'DB': 'BDD',
'db': 'bdd',
'DB Model': 'Modelo BDD',
'defines tables': 'define tablas',
'Delete': 'Eliminar',
'delete': 'eliminar',
'delete all checked': 'eliminar marcados',
'Delete:': 'Eliminar:',
'Demo': 'Demostración',
'Deploy on Google App Engine': 'Despliegue en Google App Engine',
'Deployment Recipes': 'Recetas de despliegue',
'Description': 'Descripción',
'design': 'diseño',
'DESIGN': 'DISEÑO',
'Design': 'Design',
'Design for': 'Diseño por',
'detecting': 'detectando',
'DISK': 'DISCO',
'Disk Cache Keys': 'Llaves de Caché en Disco',
'Disk Cleared': 'Disco limpiado',
'Dni': 'Dni',
'DNI': 'DNI',
'DNI Tipo': 'DNI Tipo',
'Dni Tipo': 'Dni Tipo',
'Docente': 'Docente',
'Docs': 'Docs',
'Documentation': 'Documentación',
'Domicilio': 'Domicilio',
"Don't know what to do?": '¿No sabe que hacer?',
'done!': '¡hecho!',
'Download': 'Descargas',
'E-mail': 'Correo electrónico',
'edit': 'editar',
'EDIT': 'EDITAR',
'Edit': 'Editar',
'Edit application': 'Editar aplicación',
'edit controller': 'editar controlador',
'Edit current record': 'Edite el registro actual',
'Edit Page': 'Edit Page',
'Edit Page Media': 'Edit Page Media',
'Edit Profile': 'Editar Perfil',
'edit profile': 'editar perfil',
'Edit This App': 'Edite esta App',
'Editing file': 'Editando archivo',
'Editing file "%s"': 'Editando archivo "%s"',
'Email': 'Email',
'Email and SMS': 'Correo electrónico y SMS',
'Email sent': 'Correo electrónico enviado',
'End of impersonation': 'Fin de suplantación',
'enter a number between %(min)g and %(max)g': 'introduzca un número entre %(min)g y %(max)g',
'Enter a number between %(min)g and %(max)g': 'Valor entre %(min)g y %(max)g',
'Enter a valid email address': 'Correo no válido',
'enter a value': 'introduzca un valor',
'Enter a value': 'Ingrese un valor',
'Enter an integer between %(min)g and %(max)g': 'Valor entero entre %(min)g y %(max)g',
'enter an integer between %(min)g and %(max)g': 'introduzca un entero entre %(min)g y %(max)g',
'enter date and time as %(format)s': 'introduzca fecha y hora como %(format)s',
'Enter from %(min)g to %(max)g characters': 'Desde %(min)g a %(max)g caractéres',
'Error logs for "%(app)s"': 'Bitácora de errores en "%(app)s"',
'errors': 'errores',
'Errors': 'Errores',
'Errors in form, please check it out.': 'Hay errores en el formulario, por favor comprúebelo.',
'export as csv file': 'exportar como archivo CSV',
'Export:': 'Exportar:',
'exposes': 'expone',
'extends': 'extiende',
'failed to reload module': 'la recarga del módulo ha fallado',
'FAQ': 'FAQ',
'Fecha': 'Fecha',
'Fecha Fin': 'Fecha Fin',
'Fecha Inicio': 'Fecha Inicio',
'Fecha Inscripcion': 'Fecha Inscripción',
'file "%(filename)s" created': 'archivo "%(filename)s" creado',
'file "%(filename)s" deleted': 'archivo "%(filename)s" eliminado',
'file "%(filename)s" uploaded': 'archivo "%(filename)s" subido',
'file "%(filename)s" was not deleted': 'archivo "%(filename)s" no fué eliminado',
'file "%s" of %s restored': 'archivo "%s" de %s restaurado',
'file changed on disk': 'archivo modificado en el disco',
'file does not exist': 'archivo no existe',
'file saved on %(time)s': 'archivo guardado %(time)s',
'file saved on %s': 'archivo guardado %s',
'Finalizo': 'Finalizo',
'First name': 'Nombre',
'First Name': 'Nombre',
'Forgot username?': '¿Olvidó el nombre de usuario?',
'Forms and Validators': 'Formularios y validadores',
'Free Applications': 'Aplicaciones Libres',
'Fsearch': 'Fsearch',
'Functions with no doctests will result in [passed] tests.': 'Funciones sin doctests equivalen a pruebas [aceptadas].',
'Gestión Cursos': 'Gestión Cursos',
'Gestión Personas': 'Gestión Personas',
'Gestión Profesiones': 'Gestión Profesiones',
'Group %(group_id)s created': 'Grupo %(group_id)s creado',
'Group ID': 'ID de Grupo',
'Group uniquely assigned to user %(id)s': 'Grupo asignado únicamente al usuario %(id)s',
'Groups': 'Grupos',
'Hello World': 'Hola Mundo',
'help': 'ayuda',
'Helping web2py': 'Helping web2py',
'Home': 'Inicio',
'How did you get here?': '¿Cómo llegaste aquí?',
'HTML': 'HTML',
'HTML export of visible columns': 'HTML export of visible columns',
'htmledit': 'htmledit',
'Id': 'Id',
'Impersonate': 'Suplantar',
'import': 'importar',
'Import/Export': 'Importar/Exportar',
'in': 'en',
'includes': 'incluye',
'Index': 'Índice',
'Inicio': 'Inicio',
'Inscripto': 'Inscripto',
'insert new': 'inserte nuevo',
'insert new %s': 'inserte nuevo %s',
'Installed applications': 'Aplicaciones instaladas',
'Insufficient privileges': 'Privilegios insuficientes',
'internal error': 'error interno',
'Internal State': 'Estado Interno',
'Introduction': 'Introducción',
'Invalid action': 'Acción inválida',
'Invalid email': 'Correo electrónico inválido',
'invalid expression': 'expresión inválida',
'Invalid login': 'Inicio de sesión inválido',
'invalid password': 'contraseña inválida',
'Invalid Query': 'Consulta inválida',
'invalid request': 'solicitud inválida',
'Invalid reset password': 'Reinicio de contraseña inválido',
'invalid ticket': 'tiquete inválido',
'Is Active': 'Está Activo',
'JSON': 'JSON',
'JSON export of visible columns': 'JSON export of visible columns',
'Key': 'Llave',
'language file "%(filename)s" created/updated': 'archivo de lenguaje "%(filename)s" creado/actualizado',
'Language files (static strings) updated': 'Archivos de lenguaje (cadenas estáticas) actualizados',
'languages': 'lenguajes',
'Languages': 'Lenguajes',
'languages updated': 'lenguajes actualizados',
'Last name': 'Apellido',
'Last saved on:': 'Guardado en:',
'Layout': 'Diseño de página',
'Layout Plugins': 'Plugins de diseño',
'Layouts': 'Diseños de páginas',
'License for': 'Licencia para',
'Live Chat': 'Chat en vivo',
'loading...': 'cargando...',
'Log In': 'Log In',
'Log Out': 'Log Out',
'Logged in': 'Sesión iniciada',
'Logged out': 'Sesión finalizada',
'Login': 'Inicio de sesión',
'login': 'inicio de sesión',
'Login disabled by administrator': 'Inicio de sesión deshabilitado por el administrador',
'Login to the Administrative Interface': 'Inicio de sesión para la Interfaz Administrativa',
'logout': 'fin de sesión',
'Logout': 'Fin de sesión',
'Lost Password': 'Contraseña perdida',
'Lost password?': '¿Olvidó la contraseña?',
'lost password?': '¿olvidó la contraseña?',
'Lugar': 'Lugar',
'Main Menu': 'Menú principal',
'Manage Cache': 'Gestionar la Caché',
'Matricula': 'Matricula',
'Menu Model': 'Modelo "menu"',
'merge': 'combinar',
'Models': 'Modelos',
'models': 'modelos',
'Modified By': 'Modificado Por',
'Modified On': 'Modificado En',
'Modules': 'Módulos',
'modules': 'módulos',
'Monto': 'Monto',
'must be YYYY-MM-DD HH:MM:SS!': '¡debe ser DD/MM/YYYY HH:MM:SS!',
'must be YYYY-MM-DD!': '¡debe ser DD/MM/YYYY!',
'My Sites': 'Mis Sitios',
'Name': 'Nombre',
'New': 'Nuevo',
'New %(entity)s': 'Nuevo %(entity)s',
'new application "%s" created': 'nueva aplicación "%s" creada',
'New password': 'Contraseña nueva',
'New Record': 'Registro nuevo',
'new record inserted': 'nuevo registro insertado',
'New Search': 'Nueva búsqueda',
'next %s rows': 'next %s rows',
'next 100 rows': '100 filas siguientes',
'NO': 'NO',
'No databases in this application': 'No hay bases de datos en esta aplicación',
'No records found': 'No se han encontrado registros',
'Nombre': 'Nombre',
'Nombre Apellido': 'Nombre Apellido',
'Not authorized': 'No autorizado',
'not authorized': 'No está autorizado',
'Not Authorized': 'NO ESTA AUTORIZADO',
'not in': 'no en',
'Nro Recibo': 'Nro Recibo',
'Object or table name': 'Nombre del objeto o tabla',
'Old password': 'Contraseña vieja',
'Online book': 'Online book',
'Online examples': 'Ejemplos en línea',
'Or': 'O',
'or import from csv file': 'o importar desde archivo CSV',
'or provide application url:': 'o provea URL de la aplicación:',
'Origin': 'Origen',
'Original/Translation': 'Original/Traducción',
'Other Plugins': 'Otros Plugins',
'Other Recipes': 'Otras Recetas',
'Overview': 'Resumen',
'pack all': 'empaquetar todo',
'pack compiled': 'empaquetar compilados',
'Pago': 'Pago',
'Password': 'Contraseña',
'Password changed': 'Contraseña cambiada',
"Password fields don't match": 'Los campos de contraseña no coinciden',
'Password reset': 'Reinicio de contraseña',
'Peeking at file': 'Visualizando archivo',
'Person': 'Person',
'Persona': 'Persona',
'Personas': 'Personas',
'Phone': 'Teléfono',
'please input your password again': 'por favor introduzca su contraseña otra vez',
'Plugins': 'Plugins',
'Powered by': 'Este sitio usa',
'Precio': 'Precio',
'Preface': 'Prefacio',
'previous 100 rows': '100 filas anteriores',
'Profesion': 'Profesion',
'Profile': 'Perfil',
'Profile updated': 'Perfil actualizado',
'Python': 'Python',
'Query Not Supported: %s': 'Consulta No Soportada: %s',
'Query:': 'Consulta:',
'Quick Examples': 'Ejemplos Rápidos',
'RAM': 'RAM',
'RAM Cache Keys': 'Llaves de la Caché en RAM',
'Ram Cleared': 'Ram Limpiada',
'Recipes': 'Recetas',
'Record': 'Registro',
'Record %(id)s created': 'Registro %(id)s creado',
'Record Created': 'Registro Creado',
'record does not exist': 'el registro no existe',
'Record ID': 'ID de Registro',
'Record id': 'Id de registro',
'register': 'regístrese',
'Register': 'Regístrese',
'Registration identifier': 'Identificador de Registro',
'Registration key': 'Llave de registro',
'Registration successful': 'Registro con éxito',
'reload': 'recargar',
'Remember me (for 30 days)': 'Recuérdame (durante 30 días)',
'remove compiled': 'eliminar compiladas',
'Reportes': 'Reportes',
'Request Reset Password': 'Request Reset Password',
'Request reset password': 'Solicitar reinicio de contraseña',
'Reset password': 'Reiniciar contraseña',
'Reset Password key': 'Restaurar Llave de la Contraseña',
'Resolve Conflict file': 'archivo Resolución de Conflicto',
'restore': 'restaurar',
'Retrieve username': 'Recuperar nombre de usuario',
'revert': 'revertir',
'Role': 'Rol',
'Rows in Table': 'Filas en la tabla',
'Rows selected': 'Filas seleccionadas',
'save': 'guardar',
'Saved file hash:': 'Hash del archivo guardado:',
'Search': 'Buscar',
'Search Pages': 'Search Pages',
'Semantic': 'Semántica',
'Services': 'Servicios',
'session expired': 'sesión expirada',
'shell': 'terminal',
'Sign Up': 'Sign Up',
'site': 'sitio',
'Size of cache:': 'Tamaño de la Caché:',
'some files could not be removed': 'algunos archivos no pudieron ser removidos',
'Spreadsheet-optimised export of tab-separated content including hidden columns. May be slow': 'Spreadsheet-optimised export of tab-separated content including hidden columns. May be slow',
'Spreadsheet-optimised export of tab-separated content, visible columns only. May be slow.': 'Spreadsheet-optimised export of tab-separated content, visible columns only. May be slow.',
'Sprint': 'Sprint',
'start': 'inicio',
'Start building a new search': 'Start building a new search',
'starts with': 'comienza por',
'state': 'estado',
'static': 'estáticos',
'Static files': 'Archivos estáticos',
'Statistics': 'Estadísticas',
'Stylesheet': 'Hoja de estilo',
'Submit': 'Enviar',
'submit': 'enviar',
'Success!': '¡Correcto!',
'Sugerencia': 'Sugerencia',
'Support': 'Soporte',
'Sure you want to delete this object?': '¿Está seguro que desea eliminar este objeto?',
'Surname': 'Surname',
'Table': 'tabla',
'Table name': 'Nombre de la tabla',
'Telefono': 'Telefono',
'test': 'probar',
'Testing application': 'Probando aplicación',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "consulta" es una condición como "db.tabla1.campo1==\'valor\'". Algo como "db.tabla1.campo1==db.tabla2.campo2" resulta en un JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'la lógica de la aplicación, cada ruta URL se mapea en una función expuesta en el controlador',
'The Core': 'El Núcleo',
'the data representation, define database tables and sets': 'la representación de datos, define tablas y conjuntos de base de datos',
'The output of the file is a dictionary that was rendered by the view %s': 'La salida de dicha función es un diccionario que es desplegado por la vista %s',
'the presentations layer, views are also known as templates': 'la capa de presentación, las vistas también son llamadas plantillas',
'The Views': 'Las Vistas',
'There are no controllers': 'No hay controladores',
'There are no models': 'No hay modelos',
'There are no modules': 'No hay módulos',
'There are no static files': 'No hay archivos estáticos',
'There are no translators, only default language is supported': 'No hay traductores, sólo el lenguaje por defecto es soportado',
'There are no views': 'No hay vistas',
'these files are served without processing, your images go here': 'estos archivos son servidos sin procesar, sus imágenes van aquí',
'This App': 'Esta Aplicación',
'This email already has an account': 'Este correo electrónico ya tiene una cuenta',
'This is a copy of the scaffolding application': 'Esta es una copia de la aplicación de andamiaje',
'This is the %(filename)s template': 'Esta es la plantilla %(filename)s',
'Ticket': 'Tiquete',
'Time in Cache (h:m:s)': 'Tiempo en Caché (h:m:s)',
'Timestamp': 'Marca de tiempo',
'Titulo': 'Titulo',
'to previous version.': 'a la versión previa.',
'To emulate a breakpoint programatically, write:': 'Emular un punto de ruptura programáticamente, escribir:',
'to use the debugger!': '¡usar el depurador!',
'toggle breakpoint': 'alternar punto de ruptura',
'Toggle comment': 'Alternar comentario',
'Toggle Fullscreen': 'Alternar pantalla completa',
'Too short': 'Demasiado corto',
'too short': 'demasiado corto',
'Total Abonado': 'Total Abonado',
'Traceback': 'Traceback',
'translation strings for the application': 'cadenas de caracteres de traducción para la aplicación',
'try': 'intente',
'try something like': 'intente algo como',
'TSV (Excel compatible)': 'TSV (compatible Excel)',
'TSV (Excel compatible, hidden cols)': 'TSV (compatible Excel, columnas ocultas)',
'TSV (Spreadsheets)': 'TSV (Spreadsheets)',
'TSV (Spreadsheets, hidden cols)': 'TSV (Spreadsheets, hidden cols)',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'No es posible verificar la existencia de actualizaciones',
'unable to create application "%s"': 'no es posible crear la aplicación "%s"',
'unable to delete file "%(filename)s"': 'no es posible eliminar el archivo "%(filename)s"',
'Unable to download': 'No es posible la descarga',
'Unable to download app': 'No es posible descarga la aplicación',
'unable to parse csv file': 'no es posible analizar el archivo CSV',
'unable to uninstall "%s"': 'no es posible instalar "%s"',
'uncheck all': 'desmarcar todos',
'uninstall': 'desinstalar',
'unknown': 'desconocido',
'update': 'actualizar',
'update all languages': 'actualizar todos los lenguajes',
'Update:': 'Actualice:',
'upload application:': 'subir aplicación:',
'Upload existing application': 'Suba esta aplicación',
'upload file:': 'suba archivo:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para crear consultas más complejas.',
'User %(id)s is impersonating %(other_id)s': 'El usuario %(id)s está suplantando %(other_id)s',
'User %(id)s Logged-in': 'El usuario %(id)s inició la sesión',
'User %(id)s Logged-out': 'El usuario %(id)s finalizó la sesión',
'User %(id)s Password changed': 'Contraseña del usuario %(id)s cambiada',
'User %(id)s Password reset': 'Contraseña del usuario %(id)s reiniciada',
'User %(id)s Profile updated': 'Actualizado el perfil del usuario %(id)s',
'User %(id)s Registered': 'Usuario %(id)s Registrado',
'User %(id)s Username retrieved': 'Se ha recuperado el nombre de usuario del usuario %(id)s',
'User %(username)s Logged-in': 'El usuario %(username)s inició la sesión',
"User '%(username)s' Logged-in": "El usuario '%(username)s' inició la sesión",
"User '%(username)s' Logged-out": "El usuario '%(username)s' finalizó la sesión",
'User Id': 'Id de Usuario',
'User ID': 'ID de Usuario',
'User Logged-out': 'El usuario finalizó la sesión',
'Username': 'Nombre de usuario',
'Username retrieve': 'Recuperar nombre de usuario',
'Valor': 'Valor',
'Value already in database or empty': 'El valor se encuentra en la Base de Datos o está vacío',
'value already in database or empty': 'el valor ya existe en la base de datos o está vacío',
'value not allowed': 'valor no permitido',
'Value not in database': 'Valor no encontrado en la Base de Datos',
'value not in database': 'el valor no está en la base de datos',
'Verify Password': 'Verificar Contraseña',
'Version': 'Versión',
'versioning': 'versiones',
'Videos': 'Vídeos',
'View': 'Vista',
'view': 'vista',
'View %(entity)s': 'Ver %(entity)s',
'Views': 'Vistas',
'views': 'vistas',
'web2py is up to date': 'web2py está actualizado',
'web2py Recent Tweets': 'Tweets Recientes de web2py',
'Welcome': 'Bienvenido',
'Welcome %s': 'Bienvenido %s',
'Welcome to web2py': 'Bienvenido a web2py',
'Welcome to web2py!': '¡Bienvenido a web2py!',
'Which called the function %s located in the file %s': 'La cual llamó la función %s localizada en el archivo %s',
'Working...': 'Trabajando...',
'XML': 'XML',
'XML export of columns shown': 'XML export of columns shown',
'YES': 'SÍ',
'You are successfully running web2py': 'Usted está ejecutando web2py exitosamente',
'You can modify this application and adapt it to your needs': 'Usted puede modificar esta aplicación y adaptarla a sus necesidades',
'You visited the url %s': 'Usted visitó la url %s',
'Your username is: %(username)s': 'Su nombre de usuario es: %(username)s',
}
| gpl-2.0 | -836,672,160,750,387,600 | 45.297398 | 287 | 0.70387 | false |
oinume/tomahawk | tomahawk/base.py | 1 | 13671 | # -*- coding: utf-8 -*-
import multiprocessing
import os
import re
import platform
from six import print_
import six
import string
import sys
from tomahawk import (
__version__,
TimeoutError,
)
from tomahawk.color import (
create_coloring_object
)
from tomahawk.constants import (
DEFAULT_TIMEOUT,
DEFAULT_COMMAND_OUTPUT_FORMAT,
DEFAULT_EXPECT_DELAY,
DEFAULT_EXPECT_ENCODING,
OUTPUT_FORMAT_CONTROLL_CHARS,
)
from tomahawk.log import create_logger
from tomahawk.utils import (
check_hosts,
get_options_from_conf,
read_login_password,
read_login_password_from_stdin,
read_sudo_password,
read_sudo_password_from_stdin
)
class BaseContext(object):
def __init__(self, options = {}, out = sys.stdout, err = sys.stderr):
self.options = options
self.out = out
self.err = err
self.arguments, self.source, self.destination = None, None, None
class BaseMain(object):
def __init__(self, script_path):
self.script_path = script_path
self.arg_parser = self.create_argument_parser(script_path)
self.options = self.arg_parser.parse_args(sys.argv[1:])
conf_options = None
if self.options.conf:
conf_options = get_options_from_conf(
os.path.basename(script_path),
self.options.conf
)
args = conf_options + sys.argv[1:]
# Re-parse command line options because conf_options added
self.options = self.arg_parser.parse_args(args)
self.log = create_logger(
None,
self.options.debug or self.options.deep_debug,
self.options.deep_debug
)
if conf_options:
self.log.debug("Applying options %s from %s" % (str(conf_options), self.options.conf))
def run(self):
try:
if self.options.profile:
file = '%s.prof.%d' % (os.path.basename(self.script_path), os.getpid())
cProfile = __import__('cProfile')
pstats = __import__('pstats')
cProfile.runctx("self.do_run()", globals(), locals(), file)
p = pstats.Stats(file)
p.strip_dirs()
p.sort_stats('time', 'calls')
p.print_stats()
return 0 # TODO: return exit status
else:
return self.do_run()
except KeyboardInterrupt:
print_()
print_('Keyboard interrupt. exiting...')
def do_run(self):
raise Exception("This is a template method implemented by sub-class")
def check_hosts(self):
return check_hosts(self.options.__dict__, self.log, self.arg_parser.format_usage)
def confirm_execution_on_production(self, message):
if os.environ.get('TOMAHAWK_ENV') != 'production':
return
input = raw_input(message)
if input == 'yes':
print_()
else:
print_('Command execution was cancelled.')
sys.exit(0)
@classmethod
def add_common_arguments(cls, parser):
parser.add_argument(
'-h', '--hosts', metavar='HOSTS',
help='DUPLICATED. Use -H. (Will be deleted in v0.8.0)',
)
parser.add_argument(
'-H', '--hosts', metavar='HOSTS',
help='Host names for sending commands. (splited with ",")',
)
parser.add_argument(
'-f', '--hosts-files', metavar='HOSTS_FILES',
help='Hosts files which listed host names. (splited with ",")'
)
parser.add_argument(
'-c', '--continue-on-error', action='store_true', default=None,
help='Command exectuion continues whatever any errors.'
)
parser.add_argument(
'-p', '--parallel', metavar='NUM', type=int, default=1,
help='Process numbers for parallel command execution. (default: 1)'
)
parser.add_argument(
'-l', '--prompt-login-password', action='store_true',
help='Prompt a password for ssh authentication.'
)
parser.add_argument(
'--login-password-stdin', action='store_true',
help='Read a password for ssh authentication from stdin.'
)
parser.add_argument(
'-t', '--timeout', metavar='SECONDS', type=int, default=DEFAULT_TIMEOUT,
help='Specify expect timeout in seconds. (default: %d)' % (DEFAULT_TIMEOUT)
)
parser.add_argument(
'--expect-encoding', metavar='ENCODING', default=DEFAULT_EXPECT_ENCODING,
help='Expect encoding for password prompt. (default: %s)' % (DEFAULT_EXPECT_ENCODING)
)
parser.add_argument(
'-d', '--delay', type=int, default=0,
help='Command delay time in seconds. (default: 0)'
)
parser.add_argument(
'--expect-delay', type=float, default=DEFAULT_EXPECT_DELAY,
help='Expect delay time in seconds. (default: 0.05)'
)
parser.add_argument(
'-C', '--conf', metavar='FILE', default=None,
help='Configuration file path.'
)
parser.add_argument(
'-D', '--debug', action='store_true', default=False,
help='Enable debug output.',
)
parser.add_argument(
'--deep-debug', action='store_true', default=False,
help='Enable deeper debug output.',
)
parser.add_argument(
'--profile', action='store_true', help='Enable profiling.'
)
parser.add_argument(
'--version', action='version',
version='%(prog)s ' + __version__
+ ' with Python ' + '.'.join(map(str, sys.version_info[0:3]))
+ ' (' + platform.platform() + ')'
)
return parser
class BaseExecutor(object):
"""
A base class for CommandExecutor, RsyncExecutor
"""
def __init__(self, context, log, hosts=[], **kwargs):
"""
Constructor
Args:
context -- context
log -- log
hosts -- target hosts
"""
self.processes_terminated = False
if context is None:
raise RuntimeError('Argument "context" required.')
if len(hosts) == 0:
raise RuntimeError('Argument "hosts" length must be > 0')
options = context.options
newline = False
login_password = None
if 'login_password' in kwargs:
login_password = kwargs['login_password']
elif options.get('prompt_login_password'):
login_password = read_login_password()
newline = True
elif options.get('login_password_stdin'):
login_password = read_login_password_from_stdin()
sudo_password = None
if 'sudo_password' in kwargs:
sudo_password = kwargs['sudo_password']
elif options.get('prompt_sudo_password'):
sudo_password = read_sudo_password()
elif options.get('sudo_password_stdin'):
sudo_password = read_sudo_password_from_stdin()
if newline:
print_()
self.context = context
self.log = log
self.hosts = hosts
self.login_password = login_password
self.sudo_password = sudo_password
self.raise_error = True
if options.get('continue_on_error'):
self.raise_error = False
self.process_pool = multiprocessing.Pool(processes = options.get('parallel', 1))
def process_async_results(
self,
async_results,
create_output,
create_timeout_message,
create_timeout_raise_error_message,
create_failure_message,
create_failure_raise_error_message,
create_failure_last_message,
):
out, err = self.context.out, self.context.err
color = create_coloring_object(out)
options = self.context.options
hosts_count = len(self.hosts)
finished = 0
error_hosts_count = 0
output_format = self.output_format(options.get('output_format', DEFAULT_COMMAND_OUTPUT_FORMAT))
if six.PY2:
output_format = output_format.decode(DEFAULT_EXPECT_ENCODING)
output_format_template = string.Template(output_format)
timeout = options.get('timeout', DEFAULT_TIMEOUT)
error_prefix = color.red(color.bold('[error]')) # insert newline for error messages
execution_info = {}
# Main loop continues until all processes are done
while finished < hosts_count:
for dict in async_results:
host = dict['host']
command = dict['command']
async_result = dict['async_result']
if not async_result.ready():
continue
exit_status = 1
command_output = ''
timeout_detail = None
try:
exit_status, command_output = async_result.get(timeout = timeout)
self.log.debug("host = %s, exit_status = %d" % (host, exit_status))
except (TimeoutError, multiprocessing.TimeoutError):
error = sys.exc_info()[1]
timeout_detail = str(error)
execution_info[host] = { 'timeout': 1 }
async_results.remove(dict)
finished += 1
output = create_output(color, output_format_template, command, host, exit_status, command_output)
execution_info[host] = {
'exit_status': exit_status,
'command_output': command_output,
'timeout': False,
}
if command_output == '':
# if command_output is empty, chomp last newline character for ugly output
output = re.sub(os.linesep + r'\Z', '', output)
if exit_status == 0:
if six.PY2:
output = output.encode(DEFAULT_EXPECT_ENCODING)
print_(output, file=out)
elif timeout_detail is not None:
print_('%s %s\n' % (
error_prefix,
create_timeout_message(color, output, timeout)
), file=out)
execution_info[host]['timeout'] = True
error_hosts_count += 1
if self.raise_error:
print_('%s %s\n' % (
error_prefix,
create_timeout_raise_error_message(color, command, host, timeout)
), file=err)
return 1
else:
print_('%s %s\n' % (
error_prefix,
create_failure_message(color, output, exit_status)
), file=out)
error_hosts_count += 1
if self.raise_error:
print_('%s %s' % (
error_prefix,
create_failure_raise_error_message(color, command, host)
), file=err)
return 1
# Free process pool
self.terminate_processes()
if error_hosts_count > 0:
hosts = ''
for h in self.hosts:
if execution_info[h]['exit_status'] != 0:
hosts += ' %s\n' % (h)
hosts = hosts.rstrip()
print_('%s %s' % (
error_prefix,
create_failure_last_message(color, command, hosts)
), file=err)
return 1
if options.get('verify_output'):
has_different_output = False
prev_output = None
hosts = ''
for h in self.hosts:
output = execution_info[h]['command_output']
self.log.debug("host: '%s', prev_output: '%s', output = '%s'" % (h, prev_output, output))
if prev_output != None and output != prev_output:
hosts += ' %s\n' % (h)
has_different_output = True
prev_output = output
hosts = hosts.rstrip()
if has_different_output:
print_("%s Detected different command output on following hosts.\n%s" \
% (color.red(error_prefix), hosts), file=err)
return 3
else:
print_(color.green('Verified output of all hosts.'), file=out)
return 0
def output_format(self, format):
seq = []
prev, prev_prev = None, None
for char in format:
controll_char = OUTPUT_FORMAT_CONTROLL_CHARS.get(char)
if controll_char and prev == '\\' and prev_prev == '\\':
pass
elif controll_char and prev == '\\':
seq.pop(len(seq) - 1)
seq.append(controll_char)
prev_prev = prev
prev = char
continue
seq.append(char)
prev_prev = prev
prev = char
return ''.join(seq)
def terminate_processes(self):
if hasattr(self, 'process_pool') and not self.processes_terminated:
#self.process_pool.close()
self.log.debug("terminating processes")
self.process_pool.terminate()
self.process_pool.join()
self.processes_terminated = True
def __del__(self):
self.terminate_processes()
| lgpl-2.1 | 3,294,858,065,523,848,700 | 35.849057 | 113 | 0.526443 | false |
ThreatConnect-Inc/tcex | tcex/threat_intelligence/mappings/indicator/indicator_types/url.py | 1 | 1845 | """ThreatConnect TI URL"""
# standard library
from urllib.parse import quote_plus
from ..indicator import Indicator
class URL(Indicator):
"""Unique API calls for URL API Endpoints"""
def __init__(self, ti: 'ThreatIntelligenc', **kwargs):
"""Initialize Class Properties.
Args:
text (str, kwargs): [Required for Create] The URL value for this Indicator.
active (bool, kwargs): If False the indicator is marked "inactive" in TC.
confidence (str, kwargs): The threat confidence for this Indicator.
date_added (str, kwargs): [Read-Only] The date timestamp the Indicator was created.
last_modified (str, kwargs): [Read-Only] The date timestamp the Indicator was last
modified.
private_flag (bool, kwargs): If True the indicator is marked as private in TC.
rating (str, kwargs): The threat rating for this Indicator.
xid (str, kwargs): The external id for this Indicator.
"""
super().__init__(ti, sub_type='URL', api_entity='url', api_branch='urls', **kwargs)
self.unique_id = kwargs.get('unique_id', kwargs.get('text'))
self.data['text'] = self.unique_id
if self.unique_id:
self.unique_id = quote_plus(self.fully_decode_uri(self.unique_id))
def can_create(self):
"""Return True if address can be created.
If the text has been provided returns that the URL can be created, otherwise
returns that the URL cannot be created.
"""
return not self.data.get('text') is None
def _set_unique_id(self, json_response):
"""Set the unique_id provided a json response.
Args:
json_response:
"""
self.unique_id = quote_plus(self.fully_decode_uri(json_response.get('text', '')))
| apache-2.0 | -6,489,890,639,290,726,000 | 40 | 95 | 0.62168 | false |
MenschMarcus/master_HistoGlobe | HistoGlobe_server/models/Hivent.py | 1 | 4233 | # ==============================================================================
# Hivent represents a significant historical happening (historical event).
# It is the only representation of the temporal dimension in the data model
# and therefore the main organisational dimension.
# An Hivent may contain one or many EditOperations to the areas of the world.
#
# ------------------------------------------------------------------------------
# Hivent 1:n EditOperation
#
# ==============================================================================
from django.db import models
from django.utils import timezone
from django.contrib import gis
from djgeojson.fields import *
from django.forms.models import model_to_dict
# ------------------------------------------------------------------------------
class Hivent(models.Model):
name = models.CharField (max_length=150, default='')
date = models.DateTimeField (default=timezone.now)
location = models.CharField (null=True, max_length=150)
description = models.CharField (null=True, max_length=1000)
link = models.CharField (null=True, max_length=300)
# ============================================================================
def __unicode__(self):
return self.name
# ============================================================================
# givent set of validated (!) hivent data, update the Hivent properties
# ============================================================================
def update(self, hivent_data):
## save in database
self.name = hivent_data['name'] # CharField
self.date = hivent_data['date'] # DateTimeField
self.location = hivent_data['location'] # CharField
self.description = hivent_data['description'] # CharField
self.link = hivent_data['link'] # CharField
hivent.save()
return hivent
# ============================================================================
# return Hivent with all its associated Changes
# ============================================================================
def prepare_output(self):
from HistoGlobe_server.models import EditOperation, HiventOperation, OldArea, NewArea, UpdateArea
from HistoGlobe_server import utils
import chromelogger as console
# get original Hivent with all properties
# -> except for change
hivent = model_to_dict(self)
# get all EditOperations associated to the Hivent
hivent['edit_operations'] = []
for edit_operation_model in EditOperation.objects.filter(hivent=self):
edit_operation = model_to_dict(edit_operation_model)
# get all HiventOperations associated to the EditOperation
edit_operation['hivent_operations'] = []
for hivent_operation_model in HiventOperation.objects.filter(edit_operation=edit_operation_model):
hivent_operation = model_to_dict(hivent_operation_model)
# get all OldAreas, NewAreas and UpdateArea associated to the HiventOperation
hivent_operation['old_areas'] = []
hivent_operation['new_areas'] = []
hivent_operation['update_area'] = None
for old_area_model in OldArea.objects.filter(hivent_operation=hivent_operation_model):
hivent_operation['old_areas'].append(model_to_dict(old_area_model))
for new_area_model in NewArea.objects.filter(hivent_operation=hivent_operation_model):
hivent_operation['new_areas'].append(model_to_dict(new_area_model))
for update_area_model in UpdateArea.objects.filter(hivent_operation=hivent_operation_model):
hivent_operation['update_area'] = model_to_dict(update_area_model)
edit_operation['hivent_operations'].append(hivent_operation)
hivent['edit_operations'].append(edit_operation)
# prepare date for output
hivent['date'] = utils.get_date_string(hivent['date'])
return hivent
# ============================================================================
class Meta:
ordering = ['-date'] # descending order (2000 -> 0 -> -2000 -> ...)
app_label = 'HistoGlobe_server'
| gpl-2.0 | -4,437,588,823,208,093,000 | 41.33 | 104 | 0.550201 | false |
will-moore/jstreedemo | views.py | 1 | 1157 |
#
# Copyright (c) 2015 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# from django.shortcuts import render
from omeroweb.webclient.decorators import login_required, render_response
@login_required()
@render_response()
def index(request, fileId=None, conn=None, **kwargs):
"""
Single page 'app' for creating a Figure, allowing you to choose images
and lay them out in canvas by dragging & resizing etc
"""
template = "jstreedemo/index.html"
context = {"template": template}
return context
| gpl-2.0 | -2,332,945,118,847,090,700 | 33.029412 | 74 | 0.741573 | false |
proversity-org/edx-platform | lms/djangoapps/verify_student/views.py | 1 | 49530 | """
Views for the verification flow
"""
import datetime
import decimal
import json
import logging
import urllib
import analytics
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import Http404, HttpResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.views.generic.base import View
from edx_rest_api_client.exceptions import SlumberBaseException
from eventtracking import tracker
from ipware.ip import get_ip
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from pytz import UTC
from course_modes.models import CourseMode
from edxmako.shortcuts import render_to_response, render_to_string
from lms.djangoapps.commerce.utils import EcommerceService, is_account_activation_requirement_disabled
from lms.djangoapps.verify_student.image import InvalidImageData, decode_image_data
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification, VerificationDeadline, get_verify_student_settings
from lms.djangoapps.verify_student.ssencrypt import has_valid_signature
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.djangoapps.embargo import api as embargo_api
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api.accounts import NAME_MIN_LENGTH
from openedx.core.djangoapps.user_api.accounts.api import update_account_settings
from openedx.core.djangoapps.user_api.errors import AccountValidationError, UserNotFound
from openedx.core.lib.log_utils import audit_log
from shoppingcart.models import CertificateItem, Order
from shoppingcart.processors import get_purchase_endpoint, get_signed_purchase_params
from student.models import CourseEnrollment
from util.db import outer_atomic
from util.json_request import JsonResponse
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
class PayAndVerifyView(View):
"""
View for the "verify and pay" flow.
This view is somewhat complicated, because the user
can enter it from a number of different places:
* From the "choose your track" page.
* After completing payment.
* From the dashboard in order to complete verification.
* From the dashboard in order to upgrade to a verified track.
The page will display different steps and requirements
depending on:
* Whether the user has submitted a photo verification recently.
* Whether the user has paid for the course.
* How the user reached the page (mostly affects messaging)
We are also super-paranoid about how users reach this page.
If they somehow aren't enrolled, or the course doesn't exist,
or they've unenrolled, or they've already paid/verified,
... then we try to redirect them to the page with the
most appropriate messaging (including the dashboard).
Note that this page does NOT handle re-verification
(photo verification that was denied or had an error);
that is handled by the "reverify" view.
"""
# Step definitions
#
# These represent the numbered steps a user sees in
# the verify / payment flow.
#
# Steps can either be:
# - displayed or hidden
# - complete or incomplete
#
# For example, when a user enters the verification/payment
# flow for the first time, the user will see steps
# for both payment and verification. As the user
# completes these steps (for example, submitting a photo)
# the steps will be marked "complete".
#
# If a user has already verified for another course,
# then the verification steps will be hidden,
# since the user has already completed them.
#
# If a user re-enters the flow from another application
# (for example, after completing payment through
# a third-party payment processor), then the user
# will resume the flow at an intermediate step.
#
INTRO_STEP = 'intro-step'
MAKE_PAYMENT_STEP = 'make-payment-step'
PAYMENT_CONFIRMATION_STEP = 'payment-confirmation-step'
FACE_PHOTO_STEP = 'face-photo-step'
ID_PHOTO_STEP = 'id-photo-step'
REVIEW_PHOTOS_STEP = 'review-photos-step'
ENROLLMENT_CONFIRMATION_STEP = 'enrollment-confirmation-step'
ALL_STEPS = [
INTRO_STEP,
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP,
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
PAYMENT_STEPS = [
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP
]
VERIFICATION_STEPS = [
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
# These steps can be skipped using the ?skip-first-step GET param
SKIP_STEPS = [
INTRO_STEP,
]
STEP_TITLES = {
INTRO_STEP: ugettext_lazy("Intro"),
MAKE_PAYMENT_STEP: ugettext_lazy("Make payment"),
PAYMENT_CONFIRMATION_STEP: ugettext_lazy("Payment confirmation"),
FACE_PHOTO_STEP: ugettext_lazy("Take photo"),
ID_PHOTO_STEP: ugettext_lazy("Take a photo of your ID"),
REVIEW_PHOTOS_STEP: ugettext_lazy("Review your info"),
ENROLLMENT_CONFIRMATION_STEP: ugettext_lazy("Enrollment confirmation"),
}
# Messages
#
# Depending on how the user entered reached the page,
# we will display different text messaging.
# For example, we show users who are upgrading
# slightly different copy than users who are verifying
# for the first time.
#
FIRST_TIME_VERIFY_MSG = 'first-time-verify'
VERIFY_NOW_MSG = 'verify-now'
VERIFY_LATER_MSG = 'verify-later'
UPGRADE_MSG = 'upgrade'
PAYMENT_CONFIRMATION_MSG = 'payment-confirmation'
# Requirements
#
# These explain to the user what he or she
# will need to successfully pay and/or verify.
#
# These are determined by the steps displayed
# to the user; for example, if the user does not
# need to complete the verification steps,
# then the photo ID and webcam requirements are hidden.
#
ACCOUNT_ACTIVATION_REQ = "account-activation-required"
PHOTO_ID_REQ = "photo-id-required"
WEBCAM_REQ = "webcam-required"
STEP_REQUIREMENTS = {
ID_PHOTO_STEP: [PHOTO_ID_REQ, WEBCAM_REQ],
FACE_PHOTO_STEP: [WEBCAM_REQ],
}
# Deadline types
VERIFICATION_DEADLINE = "verification"
UPGRADE_DEADLINE = "upgrade"
def _get_user_active_status(self, user):
"""
Returns the user's active status to the caller
Overrides the actual value if account activation has been disabled via waffle switch
Arguments:
user (User): Current user involved in the onboarding/verification flow
"""
return user.is_active or is_account_activation_requirement_disabled()
@method_decorator(login_required)
def get(
self, request, course_id,
always_show_payment=False,
current_step=None,
message=FIRST_TIME_VERIFY_MSG
):
"""
Render the payment and verification flow.
Arguments:
request (HttpRequest): The request object.
course_id (unicode): The ID of the course the user is trying
to enroll in.
Keyword Arguments:
always_show_payment (bool): If True, show the payment steps
even if the user has already paid. This is useful
for users returning to the flow after paying.
current_step (string): The current step in the flow.
message (string): The messaging to display.
Returns:
HttpResponse
Raises:
Http404: The course does not exist or does not
have a verified mode.
"""
# Parse the course key
# The URL regex should guarantee that the key format is valid.
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key)
# Verify that the course exists
if course is None:
log.warn(u"Could not find course with ID %s.", course_id)
raise Http404
# Check whether the user has access to this course
# based on country access rules.
redirect_url = embargo_api.redirect_if_blocked(
course_key,
user=request.user,
ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return redirect(redirect_url)
# If the verification deadline has passed
# then show the user a message that he/she can't verify.
#
# We're making the assumptions (enforced in Django admin) that:
#
# 1) Only verified modes have verification deadlines.
#
# 2) If set, verification deadlines are always AFTER upgrade deadlines, because why would you
# let someone upgrade into a verified track if they can't complete verification?
#
verification_deadline = VerificationDeadline.deadline_for_course(course.id)
response = self._response_if_deadline_passed(course, self.VERIFICATION_DEADLINE, verification_deadline)
if response is not None:
log.info(u"Verification deadline for '%s' has passed.", course.id)
return response
# Retrieve the relevant course mode for the payment/verification flow.
#
# WARNING: this is technical debt! A much better way to do this would be to
# separate out the payment flow and use the product SKU to figure out what
# the user is trying to purchase.
#
# Nonetheless, for the time being we continue to make the really ugly assumption
# that at some point there was a paid course mode we can query for the price.
relevant_course_mode = self._get_paid_mode(course_key)
# If we can find a relevant course mode, then log that we're entering the flow
# Otherwise, this course does not support payment/verification, so respond with a 404.
if relevant_course_mode is not None:
if CourseMode.is_verified_mode(relevant_course_mode):
log.info(
u"Entering payment and verification flow for user '%s', course '%s', with current step '%s'.",
request.user.id, course_id, current_step
)
else:
log.info(
u"Entering payment flow for user '%s', course '%s', with current step '%s'",
request.user.id, course_id, current_step
)
else:
# Otherwise, there has never been a verified/paid mode,
# so return a page not found response.
log.warn(
u"No paid/verified course mode found for course '%s' for verification/payment flow request",
course_id
)
raise Http404
# If the user is trying to *pay* and the upgrade deadline has passed,
# then they shouldn't be able to enter the flow.
#
# NOTE: This should match the availability dates used by the E-Commerce service
# to determine whether a user can purchase a product. The idea is that if the service
# won't fulfill the order, we shouldn't even let the user get into the payment flow.
#
user_is_trying_to_pay = message in [self.FIRST_TIME_VERIFY_MSG, self.UPGRADE_MSG]
if user_is_trying_to_pay:
upgrade_deadline = relevant_course_mode.expiration_datetime
response = self._response_if_deadline_passed(course, self.UPGRADE_DEADLINE, upgrade_deadline)
if response is not None:
log.info(u"Upgrade deadline for '%s' has passed.", course.id)
return response
# Check whether the user has verified, paid, and enrolled.
# A user is considered "paid" if he or she has an enrollment
# with a paid course mode (such as "verified").
# For this reason, every paid user is enrolled, but not
# every enrolled user is paid.
# If the course mode is not verified(i.e only paid) then already_verified is always True
already_verified = (
self._check_already_verified(request.user)
if CourseMode.is_verified_mode(relevant_course_mode)
else True
)
already_paid, is_enrolled = self._check_enrollment(request.user, course_key)
# Redirect the user to a more appropriate page if the
# messaging won't make sense based on the user's
# enrollment / payment / verification status.
sku_to_use = relevant_course_mode.sku
purchase_workflow = request.GET.get('purchase_workflow', 'single')
if purchase_workflow == 'bulk' and relevant_course_mode.bulk_sku:
sku_to_use = relevant_course_mode.bulk_sku
redirect_response = self._redirect_if_necessary(
message,
already_verified,
already_paid,
is_enrolled,
course_key,
user_is_trying_to_pay,
request.user,
sku_to_use
)
if redirect_response is not None:
return redirect_response
display_steps = self._display_steps(
always_show_payment,
already_verified,
already_paid,
relevant_course_mode
)
# Override the actual value if account activation has been disabled
# Also see the reference to this parameter in context dictionary further down
user_is_active = self._get_user_active_status(request.user)
requirements = self._requirements(display_steps, user_is_active)
if current_step is None:
current_step = display_steps[0]['name']
# Allow the caller to skip the first page
# This is useful if we want the user to be able to
# use the "back" button to return to the previous step.
# This parameter should only work for known skip-able steps
if request.GET.get('skip-first-step') and current_step in self.SKIP_STEPS:
display_step_names = [step['name'] for step in display_steps]
current_step_idx = display_step_names.index(current_step)
if (current_step_idx + 1) < len(display_steps):
current_step = display_steps[current_step_idx + 1]['name']
courseware_url = ""
if not course.start or course.start < datetime.datetime.today().replace(tzinfo=UTC):
courseware_url = reverse(
'course_root',
kwargs={'course_id': unicode(course_key)}
)
full_name = (
request.user.profile.name
if request.user.profile.name
else ""
)
# If the user set a contribution amount on another page,
# use that amount to pre-fill the price selection form.
contribution_amount = request.session.get(
'donation_for_course', {}
).get(unicode(course_key), '')
# Remember whether the user is upgrading
# so we can fire an analytics event upon payment.
request.session['attempting_upgrade'] = (message == self.UPGRADE_MSG)
# Determine the photo verification status
verification_good_until = self._verification_valid_until(request.user)
# get available payment processors
if relevant_course_mode.sku:
try:
processors = ecommerce_api_client(request.user).payment.processors.get()
except Exception as e:
log.info(str(e))
processors = ["cybersource","paypal","stripe"]
else:
# transaction will be conducted using legacy shopping cart
processors = [settings.CC_PROCESSOR_NAME]
default_currency = configuration_helpers.get_value('PAID_COURSE_REGISTRATION_CURRENCY', settings.PAID_COURSE_REGISTRATION_CURRENCY) or ['usd', '$']
# Render the top-level page
context = {
'contribution_amount': contribution_amount,
'course': course,
'course_key': unicode(course_key),
'checkpoint_location': request.GET.get('checkpoint'),
'course_mode': relevant_course_mode,
'default_currency': default_currency,
'courseware_url': courseware_url,
'current_step': current_step,
'disable_courseware_js': True,
'display_steps': display_steps,
'is_active': json.dumps(user_is_active),
'user_email': request.user.email,
'message_key': message,
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'processors': processors,
'requirements': requirements,
'user_full_name': full_name,
'verification_deadline': verification_deadline or "",
'already_verified': already_verified,
'verification_good_until': verification_good_until,
'capture_sound': staticfiles_storage.url("audio/camera_capture.wav"),
'nav_hidden': True,
'is_ab_testing': 'begin-flow' in request.path,
}
return render_to_response("verify_student/pay_and_verify.html", context)
def add_utm_params_to_url(self, url):
# utm_params is [(u'utm_content', u'course-v1:IDBx IDB20.1x 1T2017'),...
utm_params = [item for item in self.request.GET.items() if 'utm_' in item[0]]
# utm_params is utm_content=course-v1%3AIDBx+IDB20.1x+1T2017&...
utm_params = urllib.urlencode(utm_params, True)
# utm_params is utm_content=course-v1:IDBx+IDB20.1x+1T2017&...
# (course-keys do not have url encoding)
utm_params = urllib.unquote(utm_params)
if utm_params:
if '?' in url:
url = url + '&' + utm_params
else:
url = url + '?' + utm_params
return url
def _redirect_if_necessary(
self, message, already_verified, already_paid, is_enrolled, course_key, # pylint: disable=bad-continuation
user_is_trying_to_pay, user, sku # pylint: disable=bad-continuation
):
"""Redirect the user to a more appropriate page if necessary.
In some cases, a user may visit this page with
verification / enrollment / payment state that
we don't anticipate. For example, a user may unenroll
from the course after paying for it, then visit the
"verify now" page to complete verification.
When this happens, we try to redirect the user to
the most appropriate page.
Arguments:
message (string): The messaging of the page. Should be a key
in `MESSAGES`.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
is_enrolled (bool): Whether the user has an active enrollment
in the course.
course_key (CourseKey): The key for the course.
Returns:
HttpResponse or None
"""
url = None
course_kwargs = {'course_id': unicode(course_key)}
if already_verified and already_paid:
# If they've already paid and verified, there's nothing else to do,
# so redirect them to the dashboard.
if message != self.PAYMENT_CONFIRMATION_MSG:
url = reverse('dashboard')
elif message in [self.VERIFY_NOW_MSG, self.VERIFY_LATER_MSG, self.PAYMENT_CONFIRMATION_MSG]:
if is_enrolled:
# If the user is already enrolled but hasn't yet paid,
# then the "upgrade" messaging is more appropriate.
if not already_paid:
url = reverse('verify_student_upgrade_and_verify', kwargs=course_kwargs)
else:
# If the user is NOT enrolled, then send him/her
# to the first time verification page.
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
elif message == self.UPGRADE_MSG:
if is_enrolled:
if already_paid:
# If the student has paid, but not verified, redirect to the verification flow.
url = reverse('verify_student_verify_now', kwargs=course_kwargs)
else:
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
if user_is_trying_to_pay and self._get_user_active_status(user) and not already_paid:
# If the user is trying to pay, has activated their account, and the ecommerce service
# is enabled redirect him to the ecommerce checkout page.
ecommerce_service = EcommerceService()
if ecommerce_service.is_enabled(user):
url = ecommerce_service.get_checkout_page_url(sku)
# Redirect if necessary, otherwise implicitly return None
if url is not None:
url = self.add_utm_params_to_url(url)
return redirect(url)
def _get_paid_mode(self, course_key):
"""
Retrieve the paid course mode for a course.
The returned course mode may or may not be expired.
Unexpired modes are preferred to expired modes.
Arguments:
course_key (CourseKey): The location of the course.
Returns:
CourseMode tuple
"""
# Retrieve all the modes at once to reduce the number of database queries
all_modes, unexpired_modes = CourseMode.all_and_unexpired_modes_for_courses([course_key])
# Retrieve the first mode that matches the following criteria:
# * Unexpired
# * Price > 0
# * Not credit
for mode in unexpired_modes[course_key]:
if mode.min_price > 0 and not CourseMode.is_credit_mode(mode):
return mode
# Otherwise, find the first non credit expired paid mode
for mode in all_modes[course_key]:
if mode.min_price > 0 and not CourseMode.is_credit_mode(mode):
return mode
# Otherwise, return None and so the view knows to respond with a 404.
return None
def _display_steps(self, always_show_payment, already_verified, already_paid, course_mode):
"""Determine which steps to display to the user.
Includes all steps by default, but removes steps
if the user has already completed them.
Arguments:
always_show_payment (bool): If True, display the payment steps
even if the user has already paid.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
Returns:
list
"""
display_steps = self.ALL_STEPS
remove_steps = set()
if already_verified or not CourseMode.is_verified_mode(course_mode):
remove_steps |= set(self.VERIFICATION_STEPS)
if already_paid and not always_show_payment:
remove_steps |= set(self.PAYMENT_STEPS)
else:
# The "make payment" step doubles as an intro step,
# so if we're showing the payment step, hide the intro step.
remove_steps |= set([self.INTRO_STEP])
return [
{
'name': step,
'title': unicode(self.STEP_TITLES[step]),
}
for step in display_steps
if step not in remove_steps
]
def _requirements(self, display_steps, is_active):
"""Determine which requirements to show the user.
For example, if the user needs to submit a photo
verification, tell the user that she will need
a photo ID and a webcam.
Arguments:
display_steps (list): The steps to display to the user.
is_active (bool): If False, adds a requirement to activate the user account.
Returns:
dict: Keys are requirement names, values are booleans
indicating whether to show the requirement.
"""
all_requirements = {
self.ACCOUNT_ACTIVATION_REQ: not is_active,
self.PHOTO_ID_REQ: False,
self.WEBCAM_REQ: False,
}
# Remove the account activation requirement if disabled via waffle
if is_account_activation_requirement_disabled():
all_requirements.pop(self.ACCOUNT_ACTIVATION_REQ)
display_steps = set(step['name'] for step in display_steps)
for step, step_requirements in self.STEP_REQUIREMENTS.iteritems():
if step in display_steps:
for requirement in step_requirements:
all_requirements[requirement] = True
return all_requirements
def _verification_valid_until(self, user, date_format="%m/%d/%Y"):
"""
Check whether the user has a valid or pending verification.
Arguments:
user:
date_format: optional parameter for formatting datetime
object to string in response
Returns:
datetime object in string format
"""
photo_verifications = SoftwareSecurePhotoVerification.verification_valid_or_pending(user)
# return 'expiration_datetime' of latest photo verification if found,
# otherwise implicitly return ''
if photo_verifications:
return photo_verifications[0].expiration_datetime.strftime(date_format)
return ''
def _check_already_verified(self, user):
"""Check whether the user has a valid or pending verification.
Note that this includes cases in which the user's verification
has not been accepted (either because it hasn't been processed,
or there was an error).
This should return True if the user has done their part:
submitted photos within the expiration period.
"""
return SoftwareSecurePhotoVerification.user_has_valid_or_pending(user)
def _check_enrollment(self, user, course_key):
"""Check whether the user has an active enrollment and has paid.
If a user is enrolled in a paid course mode, we assume
that the user has paid.
Arguments:
user (User): The user to check.
course_key (CourseKey): The key of the course to check.
Returns:
Tuple `(has_paid, is_active)` indicating whether the user
has paid and whether the user has an active account.
"""
enrollment_mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_key)
has_paid = False
if enrollment_mode is not None and is_active:
all_modes = CourseMode.modes_for_course_dict(course_key, include_expired=True)
course_mode = all_modes.get(enrollment_mode)
has_paid = (course_mode and course_mode.min_price > 0)
return (has_paid, bool(is_active))
def _response_if_deadline_passed(self, course, deadline_name, deadline_datetime):
"""
Respond with some error messaging if the deadline has passed.
Arguments:
course (Course): The course the user is trying to enroll in.
deadline_name (str): One of the deadline constants.
deadline_datetime (datetime): The deadline.
Returns: HttpResponse or None
"""
if deadline_name not in [self.VERIFICATION_DEADLINE, self.UPGRADE_DEADLINE]:
log.error("Invalid deadline name %s. Skipping check for whether the deadline passed.", deadline_name)
return None
deadline_passed = (
deadline_datetime is not None and
deadline_datetime < datetime.datetime.now(UTC)
)
if deadline_passed:
context = {
'course': course,
'deadline_name': deadline_name,
'deadline': deadline_datetime
}
return render_to_response("verify_student/missed_deadline.html", context)
def checkout_with_ecommerce_service(user, course_key, course_mode, processor):
""" Create a new basket and trigger immediate checkout, using the E-Commerce API. """
course_id = unicode(course_key)
try:
api = ecommerce_api_client(user)
# Make an API call to create the order and retrieve the results
result = api.baskets.post({
'products': [{'sku': course_mode.sku}],
'checkout': True,
'payment_processor_name': processor
})
# Pass the payment parameters directly from the API response.
return result.get('payment_data')
except SlumberBaseException:
params = {'username': user.username, 'mode': course_mode.slug, 'course_id': course_id}
log.exception('Failed to create order for %(username)s %(mode)s mode of %(course_id)s', params)
raise
finally:
audit_log(
'checkout_requested',
course_id=course_id,
mode=course_mode.slug,
processor_name=processor,
user_id=user.id
)
def checkout_with_shoppingcart(request, user, course_key, course_mode, amount):
""" Create an order and trigger checkout using shoppingcart."""
cart = Order.get_cart_for_user(user)
cart.clear()
enrollment_mode = course_mode.slug
CertificateItem.add_to_order(cart, course_key, amount, enrollment_mode)
# Change the order's status so that we don't accidentally modify it later.
# We need to do this to ensure that the parameters we send to the payment system
# match what we store in the database.
# (Ordinarily we would do this client-side when the user submits the form, but since
# the JavaScript on this page does that immediately, we make the change here instead.
# This avoids a second AJAX call and some additional complication of the JavaScript.)
# If a user later re-enters the verification / payment flow, she will create a new order.
cart.start_purchase()
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
payment_data = {
'payment_processor_name': settings.CC_PROCESSOR_NAME,
'payment_page_url': get_purchase_endpoint(),
'payment_form_data': get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=[unicode(course_key), course_mode.slug]
),
}
return payment_data
@require_POST
@login_required
def create_order(request):
"""
This endpoint is named 'create_order' for backward compatibility, but its
actual use is to add a single product to the user's cart and request
immediate checkout.
"""
course_id = request.POST['course_id']
course_id = CourseKey.from_string(course_id)
donation_for_course = request.session.get('donation_for_course', {})
contribution = request.POST.get("contribution", donation_for_course.get(unicode(course_id), 0))
try:
amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN)
except decimal.InvalidOperation:
return HttpResponseBadRequest(_("Selected price is not valid number."))
current_mode = None
sku = request.POST.get('sku', None)
if sku:
try:
current_mode = CourseMode.objects.get(sku=sku)
except CourseMode.DoesNotExist:
log.exception(u'Failed to find CourseMode with SKU [%s].', sku)
if not current_mode:
# Check if there are more than 1 paid(mode with min_price>0 e.g verified/professional/no-id-professional) modes
# for course exist then choose the first one
paid_modes = CourseMode.paid_modes_for_course(course_id)
if paid_modes:
if len(paid_modes) > 1:
log.warn(u"Multiple paid course modes found for course '%s' for create order request", course_id)
current_mode = paid_modes[0]
# Make sure this course has a paid mode
if not current_mode:
log.warn(u"Create order requested for course '%s' without a paid mode.", course_id)
return HttpResponseBadRequest(_("This course doesn't support paid certificates"))
if CourseMode.is_professional_mode(current_mode):
amount = current_mode.min_price
if amount < current_mode.min_price:
return HttpResponseBadRequest(_("No selected price or selected price is below minimum."))
if current_mode.sku:
# if request.POST doesn't contain 'processor' then the service's default payment processor will be used.
payment_data = checkout_with_ecommerce_service(
request.user,
course_id,
current_mode,
request.POST.get('processor')
)
else:
payment_data = checkout_with_shoppingcart(request, request.user, course_id, current_mode, amount)
if 'processor' not in request.POST:
# (XCOM-214) To be removed after release.
# the absence of this key in the POST payload indicates that the request was initiated from
# a stale js client, which expects a response containing only the 'payment_form_data' part of
# the payment data result.
payment_data = payment_data['payment_form_data']
return HttpResponse(json.dumps(payment_data), content_type="application/json")
class SubmitPhotosView(View):
"""
End-point for submitting photos for verification.
"""
@method_decorator(transaction.non_atomic_requests)
def dispatch(self, *args, **kwargs): # pylint: disable=missing-docstring
return super(SubmitPhotosView, self).dispatch(*args, **kwargs)
@method_decorator(login_required)
@method_decorator(outer_atomic(read_committed=True))
def post(self, request):
"""
Submit photos for verification.
This end-point is used for the following cases:
* Initial verification through the pay-and-verify flow.
* Initial verification initiated from a checkpoint within a course.
* Re-verification initiated from a checkpoint within a course.
POST Parameters:
face_image (str): base64-encoded image data of the user's face.
photo_id_image (str): base64-encoded image data of the user's photo ID.
full_name (str): The user's full name, if the user is requesting a name change as well.
course_key (str): Identifier for the course, if initiated from a checkpoint.
checkpoint (str): Location of the checkpoint in the course.
"""
# If the user already has an initial verification attempt, we can re-use the photo ID
# the user submitted with the initial attempt.
initial_verification = SoftwareSecurePhotoVerification.get_initial_verification(request.user)
# Validate the POST parameters
params, response = self._validate_parameters(request, bool(initial_verification))
if response is not None:
return response
# If necessary, update the user's full name
if "full_name" in params:
response = self._update_full_name(request.user, params["full_name"])
if response is not None:
return response
# Retrieve the image data
# Validation ensures that we'll have a face image, but we may not have
# a photo ID image if this is a reverification.
face_image, photo_id_image, response = self._decode_image_data(
params["face_image"], params.get("photo_id_image")
)
# If we have a photo_id we do not want use the initial verification image.
if photo_id_image is not None:
initial_verification = None
if response is not None:
return response
# Submit the attempt
attempt = self._submit_attempt(request.user, face_image, photo_id_image, initial_verification)
self._fire_event(request.user, "edx.bi.verify.submitted", {"category": "verification"})
self._send_confirmation_email(request.user)
return JsonResponse({})
def _validate_parameters(self, request, has_initial_verification):
"""
Check that the POST parameters are valid.
Arguments:
request (HttpRequest): The request object.
has_initial_verification (bool): Whether the user has an initial verification attempt.
Returns:
HttpResponse or None
"""
# Pull out the parameters we care about.
params = {
param_name: request.POST[param_name]
for param_name in [
"face_image",
"photo_id_image",
"course_key",
"full_name"
]
if param_name in request.POST
}
# If the user already has an initial verification attempt, then we don't
# require the user to submit a photo ID image, since we can re-use the photo ID
# image from the initial attempt.
# If we don't have an initial verification OR a photo ID image, something has gone
# terribly wrong in the JavaScript. Log this as an error so we can track it down.
if "photo_id_image" not in params and not has_initial_verification:
log.error(
(
"User %s does not have an initial verification attempt "
"and no photo ID image data was provided. "
"This most likely means that the JavaScript client is not "
"correctly constructing the request to submit photos."
), request.user.id
)
return None, HttpResponseBadRequest(
_("Photo ID image is required if the user does not have an initial verification attempt.")
)
# The face image is always required.
if "face_image" not in params:
msg = _("Missing required parameter face_image")
return None, HttpResponseBadRequest(msg)
# If provided, parse the course key and checkpoint location
if "course_key" in params:
try:
params["course_key"] = CourseKey.from_string(params["course_key"])
except InvalidKeyError:
return None, HttpResponseBadRequest(_("Invalid course key"))
return params, None
def _update_full_name(self, user, full_name):
"""
Update the user's full name.
Arguments:
user (User): The user to update.
full_name (unicode): The user's updated full name.
Returns:
HttpResponse or None
"""
try:
update_account_settings(user, {"name": full_name})
except UserNotFound:
return HttpResponseBadRequest(_("No profile found for user"))
except AccountValidationError:
msg = _(
"Name must be at least {min_length} characters long."
).format(min_length=NAME_MIN_LENGTH)
return HttpResponseBadRequest(msg)
def _decode_image_data(self, face_data, photo_id_data=None):
"""
Decode image data sent with the request.
Arguments:
face_data (str): base64-encoded face image data.
Keyword Arguments:
photo_id_data (str): base64-encoded photo ID image data.
Returns:
tuple of (str, str, HttpResponse)
"""
try:
# Decode face image data (used for both an initial and re-verification)
face_image = decode_image_data(face_data)
# Decode the photo ID image data if it's provided
photo_id_image = (
decode_image_data(photo_id_data)
if photo_id_data is not None else None
)
return face_image, photo_id_image, None
except InvalidImageData:
msg = _("Image data is not valid.")
return None, None, HttpResponseBadRequest(msg)
def _submit_attempt(self, user, face_image, photo_id_image=None, initial_verification=None):
"""
Submit a verification attempt.
Arguments:
user (User): The user making the attempt.
face_image (str): Decoded face image data.
Keyword Arguments:
photo_id_image (str or None): Decoded photo ID image data.
initial_verification (SoftwareSecurePhotoVerification): The initial verification attempt.
"""
attempt = SoftwareSecurePhotoVerification(user=user)
# We will always have face image data, so upload the face image
attempt.upload_face_image(face_image)
# If an ID photo wasn't submitted, re-use the ID photo from the initial attempt.
# Earlier validation rules ensure that at least one of these is available.
if photo_id_image is not None:
attempt.upload_photo_id_image(photo_id_image)
elif initial_verification is None:
# Earlier validation should ensure that we never get here.
log.error(
"Neither a photo ID image or initial verification attempt provided. "
"Parameter validation in the view should prevent this from happening!"
)
# Submit the attempt
attempt.mark_ready()
attempt.submit(copy_id_photo_from=initial_verification)
return attempt
def _send_confirmation_email(self, user):
"""
Send an email confirming that the user submitted photos
for initial verification.
"""
context = {
'full_name': user.profile.name,
'platform_name': configuration_helpers.get_value("PLATFORM_NAME", settings.PLATFORM_NAME)
}
subject = _("Verification photos received")
message = render_to_string('emails/photo_submission_confirmation.txt', context)
from_address = configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
to_address = user.email
try:
send_mail(subject, message, from_address, [to_address], fail_silently=False)
except: # pylint: disable=bare-except
# We catch all exceptions and log them.
# It would be much, much worse to roll back the transaction due to an uncaught
# exception than to skip sending the notification email.
log.exception("Could not send notification email for initial verification for user %s", user.id)
def _fire_event(self, user, event_name, parameters):
"""
Fire an analytics event.
Arguments:
user (User): The user who submitted photos.
event_name (str): Name of the analytics event.
parameters (dict): Event parameters.
Returns: None
"""
if settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
context = {
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
analytics.track(user.id, event_name, parameters, context=context)
@require_POST
@csrf_exempt # SS does its own message signing, and their API won't have a cookie value
def results_callback(request):
"""
Software Secure will call this callback to tell us whether a user is
verified to be who they said they are.
"""
body = request.body
try:
body_dict = json.loads(body)
except ValueError:
log.exception("Invalid JSON received from Software Secure:\n\n{}\n".format(body))
return HttpResponseBadRequest("Invalid JSON. Received:\n\n{}".format(body))
if not isinstance(body_dict, dict):
log.error("Reply from Software Secure is not a dict:\n\n{}\n".format(body))
return HttpResponseBadRequest("JSON should be dict. Received:\n\n{}".format(body))
headers = {
"Authorization": request.META.get("HTTP_AUTHORIZATION", ""),
"Date": request.META.get("HTTP_DATE", "")
}
VERIFY_STUDENT = get_verify_student_settings()
api_access_key = VERIFY_STUDENT["API_ACCESS_KEY"]
api_secret_key = VERIFY_STUDENT["API_SECRET_KEY"]
body_for_signature = {"EdX-ID": body_dict["EdX-ID"]}
has_valid_signature(
"POST",
headers,
body_for_signature,
api_access_key,
api_secret_key
)
_response, access_key_and_sig = headers["Authorization"].split(" ")
access_key = access_key_and_sig.split(":")[0]
# This is what we should be doing...
#if not sig_valid:
# return HttpResponseBadRequest("Signature is invalid")
# This is what we're doing until we can figure out why we disagree on sigs
if access_key != api_access_key:
return HttpResponseBadRequest("Access key invalid")
receipt_id = body_dict.get("EdX-ID")
result = body_dict.get("Result")
reason = body_dict.get("Reason", "")
error_code = body_dict.get("MessageType", "")
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
log.error("Software Secure posted back for receipt_id %s, but not found", receipt_id)
return HttpResponseBadRequest("edX ID {} not found".format(receipt_id))
if result == "PASS":
log.debug("Approving verification for %s", receipt_id)
attempt.approve()
status = "approved"
elif result == "FAIL":
log.debug("Denying verification for %s", receipt_id)
attempt.deny(json.dumps(reason), error_code=error_code)
status = "denied"
elif result == "SYSTEM FAIL":
log.debug("System failure for %s -- resetting to must_retry", receipt_id)
attempt.system_error(json.dumps(reason), error_code=error_code)
status = "error"
log.error("Software Secure callback attempt for %s failed: %s", receipt_id, reason)
else:
log.error("Software Secure returned unknown result %s", result)
return HttpResponseBadRequest(
"Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL".format(result)
)
return HttpResponse("OK!")
class ReverifyView(View):
"""
Reverification occurs when a user's initial verification is denied
or expires. When this happens, users can re-submit photos through
the re-verification flow.
Unlike in-course reverification, this flow requires users to submit
*both* face and ID photos. In contrast, during in-course reverification,
students submit only face photos, which are matched against the ID photo
the user submitted during initial verification.
"""
@method_decorator(login_required)
def get(self, request):
"""
Render the reverification flow.
Most of the work is done client-side by composing the same
Backbone views used in the initial verification flow.
"""
status, __ = SoftwareSecurePhotoVerification.user_status(request.user)
expiration_datetime = SoftwareSecurePhotoVerification.get_expiration_datetime(request.user)
can_reverify = False
if expiration_datetime:
if SoftwareSecurePhotoVerification.is_verification_expiring_soon(expiration_datetime):
# The user has an active verification, but the verification
# is set to expire within "EXPIRING_SOON_WINDOW" days (default is 4 weeks).
# In this case user can resubmit photos for reverification.
can_reverify = True
# If the user has no initial verification or if the verification
# process is still ongoing 'pending' or expired then allow the user to
# submit the photo verification.
# A photo verification is marked as 'pending' if its status is either
# 'submitted' or 'must_retry'.
if status in ["none", "must_reverify", "expired", "pending"] or can_reverify:
context = {
"user_full_name": request.user.profile.name,
"platform_name": configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
"capture_sound": staticfiles_storage.url("audio/camera_capture.wav"),
}
return render_to_response("verify_student/reverify.html", context)
else:
context = {
"status": status
}
return render_to_response("verify_student/reverify_not_allowed.html", context)
| agpl-3.0 | -2,058,498,461,421,042,700 | 39.008078 | 155 | 0.633919 | false |
runekaagaard/django-contrib-locking | tests/backends/tests.py | 1 | 49290 | # -*- coding: utf-8 -*-
# Unit and doctests for specific database backends.
from __future__ import unicode_literals
import copy
import datetime
from decimal import Decimal, Rounded
import re
import threading
import unittest
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import no_style
from django.db import (connection, connections, DEFAULT_DB_ALIAS,
DatabaseError, IntegrityError, reset_queries, transaction)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.signals import connection_created
from django.db.backends.postgresql_psycopg2 import version as pg_version
from django.db.backends.utils import format_number, CursorWrapper
from django.db.models import Sum, Avg, Variance, StdDev
from django.db.models.sql.constants import CURSOR
from django.db.utils import ConnectionHandler
from django.test import (TestCase, TransactionTestCase, mock, override_settings,
skipUnlessDBFeature, skipIfDBFeature)
from django.test.utils import ignore_warnings, str_prefix
from django.utils import six
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.six.moves import range
from . import models
class DummyBackendTest(TestCase):
def test_no_databases(self):
"""
Test that empty DATABASES setting default to the dummy backend.
"""
DATABASES = {}
conns = ConnectionHandler(DATABASES)
self.assertEqual(conns[DEFAULT_DB_ALIAS].settings_dict['ENGINE'],
'django.db.backends.dummy')
with self.assertRaises(ImproperlyConfigured):
conns[DEFAULT_DB_ALIAS].ensure_connection()
@unittest.skipUnless(connection.vendor == 'oracle', "Test only for Oracle")
class OracleTests(unittest.TestCase):
def test_quote_name(self):
# Check that '%' chars are escaped for query execution.
name = '"SOME%NAME"'
quoted_name = connection.ops.quote_name(name)
self.assertEqual(quoted_name % (), name)
def test_dbms_session(self):
# If the backend is Oracle, test that we can call a standard
# stored procedure through our cursor wrapper.
from django.db.backends.oracle.base import convert_unicode
with connection.cursor() as cursor:
cursor.callproc(convert_unicode('DBMS_SESSION.SET_IDENTIFIER'),
[convert_unicode('_django_testing!')])
def test_cursor_var(self):
# If the backend is Oracle, test that we can pass cursor variables
# as query parameters.
from django.db.backends.oracle.base import Database
with connection.cursor() as cursor:
var = cursor.var(Database.STRING)
cursor.execute("BEGIN %s := 'X'; END; ", [var])
self.assertEqual(var.getvalue(), 'X')
def test_long_string(self):
# If the backend is Oracle, test that we can save a text longer
# than 4000 chars and read it properly
with connection.cursor() as cursor:
cursor.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join(six.text_type(x) for x in range(4000))
cursor.execute('INSERT INTO ltext VALUES (%s)', [long_str])
cursor.execute('SELECT text FROM ltext')
row = cursor.fetchone()
self.assertEqual(long_str, row[0].read())
cursor.execute('DROP TABLE ltext')
def test_client_encoding(self):
# If the backend is Oracle, test that the client encoding is set
# correctly. This was broken under Cygwin prior to r14781.
connection.ensure_connection()
self.assertEqual(connection.connection.encoding, "UTF-8")
self.assertEqual(connection.connection.nencoding, "UTF-8")
def test_order_of_nls_parameters(self):
# an 'almost right' datetime should work with configured
# NLS parameters as per #18465.
with connection.cursor() as cursor:
query = "select 1 from dual where '1936-12-29 00:00' < sysdate"
# Test that the query succeeds without errors - pre #18465 this
# wasn't the case.
cursor.execute(query)
self.assertEqual(cursor.fetchone()[0], 1)
@unittest.skipUnless(connection.vendor == 'sqlite', "Test only for SQLite")
class SQLiteTests(TestCase):
longMessage = True
def test_autoincrement(self):
"""
Check that auto_increment fields are created with the AUTOINCREMENT
keyword in order to be monotonically increasing. Refs #10164.
"""
with connection.schema_editor(collect_sql=True) as editor:
editor.create_model(models.Square)
statements = editor.collected_sql
match = re.search('"id" ([^,]+),', statements[0])
self.assertIsNotNone(match)
self.assertEqual('integer NOT NULL PRIMARY KEY AUTOINCREMENT',
match.group(1), "Wrong SQL used to create an auto-increment "
"column on SQLite")
def test_aggregation(self):
"""
#19360: Raise NotImplementedError when aggregating on date/time fields.
"""
for aggregate in (Sum, Avg, Variance, StdDev):
self.assertRaises(NotImplementedError,
models.Item.objects.all().aggregate, aggregate('time'))
self.assertRaises(NotImplementedError,
models.Item.objects.all().aggregate, aggregate('date'))
self.assertRaises(NotImplementedError,
models.Item.objects.all().aggregate, aggregate('last_modified'))
@unittest.skipUnless(connection.vendor == 'postgresql', "Test only for PostgreSQL")
class PostgreSQLTests(TestCase):
def assert_parses(self, version_string, version):
self.assertEqual(pg_version._parse_version(version_string), version)
def test_parsing(self):
"""Test PostgreSQL version parsing from `SELECT version()` output"""
self.assert_parses("PostgreSQL 9.3 beta4", 90300)
self.assert_parses("PostgreSQL 9.3", 90300)
self.assert_parses("EnterpriseDB 9.3", 90300)
self.assert_parses("PostgreSQL 9.3.6", 90306)
self.assert_parses("PostgreSQL 9.4beta1", 90400)
self.assert_parses("PostgreSQL 9.3.1 on i386-apple-darwin9.2.2, compiled by GCC i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 (Apple Inc. build 5478)", 90301)
def test_version_detection(self):
"""Test PostgreSQL version detection"""
# Helper mocks
class CursorMock(object):
"Very simple mock of DB-API cursor"
def execute(self, arg):
pass
def fetchone(self):
return ["PostgreSQL 9.3"]
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
class OlderConnectionMock(object):
"Mock of psycopg2 (< 2.0.12) connection"
def cursor(self):
return CursorMock()
# psycopg2 < 2.0.12 code path
conn = OlderConnectionMock()
self.assertEqual(pg_version.get_version(conn), 90300)
def test_connect_and_rollback(self):
"""
PostgreSQL shouldn't roll back SET TIME ZONE, even if the first
transaction is rolled back (#17062).
"""
databases = copy.deepcopy(settings.DATABASES)
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Ensure the database default time zone is different than
# the time zone in new_connection.settings_dict. We can
# get the default time zone by reset & show.
cursor = new_connection.cursor()
cursor.execute("RESET TIMEZONE")
cursor.execute("SHOW TIMEZONE")
db_default_tz = cursor.fetchone()[0]
new_tz = 'Europe/Paris' if db_default_tz == 'UTC' else 'UTC'
new_connection.close()
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
new_connection.settings_dict['TIME_ZONE'] = new_tz
new_connection.set_autocommit(False)
cursor = new_connection.cursor()
new_connection.rollback()
# Now let's see if the rollback rolled back the SET TIME ZONE.
cursor.execute("SHOW TIMEZONE")
tz = cursor.fetchone()[0]
self.assertEqual(new_tz, tz)
finally:
new_connection.close()
def test_connect_non_autocommit(self):
"""
The connection wrapper shouldn't believe that autocommit is enabled
after setting the time zone when AUTOCOMMIT is False (#21452).
"""
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['AUTOCOMMIT'] = False
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Open a database connection.
new_connection.cursor()
self.assertFalse(new_connection.get_autocommit())
finally:
new_connection.close()
def _select(self, val):
with connection.cursor() as cursor:
cursor.execute("SELECT %s", (val,))
return cursor.fetchone()[0]
def test_select_ascii_array(self):
a = ["awef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_select_unicode_array(self):
a = ["ᄲawef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_lookup_cast(self):
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
do = DatabaseOperations(connection=None)
for lookup in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
self.assertIn('::text', do.lookup_cast(lookup))
def test_correct_extraction_psycopg2_version(self):
from django.db.backends.postgresql_psycopg2.base import DatabaseWrapper
version_path = 'django.db.backends.postgresql_psycopg2.base.Database.__version__'
with mock.patch(version_path, '2.6.9'):
self.assertEqual(DatabaseWrapper.psycopg2_version.__get__(self), (2, 6, 9))
with mock.patch(version_path, '2.5.dev0'):
self.assertEqual(DatabaseWrapper.psycopg2_version.__get__(self), (2, 5))
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') - see
#12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
years = models.SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') - see #12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
classes = models.SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
cursor = connection.cursor()
try:
connection.ops.last_executed_query(cursor, '', ())
except Exception:
self.fail("'last_executed_query' should not raise an exception.")
def test_debug_sql(self):
list(models.Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(models.Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""
Test that last_executed_query() returns an Unicode string
"""
data = models.RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
sql, params = data.query.sql_with_params()
cursor = data.query.get_compiler('default').execute_sql(CURSOR)
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, six.text_type)
@unittest.skipUnless(connection.vendor == 'sqlite',
"This test is specific to SQLite.")
def test_no_interpolation_on_sqlite(self):
# Regression for #17158
# This shouldn't raise an exception
query = "SELECT strftime('%Y', 'now');"
connection.cursor().execute(query)
self.assertEqual(connection.queries[-1]['sql'],
str_prefix("QUERY = %(_)s\"SELECT strftime('%%Y', 'now');\" - PARAMS = ()"))
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
cursor = connection.cursor()
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
self.assertRaises(Exception, cursor.executemany, query, [(1, 2, 3)])
self.assertRaises(Exception, cursor.executemany, query, [(1,)])
# Unfortunately, the following tests would be a good test to run on all
# backends, but it breaks MySQL hard. Until #13711 is fixed, it can't be run
# everywhere (although it would be an effective test of #13711).
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
cursor = connection.cursor()
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
cursor.execute(statement)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
models.Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
cursor = connection.cursor()
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [models.Post])
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = models.Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
connection.cursor()
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
connection.cursor()
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@unittest.skipUnless(connection.vendor == 'sqlite',
"This is an sqlite-specific issue")
def test_sqlite_parameter_escaping(self):
#13648: '%s' escaping support for sqlite3
cursor = connection.cursor()
cursor.execute("select strftime('%s', date('now'))")
response = cursor.fetchall()[0][0]
# response should be an non-zero integer
self.assertTrue(int(response))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
cursor = connection.cursor()
opts = models.Square._meta
tbl = connection.introspection.table_name_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
#4896: Test cursor.executemany
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
#4765: executemany with params=[] does nothing
args = []
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
#10320: executemany accepts iterators
args = iter((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 5)
args = iter((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
#10070: Support pyformat style passing of parameters
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(models.Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
#10070: Support pyformat style passing of parameters
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = iter({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 5)
args = iter({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 9)
def test_unicode_fetches(self):
#6254: fetchone, fetchmany, fetchall return strings as unicode objects
qn = connection.ops.quote_name
models.Person(first_name="John", last_name="Doe").save()
models.Person(first_name="Jane", last_name="Doe").save()
models.Person(first_name="Mary", last_name="Agnelline").save()
models.Person(first_name="Peter", last_name="Parker").save()
models.Person(first_name="Clark", last_name="Kent").save()
opts2 = models.Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
% (qn(f3.column), qn(f4.column), connection.introspection.table_name_converter(opts2.db_table),
qn(f3.column)))
cursor = connection.cursor()
cursor.execute(query2)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "françois"
try:
connection.cursor()
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with unicode password: %s" % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
Test that DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.supports_stddev, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Test that creating an existing table returns a DatabaseError """
cursor = connection.cursor()
query = 'CREATE TABLE %s (id INTEGER);' % models.Article._meta.db_table
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Test that cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
Test that is_usable() doesn't crash when the database disconnects.
Regression for #21553.
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
with connection.cursor() as cursor:
reset_queries()
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
six.assertCountEqual(self, connection.queries[0].keys(), ['sql', 'time'])
reset_queries()
self.assertEqual(0, len(connection.queries))
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
Test that the backend doesn't store an unlimited number of queries.
Regression for #12581.
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connections = ConnectionHandler(settings.DATABASES)
new_connection = new_connections[DEFAULT_DB_ALIAS]
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(3, len(new_connection.queries))
self.assertEqual(1, len(w))
self.assertEqual(str(w[0].message), "Limit for query logging "
"exceeded, only the last 3 queries will be returned.")
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
# We don't make these tests conditional because that means we would need to
# check and differentiate between:
# * MySQL+InnoDB, MySQL+MYISAM (something we currently can't do).
# * if sqlite3 (if/once we get #14204 fixed) has referential integrity turned
# on or not, something that would be controlled by runtime support and user
# preference.
# verify if its type is django.database.db.IntegrityError.
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = models.Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = models.Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
a2 = models.Article(headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30)
self.assertRaises(IntegrityError, a2.save)
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = models.Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
# Create another article
r_proxy = models.ReporterProxy.objects.get(pk=self.r.pk)
models.Article.objects.create(headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy)
# Retreive the second article from the DB
a2 = models.Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
self.assertRaises(IntegrityError, a2.save)
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
Ensure that the default connection (i.e. django.db.connection) is
different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
connection.cursor()
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.allow_thread_sharing = True
connection.cursor()
connections_dict[id(connection)] = connection
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Check that each created connection got different inner connection.
self.assertEqual(
len(set(conn.connection for conn in connections_dict.values())),
3)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_connections_thread_local(self):
"""
Ensure that the connections are different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.allow_thread_sharing = True
connections_dict[id(conn)] = conn
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(len(connections_dict), 6)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_pass_connection_between_threads(self):
"""
Ensure that a connection can be passed from one thread to the other.
Refs #17258.
"""
models.Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
models.Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching allow_thread_sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to False
connections['default'].allow_thread_sharing = False
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to True
connections['default'].allow_thread_sharing = True
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
def test_closing_non_shared_connections(self):
"""
Ensure that a connection that is not explicitly shareable cannot be
closed by another thread.
Refs #17258.
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].allow_thread_sharing = True
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
models.Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = models.Object.objects.create()
ref = models.ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = models.ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(models.Object.objects.filter(id=12345).exists())
ref = models.ObjectReference.objects.create(obj_id=12345)
ref_new = models.ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(models.Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = models.Object.objects.create()
obj.related_objects.create()
self.assertEqual(models.Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = models.Object._meta.get_field("related_objects").rel.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
class BackendUtilTests(TestCase):
def test_format_number(self):
"""
Test the format_number converter utility
"""
def equal(value, max_d, places, result):
self.assertEqual(format_number(Decimal(value), max_d, places), result)
equal('0', 12, 3,
'0.000')
equal('0', 12, 8,
'0.00000000')
equal('1', 12, 9,
'1.000000000')
equal('0.00000000', 12, 8,
'0.00000000')
equal('0.000000004', 12, 8,
'0.00000000')
equal('0.000000008', 12, 8,
'0.00000001')
equal('0.000000000000000000999', 10, 8,
'0.00000000')
equal('0.1234567890', 12, 10,
'0.1234567890')
equal('0.1234567890', 12, 9,
'0.123456789')
equal('0.1234567890', 12, 8,
'0.12345679')
equal('0.1234567890', 12, 5,
'0.12346')
equal('0.1234567890', 12, 3,
'0.123')
equal('0.1234567890', 12, 1,
'0.1')
equal('0.1234567890', 12, 0,
'0')
equal('0.1234567890', None, 0,
'0')
equal('1234567890.1234567890', None, 0,
'1234567890')
equal('1234567890.1234567890', None, 2,
'1234567890.12')
equal('0.1234', 5, None,
'0.1234')
equal('123.12', 5, None,
'123.12')
with self.assertRaises(Rounded):
equal('0.1234567890', 5, None,
'0.12346')
with self.assertRaises(Rounded):
equal('1234567890.1234', 5, None,
'1234600000')
@ignore_warnings(category=UserWarning,
message="Overriding setting DATABASES can lead to unexpected behavior")
class DBTestSettingsRenamedTests(TestCase):
mismatch_msg = ("Connection 'test-deprecation' has mismatched TEST "
"and TEST_* database settings.")
def setUp(self):
super(DBTestSettingsRenamedTests, self).setUp()
self.handler = ConnectionHandler()
self.db_settings = {'default': {}}
def test_mismatched_database_test_settings_1(self):
# if the TEST setting is used, all TEST_* keys must appear in it.
self.db_settings.update({
'test-deprecation': {
'TEST': {},
'TEST_NAME': 'foo',
}
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_database_test_settings_2(self):
# if the TEST setting is used, all TEST_* keys must match.
self.db_settings.update({
'test-deprecation': {
'TEST': {'NAME': 'foo'},
'TEST_NAME': 'bar',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_database_test_settings_3(self):
# Verifies the mapping of an aliased key.
self.db_settings.update({
'test-deprecation': {
'TEST': {'CREATE_DB': 'foo'},
'TEST_CREATE': 'bar',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_database_test_settings_4(self):
# Verifies the mapping of an aliased key when the aliased key is missing.
self.db_settings.update({
'test-deprecation': {
'TEST': {},
'TEST_CREATE': 'bar',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_settings_old_none(self):
self.db_settings.update({
'test-deprecation': {
'TEST': {'CREATE_DB': None},
'TEST_CREATE': '',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_settings_new_none(self):
self.db_settings.update({
'test-deprecation': {
'TEST': {},
'TEST_CREATE': None,
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_matched_test_settings(self):
# should be able to define new settings and the old, if they match
self.db_settings.update({
'test-deprecation': {
'TEST': {'NAME': 'foo'},
'TEST_NAME': 'foo',
},
})
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('test-deprecation')
def test_new_settings_only(self):
# should be able to define new settings without the old
self.db_settings.update({
'test-deprecation': {
'TEST': {'NAME': 'foo'},
},
})
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('test-deprecation')
@ignore_warnings(category=RemovedInDjango19Warning)
def test_old_settings_only(self):
# should be able to define old settings without the new
self.db_settings.update({
'test-deprecation': {
'TEST_NAME': 'foo',
},
})
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('test-deprecation')
def test_empty_settings(self):
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('default')
@unittest.skipUnless(connection.vendor == 'sqlite', 'SQLite specific test.')
@skipUnlessDBFeature('can_share_in_memory_db')
class TestSqliteThreadSharing(TransactionTestCase):
available_apps = ['backends']
def test_database_sharing_in_threads(self):
def create_object():
models.Object.objects.create()
create_object()
thread = threading.Thread(target=create_object)
thread.start()
thread.join()
self.assertEqual(models.Object.objects.count(), 2)
| bsd-3-clause | 8,455,467,329,355,840,000 | 39.463875 | 161 | 0.621021 | false |
cherrypy/magicbus | magicbus/plugins/loggers.py | 1 | 1912 | """Logging plugins for magicbus."""
from magicbus.compat import ntob, unicodestr
import datetime
import sys
from magicbus.plugins import SimplePlugin
class StreamLogger(SimplePlugin):
default_format = '[%(timestamp)s] (Bus %(bus)s) %(message)s\n'
def __init__(self, bus, stream, level=None, format=None, encoding='utf-8'):
SimplePlugin.__init__(self, bus)
self.stream = stream
self.level = level
self.format = format or self.default_format
self.encoding = encoding
def log(self, msg, level):
if self.level is None or self.level <= level:
params = {
'timestamp': ntob(datetime.datetime.now().isoformat()),
'bus': self.bus.id,
'message': msg,
'level': level
}
complete_msg = self.format % params
if self.encoding is not None:
if isinstance(complete_msg, unicodestr):
complete_msg = complete_msg.encode(self.encoding)
self.stream.write(complete_msg)
self.stream.flush()
class StdoutLogger(StreamLogger):
def __init__(self, bus, level=None, format=None, encoding='utf-8'):
StreamLogger.__init__(self, bus, sys.stdout, level, format, encoding)
class StderrLogger(StreamLogger):
def __init__(self, bus, level=None, format=None, encoding='utf-8'):
StreamLogger.__init__(self, bus, sys.stderr, level, format, encoding)
class FileLogger(StreamLogger):
def __init__(self, bus, filename=None, file=None,
level=None, format=None, encoding='utf8'):
self.filename = filename
if file is None:
if filename is None:
raise ValueError('Either file or filename MUST be supplied.')
file = open(filename, 'ab')
StreamLogger.__init__(self, bus, file, level, format, encoding)
| bsd-3-clause | 5,570,195,874,519,741,000 | 30.866667 | 79 | 0.599895 | false |
nlgranger/LazyProc | seqtools/indexing.py | 1 | 12939 | from numbers import Integral
import itertools
import bisect
from array import array
from future.builtins import range
from .utils import isint, basic_getitem, basic_setitem, normalize_slice, \
get_logger
class Arange:
def __init__(self, start, stop=None, step=None):
if stop is None and step is None:
stop = start
start = 0
if step is None:
step = 1
if (stop - start) / step < 0:
stop = start
size = abs(stop - start) - 1
abs_step = abs(step)
numel = (size + abs_step - (size % abs_step)) // abs_step
stop = start + step * numel
self.start, self.stop, self.step = start, stop, step
def __len__(self):
return abs(self.stop - self.start) // abs(self.step)
def __iter__(self):
return iter(range(self.start, self.stop, self.step))
def __getitem__(self, key):
if isinstance(key, slice):
start, stop, step = normalize_slice(
key.start, key.stop, key.step, len(self))
numel = abs(stop - start) // abs(step)
start = self.start + self.step * start
step = self.step * step
stop = start + step * numel
return Arange(start, stop, step)
elif not isinstance(key, Integral):
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
return self.start + self.step * key
def arange(start, stop=None, step=None):
"""Sequential equivalent of Python built-in :class:`python:range`."""
return Arange(start, stop, step)
class Gathering(object):
def __init__(self, sequence, indexes):
if isinstance(sequence, Gathering): # optimize nested subsets
indexes = array('l', (sequence.indexes[i] for i in indexes))
sequence = sequence.sequence
self.sequence = sequence
self.indexes = indexes
def __len__(self):
return len(self.indexes)
def __iter__(self):
for i in self.indexes:
yield self.sequence[i]
def __getitem__(self, key):
if isinstance(key, slice):
return gather(self.sequence, self.indexes[key])
elif isint(key):
if key < -len(self) or key >= len(self):
raise IndexError(
self.__class__.__name__ + " index out of range")
if key < 0:
key = len(self) + key
return self.sequence[self.indexes[key]]
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def __setitem__(self, key, value):
if isinstance(key, slice):
indexes = self.indexes[key]
if len(indexes) != len(value):
raise ValueError(self.__class__.__name__ + " only support "
"one-to-one assignment")
for i, val in zip(indexes, value):
self.sequence[i] = val
elif isint(key):
if key < -len(self) or key >= len(self):
raise IndexError(
self.__class__.__name__ + " index out of range")
if key < 0:
key = len(self) + key
self.sequence[self.indexes[key]] = value
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def gather(sequence, indexes):
"""Return a view on the sequence reordered by indexes.
.. image:: _static/gather.png
:alt: gather
:width: 15%
:align: center
Example:
>>> arr = ['d', 'e', 'h', 'l', 'o', 'r', 'w', ' ']
>>> idx = [2, 1, 3, 3, 4, 7, 6, 4, 5, 3, 0]
>>> list(seqtools.gather(arr, idx))
['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd']
"""
return Gathering(sequence, indexes)
def take(sequence, indexes):
"""Alias for :func:`seqtools.gather`."""
return gather(sequence, indexes)
def reindex(sequence, indexes):
logger = get_logger(__name__)
logger.warning(
"Call to deprecated function reindex, use gather instead",
category=DeprecationWarning,
stacklevel=2)
return gather(sequence, indexes)
class Cycle:
def __init__(self, sequence, size):
self.sequence = sequence
self.size = int(size)
def __len__(self):
return self.size
def __iter__(self):
i = 0
while True:
for v in self.sequence:
yield v
i += 1
if i == self.size:
return
@basic_getitem
def __getitem__(self, key):
return self.sequence[key % len(self.sequence)]
@basic_setitem
def __setitem__(self, key, value):
self.sequence[key % len(self.sequence)] = value
class InfiniteCycle:
def __init__(self, sequence):
self.sequence = sequence
def __iter__(self):
while True:
for v in self.sequence:
yield v
def __getitem__(self, key):
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
if start is None:
start = 0
if start < 0 or stop is None or stop < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
offset = start - start % len(self.sequence)
start -= offset
stop -= offset
return Cycle(self.sequence, stop)[start:stop:step]
elif isint(key):
if key < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
return self.sequence[key % len(self.sequence)]
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def cycle(sequence, limit=None):
"""Return repeated view of a sequence.
Args:
sequence (Sequence): The sequence to be repeated.
limit (Optional[int]): An optional size limit.
.. image:: _static/cycle.png
:alt: collate
:width: 10%
:align: center
Example:
>>> data = ['a', 'b', 'c']
>>> loop = seqtools.cycle(data)
>>> loop[3]
'a'
>>> loop[3 * 10 ** 9 + 1] # unbounded sequence
'b'
>>> loop = seqtools.cycle(data, 7)
>>> list(loop)
['a', 'b', 'c', 'a', 'b', 'c', 'a']
"""
return InfiniteCycle(sequence) if limit is None else Cycle(sequence, limit)
class Interleaving(object):
def __init__(self, sequences):
offsets_in = [0] # end of sequences in input indexing
offsets_out = [0] # end of sequences in output indexing
whose_offset = sorted(range(len(sequences)),
key=lambda k: len(sequences[k]))
for i, n_seq_left in zip(whose_offset, range(len(sequences), 0, -1)):
n_new_out_items = (len(sequences[i]) - offsets_in[-1]) * n_seq_left
offsets_out.append(offsets_out[-1] + n_new_out_items)
offsets_in.append(len(sequences[i]))
self.sequences = sequences
self.n_seqs = len(sequences)
self.offsets_in = array('i', offsets_in)
self.offsets_out = array('i', offsets_out)
self.remaining_seqs = [sorted(whose_offset[i:])
for i in range(len(sequences))]
def __len__(self):
return sum(map(len, self.sequences))
def _convert_1d_key(self, key):
# given index in interleaved sequences, return sequence and offset
n_exhausted = bisect.bisect(self.offsets_out, key) - 1
n_remaining_seqs = self.n_seqs - n_exhausted
key -= self.offsets_out[n_exhausted]
seq = self.remaining_seqs[n_exhausted][key % n_remaining_seqs]
idx = self.offsets_in[n_exhausted] + key // n_remaining_seqs
return seq, idx
def __iter__(self):
iterators = [iter(seq) for seq in self.sequences]
i = -1
while len(iterators) > 0:
i = (i + 1) % len(iterators)
try:
yield next(iterators[i])
except StopIteration:
del iterators[i]
i -= 1
@basic_getitem
def __getitem__(self, key):
seq, idx = self._convert_1d_key(key)
return self.sequences[seq][idx]
@basic_setitem
def __setitem__(self, key, value):
seq, idx = self._convert_1d_key(key)
self.sequences[seq][idx] = value
def interleave(*sequences):
"""Interleave elements from several sequences into one.
Sequences don't need to have the same length, the cycling will
operate between whatever sequences are left.
.. image:: _static/interleaving.png
:alt: interleaving
:width: 30%
:align: center
Example:
>>> arr1 = [ 1, 2, 3, 4, 5]
>>> arr2 = ['a', 'b', 'c']
>>> arr3 = [.1, .2, .3, .4]
>>> list(interleave(arr1, arr2, arr3))
[1, 'a', 0.1, 2, 'b', 0.2, 3, 'c', 0.3, 4, 0.4, 5]
"""
return Interleaving(sequences)
class Repetition(object):
def __init__(self, item, times):
self.object = item
self.times = times
def __len__(self):
return self.times
def __iter__(self):
return itertools.repeat(self.object, self.times)
@basic_getitem
def __getitem__(self, item):
return self.object
@basic_setitem
def __setitem__(self, key, value):
self.object = value
class InfiniteRepetition(object):
def __init__(self, value):
self.value = value
def __iter__(self):
return itertools.repeat(self.value)
def __len__(self):
return 0
def __getitem__(self, key):
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
start = 0 if start is None else start
step = 1 if step is None else step
if start < 0 or stop is None or stop < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
if step == 0:
raise ValueError("slice step cannot be 0")
if (stop - start) * step <= 0:
return []
if step > 0:
stop += (step + stop - start) % step
else:
stop -= (-step + start - stop) % -step
return repeat(self.value, (stop - start) // step)
elif isint(key):
if key < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
return self.value
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def __setitem__(self, key, value):
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
step = 1 if step is None else step
if start < 0 or stop is None or stop < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
if step == 0:
raise ValueError("slice step cannot be 0")
if (stop - start) * step > 0:
self.value = value[-1]
elif isint(key):
if key < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
self.value = value
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def repeat(value, times=None):
"""Make a sequence by repeating a value.
Args:
value (Any): Value to be (virtually) replicated.
times (Optional[int]): Optional size limit.
.. image:: _static/repeat.png
:alt: repeat
:width: 10%
:align: center
Example:
>>> item = 3
>>> repetition = seqtools.repeat(item, 10)
>>> list(repetition)
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
"""
if isint(times) and times > 1:
return Repetition(value, times)
elif times is None:
return InfiniteRepetition(value)
else:
raise TypeError("times must be a positive integer or None")
| mpl-2.0 | 9,185,350,080,690,769,000 | 28.076404 | 79 | 0.515341 | false |
ArcherSys/ArcherSys | Lib/site-packages/github/tests/GitTag.py | 1 | 8993 | <<<<<<< HEAD
<<<<<<< HEAD
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import datetime
from . import Framework
class GitTag(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.tag = self.g.get_user().get_repo("PyGithub").get_git_tag("f5f37322407b02a80de4526ad88d5f188977bc3c")
def testAttributes(self):
self.assertEqual(self.tag.message, "Version 0.6\n")
self.assertEqual(self.tag.object.sha, "4303c5b90e2216d927155e9609436ccb8984c495")
self.assertEqual(self.tag.object.type, "commit")
self.assertEqual(self.tag.object.url, "https://api.github.com/repos/jacquev6/PyGithub/git/commits/4303c5b90e2216d927155e9609436ccb8984c495")
self.assertEqual(self.tag.sha, "f5f37322407b02a80de4526ad88d5f188977bc3c")
self.assertEqual(self.tag.tag, "v0.6")
self.assertEqual(self.tag.tagger.date, datetime.datetime(2012, 5, 10, 18, 14, 15))
self.assertEqual(self.tag.tagger.email, "[email protected]")
self.assertEqual(self.tag.tagger.name, "Vincent Jacques")
self.assertEqual(self.tag.url, "https://api.github.com/repos/jacquev6/PyGithub/git/tags/f5f37322407b02a80de4526ad88d5f188977bc3c")
=======
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import datetime
from . import Framework
class GitTag(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.tag = self.g.get_user().get_repo("PyGithub").get_git_tag("f5f37322407b02a80de4526ad88d5f188977bc3c")
def testAttributes(self):
self.assertEqual(self.tag.message, "Version 0.6\n")
self.assertEqual(self.tag.object.sha, "4303c5b90e2216d927155e9609436ccb8984c495")
self.assertEqual(self.tag.object.type, "commit")
self.assertEqual(self.tag.object.url, "https://api.github.com/repos/jacquev6/PyGithub/git/commits/4303c5b90e2216d927155e9609436ccb8984c495")
self.assertEqual(self.tag.sha, "f5f37322407b02a80de4526ad88d5f188977bc3c")
self.assertEqual(self.tag.tag, "v0.6")
self.assertEqual(self.tag.tagger.date, datetime.datetime(2012, 5, 10, 18, 14, 15))
self.assertEqual(self.tag.tagger.email, "[email protected]")
self.assertEqual(self.tag.tagger.name, "Vincent Jacques")
self.assertEqual(self.tag.url, "https://api.github.com/repos/jacquev6/PyGithub/git/tags/f5f37322407b02a80de4526ad88d5f188977bc3c")
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import datetime
from . import Framework
class GitTag(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.tag = self.g.get_user().get_repo("PyGithub").get_git_tag("f5f37322407b02a80de4526ad88d5f188977bc3c")
def testAttributes(self):
self.assertEqual(self.tag.message, "Version 0.6\n")
self.assertEqual(self.tag.object.sha, "4303c5b90e2216d927155e9609436ccb8984c495")
self.assertEqual(self.tag.object.type, "commit")
self.assertEqual(self.tag.object.url, "https://api.github.com/repos/jacquev6/PyGithub/git/commits/4303c5b90e2216d927155e9609436ccb8984c495")
self.assertEqual(self.tag.sha, "f5f37322407b02a80de4526ad88d5f188977bc3c")
self.assertEqual(self.tag.tag, "v0.6")
self.assertEqual(self.tag.tagger.date, datetime.datetime(2012, 5, 10, 18, 14, 15))
self.assertEqual(self.tag.tagger.email, "[email protected]")
self.assertEqual(self.tag.tagger.name, "Vincent Jacques")
self.assertEqual(self.tag.url, "https://api.github.com/repos/jacquev6/PyGithub/git/tags/f5f37322407b02a80de4526ad88d5f188977bc3c")
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| mit | 8,142,942,190,424,459,000 | 61.451389 | 148 | 0.526521 | false |
dj-stripe/dj-stripe | djstripe/signals.py | 1 | 7264 | """
signals are sent for each event Stripe sends to the app
Stripe docs for Webhooks: https://stripe.com/docs/webhooks
"""
from django.dispatch import Signal
webhook_processing_error = Signal(providing_args=["data", "exception"])
# A signal for each Event type. See https://stripe.com/docs/api/events/types
WEBHOOK_SIGNALS = dict(
[
(hook, Signal(providing_args=["event"]))
for hook in [
# Update this by copy-pasting the "enabled_events" enum values from
# https://raw.githubusercontent.com/stripe/openapi/master/openapi/spec3.json
"account.application.authorized",
"account.application.deauthorized",
"account.external_account.created",
"account.external_account.deleted",
"account.external_account.updated",
"account.updated",
"application_fee.created",
"application_fee.refund.updated",
"application_fee.refunded",
"balance.available",
"capability.updated",
"charge.captured",
"charge.dispute.closed",
"charge.dispute.created",
"charge.dispute.funds_reinstated",
"charge.dispute.funds_withdrawn",
"charge.dispute.updated",
"charge.expired",
"charge.failed",
"charge.pending",
"charge.refund.updated",
"charge.refunded",
"charge.succeeded",
"charge.updated",
"checkout.session.async_payment_failed",
"checkout.session.async_payment_succeeded",
"checkout.session.completed",
"coupon.created",
"coupon.deleted",
"coupon.updated",
"credit_note.created",
"credit_note.updated",
"credit_note.voided",
"customer.created",
"customer.deleted",
"customer.discount.created",
"customer.discount.deleted",
"customer.discount.updated",
"customer.source.created",
"customer.source.deleted",
"customer.source.expiring",
"customer.source.updated",
"customer.subscription.created",
"customer.subscription.deleted",
"customer.subscription.pending_update_applied",
"customer.subscription.pending_update_expired",
"customer.subscription.trial_will_end",
"customer.subscription.updated",
"customer.tax_id.created",
"customer.tax_id.deleted",
"customer.tax_id.updated",
"customer.updated",
"file.created",
"invoice.created",
"invoice.deleted",
"invoice.finalization_failed",
"invoice.finalized",
"invoice.marked_uncollectible",
"invoice.paid",
"invoice.payment_action_required",
"invoice.payment_failed",
"invoice.payment_succeeded",
"invoice.sent",
"invoice.upcoming",
"invoice.updated",
"invoice.voided",
"invoiceitem.created",
"invoiceitem.deleted",
"invoiceitem.updated",
"issuing_authorization.created",
"issuing_authorization.request",
"issuing_authorization.updated",
"issuing_card.created",
"issuing_card.updated",
"issuing_cardholder.created",
"issuing_cardholder.updated",
"issuing_dispute.closed",
"issuing_dispute.created",
"issuing_dispute.funds_reinstated",
"issuing_dispute.submitted",
"issuing_dispute.updated",
"issuing_transaction.created",
"issuing_transaction.updated",
"mandate.updated",
"order.created",
"order.payment_failed",
"order.payment_succeeded",
"order.updated",
"order_return.created",
"payment_intent.amount_capturable_updated",
"payment_intent.canceled",
"payment_intent.created",
"payment_intent.payment_failed",
"payment_intent.processing",
"payment_intent.requires_action",
"payment_intent.succeeded",
"payment_method.attached",
"payment_method.automatically_updated",
"payment_method.detached",
"payment_method.updated",
"payout.canceled",
"payout.created",
"payout.failed",
"payout.paid",
"payout.updated",
"person.created",
"person.deleted",
"person.updated",
"plan.created",
"plan.deleted",
"plan.updated",
"price.created",
"price.deleted",
"price.updated",
"product.created",
"product.deleted",
"product.updated",
"promotion_code.created",
"promotion_code.updated",
"radar.early_fraud_warning.created",
"radar.early_fraud_warning.updated",
"recipient.created",
"recipient.deleted",
"recipient.updated",
"reporting.report_run.failed",
"reporting.report_run.succeeded",
"reporting.report_type.updated",
"review.closed",
"review.opened",
"setup_intent.canceled",
"setup_intent.created",
"setup_intent.requires_action",
"setup_intent.setup_failed",
"setup_intent.succeeded",
"sigma.scheduled_query_run.created",
"sku.created",
"sku.deleted",
"sku.updated",
"source.canceled",
"source.chargeable",
"source.failed",
"source.mandate_notification",
"source.refund_attributes_required",
"source.transaction.created",
"source.transaction.updated",
"subscription_schedule.aborted",
"subscription_schedule.canceled",
"subscription_schedule.completed",
"subscription_schedule.created",
"subscription_schedule.expiring",
"subscription_schedule.released",
"subscription_schedule.updated",
"tax_rate.created",
"tax_rate.updated",
"topup.canceled",
"topup.created",
"topup.failed",
"topup.reversed",
"topup.succeeded",
"transfer.created",
"transfer.failed",
"transfer.paid",
"transfer.reversed",
"transfer.updated",
# deprecated (no longer in events_types list) - TODO can be deleted?
"checkout_beta.session_succeeded",
"issuer_fraud_record.created",
"payment_intent.requires_capture",
"payment_method.card_automatically_updated",
"issuing_dispute.created",
"issuing_dispute.updated",
"issuing_settlement.created",
"issuing_settlement.updated",
# special case? - TODO can be deleted?
"ping",
]
]
)
| mit | -7,294,412,924,146,498,000 | 36.251282 | 88 | 0.541024 | false |
tuanvu216/udacity-course | full_stack_foundations/full_stack_foundations_master/lesson_3/15_delete_menu_item_solution/project.py | 1 | 2369 | from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
@app.route('/')
@app.route('/restaurants/<int:restaurant_id>/menu')
def restaurantMenu(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id = restaurant_id).one()
items = session.query(MenuItem).filter_by(restaurant_id = restaurant_id)
return render_template('menu.html', restaurant=restaurant, items = items, restaurant_id = restaurant_id)
@app.route('/restaurants/<int:restaurant_id>/new', methods=['GET','POST'])
def newMenuItem(restaurant_id):
if request.method == 'POST':
newItem = MenuItem(name = request.form['name'], description = request.form['description'], price = request.form['price'], course = request.form['course'], restaurant_id = restaurant_id)
session.add(newItem)
session.commit()
return redirect(url_for('restaurantMenu', restaurant_id = restaurant_id))
else:
return render_template('newmenuitem.html', restaurant_id = restaurant_id)
@app.route('/restaurants/<int:restaurant_id>/<int:menu_id>/edit', methods = ['GET', 'POST'])
def editMenuItem(restaurant_id, menu_id):
editedItem = session.query(MenuItem).filter_by(id = menu_id).one()
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
session.add(editedItem)
session.commit()
return redirect(url_for('restaurantMenu', restaurant_id = restaurant_id))
else:
return render_template('editmenuitem.html', restaurant_id = restaurant_id, menu_id = menu_id, item = editedItem)
#DELETE MENU ITEM SOLUTION
@app.route('/restaurants/<int:restaurant_id>/<int:menu_id>/delete', methods = ['GET','POST'])
def deleteMenuItem(restaurant_id, menu_id):
itemToDelete = session.query(MenuItem).filter_by(id = menu_id).one()
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
return redirect(url_for('restaurantMenu', restaurant_id = restaurant_id))
else:
return render_template('deleteconfirmation.html', item = itemToDelete)
if __name__ == '__main__':
app.debug = True
app.run(host = '0.0.0.0', port = 5000)
| mit | -4,962,088,156,317,859,000 | 36.015625 | 187 | 0.726889 | false |
openai/universe | universe/envs/diagnostics.py | 1 | 24197 | import collections
import fastzbarlight
import itertools
import logging
from multiprocessing import pool
import numpy as np
import time
import threading
# import psutil
import sys
from collections import namedtuple
from gym.utils import reraise
import re
from universe import error, pyprofile, spaces
# TODO: prefix the loggers
logger = logging.getLogger(__name__)
extra_logger = logging.getLogger('universe.extra.'+__name__)
def show(ob):
from PIL import Image
Image.fromarray(ob).show()
def standard_error(ary, axis, scale=1):
ary = np.array(ary) * scale
if len(ary) > 1:
return np.std(ary, axis=axis) / np.sqrt(len(ary) - 1)
else:
return np.std(ary, axis=axis)
def extract_timestamp(observation):
total = 0
for byte in observation[0]:
total = 256 * total + byte
for byte in observation[1]:
total = 256 * total + byte
timestamp = total/1000.
return timestamp
class MetadataDecoder(object):
@classmethod
def build(cls, metadata_encoding, pool, qr_pool, label):
metadata_encoding = metadata_encoding.copy()
type = metadata_encoding.pop('type')
if type == 'qrcode':
return QRCodeMetadataDecoder(label=label, pool=pool, qr_pool=qr_pool, **metadata_encoding)
elif type == 'pixels':
return PixelsMetadataDecoder(label=label)
else:
raise error.Error('Invalid encoding: {}'.format(type))
class AsyncDecode(object):
pool = None
def __init__(self, pool, qr_pool, method, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
self._last_img = None
self.method = method
self.results = []
self.deque = collections.deque()
self.pool = pool
self.qr_pool = qr_pool
def __call__(self, img, available_at):
# Choose the return value
if len(self.deque) > 0 and self.deque[0].ready():
last = self.deque.popleft()
res = last.get()
if res is not None:
pyprofile.timing('vnc_env.diagnostics.async_decode.latency', time.time() - res['available_at'])
else:
res = False
pyprofile.gauge('vnc_env.diagnostics.async_decode.queue_depth', len(self.deque))
# Just grayscale it by keeping only one component. Should be
# good enough as this region is black and white anyway.
grayscale = img[self.y:self.y+self.height, self.x:self.x+self.width, 0]
# Apply processing if needed
match = np.array_equal(self._last_img, grayscale)
if not match:
pyprofile.incr('vnc_env.diagnostics.async_decode.schedule')
# sneakily copy if numpy hasn't, so it can be cached
self._last_img = np.ascontiguousarray(grayscale)
async = self.qr_pool.apply_async(self.method, (self._last_img, time.time(), available_at))
self.deque.append(async)
else:
pyprofile.incr('vnc_env.diagnostics.async_decode.cache_hit')
return res
class QRCodeMetadataDecoder(MetadataDecoder):
def __init__(self, pool, qr_pool, x, y, width, height, label):
self.flag_synchronous = False
self.x = x
self.y = y
self.width = width
self.height = height
self.label = label
self.decode = AsyncDecode(pool, qr_pool, self._decode, x, y, width, height)
def _decode(self, observation, start, available_at):
# This method gets wrapped by AsyncDecode.__call__
with pyprofile.push('vnc_env.diagnostics.QRCodeMetadataDecoder.qr_code_scanner'):
encoded = fastzbarlight.qr_code_scanner(observation.tobytes(), self.width, self.height)
if encoded is None:
# Failed to parse!
return
if encoded.startswith(b'v1:'):
encoded = encoded.decode('utf-8')
if len(encoded) != len('v1:') + 12 + 12:
raise error.Error('Bad length for metadata from enviroment: {}'.format(encoded))
encoded = encoded[len('v1:'):]
last_update = int(encoded[:12], 16) / 1000.0
last_action = int(encoded[12:24], 16) / 1000.
return {
# Timestamp on the image
'now': last_update,
# When the last probe was received
'probe_received_at': last_action,
'processing_start': start,
'processing_end': time.time(),
'available_at': available_at,
}
else:
raise error.Error('Bad version string for metadata from environment: {}'.format(encoded))
class PixelsMetadataDecoder(MetadataDecoder):
def __init__(self, label):
self.flag_synchronous = True
self.anchor = np.array([
[(0x12, 0x34, 0x56), (0x78, 0x90, 0xab)],
[(0x23, 0x45, 0x67), (0x89, 0x0a, 0xbc)],
], dtype=np.uint8)
self.location = None
self.last_search_metadata = 0
self.label = label
def _check_location(self, observation, location):
y, x = location
return np.all(observation[y:y+2, x:x+2] == self.anchor)
def _find_metadata_location(self, observation):
ys, xs = np.where(np.all(observation == self.anchor[0, 0], axis=-1))
if len(ys) == 0:
extra_logger.info('[%s] Could not find metadata anchor pixel', self.label)
return False
# TODO: handle multiple hits
assert len(ys) == 1
location = (ys[0], xs[0])
assert self._check_location(observation, location)
extra_logger.info('[%s] Found metadata anchor pixel: %s', self.label, location)
return location
def _should_search_metadata(self):
return time.time() - self.last_search_metadata > 1
def decode(self, observation, available_at=None):
start = time.time()
# metadata pixel location hasn't been initialized or it has moved
if not self.location or not self._check_location(observation,
self.location):
# only search for metadata occasionally
if self._should_search_metadata():
self.location = self._find_metadata_location(observation)
self.last_search_metadata = time.time()
if not self.location:
return False # False translates to None in DiagnosticsInstance
y, x = self.location
now = extract_timestamp(observation[y, x+2:x+4])
probe_received_at = extract_timestamp(observation[y, x+4:x+6])
return {
'now': now,
'probe_received_at': probe_received_at,
'processing_start': start,
'processing_end': time.time(),
'available_at': available_at,
}
class Diagnostics(object):
def __init__(self, n, probe_key, ignore_clock_skew=False, metadata_encoding=None, disable_action_probes=False):
# Each QR code takes about 1ms (and updates at 5fps). We do
# our best to ensure the QR is processed in time for the next
# step call (n/16 would put us right at the threshold).
self.pool = pool.ThreadPool(max(int(n/4), 1))
self.qr_pool = pool.ThreadPool(max(int(n/8), 1))
self.lock = threading.RLock()
self.instance_n = [None] * n
self.ignore_clock_skew = ignore_clock_skew
self.disable_action_probes = disable_action_probes
self.metadata_encoding = metadata_encoding
self.update(probe_key=probe_key, metadata_encoding=metadata_encoding)
# only used in flashgames right now
def update(self, probe_key, metadata_encoding):
self.probe_key = probe_key
self.metadata_encoding = metadata_encoding
for instance in self.instance_n:
if instance is not None:
instance.update(probe_key=self.probe_key, metadata_encoding=self.metadata_encoding)
def connect(self, i, network=None, label=None):
# This should technically be synchronized
self.instance_n[i] = DiagnosticsInstance(i, network, self.probe_key, self.ignore_clock_skew, self.metadata_encoding, disable_action_probes=self.disable_action_probes, qr_pool=self.qr_pool, pool=self.pool, label=label)
def close(self, i=None):
if i is not None:
self.instance_n[i] = None
else:
self.pool.close()
self.qr_pool.close()
for i in range(len(self.instance_n)):
self.close(i)
self.instance_n = None
def add_probe(self, action_n, mask_n):
if self.disable_action_probes or self.instance_n is None:
return
for instance, action, mask in zip(self.instance_n, action_n, mask_n):
# Important that masking prevents us from adding probes. (This
# avoids us e.g. filling in backticks into text boxes as the
# environment boots.)
if mask and instance:
instance.add_probe(action)
def add_metadata(self, observation_n, info_n, available_at=None):
"""Mutates the info_n dictionary."""
if self.instance_n is None:
return
with pyprofile.push('vnc_env.diagnostics.Diagnostics.add_metadata'):
async = self.pool.imap_unordered(
self._add_metadata_i,
zip(self.instance_n, observation_n, info_n, [available_at] * len(observation_n)))
list(async)
def _add_metadata_i(self, args):
instance, observation, info, now = args
if instance is None or observation is None:
return
instance.add_metadata(observation, info, now)
def extract_metadata(self, observation_n):
return [instance._extract_metadata(observation)
for instance, observation in zip(self.instance_n, observation_n)]
def clear_probes_when_done(self, done_n):
if self.instance_n is None: # if we've been closed there's nothing to do
return
for instance, done in zip(self.instance_n, done_n):
if done:
instance.clear_probe()
class DiagnosticsInstance(object):
anchor = np.array([
[(0x12, 0x12, 0x12), (0x78, 0x78, 0x78)],
[(0x23, 0x23, 0x23), (0x89, 0x89, 0x89)],
], dtype=np.uint8)
zero_clock_skew = np.zeros([2])
def __init__(self, i, network, probe_key, ignore_clock_skew, metadata_encoding, disable_action_probes, pool, qr_pool, label=None):
'''
network - either Network() object used to get clock skew, or None.
If None, we skip measuring clock skew, and skip measuring
diagnostics which rely on clock skew.
'''
if network is None:
assert ignore_clock_skew
self.ignore_clock_skew = ignore_clock_skew
self.label = label
self.i = i
self.network = network
self.probe_sent_at = None # local time
self.probe_received_at = None # remote time
self.action_latency_skewed = None
self.last_observation_timestamp = None
self.disable_action_probes = disable_action_probes
self.pool = pool
self.qr_pool = qr_pool
self.could_read_metadata = None
self.update(probe_key=probe_key, metadata_encoding=metadata_encoding)
def update(self, probe_key, metadata_encoding):
self.probe = [
spaces.KeyEvent(probe_key, down=True).compile(),
spaces.KeyEvent(probe_key, down=False).compile(),
]
if metadata_encoding is not None:
self.metadata_decoder = MetadataDecoder.build(metadata_encoding, pool=self.pool, qr_pool=self.qr_pool, label=self.label)
else:
self.metadata_decoder = None
def clear_probe(self):
self.probe_sent_at = None
self.probe_received_at = None
def add_probe(self, action):
if self.network is not None and not self.network.active():
return
if self.probe_sent_at is not None and self.probe_sent_at + 10 < time.time():
extra_logger.warn('[%s] Probe to determine action latency timed out (was sent %s). (This is harmless, but worth knowing about.)', self.label, self.probe_sent_at)
self.probe_sent_at = None
if self.probe_sent_at is None:
extra_logger.debug('[%s] Sending out new action probe: %s', self.label, self.probe)
self.probe_sent_at = time.time()
action += self.probe
assert self.probe_sent_at is not None
def add_metadata(self, observation, info, available_at=None):
"""Extract metadata from a pixel observation and add it to the info
"""
observation = observation['vision']
if observation is None: return
if self.network is not None and not self.network.active():
return
elif self.metadata_decoder is None:
return
elif observation is None:
return
# should return a dict with now/probe_received_at keys
with pyprofile.push('vnc_env.diagnostics.DiagnosticsInstance.add_metadata.decode'):
metadata = self.metadata_decoder.decode(observation, available_at=available_at)
if metadata is False:
# No metadata ready, though it doesn't mean parsing failed
metadata = None
elif metadata is None:
if self.could_read_metadata:
self.could_read_metadata = False
extra_logger.info('[%s] Stopped being able to read metadata (expected when environment resets)', self.label)
elif not self.could_read_metadata:
self.could_read_metadata = True
extra_logger.info('[%s] Started being able to read metadata', self.label)
if self.metadata_decoder.flag_synchronous and metadata is not None:
info['diagnostics.image_remote_time'] = metadata['now']
local_now = time.time()
if self.network is None:
# Assume the clock skew is zero. Should only be run on the
# same machine as the VNC server, such as the universe
# instance inside of the environmenth containers.
real_clock_skew = self.zero_clock_skew
else:
# Note: this is a 2-length vector of (min, max), so anything added to
# it is also going to be a 2-length vector.
# Most of the diagnostics below are, but you have to look carefully.
real_clock_skew = self.network.reversed_clock_skew()
# Store real clock skew here
info['stats.gauges.diagnostics.clock_skew'] = real_clock_skew
if self.ignore_clock_skew:
clock_skew = self.zero_clock_skew
else:
clock_skew = real_clock_skew
if metadata is not None:
# We'll generally update the observation timestamp infrequently
if self.last_observation_timestamp == metadata['now']:
delta = None
else:
# We just got a new timestamp in the observation!
self.last_observation_timestamp = metadata['now']
observation_now = metadata['now']
delta = observation_now - metadata['available_at']
# Subtract *local* time it was received from the *remote* time
# displayed. Negate and reverse order to fix time ordering.
info['stats.gauges.diagnostics.lag.observation'] = -(delta + clock_skew)[[1, 0]]
# if self.network is None:
# # The rest of diagnostics need the network, so we're done here
# return
probe_received_at = metadata['probe_received_at']
if probe_received_at == 0 or self.disable_action_probes:
# Happens when the env first starts
self.probe_received_at = None
elif self.probe_received_at is None: # this also would work for the equality case
self.probe_received_at = probe_received_at
elif self.probe_received_at != probe_received_at and self.probe_sent_at is None:
logger.info('[%s] Probe is marked as received at %s, but probe_sent_at is None. This is surprising. (HINT: do you have multiple universe instances talking to the same environment?)', self.label, probe_received_at)
elif self.probe_received_at != probe_received_at:
extra_logger.debug('[%s] Next probe received: old=%s new=%s', self.label, self.probe_received_at, probe_received_at)
self.probe_received_at = probe_received_at
# Subtract the *local* time we sent it from the *remote* time it was received
self.action_latency_skewed = probe_received_at - self.probe_sent_at
self.probe_sent_at = None
if self.action_latency_skewed:
action_lag = self.action_latency_skewed + clock_skew
self.action_latency_skewed = None
else:
action_lag = None
info['stats.gauges.diagnostics.lag.action'] = action_lag
local_now = time.time()
# Look at when the remote believed it parsed the score (not
# all envs send this currently).
#
# Also, if we received no new rewards, then this values is
# None. This could indicate a high reward latency (bad,
# uncommon), or that the agent is calling step faster than new
# rewards are coming in (good, common).
remote_score_now = info.get('rewarder.lag.observation.timestamp')
if remote_score_now is not None:
delta = remote_score_now - local_now
info['stats.gauges.diagnostics.lag.reward'] = -(delta + clock_skew)[[1, 0]]
# Look at when the remote send the message, so we know how
# long it's taking for messages to get to us.
rewarder_message_now = info.get('reward_buffer.remote_time')
if rewarder_message_now:
delta = rewarder_message_now - local_now
info['stats.gauges.diagnostics.lag.rewarder_message'] = -(delta + clock_skew)[[1, 0]]
def extract_n_m(dict_n_m, key):
output = []
for dict_n in dict_n_m:
layer = []
for dict in dict_n:
layer.append(dict[key])
output.append(layer)
return np.array(output)
# class ChromeProcessInfo(object):
# proc_regex = re.compile('.*(chrome|Chrome|nacl_helper).*')
# def add_system_stats(self, info, now):
# """TODO: This needs be moved to universe-envs and run there. Otherwise it only works if the env and agent
# are on the same machine. In addition a new rpc call, rpc.env.diagnostics, should be added to return
# data to the agent periodically.
# """
# start = time.time()
# # CPU
# cpu_percent = psutil.cpu_percent()
# info['diagnostics.env.cpu.percent'] = cpu_percent
# cpu_cores_percent = psutil.cpu_percent(percpu=True)
# num_cores = len(cpu_cores_percent)
# info['diagnostics.env.cpu.percent.all_cores'] = cpu_percent / num_cores
# info['diagnostics.env.cpu.percent.each_core'] = cpu_cores_percent
# info['diagnostics.env.cpu.num_cores'] = num_cores
# # MEMORY
# mem = psutil.virtual_memory()
# info['diagnostics.env.memory.percent'] = mem.percent
# info['diagnostics.env.memory.total'] = mem.total
# info['diagnostics.env.memory.available'] = mem.available
# # NETWORK
# if self.last_measured_at is not None:
# elapsed_ms = (now - self.last_measured_at) * 1000.
# current = psutil.net_io_counters()
# dl = (current.bytes_recv - self.system_network_counters.bytes_recv) / elapsed_ms
# ul = (current.bytes_sent - self.system_network_counters.bytes_sent) / elapsed_ms
# info['diagnostics.env.network.download_bytes_ps'] = dl * 1000.
# info['diagnostics.env.network.upload_bytes_ps'] = ul * 1000.
# self.system_network_counters = current
# # CHROME
# if self.chrome_last_measured_at is None or (time.time() - self.chrome_last_measured_at) > 30:
# # Fetch every 30 seconds
# self.chrome_last_measured_at = time.time()
# logger.info("Measuring Chrome process statistics")
# chrome_info = ChromeProcessInfo()
# chrome_info = best_effort(chrome_info.fetch, num_cores)
# if chrome_info is not None:
# self.chrome_info = chrome_info
# if self.chrome_info is not None:
# self._populate_chrome_info(self.chrome_info, info)
# # TODO: Add GPU stats
# pyprofile.push('diagnostics.system_stats')
# def _populate_chrome_info(self, chrome_info, info):
# pyprofile.push('diagnostics.chrome_process_info.process_iter')
# pyprofile.push('diagnostics.chrome_process_info.total')
# info['diagnostics.chrome.age'] = chrome_info.age
# info['diagnostics.chrome.cpu.time'] = chrome_info.cpu_time
# info['diagnostics.chrome.cpu.percent'] = chrome_info.cpu_percent
# info['diagnostics.chrome.cpu.percent.all_cores'] = chrome_info.cpu_percent_all_cores
# info['diagnostics.chrome.cpu.percent.all_cores_all_time'] = chrome_info.cpu_percent_all_cores_all_time
# info['diagnostics.chrome.num_processes'] = len(chrome_info.processes)
# def __init__(self):
# self.cpu_time = 0.
# self.cpu_percent = 0.
# self.min_create_time = None
# self.visited_pids = set()
# self.processes = []
# self.time_to_get_procs = None
# self.total_time_to_measure = None
# self.age = None
# self.cpu_percent_all_cores_all_time = None
# self.cpu_percent_all_cores = None
# def fetch(self, num_cores):
# start = time.time()
# start_process_iter = time.time()
# procs = list(psutil.process_iter())
# self.time_to_get_procs = time.time() - start_process_iter
# for proc in procs:
# try:
# name = proc.name()
# if self.proc_regex.match(name):
# self._fetch_single(proc, name)
# # N.B. Don't read children. defunct processes make this take 4ever.
# # Child processes are all uncovered by initial scan.
# except (psutil.AccessDenied, psutil.NoSuchProcess) as e:
# pass
# self.total_time_to_measure = time.time() - start
# if self.min_create_time is None:
# self.age = 0
# else:
# self.age = time.time() - self.min_create_time
# self.cpu_percent_all_cores_all_time = 100. * self.cpu_time / (self.age * num_cores)
# self.cpu_percent_all_cores = self.cpu_percent / num_cores
# return self
# def _fetch_single(self, proc, name):
# if proc.pid in self.visited_pids:
# return
# try:
# cpu_times = proc.cpu_times()
# cpu_percent = proc.cpu_percent()
# created = proc.create_time()
# if self.min_create_time is None:
# self.min_create_time = created
# else:
# self.min_create_time = min(created, self.min_create_time)
# cpu_time = cpu_times.user + cpu_times.system
# proc_info = namedtuple('proc_info', 'name cpu_time cpu_percent created age')
# proc_info.name = name
# proc_info.cpu_time = cpu_time
# proc_info.cpu_percent = cpu_percent
# proc_info.created = created
# proc_info.age = time.time() - created
# proc_info.pid = proc.pid
# self.processes.append(proc_info)
# # Totals
# self.cpu_time += cpu_time
# self.cpu_percent += cpu_percent
# self.visited_pids.add(proc.pid)
# except (psutil.AccessDenied, psutil.NoSuchProcess) as e:
# pass
| mit | 3,990,940,570,749,999,000 | 40.081494 | 229 | 0.598008 | false |
ujjwalgulecha/AdventOfCode | 2015/Day_18/Part_1.py | 1 | 1072 | from copy import copy, deepcopy
lights = []
with open("Day_18.input") as f:
for line in f:
data = line.strip()
light_temp = list(data)
lights.append(light_temp)
def getNeighbours(j, k, lights):
dx = [0, 1, -1]
dy = [0, 1, -1]
neighbours = 0
for x in dx:
for y in dy:
if j+x >= 0 and j+x < len(lights[0]) and k+y >= 0 and k+y < len(lights):
if lights[j+x][k+y] == on and (x or y):
neighbours+=1
return neighbours
def number_of_lights_on():
return sum(1 for i in xrange(len(lights[0])) for j in xrange(len(lights)) if lights[i][j] == on)
iterations = 100
on = '#'
off = '.'
for i in xrange(iterations):
temp_lights = deepcopy(lights)
for j in xrange(len(lights[0])):
for k in xrange(len(lights)):
neighbours = getNeighbours(j, k, lights)
if lights[j][k] == off:
if neighbours == 3:
temp_lights[j][k] = on
else:
temp_lights[j][k] = off
else:
if neighbours == 2 or neighbours == 3:
temp_lights[j][k] = on
else:
temp_lights[j][k] = off
lights = deepcopy(temp_lights)
print number_of_lights_on() | mit | -6,348,467,895,201,080,000 | 22.844444 | 97 | 0.608209 | false |
yephper/django | tests/postgres_tests/test_array.py | 1 | 53590 | <<<<<<< HEAD
import decimal
import json
import unittest
import uuid
from django import forms
from django.core import exceptions, serializers, validators
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.test import TransactionTestCase, override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import (
ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel,
NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel,
PostgreSQLModel,
)
try:
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.forms import SimpleArrayField, SplitArrayField
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertEqual(loaded.field, None)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
class TestQuerying(PostgreSQLTestCase):
def setUp(self):
self.objs = [
NullableIntegerArrayModel.objects.create(field=[1]),
NullableIntegerArrayModel.objects.create(field=[2]),
NullableIntegerArrayModel.objects.create(field=[2, 3]),
NullableIntegerArrayModel.objects.create(field=[20, 30, 40]),
NullableIntegerArrayModel.objects.create(field=None),
]
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=['text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=['text']),
[instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]),
[instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0),
[obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
class TestDateTimeExactQuerying(PostgreSQLTestCase):
def setUp(self):
now = timezone.now()
self.datetimes = [now]
self.dates = [now.date()]
self.times = [now.time()]
self.objs = [
DateTimeArrayModel.objects.create(
datetimes=self.datetimes,
dates=self.dates,
times=self.times,
)
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes),
self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates),
self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times),
self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
def setUp(self):
self.ips = ['192.168.0.1', '::1']
self.uuids = [uuid.uuid4()]
self.decimals = [decimal.Decimal(1.25), 1.75]
self.objs = [
OtherTypesArrayModel.objects.create(
ips=self.ips,
uuids=self.uuids,
decimals=self.decimals,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips),
self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids),
self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals),
self.objs
)
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField()))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_field_names = [
c.rsplit('_', 2)[0][len(table_name) + 1:]
for c in connection.introspection.get_constraints(cursor, table_name)
if c.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_field_names, ['char2'])
with connection.cursor() as cursor:
indexes = connection.introspection.get_indexes(cursor, table_name)
# All fields should have regular indexes.
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 1 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc'], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 0, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 0 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 0, 'value': 0, 'limit_value': 1, 'show_value': 0})
class TestSimpleFormField(PostgreSQLTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 2 in the array did not validate: This field is required.')
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('abc,c,defg')
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(first_error.code, 'item_invalid')
self.assertEqual(first_error.params, {'nth': 0, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
second_error = errors[1]
self.assertEqual(
second_error.message,
'Item 2 in the array did not validate: Ensure this value has at most 2 characters (it has 4).'
)
self.assertEqual(second_error.code, 'item_invalid')
self.assertEqual(second_error.params, {'nth': 2, 'value': 'defg', 'limit_value': 2, 'show_value': 4})
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
class TestSplitFormField(PostgreSQLTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 2 in the array did not validate: This field is required.']})
def test_invalid_integer(self):
msg = 'Item 1 in the array did not validate: Ensure this value is less than or equal to 100.'
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" />
<input id="id_array_1" name="array_1" type="text" />
<input id="id_array_2" name="array_2" type="text" />
</td>
</tr>
''')
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc', 'c', 'defg'])
self.assertEqual(cm.exception.messages, [
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).',
'Item 2 in the array did not validate: Ensure this value has at most 2 characters (it has 4).',
])
=======
import decimal
import json
import unittest
import uuid
from django import forms
from django.core import exceptions, serializers, validators
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.test import TransactionTestCase, override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import (
ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel,
NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel,
PostgreSQLModel, Tag,
)
try:
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.forms import SimpleArrayField, SplitArrayField
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertEqual(loaded.field, None)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
class TestQuerying(PostgreSQLTestCase):
def setUp(self):
self.objs = [
NullableIntegerArrayModel.objects.create(field=[1]),
NullableIntegerArrayModel.objects.create(field=[2]),
NullableIntegerArrayModel.objects.create(field=[2, 3]),
NullableIntegerArrayModel.objects.create(field=[20, 30, 40]),
NullableIntegerArrayModel.objects.create(field=None),
]
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=['text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=['text']),
[instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]),
[instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0),
[obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
class TestDateTimeExactQuerying(PostgreSQLTestCase):
def setUp(self):
now = timezone.now()
self.datetimes = [now]
self.dates = [now.date()]
self.times = [now.time()]
self.objs = [
DateTimeArrayModel.objects.create(
datetimes=self.datetimes,
dates=self.dates,
times=self.times,
)
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes),
self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates),
self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times),
self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
def setUp(self):
self.ips = ['192.168.0.1', '::1']
self.uuids = [uuid.uuid4()]
self.decimals = [decimal.Decimal(1.25), 1.75]
self.tags = [Tag(1), Tag(2), Tag(3)]
self.objs = [
OtherTypesArrayModel.objects.create(
ips=self.ips,
uuids=self.uuids,
decimals=self.decimals,
tags=self.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips),
self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids),
self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals),
self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags),
self.objs
)
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField()))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_field_names = [
c.rsplit('_', 2)[0][len(table_name) + 1:]
for c in connection.introspection.get_constraints(cursor, table_name)
if c.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_field_names, ['char2'])
with connection.cursor() as cursor:
indexes = connection.introspection.get_indexes(cursor, table_name)
# All fields should have regular indexes.
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 1 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc'], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 0, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 0 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 0, 'value': 0, 'limit_value': 1, 'show_value': 0})
class TestSimpleFormField(PostgreSQLTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 2 in the array did not validate: This field is required.')
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('abc,c,defg')
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(first_error.code, 'item_invalid')
self.assertEqual(first_error.params, {'nth': 0, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
second_error = errors[1]
self.assertEqual(
second_error.message,
'Item 2 in the array did not validate: Ensure this value has at most 2 characters (it has 4).'
)
self.assertEqual(second_error.code, 'item_invalid')
self.assertEqual(second_error.params, {'nth': 2, 'value': 'defg', 'limit_value': 2, 'show_value': 4})
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
class TestSplitFormField(PostgreSQLTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_remove_trailing_nulls_not_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False),
size=2,
remove_trailing_nulls=True,
required=False,
)
data = {'array_0': '', 'array_1': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': []})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 2 in the array did not validate: This field is required.']})
def test_invalid_integer(self):
msg = 'Item 1 in the array did not validate: Ensure this value is less than or equal to 100.'
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" />
<input id="id_array_1" name="array_1" type="text" />
<input id="id_array_2" name="array_2" type="text" />
</td>
</tr>
''')
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc', 'c', 'defg'])
self.assertEqual(cm.exception.messages, [
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).',
'Item 2 in the array did not validate: Ensure this value has at most 2 characters (it has 4).',
])
>>>>>>> 6448873197fa4e3df3f5f03201538dc57d7643d6
| bsd-3-clause | -778,933,330,977,688,600 | 36.764454 | 118 | 0.616029 | false |
breedlun/clearplot | doc/source/examples/curve_and_image_sequence/curve_and_image_sequence.py | 1 | 1576 | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 18 15:43:55 2015
@author: Ben
"""
import clearplot.plot_functions as pf
import matplotlib.pyplot
import os
import numpy as np
#Load global response
data_dir = os.path.join(os.path.dirname(pf.__file__), os.pardir, 'doc', \
'source', 'data')
path = os.path.join(data_dir, 's140302C-mechanical_response.csv')
data = np.loadtxt(path, delimiter = ',')
#Specify the indices of the field images to be plotted
ndx_list = [0, 85, 141, 196, 252]
#Specify the column indices to crop the images to
cols = range(470,470+340)
#Load the field images into an image sequence list
im_seq = []
for ndx in ndx_list:
#Load field image
im_filename = 's140302C-eqps_field-frame_%r.png' %(ndx)
im_path = os.path.join(data_dir, 'hi-rez_field_images', im_filename)
im = matplotlib.pyplot.imread(im_path)
#Crop the field image and add to list
im_seq.append(im[:,cols,:])
#Create labels
labels = []
for i in range(1, len(ndx_list) + 1):
labels.append(str(i))
#Plot curve
[fig, ax, curves] = pf.plot('', data[:,0], data[:,1], \
x_label = ['\varepsilon', '\%'], y_label = ['\sigma', 'GPa'])
ax.label_curve(curves[0], labels, ndx = ndx_list, angles = 60)
ax.plot_markers(data[ndx_list,0], data[ndx_list,1], colors = [0,0,0])
fig.save('curve_and_image_sequence-a.png');
#Plot image sequence
[fig, ax, im_obj] = pf.show_im('curve_and_image_sequence-b.png', \
im_seq, scale_im = 0.3, c_label = ['\bar{\varepsilon}^p', '\%'], \
c_lim = [0, 100], c_tick = 25, b_labels = True, im_interp = 'bicubic', \
c_bar = True); | mit | -1,595,391,377,537,425,700 | 34.044444 | 76 | 0.647843 | false |
uw-it-aca/spotseeker_server | spotseeker_server/test/spot_caching.py | 1 | 1190 | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.test import TestCase
from django.core.cache import cache
from spotseeker_server.models import Spot
class SpotCacheTest(TestCase):
def test_spot_caching(self):
spot = Spot.objects.create(name='foo')
spot_id = spot.pk
# Assert that a cache entry is created when we call
# json_data_structure()
js = spot.json_data_structure()
cached_js = cache.get(spot.json_cache_key())
self.assertEqual(js, cached_js)
# Assert that saving the spot removes the cache entry
spot.save()
self.assertNotIn(spot_id, cache)
# Assert that the spot now has a new etag
new_js = spot.json_data_structure()
self.assertNotEqual(js['etag'], new_js['etag'])
self.assertEqual(new_js['etag'], spot.etag)
# Assert the new cache entry reflects the updated etag
new_cached_js = cache.get(spot.json_cache_key())
self.assertEqual(new_js, new_cached_js)
# Assert that deleting the spot removes the cache entry
spot.delete()
self.assertNotIn(spot_id, cache)
| apache-2.0 | -5,387,262,768,862,895,000 | 31.162162 | 63 | 0.652941 | false |
DemocracyClub/yournextrepresentative | ynr/apps/uk_results/tests/test_smoke_test_views.py | 1 | 3868 | from django.test import TestCase
from django.urls import reverse
from django_webtest import WebTest
from candidates.models.popolo_extra import Ballot
from candidates.tests.auth import TestUserMixin
from candidates.tests.factories import MembershipFactory
from candidates.tests.uk_examples import UK2015ExamplesMixin
from people.tests.factories import PersonFactory
from uk_results.models import CandidateResult, ResultSet
class TestUKResults(TestUserMixin, UK2015ExamplesMixin, WebTest, TestCase):
def setUp(self):
super().setUp()
self.ballot = self.local_post.ballot_set.get()
self.ballot.voting_system = Ballot.VOTING_SYSTEM_FPTP
self.ballot.save()
self.result_set = ResultSet.objects.create(
ballot=self.ballot,
num_turnout_reported=10000,
num_spoilt_ballots=30,
user=self.user,
ip_address="127.0.0.1",
source="Example ResultSet for testing",
)
# Create three people:
self.people = [
PersonFactory.create(id=13, name="Alice"),
PersonFactory.create(id=14, name="Bob"),
PersonFactory.create(id=15, name="Carol"),
]
parties = [self.labour_party, self.conservative_party, self.ld_party]
# Create their candidacies:
candidacies = [
MembershipFactory.create(
ballot=self.ballot,
person=person,
post=self.local_post,
party=party,
)
for person, party in zip(self.people, parties)
]
# Create their CandidateResult objects:
votes = [2000, 5000, 3000]
winner = [False, True, False]
self.candidate_results = [
CandidateResult.objects.create(
result_set=self.result_set,
membership=c,
num_ballots=v,
is_winner=w,
)
for c, v, w in zip(candidacies, votes, winner)
]
self.expected = {
"ballot_paper_id": "local.maidstone.DIW:E05005004.2016-05-05",
"created": self.result_set.created.isoformat(),
"candidate_results": [
{
"is_winner": True,
"num_ballots": 5000,
"person_id": 14,
"person_name": "Bob",
},
{
"is_winner": False,
"num_ballots": 3000,
"person_id": 15,
"person_name": "Carol",
},
{
"is_winner": False,
"num_ballots": 2000,
"person_id": 13,
"person_name": "Alice",
},
],
"source": "Example ResultSet for testing",
"spoilt_ballots": 30,
"turnout": 10000,
"user": "john",
}
def test_form_view(self):
url = reverse(
"ballot_paper_results_form",
kwargs={
"ballot_paper_id": "local.maidstone.DIW:E05005004.2016-05-05"
},
)
resp = self.app.get(url, user=self.user_who_can_record_results)
self.assertEqual(resp.status_code, 200)
form = resp.forms[1]
form["memberships_13"] = 345
form.submit()
def test_form_view_cancelled_election(self):
url = reverse(
"ballot_paper_results_form",
kwargs={
"ballot_paper_id": "local.maidstone.DIW:E05005004.2016-05-05"
},
)
self.ballot.cancelled = True
self.ballot.save()
resp = self.app.get(
url, user=self.user_who_can_record_results, expect_errors=True
)
self.assertEqual(resp.status_code, 404)
| agpl-3.0 | -5,650,242,670,049,382,000 | 33.535714 | 77 | 0.526629 | false |
restful-open-annotation/eve-restoa | oaeve.py | 1 | 13383 | #!/usr/bin/env python
"""Open Annotation JSON-LD support functions for Eve."""
__author__ = 'Sampo Pyysalo'
__license__ = 'MIT'
import json
import urlparse
import hashlib
import re
import flask
import mimeparse
import oajson
import seqid
from settings import TARGET_RESOURCE
# whether to expand @id values to absolute URLs
ABSOLUTE_ID_URLS = True
# mapping from Eve JSON keys to JSON-LD ones
jsonld_key_rewrites = [
('_id', '@id'),
]
eve_to_jsonld_key_map = dict(jsonld_key_rewrites)
jsonld_to_eve_key_map = dict([(b,a) for a,b in jsonld_key_rewrites])
def dump_json(document, prettyprint=True):
if not prettyprint:
return json.dumps(document)
else:
return json.dumps(document, indent=2, sort_keys=True,
separators=(',', ': '))+'\n'
def setup_callbacks(app):
# annotations
app.on_pre_POST_annotations += convert_incoming_jsonld
app.on_pre_PUT_annotations += convert_incoming_jsonld
app.on_post_GET_annotations += convert_outgoing_jsonld
app.on_post_PUT_annotations += convert_outgoing_jsonld
app.on_post_POST_annotations += convert_outgoing_jsonld
# annotations by document (separate Eve endpoint)
app.on_post_GET_annbydoc += convert_outgoing_jsonld
app.on_post_GET_annbydoc += rewrite_annbydoc_ids
# documents
app.on_post_GET_documents += rewrite_outgoing_document
# TODO: this doesn't seem to be firing, preventing the use of ETag
# in HEAD response to avoid roundtrips.
app.on_post_HEAD_documents += rewrite_outgoing_document
def eve_to_jsonld(document):
document = oajson.remap_keys(document, eve_to_jsonld_key_map)
if ABSOLUTE_ID_URLS:
ids_to_absolute_urls(document)
oajson.add_context(document)
oajson.add_types(document)
remove_meta(document)
remove_status(document)
remove_target_resources(document)
rewrite_links(document)
return document
def eve_from_jsonld(document):
document = oajson.remap_keys(document, jsonld_to_eve_key_map)
# TODO: invert ids_to_absolute_urls() here
oajson.normalize(document)
oajson.remove_context(document)
oajson.remove_types(document)
add_target_resources(document)
return document
def add_target_resources(document):
"""Add fragmentless target URL values to make search easier."""
if oajson.is_collection(document):
for item in document.get(oajson.ITEMS, []):
add_target_resources(item)
else:
target = document.get('target')
if target is None:
return
assert TARGET_RESOURCE not in document
# TODO: support multiple and structured targets
if not isinstance(target, basestring):
raise NotImplementedError('multiple/structured targets')
document[TARGET_RESOURCE] = urlparse.urldefrag(target)[0]
def remove_target_resources(document):
"""Remove fragmentless target URL values added to make search easier."""
if oajson.is_collection(document):
for item in document.get(oajson.ITEMS, []):
remove_target_resources(item)
else:
try:
del document[TARGET_RESOURCE]
except KeyError:
pass
def is_jsonld_response(response):
"""Return True if the given Response object should be treated as
JSON-LD, False otherwise."""
# TODO: reconsider "application/json" here
return response.mimetype in ['application/json', 'application/ld+json']
def convert_outgoing_jsonld(request, payload):
"""Event hook to run after executing a GET method.
Converts Eve payloads that should be interpreted as JSON-LD into
the Open Annotation JSON-LD representation.
"""
if not is_jsonld_response(payload):
return
doc = json.loads(payload.get_data())
jsonld_doc = eve_to_jsonld(doc)
payload.set_data(dump_json(jsonld_doc))
def _collection_ids_to_absolute_urls(document):
"""Rewrite @id values from relative to absolute URL form for collection."""
base = flask.request.base_url
# Eve responds to both "collection" and "collection/" variants
# of the same endpoint, but the join only works for the latter.
# We have to make sure the separator is present in the base.
if base and base[-1] != '/':
base = base + '/'
for item in document.get(oajson.ITEMS, []):
_item_ids_to_absolute_urls(item)
def _item_ids_to_absolute_urls(document, base=None):
"""Rewrite @id values from relative to absolute URL form for item."""
if base is None:
base = flask.request.base_url
try:
id_ = document['@id']
document['@id'] = urlparse.urljoin(base, id_)
except KeyError, e:
print 'Warning: no @id: %s' % str(document)
def ids_to_absolute_urls(document):
"""Rewrite @id value from relative to absolute URL form."""
if oajson.is_collection(document):
return _collection_ids_to_absolute_urls(document)
else:
return _item_ids_to_absolute_urls(document)
def remove_meta(document):
"""Remove Eve pagination meta-information ("_meta") if present."""
try:
del document['_meta']
except KeyError:
pass
def remove_status(document):
"""Remove Eve status information ("_status") if present."""
try:
del document['_status']
except KeyError:
pass
def _rewrite_collection_links(document):
"""Rewrite Eve HATEOAS-style "_links" to JSON-LD for a collection.
Also rewrites links for items in the collection."""
links = document.get('_links')
assert links is not None, 'internal error'
# Eve generates RFC 5988 link relations ("next", "prev", etc.)
# for collections when appropriate. Move these to the collection
# level.
for key in ['start', 'last', 'next', 'prev', 'previous']:
if key not in links:
pass
elif 'href' not in links[key]:
print 'Warning: no href in Eve _links[%s]' % key
else:
assert key not in document, \
'Error: redundant %s links: %s' % (key, str(document))
# fill in relative links (e.g. "people?page=2")
url = links[key]['href']
url = urlparse.urljoin(flask.request.url_root, url)
# TODO: don't assume the RESTful OA keys match Eve ones. In
# particular, consider normalizing 'prev' vs. 'previous'.
document[key] = url
# Others assumed to be redundant with JSON-LD information and safe
# to delete.
del document['_links']
# Process _links in collection items. (At the moment, just
# delete them.)
for item in document.get(oajson.ITEMS, []):
try:
del item['_links']
except KeyError:
pass
return document
def _rewrite_item_links(document):
"""Rewrite Eve HATEOAS-style "_links" to JSON-LD for non-collection."""
links = document.get('_links')
assert links is not None, 'internal error'
# Eve is expected to provide "collection" as a refererence back to
# the collection of which the item is a member. We'll move this to
# the item level with the collection link relation (RFC 6573)
if 'collection' not in links or 'href' not in links['collection']:
print 'Warning: no collection in Eve _links.' # TODO use logging
else:
assert oajson.COLLECTION_KEY not in document, \
'Error: redundant collection links: %s' % str(document)
document[oajson.COLLECTION_KEY] = links['collection']['href']
# Eve also generates a "self" links, which is redundant with
# JSON-LD "@id", and "parent", which is not defined in the RESTful
# OA spec. These can simply be removed.
del document['_links']
return document
def rewrite_links(document):
"""Rewrite Eve HATEOAS-style "_links" to JSON-LD."""
# HATEOAS is expected but not required, so _links may be absent.
if not '_links' in document:
print "Warning: no _links in Eve document." # TODO use logging
return document
if oajson.is_collection(document):
return _rewrite_collection_links(document)
else:
return _rewrite_item_links(document)
def is_jsonld_request(request):
"""Return True if the given Request object should be treated as
JSON-LD, False otherwise."""
content_type = request.headers['Content-Type'].split(';')[0]
# TODO: reconsider "application/json" here
return content_type in ['application/json', 'application/ld+json']
def rewrite_content_type(request):
"""Rewrite JSON-LD content type to assure compatibility with Eve."""
if request.headers['Content-Type'].split(';')[0] != 'application/ld+json':
return # OK
# Eve doesn't currently support application/ld+json, so we'll
# pretend it's just json by changing the content-type header.
# werkzeug EnvironHeaders objects are immutable and disallow
# copy(), so hack around that. (This is probably a bad idea.)
headers = { key: value for key, value in request.headers }
parts = headers['Content-Type'].split(';')
if parts[0] == 'application/ld+json':
parts[0] = 'application/json'
headers['Content-Type'] = ';'.join(parts)
request.headers = headers
def _is_create_annotation_request(document, request):
# TODO: better logic for deciding if a document is an annotation.
return (request.method == 'POST' and
(request.url.endswith('/annotations') or
request.url.endswith('/annotations/')))
def add_new_annotation_id(document, request):
"""Add IDs for annotation documents when necessary."""
if _is_create_annotation_request(document, request):
# Creating new annotation; fill ID if one is not provided.
if '_id' not in document:
document['_id'] = str(seqid.next_id())
return document
def convert_incoming_jsonld(request, lookup=None):
# force=True because older versions of flask don't recognize the
# content type application/ld+json as JSON.
doc = request.get_json(force=True)
assert doc is not None, 'get_json() failed for %s' % request.mimetype
# NOTE: it's important that the following are in-place
# modifications of the JSON dict, as assigning to request.data
# doesn't alter the JSON (it's cached) and there is no set_json().
doc = eve_from_jsonld(doc)
# If the request is a post and no ID is provided, make one
doc = add_new_annotation_id(doc, request)
# Also, we'll need to avoid application/ld+json.
rewrite_content_type(request)
def accepts_mimetype(request, mimetype):
"""Return True if requests accepts mimetype, False otherwise."""
accepted = request.headers.get('Accept')
return mimeparse.best_match([mimetype], accepted) == mimetype
def is_document_collection_request(request):
parsed = urlparse.urlparse(request.url)
return parsed.path in ('/documents', '/documents/')
def text_etag(text):
return hashlib.sha1(text.encode('utf-8')).hexdigest()
def rewrite_outgoing_document_collection(request, payload):
collection = json.loads(payload.get_data())
for document in collection.get(oajson.ITEMS, []):
# Only include the bare minimum in collection-level requests
id_, modified = document['name'], document['serializedAt']
document.clear()
document['@id'], document['serializedAt'] = id_, modified
collection = eve_to_jsonld(collection)
payload.set_data(dump_json(collection))
def rewrite_outgoing_document(request, payload):
if not is_jsonld_response(payload):
pass # Can only rewrite JSON
elif is_document_collection_request(request):
rewrite_outgoing_document_collection(request, payload)
elif not accepts_mimetype(request, 'text/plain'):
pass # Just return whatever is prepared
else:
# Return the text of the document as text/plain
doc = json.loads(payload.get_data())
try:
text = doc['text']
except KeyError, e:
text = 'Error: failed to load text: %s' % dump_json(doc)
payload.set_data(text)
payload.headers['Content-Type'] = 'text/plain; charset=utf-8'
payload.headers['ETag'] = text_etag(text)
def _rewrite_annbydoc_collection_ids(collection):
for item in collection.get(oajson.ITEMS, []):
_rewrite_annbydoc_item_id(item)
def _rewrite_annbydoc_item_id(document):
id_ = document['@id']
parts = urlparse.urlparse(id_)
m = re.match(r'^.*(/annotations/[^\/]+)$', parts.path)
if not m:
# TODO
print 'failed to rewrite ann-by-doc id %s' % id_
return
new_path = m.group(1)
rewritten = urlparse.urlunparse((parts.scheme, parts.netloc, new_path,
parts.params, parts.query, parts.fragment))
document['@id'] = rewritten
def rewrite_annbydoc_ids(request, payload):
"""Event hook to run after GET on annotations-by-document endpoint.
Removes extra "/documents/.../" from @id values. For example, an
@id of "http://ex.org/documents/1.txt/annotations/1" would be
rewritten as "http://ex.org/annotations/1".
"""
if not is_jsonld_response(payload):
return
doc = json.loads(payload.get_data())
if oajson.is_collection(doc):
_rewrite_annbydoc_collection_ids(doc)
else:
_rewrite_annbydoc_item_id(doc)
payload.set_data(dump_json(doc))
| mit | 7,361,483,527,524,593,000 | 36.805085 | 80 | 0.661063 | false |
thesgc/chembiohub_ws | chembl_business_model/models/mechanismAnnotation.py | 1 | 1567 | __author__ = 'mnowotka'
import chembl_core_model.models as core
#-----------------------------------------------------------------------------------------------------------------------
class PredictedBindingDomains(core.PredictedBindingDomains):
#api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class LigandEff(core.LigandEff):
#haystack_index = ['bei', 'sei']
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class ActionType(core.ActionType):
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class DrugMechanism(core.DrugMechanism):
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class MechanismRefs(core.MechanismRefs):
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#----------------------------------------------------------------------------------------------------------------------- | gpl-3.0 | 8,144,624,463,717,007,000 | 28.584906 | 120 | 0.333121 | false |
mathiasmch/Krypton | lib/basisset.py | 1 | 4258 | #! /usr/bin/env python3.4
# -*- coding:utf-8 -*-
#
# Krypton - A little tool for GAMESS (US) users
#
# Copyright (C) 2012-20.. Mathias M.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os import listdir, path, makedirs
from lib.config import *
from lib.parser import extract_basis_set, extract_ECPs
################################################################################
def add_basis_set(bsid, basis_set_file):
"""
Add the basis set of a basis set file from the BSE portal into the folder
used as database.
bsid : ID to use for the basis set (STO-3G, 6-31G, etc.)
basis_set_file : GAMESS (US) input file from the BSE portal
"""
basis_set = extract_basis_set(basis_set_file)
ECPs = extract_ECPs(basis_set_file)
elements = list()
if bsid in listdir(DB):
elements = get_elements(bsid)
else:
makedirs(DB+"/"+bsid)
for element, coeffs in basis_set.items():
if element not in elements:
with open(DB+"/"+bsid+"/"+element+".txt", "w") as f:
for coeff in coeffs:
f.write(coeff+"\n")
if ECPs:
if "ECP" not in listdir(DB+"/"+bsid):
makedirs(DB+"/"+bsid+"/ECP")
elements = get_elements(bsid, True)
for element, coeffs in ECPs.items():
if element not in elements:
with open(DB+"/"+bsid+"/ECP/"+element+".txt", "w") as f:
for coeff in coeffs:
f.write(coeff+"\n")
################################################################################
def load_basis_set(bsid):
"""
Extract the basis set from the database.
bsid : ID of the basis set
return : dictionary = list of strings for each atom
example: {'H':['S 3','1 3.425 0.154','2 0.623 0.535'], 'C': ...}
"""
basis_set = dict()
if not path.isdir(DB):
raise Exception("ERROR: There is no database.")
if bsid not in listdir(DB):
raise Exception("ERROR: Basis set "+bsid+" does not exist.")
for element_file in listdir(DB+"/"+bsid):
if element_file != "ECP":
element = element_file.split(".")[0]
with open(DB+"/"+bsid+"/"+element_file) as f:
basis_set[element] = []
for line in f:
basis_set[element].append(line.rstrip())
return basis_set
################################################################################
def get_elements(bsid, ECP=False):
"""
Return the elements available in the database for the basis set bsid.
bsid : ID of the basis set
return : list of elements
"""
elements = list()
if bsid not in listdir(DB):
raise Exception("ERROR: Basis set "+bsid+" does not exist.")
path = DB+"/"+bsid
if ECP:
path += "/ECP"
for element in listdir(path):
if element.endswith(".txt"):
elements.append(element.split(".")[0])
return elements
################################################################################
def list_basis_sets():
"""
Print the available basis sets in the database and their atoms.
"""
if not path.isdir(DB):
raise Exception("ERROR: There is no database.")
for bsid in listdir(DB):
line = bsid + " : "
for elements in get_elements(bsid):
line += elements
line += " "
if "ECP" in listdir(DB+"/"+bsid):
line += "(ECP :"
ECPs = get_elements(bsid, True)
for ECP in ECPs:
line += " "
line += ECP
line += ")"
print(line)
| gpl-3.0 | 104,122,611,344,023,140 | 28.365517 | 80 | 0.536637 | false |
reinforceio/tensorforce | tensorforce/core/parameters/exponential.py | 1 | 2429 | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import tf_util
from tensorforce.core.parameters import Decaying
class Exponential(Decaying):
"""
Exponentially decaying hyperparameter (specification key: `exponential`).
Args:
unit ("timesteps" | "episodes" | "updates"): Unit of decay schedule
(<span style="color:#C00000"><b>required</b></span>).
num_steps (int): Number of decay steps
(<span style="color:#C00000"><b>required</b></span>).
initial_value (float): Initial value
(<span style="color:#C00000"><b>required</b></span>).
decay_rate (float): Decay rate
(<span style="color:#C00000"><b>required</b></span>).
staircase (bool): Whether to apply decay in a discrete staircase, as opposed to continuous,
fashion (<span style="color:#00C000"><b>default</b></span>: false).
name (string): <span style="color:#0000C0"><b>internal use</b></span>.
dtype (type): <span style="color:#0000C0"><b>internal use</b></span>.
min_value (dtype-compatible value): <span style="color:#0000C0"><b>internal use</b></span>.
max_value (dtype-compatible value): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, unit, num_steps, initial_value, decay_rate, staircase=False, name=None, dtype=None,
min_value=None, max_value=None, **kwargs
):
super().__init__(
decay='exponential', unit=unit, num_steps=num_steps, initial_value=initial_value,
name=name, dtype=dtype, min_value=min_value, max_value=max_value, decay_rate=decay_rate,
staircase=staircase, **kwargs
)
| apache-2.0 | -670,260,956,132,704,100 | 45.711538 | 100 | 0.641005 | false |
vlegoff/tsunami | src/test/primaires/connex/fausse_instance_connexion.py | 1 | 2269 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Module contenant une fausse instance de connexion (modk)."""
from primaires.connex.instance_connexion import InstanceConnexion
class FausseInstanceConnexion(InstanceConnexion):
"""Classe représentant une fausse instance de connexion.
Cette instance de connexion n'envoie pas les données sur le
réseau mais les conserve. Cela permet de tester le retour de
certaines commandes (ou actions) sans avoir besoin de créer une
connexion réseau.
"""
def __init__(self):
self.messages = []
InstanceConnexion.__init__(self, None, creer_contexte=True)
def envoyer(self, msg, nl=2):
"""Conserve le message."""
self.messages.append(msg)
| bsd-3-clause | 7,651,180,969,544,803,000 | 42.538462 | 79 | 0.756625 | false |
artursmet/django-payments | setup.py | 1 | 2256 | #!/usr/bin/env python
from setuptools import setup
from setuptools.command.test import test as TestCommand
import os
import sys
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_settings')
PACKAGES = [
'payments',
'payments.authorizenet',
'payments.braintree',
'payments.cybersource',
'payments.dummy',
'payments.dotpay',
'payments.paypal',
'payments.sagepay',
'payments.sofort',
'payments.stripe',
'payments.wallet']
REQUIREMENTS = [
'braintree>=3.14.0',
'Django>=1.5',
'pycryptodome>=3.3.1',
'PyJWT>=1.3.0',
'requests>=1.2.0',
'stripe>=1.9.8',
'suds-jurko>=0.6',
'xmltodict>=0.9.2']
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
test_args = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='django-payments',
author='Mirumee Software',
author_email='[email protected]',
description='Universal payment handling for Django',
version='0.8.0',
url='http://github.com/mirumee/django-payments',
packages=PACKAGES,
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=REQUIREMENTS,
cmdclass={
'test': PyTest},
tests_require=[
'mock',
'pytest',
'pytest-django'],
zip_safe=False)
| bsd-3-clause | 6,459,327,838,618,010,000 | 26.851852 | 79 | 0.615248 | false |
sdgdsffdsfff/jumpserver | apps/assets/serializers/node.py | 1 | 1482 | # -*- coding: utf-8 -*-
from rest_framework import serializers
from django.utils.translation import ugettext as _
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from ..models import Asset, Node
__all__ = [
'NodeSerializer', "NodeAddChildrenSerializer",
"NodeAssetsSerializer",
]
class NodeSerializer(BulkOrgResourceModelSerializer):
name = serializers.ReadOnlyField(source='value')
value = serializers.CharField(
required=False, allow_blank=True, allow_null=True, label=_("value")
)
class Meta:
model = Node
only_fields = ['id', 'key', 'value', 'org_id']
fields = only_fields + ['name', 'full_value']
read_only_fields = ['key', 'org_id']
def validate_value(self, data):
if self.instance:
instance = self.instance
siblings = instance.get_siblings()
else:
instance = Node.org_root()
siblings = instance.get_children()
if siblings.filter(value=data):
raise serializers.ValidationError(
_('The same level node name cannot be the same')
)
return data
class NodeAssetsSerializer(BulkOrgResourceModelSerializer):
assets = serializers.PrimaryKeyRelatedField(
many=True, queryset=Asset.objects
)
class Meta:
model = Node
fields = ['assets']
class NodeAddChildrenSerializer(serializers.Serializer):
nodes = serializers.ListField()
| gpl-2.0 | -4,152,244,283,092,600,300 | 26.962264 | 75 | 0.644399 | false |
SINGROUP/pycp2k | pycp2k/classes/_restart_averages1.py | 1 | 1738 | from pycp2k.inputsection import InputSection
class _restart_averages1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Itimes_start = None
self.Avecpu = None
self.Avehugoniot = None
self.Avetemp_baro = None
self.Avepot = None
self.Avekin = None
self.Avetemp = None
self.Avekin_qm = None
self.Avetemp_qm = None
self.Avevol = None
self.Avecell_a = None
self.Avecell_b = None
self.Avecell_c = None
self.Avealpha = None
self.Avebeta = None
self.Avegamma = None
self.Ave_econs = None
self.Ave_press = None
self.Ave_pxx = None
self.Ave_pv_vir = None
self.Ave_pv_tot = None
self.Ave_pv_kin = None
self.Ave_pv_cnstr = None
self.Ave_pv_xc = None
self.Ave_pv_fock_4c = None
self.Ave_colvars = None
self.Ave_mmatrix = None
self._name = "RESTART_AVERAGES"
self._keywords = {'Avehugoniot': 'AVEHUGONIOT', 'Ave_pv_kin': 'AVE_PV_KIN', 'Avepot': 'AVEPOT', 'Ave_pv_cnstr': 'AVE_PV_CNSTR', 'Avetemp_baro': 'AVETEMP_BARO', 'Avekin': 'AVEKIN', 'Ave_pv_xc': 'AVE_PV_XC', 'Avebeta': 'AVEBETA', 'Avealpha': 'AVEALPHA', 'Ave_pxx': 'AVE_PXX', 'Ave_press': 'AVE_PRESS', 'Ave_econs': 'AVE_ECONS', 'Ave_pv_fock_4c': 'AVE_PV_FOCK_4C', 'Ave_colvars': 'AVE_COLVARS', 'Ave_mmatrix': 'AVE_MMATRIX', 'Ave_pv_vir': 'AVE_PV_VIR', 'Avecell_c': 'AVECELL_C', 'Avegamma': 'AVEGAMMA', 'Avecell_a': 'AVECELL_A', 'Avekin_qm': 'AVEKIN_QM', 'Avevol': 'AVEVOL', 'Avecell_b': 'AVECELL_B', 'Itimes_start': 'ITIMES_START', 'Avetemp': 'AVETEMP', 'Avecpu': 'AVECPU', 'Avetemp_qm': 'AVETEMP_QM', 'Ave_pv_tot': 'AVE_PV_TOT'}
| lgpl-3.0 | 1,865,731,093,727,188,500 | 47.277778 | 735 | 0.587457 | false |
clarkzjw/COMM2TG | bot.py | 1 | 7355 | # -*- coding: utf-8 -*-
import os
import re
import sys
import time
import json
import ingrex
import logging
import inspect
import telegram
from selenium import webdriver
from pymongo import MongoClient
bot = None
BOT_TOKEN = ''
CHANNEL_NAME = ''
Email = ''
Passwd = ''
PhantomJSPath = ''
DBName = ''
DBUser = ''
DBPass = ''
DBHost = ''
BlockList = ''
LOG_FILENAME = 'voh.log'
TIME_ZONE='Asia/Shanghai'
minLngE6 = 0
minLatE6 = 0
maxLngE6 = 0
maxLatE6 = 0
class CookieException(Exception):
"""Intel Error"""
pass
def get_time():
return time.strftime('%x %X %Z')
def read_config():
global Email
global Passwd
global BOT_TOKEN
global CHANNEL_NAME
global PhantomJSPath
global DBName
global DBUser
global DBPass
global DBHost
global BlockList
global LOG_FILENAME
global minLngE6
global minLatE6
global maxLngE6
global maxLatE6
configfile = open("./config.json")
config = json.load(configfile)
Email = config["Email"]
Passwd = config["Passwd"]
BOT_TOKEN = config["BOT_TOKEN"]
CHANNEL_NAME = config["CHANNEL_NAME"]
PhantomJSPath = config["PhantomJSPath"]
DBName = config["DBName"]
DBUser = config["DBUser"]
DBPass = config["DBPass"]
DBHost = config["DBHost"]
BlockList = config["BlockList"]
minLngE6 = config["minLngE6"]
minLatE6 = config["minLatE6"]
maxLngE6 = config["maxLngE6"]
maxLatE6 = config["maxLatE6"]
os.environ['TZ'] = TIME_ZONE
time.tzset()
logging.basicConfig(level=logging.DEBUG,
filename=LOG_FILENAME,
filemode='w')
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-8s: %(levelname)-4s %(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
def fetch_cookie():
logger = logging.getLogger('fetch_cookie')
logger.info(get_time() + ': Fetching Cookie...')
driver = webdriver.PhantomJS(PhantomJSPath)
driver.get('https://www.ingress.com/intel')
# get login page
link = driver.find_elements_by_tag_name('a')[0].get_attribute('href')
driver.get(link)
driver.get_screenshot_as_file('1.png')
# simulate manual login
driver.set_page_load_timeout(10)
driver.set_script_timeout(20)
driver.find_element_by_id('Email').send_keys(Email)
driver.get_screenshot_as_file('2.png')
driver.find_element_by_css_selector('#next').click()
time.sleep(3)
driver.find_element_by_id('Passwd').send_keys(Passwd)
driver.get_screenshot_as_file('3.png')
driver.find_element_by_css_selector('#signIn').click()
time.sleep(3)
driver.get_screenshot_as_file('4.png')
# get cookies
cookies = driver.get_cookies()
csrftoken = ''
SACSID = ''
for key in cookies:
if key['name'] == 'csrftoken':
csrftoken = key['value']
if key['name'] == 'SACSID':
SACSID = key['value']
if csrftoken == '' or SACSID == '':
raise CookieException
with open('cookie', 'w') as file:
cookie = 'SACSID='+SACSID+'; csrftoken='+csrftoken+'; ingress.intelmap.shflt=viz; ingress.intelmap.lat=29.098418372855484; ingress.intelmap.lng=119.81689453125; ingress.intelmap.zoom=17'
file.write(cookie)
logger.info(get_time() + ': Fetching Cookie Succeed')
driver.quit()
return True
def send_message(bot, message, monitor=False):
logger = logging.getLogger('send_message')
while True:
try:
if monitor is True:
bot.sendMessage(chat_id="@voamonitor", text=message)
else:
print(type(message))
bot.sendMessage(chat_id=CHANNEL_NAME, text=message)
logger.info(get_time() + ": sendMsg " + message)
break
except telegram.TelegramError:
logger.error(get_time() + ": Send Message to Channel Failed")
time.sleep(1)
except Exception:
logger.error(get_time() + ": Unexpected error: " + str(sys.exc_info()[0]) + " Line: " + str(inspect.currentframe().f_lineno))
time.sleep(1)
def find_message_record(id):
uri = 'mongodb://' + DBHost
conn = MongoClient(uri)
conn.api.authenticate(DBUser, DBPass, DBName)
database = conn[DBName]
collection = database.entries
count = collection.find({"id": id}).count()
conn.close()
if count == 0:
return False
else:
return True
def insert_message_to_database(time, id, msg):
uri = 'mongodb://' + DBHost
conn = MongoClient(uri)
conn.api.authenticate(DBUser, DBPass, DBName)
database = conn[DBName]
collection = database.entries
post = {"id": id, "time": time, "msg": msg}
collection.insert(post)
conn.close()
def main():
logger = logging.getLogger(__name__)
# Lat & Lng of fetch region
field = {
'minLngE6': minLngE6,
'minLatE6': minLatE6,
'maxLngE6': maxLngE6,
'maxLatE6': maxLatE6,
}
mints = -1
maxts = -1
reverse = False
tab = 'all'
# fetch cookie
while True:
try:
if fetch_cookie():
break
except CookieException:
logger.error(get_time() + ': Fetch Cookie Failed')
time.sleep(3)
except:
logger.error(get_time() + ": Unexpected error: " + str(sys.exc_info()[0]) + " Line: " + str(inspect.currentframe().f_lineno))
time.sleep(3)
# fetch message
count = 0
while True:
count += 1
logger.info(get_time() + ": {} Fetching from Intel...".format(str(count)))
with open('cookie') as cookies:
cookies = cookies.read().strip()
# fetch message per time
while True:
try:
intel = ingrex.Intel(cookies, field)
result = intel.fetch_msg(mints, maxts, reverse, tab)
if result:
mints = result[0][1] + 1
break
except:
logger.error(get_time() + ": Unexpected error: " + str(sys.exc_info()[0]) + " Line: " + str(inspect.currentframe().f_lineno))
time.sleep(3)
for item in result[::-1]:
# Check spam message
pattern = re.compile(BlockList)
match = pattern.search(str(item))
if match:
continue
message = ingrex.Message(item)
if message.ptype == 'PLAYER_GENERATED':
if find_message_record(message.guid) is False:
insert_message_to_database(message.time, message.guid, message.msg)
send_message(bot, message.msg, False)
time.sleep(10)
if __name__ == '__main__':
read_config()
bot = telegram.Bot(BOT_TOKEN)
while True:
try:
main()
except Exception:
send_message(bot, 'Main Unexpected error' + str(sys.exc_info()[0]) + " Line: " + str(inspect.currentframe().f_lineno), True)
time.sleep(3) | mit | 2,498,215,814,560,425,500 | 26.626459 | 194 | 0.569409 | false |
saullocastro/pyNastran | pyNastran/bdf/dev_vectorized/cards/loads/static/grav.py | 1 | 4585 | from six.moves import zip
import numpy as np
from numpy import zeros, unique
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
from pyNastran.bdf.bdf_interface.assign_type import (integer, integer_or_blank,
double, double_or_blank)
from pyNastran.bdf.dev_vectorized.cards.loads.vectorized_load import VectorizedLoad
class GRAV(VectorizedLoad):
"""
+------+-----+-----+------+-----+-----+------+-----+
| GRAV | SID | CID | A | N1 | N2 | N3 | MB |
+------+-----+-----+------+-----+-----+------+-----+
| GRAV | 1 | 3 | 32.2 | 0.0 | 0.0 | -1.0 | |
+------+-----+-----+------+-----+-----+------+-----+
"""
type = 'GRAV'
def __init__(self, model):
"""
Defines the GRAV object.
Parameters
----------
model : BDF
the BDF object
.. todo:: collapse loads
"""
VectorizedLoad.__init__(self, model)
#self.model = model
#self.n = 0
#self._cards = []
#self._comments = []
def __getitem__(self, i):
#unique_lid = unique(self.load_id)
if len(i):
f = GRAV(self.model)
f.load_id = self.load_id[i]
f.coord_id = self.coord_id[i]
f.scale = self.scale[i]
f.N = self.N[i]
f.mb = self.mb[i]
f.n = len(i)
return f
raise RuntimeError('len(i) = 0')
def __mul__(self, value):
f = GRAV(self.model)
f.load_id = self.load_id
f.coord_id = self.coord_id
f.scale = self.scale * value
f.N = self.N
f.mb = self.mb
f.n = self.n
return f
def __rmul__(self, value):
return self.__mul__(value)
def allocate(self, card_count):
ncards = card_count[self.type]
if ncards:
self.n = ncards
float_fmt = self.model.float_fmt
#: Set identification number
self.load_id = zeros(ncards, 'int32')
#: Coordinate system identification number.
self.coord_id = zeros(ncards, 'int32')
#: scale factor
self.scale = zeros(ncards, float_fmt)
self.N = zeros((ncards, 3), float_fmt)
self.mb = zeros(ncards, 'int32')
def add_card(self, card, comment=''):
#self._cards.append(card)
#self._comments.append(comment)
i = self.i
self.load_id[i] = integer(card, 1, 'sid')
#self.node_id[i] = integer(card, 1, 'node_id')
self.coord_id[i] = integer_or_blank(card, 2, 'cid', 0)
self.scale[i] = double(card, 3, 'scale')
#: Acceleration vector components measured in coordinate system CID
self.N[i, :] = [double_or_blank(card, 4, 'N1', 0.0),
double_or_blank(card, 5, 'N2', 0.0),
double_or_blank(card, 6, 'N3', 0.0)]
#: Indicates whether the CID coordinate system is defined in the
#: main Bulk Data Section (MB = -1) or the partitioned superelement
#: Bulk Data Section (MB = 0). Coordinate systems referenced in the
#: main Bulk Data Section are considered stationary with respect to
#: the assembly basic coordinate system. See Remark 10.
#: (Integer; Default = 0)
self.mb[i] = integer_or_blank(card, 7, 'mb', 0)
assert len(card) <= 8, 'len(GRAV card) = %i\ncard=%s' % (len(card), card)
self.i += 1
def build(self):
"""
Parameters
----------
:param cards: the list of GRAV cards
"""
if self.n:
i = self.load_id.argsort()
self.load_id = self.load_id[i]
#self.node_id = self.node_id[i]
self.coord_id = self.coord_id[i]
self.scale = self.scale[i]
self.N = self.N[i]
self._cards = []
self._comments = []
def get_stats(self):
msg = []
if self.n:
msg.append(' %-8s: %i' % ('GRAV', self.n))
return msg
def write_card_by_index(self, bdf_file, size=8, is_double=False, i=None):
for (lid, cid, scale, N, mb) in zip(
self.load_id[i], self.coord_id[i], self.scale[i], self.N[i, :], self.mb[i]):
card = ['GRAV', lid, cid, scale, N[0], N[1], N[2], mb]
if size == 8:
bdf_file.write(print_card_8(card))
else:
bdf_file.write(print_card_16(card))
def get_load_ids(self):
return np.unique(self.load_id)
| lgpl-3.0 | 771,074,946,562,571,900 | 32.467153 | 89 | 0.499891 | false |
lenarother/moderna | moderna/modifications/ModificationAdder.py | 1 | 3806 | """
Add modifications to a residue
"""
from ResidueEditor import ResidueEditor
from BaseExchanger import BaseExchanger
from ModificationRemover import ModificationRemover
from moderna.util.Errors import AddModificationError
from moderna.util.LogFile import log
from moderna.Constants import ANY_RESIDUE, MISSING_RESIDUE, \
UNKNOWN_RESIDUE_SHORT, B_FACTOR_ADD_MODIF, \
ADDING_MODIFICATION_RULES_PATH
def parse_modification_rules(separator=' | '):
"""
Prepares a rule for adding a modification.
Rules describe which fragments add and how to do this
to obtain a residue with given modification.
Returns dict of list of dicts with rules for adding a single fragment.
Keys in each rule dict: ['modification_name', 'original_base', 'remove',
'moved_link_atoms', 'fixed_link_atoms', 'fragment_file_name', 'pdb_abbrev']
"""
rules = {}
try:
infile = open(ADDING_MODIFICATION_RULES_PATH)
except IOError:
log.write_message('File does not exist: %s ' % ADDING_MODIFICATION_RULES_PATH)
return {}
for line in infile:
line = line.strip().split(separator)
if len(line) >= 7:
mod_name = line[0].strip()
rules.setdefault(mod_name, [])
rule = {}
rule['modification_name'] = line[0]
rule['original_base'] = line[1]
rule['remove'] = line[2]
rule['moved_link_atoms'] = line[3].split(',')
rule['fixed_link_atoms'] = line[4].split(',')
rule['fragment_file_name'] = line[5]
rule['pdb_abbrev'] = line[6]
rules[mod_name].append(rule)
return rules
MODIFICATION_RULES = parse_modification_rules()
class ModificationAdder(ResidueEditor):
def add_modification(self, resi, modification_name):
"""
Adds a modification to a residue.
It adds single fragments (add_single_fragment)
according to adding modification rules (get_modification_rules).
Arguments:
- modification name (as a long abbreviation)
"""
try:
if modification_name in [ANY_RESIDUE, MISSING_RESIDUE]:
raise AddModificationError('Residue %s: expected a modification name, instead got missing/any residue abbreviation "%s"'\
% (resi.identifier, modification_name))
else:
if resi.long_abbrev == UNKNOWN_RESIDUE_SHORT:
self.mutate_unknown_residue(resi)
if resi.modified:
rem = ModificationRemover()
rem.remove_modification(resi)
rules = MODIFICATION_RULES.get(modification_name, [])
if not rules:
raise AddModificationError('Residue %s: there is no rule for adding this modification. Check modification name "%s".' \
%(resi.identifier, modification_name))
else:
if rules[0]['original_base'] != resi.original_base:
bex = BaseExchanger()
bex.exchange_base(resi, rules[0]['original_base'])
for rule in rules:
self.add_single_fragment(resi, rule)
resi.change_name(modification_name)
self.set_bfactor(resi, B_FACTOR_ADD_MODIF)
except IOError:
raise AddModificationError('Residue %s: could not add modification.' % resi.identifier)
def add_modification(resi, long_abbrev):
"""Adds modification with given abbreviation"""
old_name = resi.long_abbrev
add = ModificationAdder()
add.add_modification(resi, long_abbrev)
log.write_message('Residue %s: modification added (%s ---> %s).' %(resi.identifier, old_name, long_abbrev))
| gpl-3.0 | 1,377,465,879,723,447,600 | 39.063158 | 139 | 0.607199 | false |
jessicachung/rna_seq_pipeline | pipeline_config.py | 1 | 8036 | #---------------------------------
# PIPELINE RUN
#---------------------------------
# The configuration settings to run the pipeline. These options are overwritten
# if a new setting is specified as an argument when running the pipeline.
# These settings include:
# - logDir: The directory where the batch queue scripts are stored, along with
# stdout and stderr dumps after the job is run.
# - logFile: Log file in logDir which all commands submitted are stored.
# - style: the style which the pipeline runs in. One of:
# - 'print': prints the stages which will be run to stdout,
# - 'run': runs the pipeline until the specified stages are finished, and
# - 'flowchart': outputs a flowchart of the pipeline stages specified and
# their dependencies.
# - procs: the number of python processes to run simultaneously. This
# determines the maximum parallelism of the pipeline. For distributed jobs
# it also constrains the maximum total jobs submitted to the queue at any one
# time.
# - verbosity: one of 0 (quiet), 1 (normal), 2 (chatty).
# - end: the desired tasks to be run. Rubra will also run all tasks which are
# dependencies of these tasks.
# - force: tasks which will be forced to run, regardless of timestamps.
# - rebuild: one of 'fromstart','fromend'. Whether to calculate which
# dependencies will be rerun by working back from an end task to the latest
# up-to-date task, or forward from the earliest out-of-date task. 'fromstart'
# is the most conservative and commonly used as it brings all intermediate
# tasks up to date.
# - manager: "pbs" or "slurm"
pipeline = {
"logDir": "log",
"logFile": "pipeline_commands.log",
"style": "print",
"procs": 16,
"verbose": 2,
"end": ["fastQCSummary", "voom", "edgeR", "qcSummary"],
"force": [],
"rebuild": "fromstart",
"manager": "slurm",
}
# This option specifies whether or not you are using VLSCI's Merri or Barcoo
# cluster. If True, this changes java's tmpdir to the job's tmp dir on
# /scratch ($TMPDIR) instead of using the default /tmp which has limited space.
using_merri = True
# Optional parameter governing how Ruffus determines which part of the
# pipeline is out-of-date and needs to be re-run. If set to False, Ruffus
# will work back from the end target tasks and only execute the pipeline
# after the first up-to-date tasks that it encounters.
# Warning: Use with caution! If you don't understand what this option does,
# keep this option as True.
maximal_rebuild_mode = True
#---------------------------------
# CONFIG
#---------------------------------
# Name of analysis. Changing the name will create new sub-directories for
# voom, edgeR, and cuffdiff analysis.
analysis_name = "analysis_v1"
# The directory containing *.fastq.gz read files.
raw_seq_dir = "/path_to_project/fastq_files/"
# Path to the CSV file with sample information regarding condition and
# covariates if available.
samples_csv = "/path_to_project/fastq_files/samples.csv"
# Path to the CSV file with which comparisons to make.
comparisons_csv = "/path_to_project/fastq_files/comparisons.csv"
# The output directory.
output_dir = "/path_to_project/results/"
# Sequencing platform for read group information.
platform = "Illumina"
# If the experiment is paired-end or single-end: True (PE) or False (SE).
paired_end = False
# Whether the experiment is strand specific: "yes", "no", or "reverse".
stranded = "no"
#---------------------------------
# REFERENCE FILES
#---------------------------------
# Most reference files can be obtained from the Illumina iGenomes project:
# http://cufflinks.cbcb.umd.edu/igenomes.html
# Bowtie 2 index files: *.1.bt2, *.2.bt2, *.3.bt2, *.4.bt2, *.rev.1.bt2,
# *.rev.2.bt2.
genome_ref = "/vlsci/VR0002/shared/Reference_Files/Indexed_Ref_Genomes/bowtie_Indexed/human_g1k_v37"
# Genome reference FASTA. Also needs an indexed genome (.fai) and dictionary
# (.dict) file in the same directory.
genome_ref_fa = "/vlsci/VR0002/shared/Reference_Files/Indexed_Ref_Genomes/bowtie_Indexed/human_g1k_v37.fa"
# Gene set reference file (.gtf). Recommend using the GTF file obtained from
# Ensembl as Ensembl gene IDs are used for annotation (if specified).
gene_ref = "/vlsci/VR0002/shared/Reference_Files/Indexed_Ref_Genomes/TuxedoSuite_Ref_Files/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf"
# Either a rRNA reference fasta (ending in .fasta or .fa) or an GATK interval
# file (ending in .list) containing rRNA intervals to calculate the rRNA
# content. Can set as False if not available.
# rrna_ref = "/vlsci/VR0002/shared/Reference_Files/rRNA/human_all_rRNA.fasta"
rrna_ref = "/vlsci/VR0002/shared/jchung/human_reference_files/human_rRNA.list"
# Optional tRNA and rRNA sequences to filter out in Cuffdiff (.gtf or .gff).
# Set as False if not provided.
cuffdiff_mask_file = False
#---------------------------------
# TRIMMOMATIC PARAMETERS
#---------------------------------
# Parameters for Trimmomatic (a tool for trimming Illumina reads).
# http://www.usadellab.org/cms/index.php?page=trimmomatic
# Path of a FASTA file containing adapter sequences used in sequencing.
adapter_seq = "/vlsci/VR0002/shared/jchung/human_reference_files/TruSeqAdapters.fa"
# The maximum mismatch count which will still allow a full match to be
# performed.
seed_mismatches = 2
# How accurate the match between the two 'adapter ligated' reads must be for
# PE palindrome read alignment.
palendrome_clip_threshold = 30
# How accurate the match between any adapter etc. sequence must be against a
# read.
simple_clip_threshold = 10
# The minimum quality needed to keep a base and the minimum length of reads to
# be kept.
extra_parameters = "LEADING:3 TRAILING:3 SLIDINGWINDOW:4:15 MINLEN:36"
# Output Trimmomatic log file
write_trimmomatic_log = True
#---------------------------------
# R PARAMETERS
#---------------------------------
# Get annotations from Ensembl BioMart. GTF file needs to use IDs from Ensembl.
# Set as False to skip annotation, else
# provide the name of the dataset that will be queried. Attributes to be
# obtained include gene symbol, chromosome name, description, and gene biotype.
# Commonly used datasets:
# human: "hsapiens_gene_ensembl"
# mouse: "mmusculus_gene_ensembl"
# rat: "rnorvegicus_gene_ensembl"
# You can list all available datasets in R by using the listDatasets fuction:
# > library(biomaRt)
# > listDatasets(useMart("ensembl"))
# The gene symbol is obtained from the attribute "hgnc_symbol" (human) or
# "mgi_symbol" (mice/rats) if available. If not, the "external_gene_id" is used
# to obtain the gene symbol. You can change this by editing the script:
# scripts/combine_and_annotate.r
annotation_dataset = "hsapiens_gene_ensembl"
#---------------------------------
# SCRIPT PATHS
#---------------------------------
# Paths to other wrapper scripts needed to run the pipeline. Make sure these
# paths are relative to the directory where you plan to run the pipeline in or
# change them to absolute paths.
html_index_script = "scripts/html_index.py"
index_script = "scripts/build_index.sh"
tophat_script = "scripts/run_tophat.sh"
merge_tophat_script = "scripts/merge_tophat.sh"
fix_tophat_unmapped_reads_script = "scripts/fix_tophat_unmapped_reads.py"
htseq_script = "scripts/run_htseq.sh"
fastqc_parse_script = "scripts/fastqc_parse.py"
qc_parse_script = "scripts/qc_parse.py"
alignment_stats_script = "scripts/alignment_stats.sh"
combine_and_annotate_script = "scripts/combine_and_annotate.R"
de_analysis_script = "scripts/de_analysis.R"
#---------------------------------
# PROGRAM PATHS
#---------------------------------
trimmomatic_path = "/usr/local/trimmomatic/0.30/trimmomatic-0.30.jar"
reorder_sam_path = "/usr/local/picard/1.69/lib/ReorderSam.jar"
mark_duplicates_path = "/usr/local/picard/1.69/lib/MarkDuplicates.jar"
rnaseqc_path = "/usr/local/rnaseqc/1.1.7/RNA-SeQC_v1.1.7.jar"
add_or_replace_read_groups_path = "/usr/local/picard/1.69/lib/AddOrReplaceReadGroups.jar"
| mit | -6,517,727,467,118,857,000 | 39.791878 | 146 | 0.697113 | false |
HunterBaines/sudb | setup.py | 1 | 1155 | # Author: Hunter Baines <[email protected]>
# Copyright: (C) 2017 Hunter Baines
# License: GNU GPL version 3
import sys
from distutils.core import setup
import sudb
FAILURE = '\033[1;31m' + 'Install cannot proceed.' + '\033[00m'
if len(sys.argv) > 1 and sys.argv[1] == 'install':
# Check python version
if sys.version_info < (3, 4):
sys.exit(FAILURE + ' Sorry, Python 3.4 or above is required.')
setup(name='sudb',
description='Sudoku debugger',
long_description=sudb.__doc__,
author=sudb.__author__,
author_email=sudb.__email__,
license=sudb.__license__,
packages=['sudb'],
scripts=['scripts/sudb'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Games/Entertainment :: Puzzle Games'
]
)
| gpl-3.0 | -9,102,236,818,809,176,000 | 28.615385 | 77 | 0.592208 | false |
gersteinlab/AlleleDB | alleledb_pipeline/CombineSnpCounts.py | 1 | 5465 |
import gc, os, sys, string, re, pdb, scipy.stats, cPickle
import Mapping2, getNewer1000GSNPAnnotations, Bowtie, binom, GetCNVAnnotations, dictmerge, utils, InBindingSite
TABLE=string.maketrans('ACGTacgt', 'TGCAtgca')
USAGE="%s mindepth snpfile readfiletmplt maptmplt bindingsites cnvfile outfile logfile ksfile"
def reverseComplement(seq):
tmp=seq[::-1]
return tmp.translate(TABLE)
def makeMappers(maptmplt):
mappers={}
cs=['chr%s' % str(c) for c in range(1,23)] + ['chrX', 'chrY', 'chrM']
for c in cs:
f=maptmplt % c
if os.path.exists(f):
mappers[c] = Mapping2.Mapping(f)
return mappers
THRESH1=0.90
THRESH2=0.05
SYMMETRIC="Sym"
ASYMMETRIC="Asym"
HOMOZYGOUS="Homo"
WEIRD="Weird"
tbl={
'a':('a','a'),
'c':('c','c'),
'g':('g','g'),
't':('t','t'),
'r':('a','g'),
'y':('c','t'),
's':('c','g'),
'w':('a','t'),
'k':('g','t'),
'm':('a','c')
}
def convert(a):
return tbl[a.lower()]
def testCounts(counts, chrm, snprec):
winningParent='?'
ref_pos, mat_genotype, pat_genotype, child_genotype, mat_allele, pat_allele, typ, ref, hetSNP = snprec
# first, make sure that the expected alleles are the bulk of the counts
total = counts['a']+counts['c']+counts['g']+counts['t']
a1,a2=convert(child_genotype)
if a1==a2:
allelecnts = counts[a1]
else:
allelecnts = counts[a1]+counts[a2]
both=counts[a1]+counts[a2]
sortedCounts=sorted([(counts['a'], 'a'), (counts['c'],'c'), (counts['g'], 'g'), (counts['t'], 't')], reverse=True)
majorAllele=sortedCounts[0][1]
smaller=min(counts[a1], counts[a2])
#pval=binomialDist.cdf(smaller, both, 0.5)*2 # This had problems for large sample sizes. Switched to using scipy
pval = binom.binomtest(smaller, both, 0.5) # scipy.binom_test was unstable for large counts
if float(allelecnts)/total < THRESH1:
print >>LOGFP, "WARNING %s:%d failed thresh 1 %d %d" % (chrm, ref_pos, allelecnts, total)
return (WEIRD, pval, a1, a2, counts, winningParent)
# if the snp was phased
if mat_allele and pat_allele:
if mat_allele.lower()==majorAllele.lower():
winningParent='M'
elif pat_allele.lower()==majorAllele.lower():
winningParent='P'
else:
winningParent='?'
if a1!=a2:
# we expect roughly 50/50.
if pval < THRESH2:
print >>LOGFP, "NOTE %s:%d Looks interesting: failed thresh 2 %d %d %f" % (chrm, ref_pos, both, smaller, pval)
print >>LOGFP, "SNPS %s/%s, COUNTS a:%d c:%d g:%d t:%d" % (a1, a2, counts['a'], counts['c'], counts['g'], counts['t'])
print >>LOGFP, "Phasing P:%s M:%s D:%s" % (pat_allele, mat_allele, snprec)
print >>LOGFP, "\n"
return (ASYMMETRIC, pval, a1, a2, counts, winningParent)
else:
return (SYMMETRIC, pval, a1, a2, counts, winningParent)
else:
return (HOMOZYGOUS, pval, a1, a2, counts, winningParent)
def process(chrm, snppos, counts, snprec, CNVhandler):
ref_pos, mat_genotype, pat_genotype, child_genotype, mat_allele, pat_allele, typ, ref, hetSNP = snprec
t, pval, a1, a2, counts, winningParent = testCounts(counts, chrm, snprec)
#if t==ASYMMETRIC or t==SYMMETRIC:
# hetSnps+=1
#if t==ASYMMETRIC:
# interestingSnps+=1
if BShandler:
inBS=1 if BShandler.check("chr%s"%chrm, snppos) else 0
else:
inBS=-1
cnv=CNVhandler.getAnnotation("chr%s"%chrm, snppos)
if cnv:
cnv=cnv[2]
else:
cnv='1.0'
#nd, np = scipy.stats.kstest(ksvals, 'uniform', (0.0, 1.0))
print >>OUTFP, utils.myFormat('\t', (chrm, snppos, ref, mat_genotype, pat_genotype, child_genotype, typ, mat_allele, pat_allele, counts['a'], counts['c'], counts['g'], counts['t'], winningParent, t, pval, inBS, cnv))
OUTFP.flush()
# This is used to order the chromosomes 1,2,3,...,22,X,Y. Tricky, eh?
def chrcmp(a, b):
try:
a=int(a)
except:
pass
try:
b=int(b)
except:
pass
return cmp(a,b)
if __name__=='__main__':
if len(sys.argv) < 7:
print USAGE % sys.argv[0]
sys.exit(-1)
mindepth=int(sys.argv[1])
snpfile=sys.argv[2]
BindingSitefile=sys.argv[3]
CNVFile=sys.argv[4]
OUTFP = open(sys.argv[5], 'w')
LOGFP = open(sys.argv[6], 'w')
countfiles=sys.argv[7:]
if os.access(BindingSitefile, os.R_OK):
BShandler=InBindingSite.BSHandler(BindingSitefile)
else:
BShandler=None
CNVhandler=GetCNVAnnotations.Handler(CNVFile)
hetSnps=0
interestingSnps=0
gc.disable()
pat=re.compile('chr(\w+)_([mp]aternal)')
print >>OUTFP, utils.myFormat('\t', ['chrm', 'snppos ', 'ref', 'mat_gtyp', 'pat_gtyp', 'c_gtyp', 'phase', 'mat_all', 'pat_all', 'cA', 'cC', 'cG', 'cT', 'winning', 'SymCls', 'SymPval', 'BindingSite', 'cnv'])
ref_1000G=getNewer1000GSNPAnnotations.Handler(snpfile, None, 'PAT', hasHeader=True, onlyHets=True)
counts={}
for countfile in countfiles:
temp=cPickle.load(open(countfile))
dictmerge.accum(counts, temp, lambda : 0, lambda a, b: a+b)
for chrm in sorted(counts.keys(), chrcmp):
for pos in sorted(counts[chrm].keys()):
total = sum(counts[chrm][pos].values())
if total >= mindepth:
process(chrm, pos, counts[chrm][pos], ref_1000G.getAnnotation(chrm, pos), CNVhandler)
| cc0-1.0 | -3,136,855,795,635,907,600 | 30.959064 | 220 | 0.598536 | false |
Tecnativa/docker-odoo-base | tests/__init__.py | 1 | 22433 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Run tests for this base image.
Each test must be a valid docker-compose.yaml file with a ``odoo`` service.
"""
import logging
import unittest
from itertools import product
from os import environ
from os.path import dirname, join
from subprocess import Popen
logging.basicConfig(level=logging.DEBUG)
DIR = dirname(__file__)
ODOO_PREFIX = ("odoo", "--stop-after-init", "--workers=0")
ODOO_VERSIONS = frozenset(
environ.get("DOCKER_TAG", "7.0 8.0 9.0 10.0 11.0 12.0 13.0 14.0").split()
)
PG_VERSIONS = frozenset(environ.get("PG_VERSIONS", "13").split())
SCAFFOLDINGS_DIR = join(DIR, "scaffoldings")
GEIOP_CREDENTIALS_PROVIDED = environ.get("GEOIP_LICENSE_KEY", False) and environ.get(
"GEOIP_ACCOUNT_ID", False
)
# This decorator skips tests that will fail until some branches and/or addons
# are migrated to the next release. It is used in situations where Doodba is
# preparing the pre-release for the next version of Odoo, which hasn't been
# released yet.
prerelease_skip = unittest.skipIf(
ODOO_VERSIONS == {"14.0"}, "Tests not supported in pre-release"
)
def matrix(
odoo=ODOO_VERSIONS, pg=PG_VERSIONS, odoo_skip=frozenset(), pg_skip=frozenset()
):
"""All possible combinations.
We compute the variable matrix here instead of in ``.travis.yml`` because
this generates faster builds, given the scripts found in ``hooks``
directory are already multi-version-build aware.
"""
return map(
dict,
product(
product(("ODOO_MINOR",), ODOO_VERSIONS & odoo - odoo_skip),
product(("DB_VERSION",), PG_VERSIONS & pg - pg_skip),
),
)
class ScaffoldingCase(unittest.TestCase):
def setUp(self):
super().setUp()
self.compose_run = ("docker-compose", "run", "--rm", "odoo")
def popen(self, *args, **kwargs):
"""Shortcut to open a subprocess and ensure it works."""
logging.info("Subtest execution: %s", self._subtest)
self.assertFalse(Popen(*args, **kwargs).wait())
def compose_test(self, workdir, sub_env, *commands):
"""Execute commands in a docker-compose environment.
:param workdir:
Path where the docker compose commands will be executed. It should
contain a valid ``docker-compose.yaml`` file.
:param dict sub_env:
Specific environment variables that will be appended to current
ones to execute the ``docker-compose`` tests.
You can set in this dict a ``COMPOSE_FILE`` key to choose different
docker-compose files in the same directory.
:param tuple()... commands:
List of commands to be tested in the odoo container.
"""
full_env = dict(environ, **sub_env)
with self.subTest(PWD=workdir, **sub_env):
try:
self.popen(("docker-compose", "build"), cwd=workdir, env=full_env)
for command in commands:
with self.subTest(command=command):
self.popen(
self.compose_run + command, cwd=workdir, env=full_env
)
finally:
self.popen(("docker-compose", "down", "-v"), cwd=workdir, env=full_env)
def test_addons_filtered(self):
"""Test addons filtering with ``ONLY`` keyword in ``addons.yaml``."""
project_dir = join(SCAFFOLDINGS_DIR, "dotd")
for sub_env in matrix():
self.compose_test(
project_dir,
dict(sub_env, DBNAME="prod"),
("test", "-e", "auto/addons/web"),
("test", "-e", "auto/addons/private_addon"),
(
"bash",
"-xc",
'test "$(addons list -p)" == disabled_addon,private_addon',
),
("bash", "-xc", 'test "$(addons list -ip)" == private_addon'),
("bash", "-xc", "addons list -c | grep ,crm,"),
# absent_addon is missing and should fail
("bash", "-xc", "! addons list -px"),
# Test addon inclusion, exclusion, dependencies...
(
"bash",
"-xc",
'test "$(addons list -dw private_addon)" == base,dummy_addon,website',
),
(
"bash",
"-xc",
'test "$(addons list -dwprivate_addon -Wwebsite)" == base,dummy_addon',
),
(
"bash",
"-xc",
'test "$(addons list -dw private_addon -W dummy_addon)" == base,website',
),
("bash", "-xc", 'test "$(addons list -nd)" == base,iap',),
(
"bash",
"-xc",
'test "$(addons list --enterprise)" == make_odoo_rich',
),
)
self.compose_test(
project_dir,
dict(sub_env, DBNAME="limited_private"),
("test", "-e", "auto/addons/web"),
("test", "!", "-e", "auto/addons/private_addon"),
("bash", "-xc", 'test -z "$(addons list -p)"'),
(
"bash",
"-xc",
'[ "$(addons list -s. -pwfake1 -wfake2)" == fake1.fake2 ]',
),
("bash", "-xc", "! addons list -wrepeat -Wrepeat"),
("bash", "-xc", "addons list -c | grep ,crm,"),
)
self.compose_test(
project_dir,
dict(sub_env, DBNAME="limited_core"),
("test", "!", "-e", "auto/addons/web"),
("test", "!", "-e", "auto/addons/private_addon"),
("bash", "-xc", 'test -z "$(addons list -p)"'),
("bash", "-xc", 'test "$(addons list -c)" == crm,sale'),
)
# Skip Odoo versions that don't support __manifest__.py files
for sub_env in matrix(odoo_skip={"7.0", "8.0", "9.0"}):
self.compose_test(
project_dir,
dict(sub_env, DBNAME="prod"),
("bash", "-xc", 'test "$(addons list -ped)" == base,web,website'),
# ``dummy_addon`` and ``private_addon`` exist
("test", "-d", "auto/addons/dummy_addon"),
("test", "-h", "auto/addons/dummy_addon"),
("test", "-f", "auto/addons/dummy_addon/__init__.py"),
("test", "-e", "auto/addons/dummy_addon"),
# Addon from extra repo takes higher priority than core version
("realpath", "auto/addons/product"),
(
"bash",
"-xc",
'test "$(realpath auto/addons/product)" == '
"/opt/odoo/custom/src/other-doodba/odoo/src/private/product",
),
("bash", "-xc", 'test "$(addons list -e)" == dummy_addon,product'),
)
self.compose_test(
project_dir,
dict(sub_env, DBNAME="limited_private"),
("test", "-e", "auto/addons/dummy_addon"),
("bash", "-xc", 'test "$(addons list -e)" == dummy_addon,product'),
)
self.compose_test(
project_dir,
dict(sub_env, DBNAME="limited_core"),
("test", "-e", "auto/addons/dummy_addon"),
(
"bash",
"-xc",
'[ "$(addons list -s. -pwfake1 -wfake2)" == fake1.fake2 ]',
),
("bash", "-xc", 'test "$(addons list -e)" == dummy_addon,product'),
("bash", "-xc", 'test "$(addons list -c)" == crm,sale'),
("bash", "-xc", 'test "$(addons list -cWsale)" == crm'),
)
@prerelease_skip
def test_qa(self):
"""Test that QA tools are in place and work as expected."""
folder = join(SCAFFOLDINGS_DIR, "settings")
commands = (
("./custom/scripts/qa-insider-test",),
("/qa/node_modules/.bin/eslint", "--version"),
("/qa/venv/bin/flake8", "--version"),
("/qa/venv/bin/pylint", "--version"),
("/qa/venv/bin/python", "--version"),
("/qa/venv/bin/python", "-c", "import pylint_odoo"),
("test", "-d", "/qa/mqt"),
)
for sub_env in matrix():
self.compose_test(folder, sub_env, *commands)
@prerelease_skip
def test_settings(self):
"""Test settings are filled OK"""
folder = join(SCAFFOLDINGS_DIR, "settings")
commands = (
# Odoo should install
("--stop-after-init",),
# Odoo settings work
("./custom/scripts/test_settings.py",),
)
if ODOO_VERSIONS & {"9.0", "10.0", "11.0"}:
commands += (
# Check Odoo settings using python-odoo-shell, which is available
# only for Odoo 9-11 (for 8 too, but it had no built-in shell)
("./custom/scripts/test_settings_python_odoo_shell.py",),
)
# --load-language doesn't work fine in Odoo 9.0
for sub_env in matrix(odoo={"9.0"}):
self.compose_test(folder, sub_env, *commands)
# Extra tests for versions >= 10.0, that support --load-language fine
commands += (
# DB was created with the correct language
(
"bash",
"-xc",
"""test "$(psql -Atqc "SELECT code FROM res_lang
WHERE active = TRUE")" == es_ES""",
),
)
for sub_env in matrix(odoo_skip={"7.0", "8.0", "9.0"}):
self.compose_test(folder, sub_env, *commands)
def test_smallest(self):
"""Tests for the smallest possible environment."""
liberation = 'Liberation{0}-Regular.ttf: "Liberation {0}" "Regular"'
commands = (
# Must generate a configuration file
("test", "-f", "/opt/odoo/auto/odoo.conf"),
("test", "-d", "/opt/odoo/custom/src/private"),
("test", "-d", "/opt/odoo/custom/ssh"),
("addons", "list", "-cpix"),
("pg_activity", "--version"),
# Default fonts must be liberation
(
"bash",
"-xc",
"""test "$(fc-match monospace)" == '{}'""".format(
liberation.format("Mono")
),
),
(
"bash",
"-xc",
"""test "$(fc-match sans-serif)" == '{}'""".format(
liberation.format("Sans")
),
),
(
"bash",
"-xc",
"""test "$(fc-match serif)" == '{}'""".format(
liberation.format("Serif")
),
),
# Must be able to install base addon
ODOO_PREFIX + ("--init", "base"),
# Auto updater must work
("click-odoo-update",),
# Needed tools exist
("curl", "--version"),
("git", "--version"),
("pg_activity", "--version"),
("psql", "--version"),
("msgmerge", "--version"),
("ssh", "-V"),
("python", "-c", "import plumbum"),
# We are able to dump
("pg_dump", "-f/var/lib/odoo/prod.sql", "prod"),
# Geoip should not be activated
("bash", "-xc", 'test "$(which geoipupdate)" != ""'),
("test", "!", "-e", "/usr/share/GeoIP/GeoLite2-City.mmdb"),
("bash", "-xc", "! geoipupdate"),
)
smallest_dir = join(SCAFFOLDINGS_DIR, "smallest")
for sub_env in matrix(odoo_skip={"7.0", "8.0"}):
self.compose_test(
smallest_dir, sub_env, *commands, ("python", "-c", "import watchdog")
)
for sub_env in matrix(odoo={"8.0"}):
self.compose_test(
smallest_dir,
sub_env,
# Odoo <= 8.0 does not autocreate the database
("createdb",),
*commands,
)
# HACK https://github.com/itpp-labs/misc-addons/issues/1014
# TODO Remove decorator
@prerelease_skip
def test_addons_env(self):
"""Test environment variables in addons.yaml"""
# Old versions are skiped because they don't support __manifest__.py,
# and the test is hacking ODOO_VERSION to pin a commit
for sub_env in matrix(odoo_skip={"7.0", "8.0", "9.0"}):
self.compose_test(
join(SCAFFOLDINGS_DIR, "addons_env"),
sub_env,
# check module from custom repo pattern
("test", "-d", "custom/src/misc-addons"),
("test", "-d", "custom/src/misc-addons/web_debranding"),
("test", "-e", "auto/addons/web_debranding"),
# Migrations folder is only in OpenUpgrade
("test", "-e", "auto/addons/crm"),
("test", "-d", "auto/addons/crm/migrations"),
)
def test_dotd(self):
"""Test environment with common ``*.d`` directories."""
for sub_env in matrix():
self.compose_test(
join(SCAFFOLDINGS_DIR, "dotd"),
sub_env,
# ``custom/build.d`` was properly executed
("test", "-f", "/home/odoo/created-at-build"),
# ``custom/entrypoint.d`` was properly executed
("test", "-f", "/home/odoo/created-at-entrypoint"),
# ``custom/conf.d`` was properly concatenated
("grep", "test-conf", "auto/odoo.conf"),
# ``custom/dependencies`` were installed
("test", "!", "-e", "/usr/sbin/sshd"),
("test", "!", "-e", "/var/lib/apt/lists/lock"),
("busybox", "whoami"),
("bash", "-xc", "echo $NODE_PATH"),
("node", "-e", "require('test-npm-install')"),
("aloha_world",),
("python", "-xc", "import Crypto; print(Crypto.__version__)"),
("sh", "-xc", "rst2html.py --version | grep 'Docutils 0.14'"),
# ``requirements.txt`` from addon repos were processed
("python", "-c", "import cfssl"),
# Local executable binaries found in $PATH
("sh", "-xc", "pip install --user -q flake8 && which flake8"),
# Addon cleanup works correctly
("test", "!", "-e", "custom/src/private/dummy_addon"),
("test", "!", "-e", "custom/src/dummy_repo/dummy_link"),
("test", "-d", "custom/src/private/private_addon"),
("test", "-f", "custom/src/private/private_addon/__init__.py"),
("test", "-e", "auto/addons/private_addon"),
# ``odoo`` command works
("odoo", "--version"),
# Implicit ``odoo`` command also works
("--version",),
)
# TODO Remove decorator when base_search_fuzzy is migrated to 14.0
@prerelease_skip
def test_dependencies(self):
"""Test dependencies installation."""
dependencies_dir = join(SCAFFOLDINGS_DIR, "dependencies")
for sub_env in matrix(odoo_skip={"7.0"}):
self.compose_test(
dependencies_dir,
sub_env,
("test", "!", "-f", "custom/dependencies/apt.txt"),
("test", "!", "-f", "custom/dependencies/gem.txt"),
("test", "!", "-f", "custom/dependencies/npm.txt"),
("test", "!", "-f", "custom/dependencies/pip.txt"),
# It should have base_search_fuzzy available
("test", "-d", "custom/src/server-tools/base_search_fuzzy"),
# Patched Werkzeug version
(
"bash",
"-xc",
(
'test "$(python -c "import werkzeug; '
'print(werkzeug.__version__)")" == 0.14.1'
),
),
# apt_build.txt
("test", "-f", "custom/dependencies/apt_build.txt"),
("test", "!", "-e", "/usr/sbin/sshd"),
# apt-without-sequence.txt
("test", "-f", "custom/dependencies/apt-without-sequence.txt"),
("test", "!", "-e", "/bin/busybox"),
# 070-apt-bc.txt
("test", "-f", "custom/dependencies/070-apt-bc.txt"),
("test", "-e", "/usr/bin/bc"),
# 150-npm-aloha_world-install.txt
(
"test",
"-f",
("custom/dependencies/" "150-npm-aloha_world-install.txt"),
),
("node", "-e", "require('test-npm-install')"),
# 200-pip-without-ext
("test", "-f", "custom/dependencies/200-pip-without-ext"),
("python", "-c", "import Crypto; print(Crypto.__version__)"),
("sh", "-xc", "rst2html.py --version | grep 'Docutils 0.14'"),
# 270-gem.txt
("test", "-f", "custom/dependencies/270-gem.txt"),
("aloha_world",),
)
def test_modified_uids(self):
"""tests if we can build an image with a custom uid and gid of odoo"""
uids_dir = join(SCAFFOLDINGS_DIR, "uids_1001")
for sub_env in matrix():
self.compose_test(
uids_dir,
sub_env,
# verify that odoo user has the given ids
("bash", "-xc", 'test "$(id -u)" == "1001"'),
("bash", "-xc", 'test "$(id -g)" == "1002"'),
("bash", "-xc", 'test "$(id -u -n)" == "odoo"'),
# all those directories need to belong to odoo (user or group odoo)
(
"bash",
"-xc",
'test "$(stat -c \'%U:%G\' /var/lib/odoo)" == "odoo:odoo"',
),
(
"bash",
"-xc",
'test "$(stat -c \'%U:%G\' /opt/odoo/auto/addons)" == "root:odoo"',
),
(
"bash",
"-xc",
'test "$(stat -c \'%U:%G\' /opt/odoo/custom/src)" == "root:odoo"',
),
)
def test_uids_mac_os(self):
"""tests if we can build an image with a custom uid and gid of odoo"""
uids_dir = join(SCAFFOLDINGS_DIR, "uids_mac_os")
for sub_env in matrix():
self.compose_test(
uids_dir,
sub_env,
# verify that odoo user has the given ids
("bash", "-c", 'test "$(id -u)" == "501"'),
("bash", "-c", 'test "$(id -g)" == "20"'),
("bash", "-c", 'test "$(id -u -n)" == "odoo"'),
# all those directories need to belong to odoo (user or group odoo/dialout)
(
"bash",
"-c",
'test "$(stat -c \'%U:%g\' /var/lib/odoo)" == "odoo:20"',
),
(
"bash",
"-c",
'test "$(stat -c \'%U:%g\' /opt/odoo/auto/addons)" == "root:20"',
),
(
"bash",
"-c",
'test "$(stat -c \'%U:%g\' /opt/odoo/custom/src)" == "root:20"',
),
)
def test_default_uids(self):
uids_dir = join(SCAFFOLDINGS_DIR, "uids_default")
for sub_env in matrix():
self.compose_test(
uids_dir,
sub_env,
# verify that odoo user has the given ids
("bash", "-xc", 'test "$(id -u)" == "1000"'),
("bash", "-xc", 'test "$(id -g)" == "1000"'),
("bash", "-xc", 'test "$(id -u -n)" == "odoo"'),
# all those directories need to belong to odoo (user or group odoo)
(
"bash",
"-xc",
'test "$(stat -c \'%U:%G\' /var/lib/odoo)" == "odoo:odoo"',
),
(
"bash",
"-xc",
'test "$(stat -c \'%U:%G\' /opt/odoo/auto/addons)" == "root:odoo"',
),
(
"bash",
"-xc",
'test "$(stat -c \'%U:%G\' /opt/odoo/custom/src)" == "root:odoo"',
),
)
@unittest.skipIf(
not GEIOP_CREDENTIALS_PROVIDED, "GeoIP credentials missing in environment"
)
def test_geoip(self):
geoip_dir = join(SCAFFOLDINGS_DIR, "geoip")
for sub_env in matrix():
self.compose_test(
geoip_dir,
sub_env,
# verify that geoipupdate works after waiting for entrypoint to finish its update
(
"bash",
"-c",
"timeout 60s bash -c 'while (ls -l /proc/*/exe 2>&1 | grep geoipupdate); do sleep 1; done' &&"
" geoipupdate",
),
# verify that geoip database exists after entrypoint finished its update
# using ls and /proc because ps is missing in image for 13.0
(
"bash",
"-c",
"timeout 60s bash -c 'while (ls -l /proc/*/exe 2>&1 | grep geoipupdate); do sleep 1; done' &&"
" test -e /opt/odoo/auto/geoip/GeoLite2-City.mmdb",
),
# verify that geoip database is configured
(
"grep",
"-R",
"geoip_database = /opt/odoo/auto/geoip/GeoLite2-City.mmdb",
"/opt/odoo/auto/odoo.conf",
),
)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 8,515,197,492,360,328,000 | 40.774674 | 114 | 0.447644 | false |
mschwager/CTFd | CTFd/admin/teams.py | 1 | 9765 | from flask import current_app as app, render_template, request, redirect, jsonify, url_for, Blueprint
from CTFd.utils import admins_only, is_admin, unix_time, get_config, \
set_config, sendmail, rmdir, create_image, delete_image, run_image, container_status, container_ports, \
container_stop, container_start, get_themes, cache, upload_file
from CTFd.models import db, Teams, Solves, Awards, Containers, Challenges, WrongKeys, Keys, Tags, Files, Tracking, Pages, Config, DatabaseError
from passlib.hash import bcrypt_sha256
from sqlalchemy.sql import not_
admin_teams = Blueprint('admin_teams', __name__)
@admin_teams.route('/admin/teams', defaults={'page': '1'})
@admin_teams.route('/admin/teams/<int:page>')
@admins_only
def admin_teams_view(page):
page = abs(int(page))
results_per_page = 50
page_start = results_per_page * (page - 1)
page_end = results_per_page * (page - 1) + results_per_page
teams = Teams.query.order_by(Teams.id.asc()).slice(page_start, page_end).all()
count = db.session.query(db.func.count(Teams.id)).first()[0]
pages = int(count / results_per_page) + (count % results_per_page > 0)
return render_template('admin/teams.html', teams=teams, pages=pages, curr_page=page)
@admin_teams.route('/admin/team/<int:teamid>', methods=['GET', 'POST'])
@admins_only
def admin_team(teamid):
user = Teams.query.filter_by(id=teamid).first_or_404()
if request.method == 'GET':
solves = Solves.query.filter_by(teamid=teamid).all()
solve_ids = [s.chalid for s in solves]
missing = Challenges.query.filter(not_(Challenges.id.in_(solve_ids))).all()
last_seen = db.func.max(Tracking.date).label('last_seen')
addrs = db.session.query(Tracking.ip, last_seen) \
.filter_by(team=teamid) \
.group_by(Tracking.ip) \
.order_by(last_seen.desc()).all()
wrong_keys = WrongKeys.query.filter_by(teamid=teamid).order_by(WrongKeys.date.asc()).all()
awards = Awards.query.filter_by(teamid=teamid).order_by(Awards.date.asc()).all()
score = user.score()
place = user.place()
return render_template('admin/team.html', solves=solves, team=user, addrs=addrs, score=score, missing=missing,
place=place, wrong_keys=wrong_keys, awards=awards)
elif request.method == 'POST':
admin_user = request.form.get('admin', None)
if admin_user:
admin_user = True if admin_user == 'true' else False
user.admin = admin_user
# Set user.banned to hide admins from scoreboard
user.banned = admin_user
db.session.commit()
db.session.close()
return jsonify({'data': ['success']})
verified = request.form.get('verified', None)
if verified:
verified = True if verified == 'true' else False
user.verified = verified
db.session.commit()
db.session.close()
return jsonify({'data': ['success']})
name = request.form.get('name', None)
password = request.form.get('password', None)
email = request.form.get('email', None)
website = request.form.get('website', None)
affiliation = request.form.get('affiliation', None)
country = request.form.get('country', None)
errors = []
name_used = Teams.query.filter(Teams.name == name).first()
if name_used and int(name_used.id) != int(teamid):
errors.append('That name is taken')
email_used = Teams.query.filter(Teams.email == email).first()
if email_used and int(email_used.id) != int(teamid):
errors.append('That email is taken')
if errors:
db.session.close()
return jsonify({'data': errors})
else:
user.name = name
user.email = email
if password:
user.password = bcrypt_sha256.encrypt(password)
user.website = website
user.affiliation = affiliation
user.country = country
db.session.commit()
db.session.close()
return jsonify({'data': ['success']})
@admin_teams.route('/admin/team/<int:teamid>/mail', methods=['POST'])
@admins_only
def email_user(teamid):
message = request.form.get('msg', None)
team = Teams.query.filter(Teams.id == teamid).first()
if message and team:
if sendmail(team.email, message):
return '1'
return '0'
@admin_teams.route('/admin/team/<int:teamid>/ban', methods=['POST'])
@admins_only
def ban(teamid):
user = Teams.query.filter_by(id=teamid).first_or_404()
user.banned = True
db.session.commit()
db.session.close()
return redirect(url_for('admin_scoreboard.admin_scoreboard_view'))
@admin_teams.route('/admin/team/<int:teamid>/unban', methods=['POST'])
@admins_only
def unban(teamid):
user = Teams.query.filter_by(id=teamid).first_or_404()
user.banned = False
db.session.commit()
db.session.close()
return redirect(url_for('admin_scoreboard.admin_scoreboard_view'))
@admin_teams.route('/admin/team/<int:teamid>/delete', methods=['POST'])
@admins_only
def delete_team(teamid):
try:
WrongKeys.query.filter_by(teamid=teamid).delete()
Solves.query.filter_by(teamid=teamid).delete()
Tracking.query.filter_by(team=teamid).delete()
Teams.query.filter_by(id=teamid).delete()
db.session.commit()
db.session.close()
except DatabaseError:
return '0'
else:
return '1'
@admin_teams.route('/admin/solves/<teamid>', methods=['GET'])
@admins_only
def admin_solves(teamid="all"):
if teamid == "all":
solves = Solves.query.all()
else:
solves = Solves.query.filter_by(teamid=teamid).all()
awards = Awards.query.filter_by(teamid=teamid).all()
db.session.close()
json_data = {'solves': []}
for x in solves:
json_data['solves'].append({
'id': x.id,
'chal': x.chal.name,
'chalid': x.chalid,
'team': x.teamid,
'value': x.chal.value,
'category': x.chal.category,
'time': unix_time(x.date)
})
for award in awards:
json_data['solves'].append({
'chal': award.name,
'chalid': None,
'team': award.teamid,
'value': award.value,
'category': award.category or "Award",
'time': unix_time(award.date)
})
json_data['solves'].sort(key=lambda k: k['time'])
return jsonify(json_data)
@admin_teams.route('/admin/fails/all', defaults={'teamid': 'all'}, methods=['GET'])
@admin_teams.route('/admin/fails/<int:teamid>', methods=['GET'])
@admins_only
def admin_fails(teamid):
if teamid == "all":
fails = WrongKeys.query.join(Teams, WrongKeys.teamid == Teams.id).filter(not Teams.banned).count()
solves = Solves.query.join(Teams, Solves.teamid == Teams.id).filter(not Teams.banned).count()
db.session.close()
json_data = {'fails': str(fails), 'solves': str(solves)}
return jsonify(json_data)
else:
fails = WrongKeys.query.filter_by(teamid=teamid).count()
solves = Solves.query.filter_by(teamid=teamid).count()
db.session.close()
json_data = {'fails': str(fails), 'solves': str(solves)}
return jsonify(json_data)
@admin_teams.route('/admin/solves/<int:teamid>/<int:chalid>/solve', methods=['POST'])
@admins_only
def create_solve(teamid, chalid):
solve = Solves(chalid=chalid, teamid=teamid, ip='127.0.0.1', flag='MARKED_AS_SOLVED_BY_ADMIN')
db.session.add(solve)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/solves/<int:keyid>/delete', methods=['POST'])
@admins_only
def delete_solve(keyid):
solve = Solves.query.filter_by(id=keyid).first_or_404()
db.session.delete(solve)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/wrong_keys/<int:keyid>/delete', methods=['POST'])
@admins_only
def delete_wrong_key(keyid):
wrong_key = WrongKeys.query.filter_by(id=keyid).first_or_404()
db.session.delete(wrong_key)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/awards/<int:award_id>/delete', methods=['POST'])
@admins_only
def delete_award(award_id):
award = Awards.query.filter_by(id=award_id).first_or_404()
db.session.delete(award)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/teams/<int:teamid>/awards', methods=['GET'])
@admins_only
def admin_awards(teamid):
awards = Awards.query.filter_by(teamid=teamid).all()
awards_list = []
for award in awards:
awards_list.append({
'id': award.id,
'name': award.name,
'description': award.description,
'date': award.date,
'value': award.value,
'category': award.category,
'icon': award.icon
})
json_data = {'awards': awards_list}
return jsonify(json_data)
@admin_teams.route('/admin/awards/add', methods=['POST'])
@admins_only
def create_award():
try:
teamid = request.form['teamid']
name = request.form.get('name', 'Award')
value = request.form.get('value', 0)
award = Awards(teamid, name, value)
award.description = request.form.get('description')
award.category = request.form.get('category')
db.session.add(award)
db.session.commit()
db.session.close()
return '1'
except Exception as e:
print(e)
return '0'
| apache-2.0 | -7,619,913,300,284,369,000 | 34.769231 | 143 | 0.611162 | false |
pmoleri/memorize-accesible | score.py | 1 | 2642 | # Copyright (C) 2006, 2007, 2008 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import svglabel
import gtk
import os
import theme
class Score(svglabel.SvgLabel):
selected_color = "#818286"
default_color = "#4c4d4f"
status = False
def __init__(self, fill_color, stroke_color, pixbuf=None,
pixbuf_sel=None, status=False):
filename = os.path.join(os.path.dirname(__file__), "images/score.svg")
self.pixbuf_un = pixbuf
self.pixbuf_sel = pixbuf_sel
self.status = status
if self.pixbuf_un == None:
self.pixbuf_un = svglabel.SvgLabel(filename, fill_color,
stroke_color, False,
self.default_color).get_pixbuf()
if self.pixbuf_sel == None:
label = svglabel.SvgLabel(filename, fill_color, stroke_color,
False, self.selected_color)
self.pixbuf_sel = label.get_pixbuf()
if status:
self.pixbuf = self.pixbuf_sel
else:
self.pixbuf = self.pixbuf_un
svglabel.SvgLabel.__init__(self, filename, fill_color, stroke_color,
self.pixbuf, self.default_color, theme.SCORE_SIZE,
theme.SCORE_SIZE)
self.set_selected(status)
def set_selected(self, status):
self.status = status
if status:
self.pixbuf = self.pixbuf_sel
self.modify_bg(gtk.STATE_NORMAL,
gtk.gdk.color_parse(self.selected_color))
else:
self.pixbuf = self.pixbuf_un
self.modify_bg(gtk.STATE_NORMAL,
gtk.gdk.color_parse(self.default_color))
self.queue_draw()
def get_pixbuf_un(self):
return self.pixbuf_un
def get_pixbuf_sel(self):
return self.pixbuf_sel
| gpl-2.0 | -4,537,865,268,180,813,000 | 36.211268 | 79 | 0.589326 | false |
teodesson/Bookie | bookie/tests/test_api/test_popular_api.py | 1 | 5036 | import logging
import json
import pytest
import transaction
import unittest
from pyramid import testing
from bookie.models import DBSession
from bookie.models import Bmark
from bookie.models.auth import User
from bookie.tests import BOOKIE_TEST_INI
from bookie.tests import empty_db
from bookie.tests import gen_random_word
from random import randint
LOG = logging.getLogger(__name__)
API_KEY = None
MAX_CLICKS = 60
class BookiePopularAPITest(unittest.TestCase):
"""Test the Bookie API for retreiving popular bookmarks"""
def setUp(self):
from pyramid.paster import get_app
app = get_app(BOOKIE_TEST_INI, 'main')
from webtest import TestApp
self.testapp = TestApp(app)
testing.setUp()
global API_KEY
res = DBSession.execute(
"SELECT api_key FROM users WHERE username = 'admin'").fetchone()
API_KEY = res['api_key']
def tearDown(self):
"""We need to empty the bmarks table on each run"""
empty_db()
testing.tearDown()
def _check_cors_headers(self, res):
""" Make sure that the request has proper CORS headers."""
self.assertEqual(res.headers['access-control-allow-origin'], '*')
self.assertEqual(
res.headers['access-control-allow-headers'], 'X-Requested-With')
def _add_bookmark(self, user=None):
"""Add a bookmark for a particular user
with random click count.
If no user is specified, then admin is used
for the username"""
if user:
DBSession.add(user)
username = user.username
else:
username = 'admin'
b = Bmark(
url=gen_random_word(12),
username=username,
tags=gen_random_word(4),
)
b.clicks = randint(0, MAX_CLICKS)
b.hash_id = gen_random_word(5)
DBSession.add(b)
DBSession.flush()
b.hashed.clicks = b.clicks
DBSession.flush()
transaction.commit()
def test_bookmark_popular_user(self):
"""Test that we can get a list of bookmarks
added by admin and sorted by popularity."""
# Populating DB with some bookmarks of random users.
user_bmark_count = randint(1, 5)
for i in range(user_bmark_count):
user = User()
user.username = gen_random_word(10)
self._add_bookmark(user)
admin_bmark_count = randint(1, 5)
# Populating DB with some bookmarks of admin.
for i in range(admin_bmark_count):
self._add_bookmark()
res = self.testapp.get('/api/v1/admin/bmarks?sort=popular&api_key=' +
API_KEY,
status=200)
# make sure we can decode the body
bmarks = json.loads(res.unicode_body)['bmarks']
self.assertEqual(
len(bmarks),
admin_bmark_count,
"All admin bookmarks are retreived"
)
# Initializing number of clicks
previous_clicks = MAX_CLICKS
for bmark in bmarks:
self.assertEqual(
bmark['username'],
'admin',
"Only bookmarks by admin must be displayed")
self.assertTrue(
bmark['clicks'] <= previous_clicks,
'{0} < {1}'.format(bmark['clicks'], previous_clicks))
previous_clicks = bmark['clicks']
self._check_cors_headers(res)
empty_db()
@pytest.mark.skipif(
True,
reason=('Work in progress fixing queries to work in postgresql and'
'sqlite.'))
def test_bookmark_popular(self):
"""Test that we can get a list of all bookmarks
added by random users and sorted by popularity."""
# Populating DB with some bookmarks of random users.
user_bmark_count = randint(1, 5)
for i in range(user_bmark_count):
user = User()
user.username = gen_random_word(10)
self._add_bookmark(user)
admin_bmark_count = randint(1, 5)
# Populating DB with some bookmarks of admin.
for i in range(admin_bmark_count):
self._add_bookmark()
res = self.testapp.get('/api/v1/bmarks?sort=popular&api_key='
+ API_KEY,
status=200)
# make sure we can decode the body
bmarks = json.loads(res.unicode_body)['bmarks']
self.assertEqual(
len(bmarks),
admin_bmark_count + user_bmark_count,
"All bookmarks are retrieved"
)
# Initializing number of clicks
previous_clicks = MAX_CLICKS
for bmark in bmarks:
self.assertTrue(
bmark['total_clicks'] <= previous_clicks,
'{0} <= {1}'.format(bmark['total_clicks'], previous_clicks))
previous_clicks = bmark['total_clicks']
self._check_cors_headers(res)
empty_db()
| agpl-3.0 | -8,930,931,305,533,883,000 | 30.672956 | 77 | 0.573074 | false |
pythonbyexample/PBE | dbe/social/views.py | 1 | 3177 | # Imports {{{
from PIL import Image as PImage
from dbe.settings import MEDIA_ROOT, MEDIA_URL
from dbe.social.models import *
from dbe.shared.utils import *
from dbe.classviews.list_custom import ListView, ListRelated
from dbe.classviews.edit_custom import CreateView, UpdateView2
from forms import ProfileForm, PostForm
# }}}
class Main(ListView):
"""Main listing."""
model = Forum
context_object_name = "socials"
template_name = "social/list.html"
class ForumView(ListRelated):
"""Listing of threads in a social."""
model = Thread
related_model = Forum
foreign_key_field = "social"
context_object_name = "threads"
template_name = "social.html"
class ThreadView(ListRelated):
"""Listing of posts in a thread."""
model = Post
related_model = Thread
foreign_key_field = "thread"
context_object_name = "posts"
template_name = "thread.html"
class EditProfile(UpdateView2):
model = UserProfile
form_class = ProfileForm
success_url = '#'
template_name = "profile.html"
def form_valid(self, form):
"""Resize and save profile image."""
# remove old image if changed
name = form.cleaned_data.get("avatar", None)
old = UserProfile.objects.get( pk=self.kwargs.get("pk") ).avatar
if old.name and old.name != name:
old.delete()
# save new image to disk & resize new image
self.object = form.save()
if self.object.avatar:
img = PImage.open(self.object.avatar.path)
img.thumbnail((160,160), PImage.ANTIALIAS)
img.save(img.filename, "JPEG")
return redir(self.success_url)
def add_context(self):
img = ("/media/" + self.object.avatar.name) if self.object.avatar else None
return dict(img=img)
class NewTopic(CreateView):
model = Post
form_class = PostForm
title = "Start New Topic"
template_name = "social/post.html"
def increment_post_counter(self):
"""Increment counter of user's posts."""
profile = self.request.user.user_profile
profile.posts += 1
profile.save()
def get_thread(self, form):
data = form.cleaned_data
social = Forum.objects.get(pk=self.args[0])
return Thread.objects.create(social=social, title=data["title"], creator=self.request.user)
def form_valid(self, form):
"""Create new topic."""
data = form.cleaned_data
thread = self.get_thread(form)
Post.objects.create(thread=thread, title=data["title"], body=data["body"], creator=self.request.user)
self.increment_post_counter()
return self.get_success_url()
def get_success_url(self):
return redir("social", pk=self.args[0])
class Reply(NewTopic):
title = "Reply"
def get_success_url(self):
return redir(reverse2("thread", pk=self.args[0]) + "?page=last")
def get_thread(self, form):
return Thread.objects.get(pk=self.args[0])
def social_context(request):
return dict(media_url=MEDIA_URL)
| bsd-3-clause | 5,048,343,791,437,051,000 | 28.691589 | 109 | 0.619452 | false |
rplevka/robottelo | tests/foreman/endtoend/test_cli_endtoend.py | 1 | 12904 | """Smoke tests for the ``CLI`` end-to-end scenario.
:Requirement: Cli Endtoend
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Hammer
:Assignee: gtalreja
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import random
import pytest
from fauxfactory import gen_alphanumeric
from fauxfactory import gen_ipaddr
from .utils import AK_CONTENT_LABEL
from .utils import ClientProvisioningMixin
from robottelo import manifests
from robottelo import ssh
from robottelo.cli.activationkey import ActivationKey
from robottelo.cli.computeresource import ComputeResource
from robottelo.cli.contentview import ContentView
from robottelo.cli.domain import Domain
from robottelo.cli.factory import make_user
from robottelo.cli.host import Host
from robottelo.cli.hostgroup import HostGroup
from robottelo.cli.lifecycleenvironment import LifecycleEnvironment
from robottelo.cli.location import Location
from robottelo.cli.org import Org
from robottelo.cli.product import Product
from robottelo.cli.puppetmodule import PuppetModule
from robottelo.cli.repository import Repository
from robottelo.cli.repository_set import RepositorySet
from robottelo.cli.subnet import Subnet
from robottelo.cli.subscription import Subscription
from robottelo.cli.user import User
from robottelo.config import setting_is_set
from robottelo.config import settings
from robottelo.constants import DEFAULT_LOC
from robottelo.constants import DEFAULT_ORG
from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME
from robottelo.constants import PRDS
from robottelo.constants import REPOS
from robottelo.constants import REPOSET
from robottelo.constants.repos import CUSTOM_RPM_REPO
from robottelo.constants.repos import FAKE_0_PUPPET_REPO
@pytest.fixture(scope='module')
def fake_manifest_is_set():
return setting_is_set('fake_manifest')
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_default_org():
"""Check if 'Default Organization' is present
:id: 95ffeb7a-134e-4273-bccc-fe8a3a336b2a
:expectedresults: 'Default Organization' is found
"""
result = Org.info({'name': DEFAULT_ORG})
assert result['name'] == DEFAULT_ORG
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_default_loc():
"""Check if 'Default Location' is present
:id: 11cf0d06-78ff-47e8-9d50-407a2ea31988
:expectedresults: 'Default Location' is found
"""
result = Location.info({'name': DEFAULT_LOC})
assert result['name'] == DEFAULT_LOC
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_admin_user():
"""Check if Admin User is present
:id: f6755189-05a6-4d2f-a3b8-98be0cfacaee
:expectedresults: Admin User is found and has Admin role
"""
result = User.info({'login': 'admin'})
assert result['login'] == 'admin'
assert result['admin'] == 'yes'
@pytest.mark.skip_if_not_set('compute_resources')
@pytest.mark.tier4
@pytest.mark.on_premises_provisioning
@pytest.mark.upgrade
@pytest.mark.skipif((not settings.repos_hosting_url), reason='Missing repos_hosting_url')
def test_positive_cli_end_to_end(fake_manifest_is_set):
"""Perform end to end smoke tests using RH and custom repos.
1. Create a new user with admin permissions
2. Using the new user from above
1. Create a new organization
2. Clone and upload manifest
3. Create a new lifecycle environment
4. Create a custom product
5. Create a custom YUM repository
6. Create a custom PUPPET repository
7. Enable a Red Hat repository
8. Synchronize the three repositories
9. Create a new content view
10. Associate the YUM and Red Hat repositories to new content view
11. Add a PUPPET module to new content view
12. Publish content view
13. Promote content view to the lifecycle environment
14. Create a new activation key
15. Add the products to the activation key
16. Create a new libvirt compute resource
17. Create a new subnet
18. Create a new domain
19. Create a new hostgroup and associate previous entities to it
20. Provision a client
:id: 8c8b3ffa-0d54-436b-8eeb-1a3542e100a8
:expectedresults: All tests should succeed and Content should be
successfully fetched by client.
"""
# step 1: Create a new user with admin permissions
password = gen_alphanumeric()
user = make_user({'admin': 'true', 'password': password})
user['password'] = password
# step 2.1: Create a new organization
org = _create(user, Org, {'name': gen_alphanumeric()})
# step 2.2: Clone and upload manifest
if fake_manifest_is_set:
with manifests.clone() as manifest:
ssh.upload_file(manifest.content, manifest.filename)
Subscription.upload({'file': manifest.filename, 'organization-id': org['id']})
# step 2.3: Create a new lifecycle environment
lifecycle_environment = _create(
user,
LifecycleEnvironment,
{'name': gen_alphanumeric(), 'organization-id': org['id'], 'prior': 'Library'},
)
# step 2.4: Create a custom product
product = _create(user, Product, {'name': gen_alphanumeric(), 'organization-id': org['id']})
repositories = []
# step 2.5: Create custom YUM repository
yum_repo = _create(
user,
Repository,
{
'content-type': 'yum',
'name': gen_alphanumeric(),
'product-id': product['id'],
'publish-via-http': 'true',
'url': CUSTOM_RPM_REPO,
},
)
repositories.append(yum_repo)
# step 2.6: Create custom PUPPET repository
puppet_repo = _create(
user,
Repository,
{
'content-type': 'puppet',
'name': gen_alphanumeric(),
'product-id': product['id'],
'publish-via-http': 'true',
'url': FAKE_0_PUPPET_REPO,
},
)
repositories.append(puppet_repo)
# step 2.7: Enable a Red Hat repository
if fake_manifest_is_set:
RepositorySet.enable(
{
'basearch': 'x86_64',
'name': REPOSET['rhva6'],
'organization-id': org['id'],
'product': PRDS['rhel'],
'releasever': '6Server',
}
)
rhel_repo = Repository.info(
{
'name': REPOS['rhva6']['name'],
'organization-id': org['id'],
'product': PRDS['rhel'],
}
)
repositories.append(rhel_repo)
# step 2.8: Synchronize the three repositories
for repo in repositories:
Repository.with_user(user['login'], user['password']).synchronize({'id': repo['id']})
# step 2.9: Create content view
content_view = _create(
user, ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']}
)
# step 2.10: Associate the YUM and Red Hat repositories to new content view
repositories.remove(puppet_repo)
for repo in repositories:
ContentView.add_repository(
{
'id': content_view['id'],
'organization-id': org['id'],
'repository-id': repo['id'],
}
)
# step 2.11: Add a PUPPET module to new content view
result = PuppetModule.with_user(user['login'], user['password']).list(
{'repository-id': puppet_repo['id'], 'per-page': False}
)
ContentView.with_user(user['login'], user['password']).puppet_module_add(
{'content-view-id': content_view['id'], 'id': random.choice(result)['id']}
)
# step 2.12: Publish content view
ContentView.with_user(user['login'], user['password']).publish({'id': content_view['id']})
# step 2.13: Promote content view to the lifecycle environment
content_view = ContentView.with_user(user['login'], user['password']).info(
{'id': content_view['id']}
)
assert len(content_view['versions']) == 1
cv_version = ContentView.with_user(user['login'], user['password']).version_info(
{'id': content_view['versions'][0]['id']}
)
assert len(cv_version['lifecycle-environments']) == 1
ContentView.with_user(user['login'], user['password']).version_promote(
{'id': cv_version['id'], 'to-lifecycle-environment-id': lifecycle_environment['id']}
)
# check that content view exists in lifecycle
content_view = ContentView.with_user(user['login'], user['password']).info(
{'id': content_view['id']}
)
assert len(content_view['versions']) == 1
cv_version = ContentView.with_user(user['login'], user['password']).version_info(
{'id': content_view['versions'][0]['id']}
)
assert len(cv_version['lifecycle-environments']) == 2
assert cv_version['lifecycle-environments'][-1]['id'] == lifecycle_environment['id']
# step 2.14: Create a new activation key
activation_key = _create(
user,
ActivationKey,
{
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
'name': gen_alphanumeric(),
'organization-id': org['id'],
},
)
# step 2.15: Add the products to the activation key
subscription_list = Subscription.with_user(user['login'], user['password']).list(
{'organization-id': org['id']}, per_page=False
)
for subscription in subscription_list:
if subscription['name'] == DEFAULT_SUBSCRIPTION_NAME:
ActivationKey.with_user(user['login'], user['password']).add_subscription(
{
'id': activation_key['id'],
'quantity': 1,
'subscription-id': subscription['id'],
}
)
# step 2.15.1: Enable product content
if fake_manifest_is_set:
ActivationKey.with_user(user['login'], user['password']).content_override(
{
'content-label': AK_CONTENT_LABEL,
'id': activation_key['id'],
'organization-id': org['id'],
'value': '1',
}
)
# BONUS: Create a content host and associate it with promoted
# content view and last lifecycle where it exists
content_host_name = gen_alphanumeric()
content_host = Host.with_user(user['login'], user['password']).subscription_register(
{
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
'name': content_host_name,
'organization-id': org['id'],
}
)
content_host = Host.with_user(user['login'], user['password']).info({'id': content_host['id']})
# check that content view matches what we passed
assert content_host['content-information']['content-view']['name'] == content_view['name']
# check that lifecycle environment matches
assert (
content_host['content-information']['lifecycle-environment']['name']
== lifecycle_environment['name']
)
# step 2.16: Create a new libvirt compute resource
_create(
user,
ComputeResource,
{
'name': gen_alphanumeric(),
'provider': 'Libvirt',
'url': f'qemu+ssh://root@{settings.compute_resources.libvirt_hostname}/system',
},
)
# step 2.17: Create a new subnet
subnet = _create(
user,
Subnet,
{
'name': gen_alphanumeric(),
'network': gen_ipaddr(ip3=True),
'mask': '255.255.255.0',
},
)
# step 2.18: Create a new domain
domain = _create(user, Domain, {'name': gen_alphanumeric()})
# step 2.19: Create a new hostgroup and associate previous entities to it
host_group = _create(
user,
HostGroup,
{'domain-id': domain['id'], 'name': gen_alphanumeric(), 'subnet-id': subnet['id']},
)
HostGroup.with_user(user['login'], user['password']).update(
{
'id': host_group['id'],
'organization-ids': org['id'],
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
}
)
# step 2.20: Provision a client
ClientProvisioningMixin().client_provisioning(activation_key['name'], org['label'])
def _create(user, entity, attrs):
"""Creates a Foreman entity and returns it.
:param dict user: A python dictionary representing a User
:param object entity: A valid CLI entity.
:param dict attrs: A python dictionary with attributes to use when
creating entity.
:return: A ``dict`` representing the Foreman entity.
:rtype: dict
"""
# Create new entity as new user
return entity.with_user(user['login'], user['password']).create(attrs)
| gpl-3.0 | 8,062,769,473,121,014,000 | 32.957895 | 99 | 0.629572 | false |
icoz/pysymo | config.py | 1 | 1684 | # -*- coding: utf-8 -*-
import sys
import os
__author__ = 'ilya-il'
# ==================================================
# PROGRAM CONFIG SECTION. DO NOT EDIT!
# ==================================================
# WTF forms
CSRF_ENABLED = True
SECRET_KEY = 'sifdjncs-dcqodicnpdscn[osncpas#vaidcjnsajcacbqisbccsbab-cdsacvalsdcb!alsjdbafdba'
# priority list
# WARNING! do not change item position in list
# and do not change list type 'list' :)
MSG_PRIORITY_LIST = ['emerg', 'alert', 'crit', 'err', 'warn', 'notice', 'info', 'debug']
# datetime format for search form
DATETIME_FORMAT = '%d.%m.%Y %H:%M:%S'
# pysymo version
PYSYMO_VERSION = 0.2
# log file
if sys.platform == 'win32':
basedir = os.path.abspath(os.path.dirname(__file__))
PYSYMO_LOG = os.path.join(basedir, 'python.log')
else:
PYSYMO_LOG = os.environ.get('PYSYMO_LOG') or '/var/log/pysymo/python.log'
# L10n
LANGUAGES = {
'en': 'English',
'ru': 'Russian'
}
# ==================================================
# USER EDITABLE SECTION
# ==================================================
# watch mode interval in seconds
WATCH_MODE_REFRESH_INTERVAL = 30
# allow registration (only for plain auth)
REGISTRATION_ENABLED = True
# Auth type - plain, ldap
AUTH_TYPE = 'plain'
# LDAP
LDAP_SERVER = os.environ.get('PYSYMO_LDAP_SERVER') or 'ldap://[ldap_server]'
LDAP_SEARCH_BASE = os.environ.get('PYSYMO_LDAP_BASE') or '[organisation]'
LDAP_SERVICE_USER = os.environ.get('PYSYMO_LDAP_USER') or '[service_user_dn]'
LDAP_SERVICE_PASSWORD = os.environ.get('PYSYMO_LDAP_PASSWORD') or '[password]'
# MEDB - message explanation database
MEDB_ENABLED = True
# Use
USE_FQDN = True
| gpl-2.0 | 3,612,526,265,983,320,600 | 25.730159 | 95 | 0.595606 | false |
MrJohz/K-Eight | ircutils/client.py | 1 | 13416 | """ This module provides a direct client interface for managing an IRC
connection. If you are trying to build a bot, :class:`ircutils.bot.SimpleBot`
inherits from :class:`SimpleClient` so it has the methods listed below.
"""
from __future__ import absolute_import
import collections
import pprint
from . import connection
from . import ctcp
from . import events
from . import format
from . import protocol
class SimpleClient(object):
""" SimpleClient is designed to provide a high level of abstraction
of the IRC protocol. It's methods are structured in a way that allows
you to often bypass the need to send raw IRC commands. By default,
``auto_handle`` is set to ``True`` and allows the client to handle the
following:
* Client nickname changes
* Client channel tracking
* CTCP version requests
"""
software = "http://dev.guardedcode.com/projects/ircutils/"
version = (0,1,3)
custom_listeners = {}
def __init__(self, nick, real_name="A Python IRC Bot by Johz", mode="+B", auto_handle=True):
self.nickname = nick
self.user = nick
self.real_name = real_name
self.filter_formatting = True
self.channels = collections.defaultdict(protocol.Channel)
self.events = events.EventDispatcher()
self._prev_nickname = None
self._mode = mode
self._register_default_listeners()
if auto_handle:
self._add_built_in_handlers()
def __getitem__(self, name):
return self.events[name]
def __setitem__(self, name, value):
self.register_listener(name, value)
def _register_default_listeners(self):
""" Registers the default listeners to the names listed in events. """
# Connection events
for name in events.connection:
self.events.register_listener(name, events.connection[name]())
# Standard events
for name in events.standard:
self.events.register_listener(name, events.standard[name]())
# Message events
for name in events.messages:
self.events.register_listener(name, events.messages[name]())
# CTCP events
for name in events.ctcp:
self.events.register_listener(name, events.ctcp[name]())
# RPL_ events
for name in events.replies:
self.events.register_listener(name, events.replies[name]())
# Custom listeners
for name in self.custom_listeners:
self.events.register_listener(name, self.custom_listeners[name])
def _add_built_in_handlers(self):
""" Adds basic client handlers.
These handlers are bound to events that affect the data the the
client handles. It is required to have these in order to keep
track of things like client nick changes, joined channels,
and channel user lists.
"""
self.events["any"].add_handler(_update_client_info)
self.events["name_reply"].add_handler(_set_channel_names)
self.events["ctcp_version"].add_handler(_reply_to_ctcp_version)
self.events["part"].add_handler(_remove_channel_user_on_part)
self.events["quit"].add_handler(_remove_channel_user_on_quit)
self.events["join"].add_handler(_add_channel_user)
def _dispatch_event(self, prefix, command, params):
""" Given the parameters, dispatch an event.
After first building an event, this method sends the event(s) to the
primary event dispatcher.
This replaces :func:`connection.Connection.handle_line`
"""
try:
self._pending_events
except AttributeError:
self._pending_events = []
# TODO: Event parsing doesn't belong here.
if command in ["PRIVMSG", "NOTICE"]:
event = events.MessageEvent(prefix, command, params)
message_data = event.params[-1]
message_data = ctcp.low_level_dequote(message_data)
message_data, ctcp_requests = ctcp.extract(event.params[-1])
if self.filter_formatting:
message_data = format.filter(message_data)
if message_data.strip() != "":
event.message = message_data
self._pending_events.append(event)
for command, params in ctcp_requests:
ctcp_event = events.CTCPEvent()
ctcp_event.command = "CTCP_%s" % command
ctcp_event.params = params
ctcp_event.source = event.source
ctcp_event.target = event.target
self._pending_events.append(ctcp_event)
else:
self._pending_events.append(events.StandardEvent(prefix, command, params))
while self._pending_events:
event = self._pending_events.pop(0)
self.events.dispatch(self, event)
def connect(self, host, port=None, channel=None, use_ssl=False,
password=None):
""" Connect to an IRC server. """
self.conn = connection.Connection()
self.conn.handle_line = self._dispatch_event
self.conn.connect(host, port, use_ssl, password)
self.conn.execute("USER", self.user, self._mode, "*",
trailing=self.real_name)
self.conn.execute("NICK", self.nickname)
self.conn.handle_connect = self._handle_connect
self.conn.handle_close = self._handle_disconnect
if channel is not None:
# Builds a handler on-the-fly for joining init channels
if isinstance(channel, basestring):
channels = [channel]
else:
channels = channel
def _auto_joiner(client, event):
for channel in channels:
client.join_channel(channel)
self.events["welcome"].add_handler(_auto_joiner)
def is_connected(self):
return self.conn.connected
def _handle_connect(self):
connection.Connection.handle_connect(self.conn)
event = events.ConnectionEvent("CONN_CONNECT")
self.events.dispatch(self, event)
def _handle_disconnect(self):
connection.Connection.handle_close(self.conn)
event = events.ConnectionEvent("CONN_DISCONNECT")
self.events.dispatch(self, event)
def register_listener(self, event_name, listener):
""" Registers an event listener for a given event name.
In essence, this binds the event name to the listener and simply
provides an easier way to reference the listener.
::
client.register_listener("event_name", MyListener())
"""
self.events.register_listener(event_name, listener)
def identify(self, ns_password):
""" Identify yourself with the NickServ service on IRC.
This assumes that NickServ is present on the server.
"""
self.send_message("NickServ", "IDENTIFY {0}".format(ns_password))
def join_channel(self, channel, key=None):
""" Join the specified channel. Optionally, provide a key to the channel
if it requires one.
::
client.join_channel("#channel_name")
client.join_channel("#channel_name", "channelkeyhere")
"""
if channel == "0":
self.channels = []
self.execute("JOIN", "0")
else:
if key is not None:
params = [channel, key]
else:
params = [channel]
self.execute("JOIN", *params)
def part_channel(self, channel, message=None):
""" Leave the specified channel.
You may provide a message that shows up during departure.
"""
self.execute("PART", channel, trailing=message)
def send_message(self, target, message, to_service=False):
""" Sends a message to the specified target.
If it is a service, it uses SQUERY instead.
"""
message = ctcp.low_level_quote(message)
if to_service:
self.execute("SQUERY", target, message)
else:
self.execute("PRIVMSG", target, trailing=message)
def send_notice(self, target, message):
""" Sends a NOTICE to the specified target.
"""
message = ctcp.low_level_quote(message)
self.execute("NOTICE", target, trailing=message)
def send_ctcp(self, target, command, params=None):
""" Sends a CTCP (Client-to-Client-Protocol) message to the target.
"""
if params is not None:
params.insert(0, command)
self.send_message(target, ctcp.tag(" ".join(params)))
else:
self.send_message(target, ctcp.tag(command))
def send_ctcp_reply(self, target, command, params=None):
""" Sends a CTCP reply message to the target.
This differs from send_ctcp() because it uses NOTICE instead, as
specified by the CTCP documentation.
"""
if params is not None:
params.insert(0, command)
self.send_notice(target, ctcp.tag(" ".join(params)))
else:
self.send_notice(target, ctcp.tag(command))
def send_action(self, target, action_message):
""" Perform an "action". This is the same as when a person uses the
``/me is jumping up and down!`` command in their IRC client.
"""
self.send_ctcp(target, "ACTION", [action_message])
def set_nickname(self, nickname):
""" Attempts to set the nickname for the client. """
self._prev_nickname = self.nickname
self.execute("NICK", nickname)
def disconnect(self, message=None):
""" Disconnects from the IRC server.
If `message` is set, it is provided as a departing message.
Example::
client.disconnect("Goodbye cruel world!")
"""
self.execute("QUIT", trailing=message)
self.channels = []
self.conn.close_when_done()
def start(self):
""" Begin the client.
If you wish to run multiple clients at the same time, be sure to
use ``ircutils.start_all()`` instead.
"""
self.conn.start()
def execute(self, command, *args, **kwargs):
""" Execute an IRC command on the server.
Example::
self.execute("PRIVMSG", channel, trailing="Hello, world!")
"""
command, params = self.conn.execute(command, *args, **kwargs)
# Some less verbose aliases
join = join_channel
part = part_channel
notice = send_notice
action = send_action
quit = disconnect
# TODO: UPDATE EVERYTHING HERE.
def _reply_to_ctcp_version(client, event):
version_info = "IRCUtils:%s:Python" % ".".join(map(str, client.version))
client.send_ctcp_reply(event.source, "VERSION", [version_info])
def _update_client_info(client, event):
command = event.command
params = event.params
if command == "RPL_WELCOME":
if client.nickname != event.target:
client.nickname = event.target
if command == "ERR_ERRONEUSNICKNAME":
client.set_nickname(protocol.filter_nick(client.nickname))
elif command == "ERR_NICKNAMEINUSE":
client.set_nickname(client.nickname + "_")
elif command == "ERR_UNAVAILRESOURCE":
if not protocol.is_channel(event.params[0]):
client.nickname = client._prev_nickname
elif command == "NICK" and event.source == client.nickname:
client.nickname = event.target
if command in ["ERR_INVITEONLYCHAN", "ERR_CHANNELISFULL", "ERR_BANNEDFROMCHAN",
"ERR_BADCHANNELKEY", "ERR_TOOMANYCHANNELS", "ERR_NOSUCHCHANNEL"
"ERR_BADCHANMASK"]:
channel_name = params[0].lower()
if channel_name in client.channels:
del client.channels[channel_name]
elif command == "ERR_UNAVAILRESOURCE":
channel_name = params[0].lower()
if protocol.is_channel(channel_name) and channel_name in client.channels:
del client.channels[channel_name]
def _set_channel_names(client, name_event):
channel_name = name_event.channel.lower()
client.channels[channel_name].name = channel_name
client.channels[channel_name].user_list = name_event.name_list
def _remove_channel_user_on_part(client, event):
channel = event.target.lower()
if event.source == client.nickname:
del client.channels[channel]
elif event.source in client.channels[channel].user_list:
client.channels[channel].user_list.remove(event.source)
def _remove_channel_user_on_quit(client, event):
# TODO: This solution is slow. There might be a better one.
for channel in client.channels:
if event.source in client.channels[channel].user_list:
client.channels[channel].user_list.remove(event.source)
def _add_channel_user(client, event):
channel = event.target.lower()
client.channels[channel].user_list.append(event.source) | bsd-2-clause | 5,246,288,881,876,310,000 | 34.401055 | 96 | 0.599583 | false |
akrherz/iem | scripts/climodat/precip_days.py | 1 | 1747 | """
Generate a map of Number of days with precip
"""
import sys
import datetime
from pyiem.plot import MapPlot
from pyiem.network import Table as NetworkTable
from pyiem.util import get_dbconn
import psycopg2.extras
def runYear(year):
"""Do as I say"""
# Grab the data
now = datetime.datetime.now()
nt = NetworkTable("IACLIMATE")
nt.sts["IA0200"]["lon"] = -93.4
nt.sts["IA5992"]["lat"] = 41.65
pgconn = get_dbconn("coop", user="nobody")
ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
lats = []
lons = []
vals = []
labels = []
ccursor.execute(
"""
SELECT station,
sum(case when precip > 0.009 then 1 else 0 end) as days, max(day)
from alldata_ia WHERE year = %s and substr(station,3,1) != 'C'
and station != 'IA0000' GROUP by station
""",
(year,),
)
for row in ccursor:
sid = row["station"].upper()
if sid not in nt.sts:
continue
labels.append(sid[2:])
lats.append(nt.sts[sid]["lat"])
lons.append(nt.sts[sid]["lon"])
vals.append(row["days"])
maxday = row["max"]
mp = MapPlot(
title="Days with Measurable Precipitation (%s)" % (year,),
subtitle="Map valid January 1 - %s" % (maxday.strftime("%b %d")),
axisbg="white",
)
mp.plot_values(
lons,
lats,
vals,
fmt="%.0f",
labels=labels,
labeltextsize=8,
labelcolor="tan",
)
mp.drawcounties()
pqstr = "plot m %s bogus %s/summary/precip_days.png png" % (
now.strftime("%Y%m%d%H%M"),
year,
)
mp.postprocess(pqstr=pqstr)
if __name__ == "__main__":
runYear(sys.argv[1])
| mit | 4,895,807,264,595,162,000 | 24.318841 | 73 | 0.557527 | false |
SurfasJones/icecream-info | icecream/lib/python2.7/site-packages/model_utils/choices.py | 1 | 4990 | from __future__ import unicode_literals
class Choices(object):
"""
A class to encapsulate handy functionality for lists of choices
for a Django model field.
Each argument to ``Choices`` is a choice, represented as either a
string, a two-tuple, or a three-tuple.
If a single string is provided, that string is used as the
database representation of the choice as well as the
human-readable presentation.
If a two-tuple is provided, the first item is used as the database
representation and the second the human-readable presentation.
If a triple is provided, the first item is the database
representation, the second a valid Python identifier that can be
used as a readable label in code, and the third the human-readable
presentation. This is most useful when the database representation
must sacrifice readability for some reason: to achieve a specific
ordering, to use an integer rather than a character field, etc.
Regardless of what representation of each choice is originally
given, when iterated over or indexed into, a ``Choices`` object
behaves as the standard Django choices list of two-tuples.
If the triple form is used, the Python identifier names can be
accessed as attributes on the ``Choices`` object, returning the
database representation. (If the single or two-tuple forms are
used and the database representation happens to be a valid Python
identifier, the database representation itself is available as an
attribute on the ``Choices`` object, returning itself.)
Option groups can also be used with ``Choices``; in that case each
argument is a tuple consisting of the option group name and a list
of options, where each option in the list is either a string, a
two-tuple, or a triple as outlined above.
"""
def __init__(self, *choices):
# list of choices expanded to triples - can include optgroups
self._triples = []
# list of choices as (db, human-readable) - can include optgroups
self._doubles = []
# dictionary mapping Python identifier to db representation
self._mapping = {}
# set of db representations
self._db_values = set()
self._process(choices)
def _store(self, triple, triple_collector, double_collector):
self._mapping[triple[1]] = triple[0]
self._db_values.add(triple[0])
triple_collector.append(triple)
double_collector.append((triple[0], triple[2]))
def _process(self, choices, triple_collector=None, double_collector=None):
if triple_collector is None:
triple_collector = self._triples
if double_collector is None:
double_collector = self._doubles
store = lambda c: self._store(c, triple_collector, double_collector)
for choice in choices:
if isinstance(choice, (list, tuple)):
if len(choice) == 3:
store(choice)
elif len(choice) == 2:
if isinstance(choice[1], (list, tuple)):
# option group
group_name = choice[0]
subchoices = choice[1]
tc = []
triple_collector.append((group_name, tc))
dc = []
double_collector.append((group_name, dc))
self._process(subchoices, tc, dc)
else:
store((choice[0], choice[0], choice[1]))
else:
raise ValueError(
"Choices can't take a list of length %s, only 2 or 3"
% len(choice)
)
else:
store((choice, choice, choice))
def __len__(self):
return len(self._doubles)
def __iter__(self):
return iter(self._doubles)
def __getattr__(self, attname):
try:
return self._mapping[attname]
except KeyError:
raise AttributeError(attname)
def __getitem__(self, index):
return self._doubles[index]
def __add__(self, other):
if isinstance(other, self.__class__):
other = other._triples
else:
other = list(other)
return Choices(*(self._triples + other))
def __radd__(self, other):
# radd is never called for matching types, so we don't check here
other = list(other)
return Choices(*(other + self._triples))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._triples == other._triples
return False
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join(("%s" % repr(i) for i in self._triples))
)
def __contains__(self, item):
return item in self._db_values
| mit | -3,453,485,655,542,937,000 | 33.652778 | 78 | 0.591383 | false |
donbright/piliko | experiment/experiment2.py | 1 | 3122 | from fractions import Fraction
import sys
# this program explores patterns of pythagorean triples
# in the stern brocot tree... or more specifically,
# the stern diamotic sequence, aka the 'denominators' of the
# stern brocot sequence aka, the farey sequence denominators
# aka 2*sqrt(radius) of the ford circles
# it looks for 'adjacent' triple numbers, such as 3,4,5 which all 'touch'
# if you draw the stern diamotic sequence as a tree.
#
# 5,12,13 touch, so do others.
#
def pyth():
for i in range(1,100):
for j in range(1,100):
for k in range(1,100):
if i*i+j*j==k*k: print i,j,k
print
def checkp(x,y,z):
if x*x+y*y==z*z: return True
if z*z+y*y==x*x: return True
if x*x+z*z==y*y: return True
def newlayer(l1):
l2=[]
for i in range(len(l1)-1):
#newnumerator = l1[i].numerator + l1[i+1].numerator
#newdenominator = l1[i].denominator + l1[i+1].denominator
#l2+=[Fraction(newnumerator,newdenominator)]
l2 += [l1[i]+l1[i+1]]
return l2
def mixlayer(l1,l2):
l3=[]
for i in range(0,len(l1)-1):
l3+=[l1[i],l2[i]]
l3 += [l1[len(l1)-1]]
return l3
def checkpl(ml):
r=[]
for i in range(0,len(ml)-2):
x=ml[i]
y=ml[i+1]
z=ml[i+2]
if checkp(x,y,z): r+=[[x,y,z]]
return r
def checkpr(nlist):
primes=[]
for n in nlist:
prime=True
for i in range(2,n):
if n % i == 0: prime=False
if prime: primes += [n]
return primes
def dopyth1():
for m in range(0,20):
for n in range(0,20):
a=m*m-n*n # note - this is red quadrance
b=2*m*n # note - this is green quadrance
c=m*m+n*n # note - this is blue quadrance
print a,b,c,checkpl([a,b,c])
def dopyth2():
for m in range(1,110):
for n in range(1,110):
for k in range(1,110):
print m,n,k,checkpl([m,n,k]),checkpr([m,n,k])
import math
def dopyth3():
for m in range(1,110000):
msq = m*m+(m-2)*(m-2)
if checkp(m,m-2,math.trunc(math.sqrt(msq))): print m,m-2,math.sqrt(msq)
dopyth3()
sys.exit()
# pattern 1.. legs 1 apart?
# adjacent in stern diatomic network
# one per odd row
# in stern numerator, accumulate in every row!
# 3 4 5
# 5 12 13
# 7 24 25
# 9 40 41
# 11 60 61
# 13 , ,
# depth in tree = directly related to first #, formula for 2nd two order(n^2)
# pattern 2..??? legs 2 apart
#-1,0,1
#4,3,5
#8,15,17
#12,35,37
#16,63,65
#20,99,101
#24,143,145
#28,195,197
# again, order n^2 (2n,n^2+/-1, n=0,2,4,8,10,12,..). row = ?
# pattern 3
# legs will be 3 apart? (sqrt blue quadrance - sqrt green q = 3 )????
# or... pattern 3, legs will be 9 apart?
# 5, -4, 3
# 9, 0, 9
# 17 8 15 (also 2 apart)
# 29 20 21
# 45 36 27 (also 3,4,5)
# or..... 7 apart?
# 12 5 13
# 15 8 17
# 28 21 35 (aleo 4 , 3, 5 )
# . . .
# note that pyth trigs with a prime leg do smth weird.
#dopyth2()
#sys.exit()
#l1=[0,0] # zero
#l1=[0,1] # numerator
l1=[1,1] # denominator (stern diamotic)
prlen=1000
for j in range(0,21):
print l1[0:prlen],'...',len(l1)
nl = newlayer(l1)
ml = mixlayer(l1, nl)
l1 = ml
print nl[0:prlen], '...',len(nl)
print ml[0:prlen], '...',len(ml)
ptl = checkpl(ml)
print "pth:", ptl
#for sublist in ptl:
# print "prm:", checkpr(sublist)
print
| bsd-3-clause | 7,146,669,838,024,524,000 | 20.531034 | 77 | 0.614029 | false |
mtlynch/ndt-e2e-clientworker | client_wrapper/client_wrapper.py | 1 | 5534 | # Copyright 2016 Measurement Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import contextlib
import logging
import os
import banjo_driver
import filename
import html5_driver
import http_response
import http_server
import names
import result_encoder
import os_metadata
logger = logging.getLogger(__name__)
def main(args):
_configure_logging(args.verbose)
if args.client == names.BANJO:
with open(args.client_path) as replay_file:
replays = http_response.parse_yaml(replay_file.read())
with contextlib.closing(http_server.create_replay_server_manager(
replays, args.server)) as replay_server_manager:
replay_server_manager.start()
logger.info('replay server replaying %s on port %d',
args.client_path, replay_server_manager.port)
url = 'http://localhost:%d/banjo' % replay_server_manager.port
logger.info('starting tests against %s', url)
driver = banjo_driver.BanjoDriver(args.browser, url)
_run_test_iterations(driver, args.iterations, args.output)
elif args.client == names.NDT_HTML5:
driver = html5_driver.NdtHtml5SeleniumDriver(args.browser,
args.client_url)
_run_test_iterations(driver, args.iterations, args.output)
else:
raise ValueError('unsupported NDT client: %s' % args.client)
def _configure_logging(verbose):
"""Configure the root logger for log output."""
root_logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
root_logger.addHandler(handler)
if verbose:
root_logger.setLevel(logging.INFO)
else:
root_logger.setLevel(logging.WARNING)
def _run_test_iterations(driver, iterations, output_dir):
"""Use the given client driver to run the specified number of iterations.
Given an NDT client driver, run NDT tests for the given number of
iterations. On completion of each test, save the result to disk and print
the result to the console.
Args:
driver: An NDT client driver that supports the perform_test API.
iterations: The total number of test iterations to run.
output_dir: Directory in which to result file.
"""
for i in range(iterations):
logger.info('starting iteration %d...', (i + 1))
print 'starting iteration %d...' % (i + 1)
result = driver.perform_test()
result.os, result.os_version = os_metadata.get_os_metadata()
print _jsonify_result(result)
_save_result(result, output_dir)
def _save_result(result, output_dir):
"""Saves an NdtResult instance to a file in output_dir.
Serializes an NdtResult to JSON format, automatically generates a
filename based on the NdtResult metadata, then saves it to output_dir.
Args:
result: NdtResult instance to save.
output_dir: Directory in which to result file.
"""
output_filename = filename.create_result_filename(result)
output_path = os.path.join(output_dir, output_filename)
with open(output_path, 'w') as output_file:
output_file.write(_jsonify_result(result))
def _jsonify_result(result):
return result_encoder.NdtResultEncoder(indent=2,
sort_keys=True).encode(result)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='NDT E2E Testing Client Wrapper',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--client',
help='NDT client implementation to run',
choices=(names.NDT_HTML5, names.BANJO),
required=True)
parser.add_argument('--browser',
help='Browser to run under (for browser-based client)',
choices=('chrome', 'firefox', 'safari', 'edge'))
parser.add_argument('--client_path',
help=('Path to client files. Depending on the type of '
'client, these can be replay files, static HTML '
'files (not implemented), or a client binary '
'(not implemented)'))
parser.add_argument('--client_url',
help='URL of NDT client (for server-hosted clients)')
parser.add_argument('--server', help='FQDN of NDT server to test against')
parser.add_argument('--output', help='Directory in which to write output')
parser.add_argument('-v',
'--verbose',
action='store_true',
help='Use verbose logging')
parser.add_argument('--iterations',
help='Number of iterations to run',
type=int,
default=1)
main(parser.parse_args())
| apache-2.0 | -7,116,811,765,872,529,000 | 39.101449 | 79 | 0.630466 | false |
Jucyio/Jucy | web/github_mixins.py | 1 | 13786 | import json
import urlparse
kwargs_issues_filters = {
'duplicates': { 'state': 'closed', 'label': 'duplicate' },
'rejected': { 'state': 'closed', 'label': 'rejected' },
'done': { 'state': 'closed', 'label': '-rejected,duplicate' },
'ready': { 'state': 'open', 'label': 'ready' },
'new': { 'state': 'open', 'label': '-ready' },
}
class GithubException(Exception):
def __init__(self, status_code, data):
self.data = data
self.status = status_code
def __str__(self):
return json.dumps(self.data)
class GithubMixin(object):
def _wrap_error(self, expected_status, status_code, data):
""" Wraps Github API errors
Args:
expected_status (int): HTTP status code expected for the reply
data (dict): The data returned by the request
Function will raise a GithubException if the status_code isn't the same as expected
"""
if status_code != expected_status:
raise GithubException(status_code, data)
return data
def get_repos(self, *args, **kwargs):
""" Return all repositories available to the user
Github Reference:
path: /user/repos/
method: GET
reference: https://developer.github.com/v3/repos/#list-your-repositories
Args:
*args and **kwargs are passed as GET parameter to the request constructor
see available parameters in the Github API reference
"""
status_code, data = self.gh.user.repos.get(*args, **kwargs)
return self._wrap_error(200, status_code, data)
def get_paginated_repos(self, pagesize=200):
data = self.get_repos(per_page=pagesize)
headers = dict(self.gh.getheaders())
last_page = None
if 'link' in headers:
links = headers['link'].split(',')
for link in links:
content = link.strip().split('; ')
if content[1].strip() == 'rel="last"':
addr = content[0][1:-1]
query = urlparse.parse_qs(urlparse.urlparse(addr).query)
last_page = query['page'][0]
if last_page is not None:
for page in range(2, int(last_page) + 1):
print page
data = data + self.get_repos(per_page=pagesize, page=page)
return data
def get_user_repos(self, username):
""" Return all repositories available to the specified user
Github Reference:
path: /users/:username/repos
method: GET
reference: https://developer.github.com/v3/repos/#list-user-repositories
Args:
username (str) : Github username
"""
status_code, data = self.gh.users[username].repos.get()
return self._wrap_error(200, status_code, data)
def repo(self, username, repo):
""" Return a repository
Github Reference:
path: /repos/:owner/:repo
method: GET
reference: https://developer.github.com/v3/repos/#get
Args:
username (str) : Github username
repo (str) : Github repository name
"""
status_code, data = self.gh.repos[username][repo].get()
return self._wrap_error(200, status_code, data)
def is_collaborator_on_repo(self, owner, repo, username):
""" Return True is the user is collaborator for the specified repository, else False.
Github Reference:
path: /repos/:owner/:repo/collaborators/:username
method: GET
reference: https://developer.github.com/v3/repos/collaborators/#check-if-a-user-is-a-collaborator
Args:
owner (str) : Github username
repo (str) : Github repository name
"""
status_code, data = self.gh.repos[owner][repo].collaborators[username].get()
if status_code == 404:
return False
elif status_code == 204:
return True
else:
raise GithubException(status_code, data)
def search_issues(self, *args, **kwargs):
""" Do an issue search
Github Reference:
path: /search/issues
method: GET
reference: https://developer.github.com/v3/search/#search-issues
Args:
**kwargs are passed as search pattern according to the q syntax specified in the API reference.
For example, search_issues(state='open', label='bug') will search with q=state:open label:bug.
Negation for a pattern can be obtained by prefixing a value with '-':
Example: search_issues(label='-bug') will search with q=-label:bug
"""
q = ''
for key, value in kwargs.iteritems():
remove = value.startswith('-')
if remove:
value = value[1:]
if ',' in value:
values = value.split(',')
else:
values = [value]
for value in values:
q += ' ' + ('-' if remove else '') + '{}:{}'.format(key, value)
print q
status_code, data = self.gh.search.issues.get(q=q)
return self._wrap_error(200, status_code, data)
def get_issues(self, full_repository_name, issues_to_get=['ready'], context=None):
""" Return issues for the given repository.
Args:
full_repository_name (str) : Github repository full name
issues_to_get (array of str) : Type of issues to get (see list below)
context (dict) : A dictionnary that will be updated with the issues retrieved
It will split the result in a dictionnary, according to the following principles:
- If an issue is closed, and the duplicate label is set: 'duplicate'
- If an issue is closed, and the rejected label is set: 'rejected'
- If an issue is closed without the aforementioned labels: 'done'
- If an issue is open, with a ready label set: 'ready'
- If an issue is open without the ready label: 'new'
If a context object is given, it will populate it, else it will return a dictionary
"""
if not context:
context = {}
context['issues'] = []
for issue_type in issues_to_get:
try:
issues = self.search_issues(repo=full_repository_name, **kwargs_issues_filters[issue_type])
except KeyError:
continue
for issue in issues['items']:
issue['type'] = issue_type
context[issue_type] = issues
context['issues'] += issues['items']
return context
def get_comments(self, owner, repository, issue):
""" Return comments for a given issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/comments
method: GET
reference: https://developer.github.com/v3/repos/comments/#list-commit-comments-for-a-repository
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].comments.get()
return self._wrap_error(200, status_code, data)
def add_comment(self, owner, repository, issue, body):
""" Create a comment in the given issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/comments
method: POST
reference: https://developer.github.com/v3/issues/comments/#create-a-comment
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
body (str) : Comment content
"""
payload = {'body': body}
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].comments.post(body=payload)
return self._wrap_error(201, status_code, data)
def create_hook(self, owner, repository, name, config, events):
""" Create a hook for the given repository
Github Reference:
path: /repos/:owner/:repo/hooks
method: POST
reference: https://developer.github.com/v3/repos/hooks/#create-a-hook
Args:
owner (str) : Github username
repository (str) : Github repository
name (str) : Webhook name
config (dict) : config object as specified in the Github API reference
events (list) : events to register to as specified in the Github API reference
"""
payload = {'config': config, 'events': events, 'name': name}
status_code, data = self.gh.repos[owner][repository].hooks.post(body=payload)
return self._wrap_error(201, status_code, data)
def create_label(self, owner, repository, name, color):
""" Create a new label
Github Reference:
path: /repos/:owner/:repo/labels
method: POST
reference: https://developer.github.com/v3/issues/labels/#create-a-label
Args:
owner (str) : Github username
repository (str) : Github repository
name (str) : Label name
color (str) : Label color
"""
payload = {'name': name, 'color': color}
status_code, data = self.gh.repos[owner][repository].labels.post(body=payload)
return self._wrap_error(201, status_code, data)
def create_issue(self, owner, repository, title, content, labels):
""" Create an issue
Github Reference:
path: /repos/:owner/:repo/issues
method: POST
reference: https://developer.github.com/v3/issues/#create-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
title (str) : Issue title
content (str) : Issue body
label : Issue label
"""
payload = {'title': title, 'body': content, 'labels': labels}
status_code, data = self.gh.repos[owner][repository].issues.post(body=payload)
return self._wrap_error(201, status_code, data)
def remove_label(self, owner, repository, issue, label):
""" Remove a label from an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/labels/:name
method: DELETE
reference: https://developer.github.com/v3/issues/labels/#remove-a-label-from-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
label (str) : Label
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].labels[label].delete()
return self._wrap_error(200, status_code, data)
def replace_labels(self, owner, repository, issue, labels):
""" Replace labels from an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/labels
method: PUT
reference: https://developer.github.com/v3/issues/labels/#replace-all-labels-for-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
labels (str list) : Labels
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].labels.put(body=labels)
return self._wrap_error(200, status_code, data)
def get_issue(self, owner, repository, issue):
""" get a single issue
github reference:
path: /repos/:owner/:repo/issues/:number
method: GET
reference: https://developer.github.com/v3/issues/#get-a-single-issue
args:
owner (str) : github username
repository (str) : github repository
issue (int) : issue number
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].get()
return self._wrap_error(200, status_code, data)
def add_labels(self, owner, repository, issue, labels):
""" Add labels to an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/labels
method: POST
reference: https://developer.github.com/v3/issues/labels/#replace-all-labels-for-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
labels (str list) : Labels
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].labels.post(body=labels)
return self._wrap_error(200, status_code, data)
def add_as_collaborator_on_repo(self, owner, repository, username):
status_code, data = self.gh.repos[owner][repository].collaborators[username].put()
try:
return self._wrap_error(204, status_code, data)
except GithubException, exn:
pass
def edit_issue(self, owner, repository, issue, payload):
""" Edit an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number
method: PATCH
reference: https://developer.github.com/v3/issues/#edit-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
payload (dict) : A dict containing the payload according to the API documentation
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].patch(body=payload)
return self._wrap_error(200, status_code, data)
| apache-2.0 | -6,902,052,481,229,804,000 | 35.762667 | 109 | 0.58204 | false |
cikelengfeng/HTTPIDL | Sources/Compiler/antlr4/tree/RuleTagToken.py | 2 | 1972 | #
# Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#
#
# A {@link Token} object representing an entire subtree matched by a parser
# rule; e.g., {@code <expr>}. These tokens are created for {@link TagChunk}
# chunks where the tag corresponds to a parser rule.
#
from antlr4.Token import Token
class RuleTagToken(Token):
#
# Constructs a new instance of {@link RuleTagToken} with the specified rule
# name, bypass token type, and label.
#
# @param ruleName The name of the parser rule this rule tag matches.
# @param bypassTokenType The bypass token type assigned to the parser rule.
# @param label The label associated with the rule tag, or {@code null} if
# the rule tag is unlabeled.
#
# @exception IllegalArgumentException if {@code ruleName} is {@code null}
# or empty.
def __init__(self, ruleName, bypassTokenType, label=None):
if ruleName is None or len(ruleName)==0:
raise Exception("ruleName cannot be null or empty.")
self.source = None
self.type = bypassTokenType # token type of the token
self.channel = Token.DEFAULT_CHANNEL # The parser ignores everything not on DEFAULT_CHANNEL
self.start = -1 # optional; return -1 if not implemented.
self.stop = -1 # optional; return -1 if not implemented.
self.tokenIndex = -1 # from 0..n-1 of the token object in the input stream
self.line = 0 # line=1..n of the 1st character
self.column = -1 # beginning of the line at which it occurs, 0..n-1
self.label = label
self._text = self.getText() # text of the token.
self.ruleName = ruleName
def getText(self):
if self.label is None:
return "<" + self.ruleName + ">"
else:
return "<" + self.label + ":" + self.ruleName + ">"
| mit | 460,632,960,464,036,600 | 39.244898 | 99 | 0.653144 | false |
kieranrimmer/vec_hsqc | vec_hsqc/post_proc.py | 1 | 2620 |
from __future__ import division
import numpy as np
import os
import vec_hsqc
import nmrglue as ng
class DataOut( object ):
def __init__(self):
self.master_array = None
def generate_master_peak_list( self, y, legend_array, cs_array, legend_columns = [0,5], cs_columns = [0,1] ):
predind = np.nonzero( y == 1 )[0]
self.master_array = np.hstack( ( legend_array[ np.ix_( predind, legend_columns )], cs_array[ np.ix_( predind, cs_columns )] ) )
def writeall_peak_lists( self, master_array, savedir, filestump ):
for sp in np.unique( master_array[:,0] ):
specind = np.nonzero( master_array[:,0] == sp )[0]
sp_peaks = np.array( master_array[ np.ix_( specind ) ][:, 1:], dtype = float )
sp_peaks = sp_peaks[ sp_peaks[:,0].argsort() ] #sorts by first col ie residue number
self.peak_list_out( savedir, filestump, sp, sp_peaks )
def peak_list_out( self, savedir, filestump, sp_name, sp_peaks ):
basic_list = [ [ str(int(c[0])), round(c[1], 3), round(c[2], 4)] for c in [list(b) for b in sp_peaks ]]
plist_as_string = ''
for entry in basic_list:
plist_as_string += entry[0].rjust(4) + 'N-H\t' + str(entry[1]) + '\t' + str(entry[2]) + '\n'
with open( os.path.join( savedir, '%s_predicted_%s.list' %( filestump, sp_name ) ), 'wb' ) as f:
f.write( plist_as_string )
class SimpleViewAssigned( object ):
def readin_spectrum_sparky( self, spectrumpath ):
"""Reads and processes Sparky 2D spectrum using nmrglue
"""
self.dic, self.data = ng.sparky.read( spectrumpath )
self.avgheight = np.mean(self.data)
self.thresh_height = np.mean(np.abs(self.data))
udic = ng.sparky.guess_udic( self.dic, self.data )
x, y = np.shape( self.data )
self.uc0 = ng.sparky.make_uc( self.dic, self.data, dim=0)
self.uc1 = ng.sparky.make_uc( self.dic, self.data, dim=1)
self.w0limits = [ self.uc0.ppm(0), self.uc0.ppm( self.data.shape[0] ) ]
self.w1limits = [ self.uc1.ppm(0), self.uc1.ppm( self.data.shape[1] ) ]
# the below enables conversion of peak linewidths from datapoint units into Hz
self.pt_2_Hz0 = self.dic['w1']['spectral_width'] / (self.dic['w1']['npoints'] - 1 )
self.pt_2_Hz1 = self.dic['w2']['spectral_width'] / (self.dic['w2']['npoints'] - 1 )
self.w0size = self.dic['w1']['size']
self.w1size = self.dic['w2']['size']
def quick_view( self, peaklistpath, savedir, title ):
with open( peaklistpath, 'rb') as f:
peaklist = [b.strip().split() for b in f]
vec_hsqc.view_data.plot_2D_predictions_assigned( self.data, peaklist, self.thresh_height * 3.0, self, title, savedir )
| bsd-3-clause | -1,594,254,006,062,670,600 | 33.025974 | 129 | 0.630534 | false |
piyushkant/Spam-Filter-Machine-Learning | spamfilter/fisher.py | 1 | 2184 | #####################################################
# Copyright (c) 2012 Piyush Kant #
# See the file license.txt for copying permission #
#####################################################
import classifier
import math
class fisherclassifier(classifier.classifier):
def __init__(self, getFeatures):
classifier.classifier.__init__(self, getFeatures)
self.minimum = {}
#Pr(category|feature)
def cProb(self, feat, cat):
# The frequency of this feature in this category
fp = self.featProb(feat, cat)
if fp == 0:
return 0
# The frequency of this feature in all the categories
freqSum = sum([self.featProb(feat, c) for c in self.catList()])
# The probability is the frequency in this category divided by the overall frequency
p = fp / freqSum
return p
def fisherProb(self, item, cat):
# Multiply all the probabilities together
p = 1
features = self.getFeatures(item)
for feat in features:
p *= (self.weightedProb(feat, cat, self.cProb))
# Take the natural log and multiply by -2
fscore = -2 * math.log(p)
# Use the inverse chi2 function to get a probability
return self.invchi2(fscore, len(features) * 2)
def invchi2(self, chi, df):
m = chi / 2.0
sum = term = math.exp(-m)
for i in range(1, df // 2):
term *= m / i
sum += term
return min(sum, 1.0)
def setMinimum(self, cat, min):
self.minimum[cat] = min
def getMinimum(self, cat):
if cat not in self.minimum:
return 0
return self.minimum[cat]
def classify(self, item, default=None):
# Loop through looking for the best result
best = default
max = 0.0
for c in self.catList():
p = self.fisherProb(item, c)
# Make sure it exceeds its minimum
if p > self.getMinimum(c) and p > max:
best = c
max = p
return best | mit | -6,822,964,227,045,991,000 | 28.931507 | 92 | 0.514652 | false |
isotoma/alm.solrindex | alm/solrindex/tests/test_schema.py | 1 | 3153 |
import unittest
from zope.testing.cleanup import cleanUp
class SolrSchemaTests(unittest.TestCase):
def setUp(self):
cleanUp()
def tearDown(self):
cleanUp()
def _getTargetClass(self):
from alm.solrindex.schema import SolrSchema
return SolrSchema
def _makeOne(self, solr_uri=None):
return self._getTargetClass()(solr_uri=solr_uri)
def test_verifyImplements(self):
from zope.interface.verify import verifyClass
from alm.solrindex.interfaces import ISolrSchema
verifyClass(ISolrSchema, self._getTargetClass())
def test_verifyProvides(self):
from zope.interface.verify import verifyObject
from alm.solrindex.interfaces import ISolrSchema
verifyObject(ISolrSchema, self._makeOne())
def test_download_from(self):
import os
import shutil
import tempfile
schema = self._makeOne()
d = tempfile.mkdtemp()
try:
os.makedirs(os.path.join(d, 'admin', 'file'))
fn = os.path.join(d, 'admin', 'file', '?file=schema.xml')
f = open(fn, 'w')
f.write('<schema></schema>')
f.close()
solr_uri = 'file://%s' % d.replace(os.sep, '/')
f = schema.download_from(solr_uri)
content = f.read()
f.close()
finally:
shutil.rmtree(d)
self.assertEqual(content, '<schema></schema>')
def test_xml_init(self):
import os
from alm.solrindex.interfaces import ISolrFieldHandler
from zope.component import getGlobalSiteManager
getGlobalSiteManager().registerUtility(
DummyFieldHandler(), ISolrFieldHandler)
schema = self._makeOne()
fn = os.path.join(os.path.dirname(__file__), 'schema.xml')
f = open(fn, 'r')
schema.xml_init(f)
f.close()
self.assertEqual(schema.uniqueKey, 'docid')
self.assertEqual(schema.defaultSearchField, 'default')
field_names = [field.name for field in schema.fields]
self.assertEqual(field_names, [
'docid',
'Title',
'physicalPath',
'physicalDepth',
'parentPaths',
'default',
'Subject',
'Description',
'Creator',
'Date',
'SearchableText',
'Type',
'allowedRolesAndUsers',
'created',
'effective',
'expires',
'getIcon',
'getId',
'modified',
'portal_type',
'review_state',
'is_folderish',
])
self.assertEqual(schema.fields[0].java_class, 'solr.IntField')
self.assertEqual(schema.fields[0].required, True)
self.assertEqual(schema.fields[0].multiValued, False)
self.assertEqual(schema.fields[4].required, False)
self.assertEqual(schema.fields[4].multiValued, True)
class DummyFieldHandler:
pass
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SolrSchemaTests))
return suite
| bsd-3-clause | -5,156,167,007,460,365,000 | 27.926606 | 70 | 0.575325 | false |
google-research/language | language/orqa/preprocessing/wiki_preprocessor.py | 1 | 3798 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Preprocessor class that extract creates a database of text blocks.
Each input line should have the following JSON format:
```
{
"title": "Document Tile",
"text": "This is a full document."
}
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import re
import six
import tensorflow.compat.v1 as tf
def add_int64_feature(key, values, example):
example.features.feature[key].int64_list.value.extend(values)
class Preprocessor(object):
"""Preprocessor."""
def __init__(self, sentence_splitter, max_block_length, tokenizer):
self._tokenizer = tokenizer
self._sentence_splitter = sentence_splitter
self._max_block_length = max_block_length
tf.logging.info("Max block length {}".format(self._max_block_length))
def generate_sentences(self, title, text):
"""Generate sentences in each block from text."""
title_length = len(self._tokenizer.tokenize(title))
current_token_count = 0
current_block_sentences = []
for sentence in self._sentence_splitter.tokenize(text):
num_tokens = len(self._tokenizer.tokenize(sentence))
# Hypothetical sequence [CLS] <title> [SEP] <current> <next> [SEP].
hypothetical_length = 3 + title_length + current_token_count + num_tokens
if hypothetical_length <= self._max_block_length:
current_token_count += num_tokens
current_block_sentences.append(sentence)
else:
yield current_block_sentences
current_token_count = num_tokens
current_block_sentences = []
current_block_sentences.append(sentence)
if current_block_sentences:
yield current_block_sentences
def create_example(self, title, sentences):
"""Create example."""
title_tokens = self._tokenizer.tokenize(title)
title_ids = self._tokenizer.convert_tokens_to_ids(title_tokens)
token_ids = []
sentence_starts = []
for sentence in sentences:
sentence_starts.append(len(token_ids))
sentence_tokens = self._tokenizer.tokenize(sentence)
token_ids.extend(self._tokenizer.convert_tokens_to_ids(sentence_tokens))
example = tf.train.Example()
add_int64_feature("title_ids", title_ids, example)
add_int64_feature("token_ids", token_ids, example)
add_int64_feature("sentence_starts", sentence_starts, example)
return example.SerializeToString()
def generate_block_info(self, title, text):
for sentences in self.generate_sentences(title, text):
if sentences:
block = " ".join(sentences)
example = self.create_example(title, sentences)
yield title, block, example
def remove_doc(title):
return re.match(r"(List of .+)|"
r"(Index of .+)|"
r"(Outline of .+)|"
r"(.*\(disambiguation\).*)", title)
def example_from_json_line(line, html_parser, preprocessor):
if not isinstance(line, six.text_type):
line = line.decode("utf-8")
data = json.loads(line)
title = data["title"]
if not remove_doc(title):
text = html_parser.unescape(data["text"])
for info in preprocessor.generate_block_info(title, text):
yield info
| apache-2.0 | -1,051,289,272,584,799,600 | 34.495327 | 79 | 0.688784 | false |
srfraser/services | lib/backend_common/backend_common/auth0.py | 1 | 5945 | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''
Provide auth0 / OpenID Connect protection for API endpoints.
accept_token will take an oauth2 access_token provided by auth0 and
user the userinfo endpoint to validate it. This is because the token
info endpoint used by the Flask-OIDC accept_token wrapper has some
issues with validating tokens for certain application types.
'''
from __future__ import absolute_import
import cli_common.log
from urllib.parse import urlencode
import time
import requests
import flask
import flask_oidc
import functools
import json
import hmac
import base64
import os
logger = cli_common.log.get_logger(__name__)
auth0 = flask_oidc.OpenIDConnect()
SETTINGS_REQUIRED = (
'SECRET_KEY',
'AUTH_CLIENT_ID',
'AUTH_CLIENT_SECRET',
'AUTH_REDIRECT_URI',
'AUTH_DOMAIN',
)
def mozilla_accept_token(render_errors=True):
'''
Use this to decorate view functions that should accept OAuth2 tokens,
this will most likely apply to API functions.
Tokens are accepted as part of
* the query URL (access_token value)
* a POST form value (access_token)
* the header Authorization: Bearer <token value>
:param render_errors: Whether or not to eagerly render error objects
as JSON API responses. Set to False to pass the error object back
unmodified for later rendering.
:type render_errors: bool
Side effects: flask.g gets the 'userinfo' attribute containing the data
from the response
.. versionadded:: 1.0
'''
def wrapper(view_func):
@functools.wraps(view_func)
def decorated(*args, **kwargs):
token = None
if flask.request.headers.get('Authorization', '').startswith('Bearer'):
token = flask.request.headers['Authorization'].split(maxsplit=1)[
1].strip()
if 'access_token' in flask.request.form:
token = flask.request.form['access_token']
elif 'access_token' in flask.request.args:
token = flask.request.args['access_token']
url = auth0.client_secrets.get(
'userinfo_uri', 'https://auth.mozilla.auth0.com/userinfo')
payload = {'access_token': token}
response = requests.get(url, params=payload)
# Because auth0 returns http 200 even if the token is invalid.
if response.content == b'Unauthorized':
response_body = {'error': 'invalid_token',
'error_description': str(response.content, 'utf-8')}
if render_errors:
response_body = json.dumps(response_body)
return response_body, 401, {'WWW-Authenticate': 'Bearer'}
# store response.content for later
flask.g.userinfo = json.loads(str(response.content, 'utf-8'))
# g.oidc_id_token = token # requires a specific format
flask.g.access_token = token
return view_func(*args, **kwargs)
return decorated
return wrapper
def build_state(seed=None, size=8):
'''
Build a unique opaque value, used by Auth0
as XSRF protection, using HMAC algorithm
'''
if seed is None:
seed = os.urandom(size)
else:
assert isinstance(seed, bytes)
assert len(seed) == size
h = hmac.new(
msg=seed,
key=flask.current_app.config.get('SECRET_KEY'),
)
return base64.urlsafe_b64encode(b''.join([seed, h.digest()]))
def check_state(state, size=8):
'''
Check a previously created state value is valid
for this website
'''
data = base64.urlsafe_b64decode(state)
return hmac.compare_digest(
state.encode('utf-8'),
build_state(data[:size], size),
)
def auth0_login():
'''
API Endpoint: Build Url to login on Auth0 server
'''
params = {
'audience': 'login.taskcluster.net',
'scope': 'full-user-credentials openid',
'response_type': 'code',
'client_id': flask.current_app.config.get('AUTH_CLIENT_ID'),
'redirect_uri': flask.current_app.config.get('AUTH_REDIRECT_URI'),
'state': build_state(),
}
return 'https://{}/authorize?{}'.format(
flask.current_app.config.get('AUTH_DOMAIN'),
urlencode(params),
)
def auth0_check():
'''
Echange auth0 login code for long lasting tokens
access_token & id_token
'''
# Check state
state = flask.request.json.get('state')
assert state is not None, \
'Missing state in payload'
assert check_state(state), \
'Invalid state value'
code = flask.request.json.get('code')
assert code is not None, \
'Missing code in payload'
# Exchange code for tokens
url = 'https://{}/oauth/token'.format(
flask.current_app.config.get('AUTH_DOMAIN')
)
payload = {
'grant_type': 'authorization_code',
'code': code,
'client_id': flask.current_app.config.get('AUTH_CLIENT_ID'),
'client_secret': flask.current_app.config.get('AUTH_CLIENT_SECRET'),
'redirect_uri': flask.current_app.config.get('AUTH_REDIRECT_URI'),
}
auth = requests.post(url, payload)
if not auth.ok:
# Forward error
return auth.json(), auth.status_code
# Export values
data = auth.json()
return {
'expires': int(time.time()) + data['expires_in'],
'access_token': data['access_token'],
'id_token': data['id_token'],
}
def init_app(app):
for setting in SETTINGS_REQUIRED:
if app.config.get(setting) is None:
raise Exception('When using `auth0` extention you need to specify {}.'.format(setting)) # noqa
auth0.init_app(app)
return auth0
| mpl-2.0 | 6,326,112,499,239,969,000 | 30.125654 | 107 | 0.622876 | false |
JuBra/GEMEditor | GEMEditor/rw/test/test_units_rw.py | 1 | 1230 | from GEMEditor.rw.units import add_unit_definitions
from lxml.etree import Element
from GEMEditor.rw import *
class TestAddUnitsDefinition:
def test_node_addition(self):
root = Element("root")
add_unit_definitions(root)
list_of_unitdefinitions_node = root.find(sbml3_listOfUnitDefinitions)
assert list_of_unitdefinitions_node is not None
assert len(list_of_unitdefinitions_node) == 1
unit_definition_node = list_of_unitdefinitions_node.find(sbml3_unitDefinition)
assert unit_definition_node is not None
assert unit_definition_node.get("id") == "mmol_per_gDW_per_hr"
list_of_units_node = unit_definition_node.find(sbml3_listOfUnits)
assert list_of_units_node is not None
assert len(list_of_units_node) == 3
expected_values = set([("1", "mole", "1", "-3"),
("-1", "gram", "1", "0"),
("-1", "second", "3600", "0")])
found_set = set()
for child in list_of_units_node.iterfind(sbml3_unit):
found_set.add((child.get("exponent"), child.get("kind"), child.get("multiplier"), child.get("scale")))
assert expected_values == found_set | gpl-3.0 | 2,451,616,240,589,040,000 | 37.46875 | 114 | 0.615447 | false |
ceball/param | param/ipython.py | 1 | 12580 | """
Optional IPython extension for working with Parameters.
This extension offers extended but completely optional functionality
for IPython users. From within IPython, it may be loaded using:
%load_ext param.ipython
This will register the %params line magic to allow easy inspection of
all the parameters defined on a parameterized class or object:
%params <parameterized class or object>
All parameters of the class or object will be listed in the IPython
pager together with all their corresponding attributes and
docstrings. Note that the class or object to be inspected must already
exist in the active namespace.
"""
__author__ = "Jean-Luc Stevens"
import re
import textwrap
import param
# Whether to generate warnings when misformatted docstrings are found
WARN_MISFORMATTED_DOCSTRINGS = False
# ANSI color codes for the IPython pager
red = '\x1b[1;31m%s\x1b[0m'
blue = '\x1b[1;34m%s\x1b[0m'
green = '\x1b[1;32m%s\x1b[0m'
cyan = '\x1b[1;36m%s\x1b[0m'
class ParamPager(object):
"""
Callable class that displays information about the supplied
Parameterized object or class in the IPython pager.
"""
def __init__(self, metaclass=False):
"""
If metaclass is set to True, the checks for Parameterized
classes objects are disabled. This option is for use in
ParameterizedMetaclass for automatic docstring generation.
"""
# Order of the information to be listed in the table (left to right)
self.order = ['name', 'changed', 'value', 'type', 'bounds', 'mode']
self.metaclass = metaclass
def get_param_info(self, obj, include_super=True):
"""
Get the parameter dictionary, the list of modifed parameters
and the dictionary or parameter values. If include_super is
True, parameters are also collected from the super classes.
"""
params = dict(obj.param.objects('existing'))
if isinstance(obj,type):
changed = []
val_dict = dict((k,p.default) for (k,p) in params.items())
self_class = obj
else:
changed = [name for (name,_) in obj.param.get_param_values(onlychanged=True)]
val_dict = dict(obj.param.get_param_values())
self_class = obj.__class__
if not include_super:
params = dict((k,v) for (k,v) in params.items()
if k in self_class.__dict__)
params.pop('name') # Already displayed in the title.
return (params, val_dict, changed)
def param_docstrings(self, info, max_col_len=100, only_changed=False):
"""
Build a string to that presents all of the parameter
docstrings in a clean format (alternating red and blue for
readability).
"""
(params, val_dict, changed) = info
contents = []
displayed_params = {}
for name, p in params.items():
if only_changed and not (name in changed):
continue
displayed_params[name] = p
right_shift = max(len(name) for name in displayed_params.keys())+2
for i, name in enumerate(sorted(displayed_params)):
p = displayed_params[name]
heading = "%s: " % name
unindented = textwrap.dedent("< No docstring available >" if p.doc is None else p.doc)
if (WARN_MISFORMATTED_DOCSTRINGS
and not unindented.startswith("\n") and len(unindented.splitlines()) > 1):
param.main.warning("Multi-line docstring for %r is incorrectly formatted "
" (should start with newline)", name)
# Strip any starting newlines
while unindented.startswith("\n"):
unindented = unindented[1:]
lines = unindented.splitlines()
if len(lines) > 1:
tail = ['%s%s' % (' ' * right_shift, line) for line in lines[1:]]
all_lines = [ heading.ljust(right_shift) + lines[0]] + tail
elif len(lines) == 1:
all_lines = [ heading.ljust(right_shift) + lines[0]]
else:
all_lines = []
if i % 2: # Alternate red and blue for docstrings
contents.extend([red %el for el in all_lines])
else:
contents.extend([blue %el for el in all_lines])
return "\n".join(contents)
def _build_table(self, info, order, max_col_len=40, only_changed=False):
"""
Collect the information about parameters needed to build a
properly formatted table and then tabulate it.
"""
info_dict, bounds_dict = {}, {}
(params, val_dict, changed) = info
col_widths = dict((k,0) for k in order)
for name, p in params.items():
if only_changed and not (name in changed):
continue
constant = 'C' if p.constant else 'V'
readonly = 'RO' if p.readonly else 'RW'
allow_None = ' AN' if hasattr(p, 'allow_None') and p.allow_None else ''
mode = '%s %s%s' % (constant, readonly, allow_None)
info_dict[name] = {'name': name, 'type':p.__class__.__name__,
'mode':mode}
if hasattr(p, 'bounds'):
lbound, ubound = (None,None) if p.bounds is None else p.bounds
mark_lbound, mark_ubound = False, False
# Use soft_bounds when bounds not defined.
if hasattr(p, 'get_soft_bounds'):
soft_lbound, soft_ubound = p.get_soft_bounds()
if lbound is None and soft_lbound is not None:
lbound = soft_lbound
mark_lbound = True
if ubound is None and soft_ubound is not None:
ubound = soft_ubound
mark_ubound = True
if (lbound, ubound) != (None,None):
bounds_dict[name] = (mark_lbound, mark_ubound)
info_dict[name]['bounds'] = '(%s, %s)' % (lbound, ubound)
value = repr(val_dict[name])
if len(value) > (max_col_len - 3):
value = value[:max_col_len-3] + '...'
info_dict[name]['value'] = value
for col in info_dict[name]:
max_width = max([col_widths[col], len(info_dict[name][col])])
col_widths[col] = max_width
return self._tabulate(info_dict, col_widths, changed, order, bounds_dict)
def _tabulate(self, info_dict, col_widths, changed, order, bounds_dict):
"""
Returns the supplied information as a table suitable for
printing or paging.
info_dict: Dictionary of the parameters name, type and mode.
col_widths: Dictionary of column widths in characters
changed: List of parameters modified from their defaults.
order: The order of the table columns
bound_dict: Dictionary of appropriately formatted bounds
"""
contents, tail = [], []
column_set = set(k for row in info_dict.values() for k in row)
columns = [col for col in order if col in column_set]
title_row = []
# Generate the column headings
for i, col in enumerate(columns):
width = col_widths[col]+2
col = col.capitalize()
formatted = col.ljust(width) if i == 0 else col.center(width)
title_row.append(formatted)
contents.append(blue % ''.join(title_row)+"\n")
# Format the table rows
for row in sorted(info_dict):
row_list = []
info = info_dict[row]
for i,col in enumerate(columns):
width = col_widths[col]+2
val = info[col] if (col in info) else ''
formatted = val.ljust(width) if i==0 else val.center(width)
if col == 'bounds' and bounds_dict.get(row,False):
(mark_lbound, mark_ubound) = bounds_dict[row]
lval, uval = formatted.rsplit(',')
lspace, lstr = lval.rsplit('(')
ustr, uspace = uval.rsplit(')')
lbound = lspace + '('+(cyan % lstr) if mark_lbound else lval
ubound = (cyan % ustr)+')'+uspace if mark_ubound else uval
formatted = "%s,%s" % (lbound, ubound)
row_list.append(formatted)
row_text = ''.join(row_list)
if row in changed:
row_text = red % row_text
contents.append(row_text)
return '\n'.join(contents+tail)
def __call__(self, param_obj):
"""
Given a Parameterized object or class, display information
about the parameters in the IPython pager.
"""
title = None
if not self.metaclass:
parameterized_object = isinstance(param_obj, param.Parameterized)
parameterized_class = (isinstance(param_obj,type)
and issubclass(param_obj,param.Parameterized))
if not (parameterized_object or parameterized_class):
print("Object is not a Parameterized class or object.")
return
if parameterized_object:
# Only show the name if not autogenerated
class_name = param_obj.__class__.__name__
default_name = re.match('^'+class_name+'[0-9]+$', param_obj.name)
obj_name = '' if default_name else (' %r' % param_obj.name)
title = 'Parameters of %r instance%s' % (class_name, obj_name)
if title is None:
title = 'Parameters of %r' % param_obj.name
heading_line = '=' * len(title)
heading_text = "%s\n%s\n" % (title, heading_line)
param_info = self.get_param_info(param_obj, include_super=True)
if not param_info[0]:
return "%s\n%s" % ((green % heading_text), "Object has no parameters.")
table = self._build_table(param_info, self.order, max_col_len=40,
only_changed=False)
docstrings = self.param_docstrings(param_info, max_col_len=100, only_changed=False)
dflt_msg = "Parameters changed from their default values are marked in red."
top_heading = (green % heading_text)
top_heading += "\n%s" % (red % dflt_msg)
top_heading += "\n%s" % (cyan % "Soft bound values are marked in cyan.")
top_heading += '\nC/V= Constant/Variable, RO/RW = ReadOnly/ReadWrite, AN=Allow None'
heading_text = 'Parameter docstrings:'
heading_string = "%s\n%s" % (heading_text, '=' * len(heading_text))
docstring_heading = (green % heading_string)
return "%s\n\n%s\n\n%s\n\n%s" % (top_heading, table, docstring_heading, docstrings)
message = """Welcome to the param IPython extension! (https://param.holoviz.org/)"""
message += '\nAvailable magics: %params'
_loaded = False
def load_ipython_extension(ip, verbose=True):
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.core import page
@magics_class
class ParamMagics(Magics):
"""
Implements the %params line magic used to inspect the parameters
of a parameterized class or object.
"""
def __init__(self, *args, **kwargs):
super(ParamMagics, self).__init__(*args, **kwargs)
self.param_pager = ParamPager()
@line_magic
def params(self, parameter_s='', namespaces=None):
"""
The %params line magic accepts a single argument which is a
handle on the parameterized object to be inspected. If the
object can be found in the active namespace, information about
the object's parameters is displayed in the IPython pager.
Usage: %params <parameterized class or object>
"""
if parameter_s=='':
print("Please specify an object to inspect.")
return
# Beware! Uses IPython internals that may change in future...
obj = self.shell._object_find(parameter_s)
if obj.found is False:
print("Object %r not found in the namespace." % parameter_s)
return
page.page(self.param_pager(obj.obj))
if verbose: print(message)
global _loaded
if not _loaded:
_loaded = True
ip.register_magics(ParamMagics)
| bsd-3-clause | -6,385,748,612,119,651,000 | 36.891566 | 98 | 0.570032 | false |
nimbis/cmsplugin-forms-builder | tests/settings.py | 1 | 5042 | DEBUG = True
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'TEST_NAME': ':memory:',
},
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '6e-b#&0y4mbwu=)hx7a899p(k+i48(p)@e@^aal8^$pn1xqk$$'
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tests.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'cmsplugin_forms_builder.wsgi.application'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'debug': DEBUG,
'context_processors': [
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
},
},
]
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'menus',
'treebeard',
'cms',
'forms_builder.forms',
'cmsplugin_forms_builder',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
| bsd-3-clause | 4,226,599,500,157,421,000 | 30.710692 | 79 | 0.672154 | false |
Ikusaba-san/Chiaki-Nanami | cogs/utils/formats.py | 1 | 1363 | import functools
import re
from more_itertools import one
def pluralize(**thing):
name, value = one(thing.items())
if name.endswith('y') and name[-2] not in 'aeiou':
name = f'{name[:-1]}ies' if value != 1 else name
return f'{value} {name}'
return f'{value} {name}{"s" * (value != 1)}'
def human_join(iterable, delim=', ', *, final='and'):
"""Joins an iterable in a human-readable way.
The items are joined such that the last two items will be joined with a
different delimiter than the rest.
"""
seq = tuple(iterable)
if not seq:
return ''
return f"{delim.join(seq[:-1])} {final} {seq[-1]}" if len(seq) != 1 else seq[0]
def multi_replace(string, replacements):
substrs = sorted(replacements, key=len, reverse=True)
pattern = re.compile("|".join(map(re.escape, substrs)))
return pattern.sub(lambda m: replacements[m.group(0)], string)
_markdown_replacements = {c: f'\\{c}' for c in ('*', '`', '_', '~', '\\')}
escape_markdown = functools.partial(multi_replace, replacements=_markdown_replacements)
del _markdown_replacements
def truncate(s, length, placeholder):
return (s[:length] + placeholder) if len(s) > length + len(placeholder) else s
def bold_name(thing, predicate):
name = str(thing)
return f'**{escape_markdown(name)}**' if predicate(thing) else name
| mit | -345,050,366,121,900,900 | 29.288889 | 87 | 0.641966 | false |
keras-team/keras-autodoc | keras_autodoc/gathering_members.py | 1 | 4181 | import inspect
from inspect import isclass, isfunction, isroutine
from typing import List
from .utils import import_object
def get_classes(module,
exclude: List[str] = None,
return_strings: bool = True):
"""Get all the classes of a module.
# Arguments
module: The module to fetch the classes from. If it's a
string, it should be in the dotted format. `'keras.layers'` for example.
exclude: The names which will be excluded from the returned list. For
example, `get_classes('keras.layers', exclude=['Dense', 'Conv2D'])`.
return_strings: If False, the actual classes will be returned. Note that
if you use aliases when building your docs, you should use strings.
This is because the computed signature uses
`__name__` and `__module__` if you don't provide a string as input.
# Returns
A list of strings or a list of classes.
"""
return _get_all_module_element(module, exclude, return_strings, True)
def get_functions(module,
exclude: List[str] = None,
return_strings: bool = True):
"""Get all the functions of a module.
# Arguments
module: The module to fetch the functions from. If it's a
string, it should be in the dotted format. `'keras.backend'` for example.
exclude: The names which will be excluded from the returned list. For
example, `get_functions('keras.backend', exclude=['max'])`.
return_strings: If False, the actual functions will be returned. Note that
if you use aliases when building your docs, you should use strings.
This is because the computed signature uses
`__name__` and `__module__` if you don't provide a string as input.
# Returns
A list of strings or a list of functions.
"""
return _get_all_module_element(module, exclude, return_strings, False)
def get_methods(cls, exclude=None, return_strings=True):
"""Get all the method of a class.
# Arguments
cls: The class to fetch the methods from. If it's a
string, it should be in the dotted format. `'keras.layers.Dense'`
for example.
exclude: The names which will be excluded from the returned list. For
example, `get_methods('keras.Model', exclude=['save'])`.
return_strings: If False, the actual methods will be returned. Note that
if you use aliases when building your docs, you should use strings.
This is because the computed signature uses
`__name__` and `__module__` if you don't provide a string as input.
# Returns
A list of strings or a list of methods.
"""
if isinstance(cls, str):
cls_str = cls
cls = import_object(cls)
else:
cls_str = f'{cls.__module__}.{cls.__name__}'
exclude = exclude or []
methods = []
for _, method in inspect.getmembers(cls, predicate=isroutine):
if method.__name__[0] == "_" or method.__name__ in exclude:
continue
if return_strings:
methods.append(f'{cls_str}.{method.__name__}')
else:
methods.append(method)
return methods
def _get_all_module_element(module, exclude, return_strings, class_):
if isinstance(module, str):
module = import_object(module)
exclude = exclude or []
module_data = []
for name in dir(module):
module_member = getattr(module, name)
if not (isfunction(module_member) or isclass(module_member)):
continue
if name[0] == "_" or name in exclude:
continue
if module.__name__ not in module_member.__module__:
continue
if module_member in module_data:
continue
if class_ and not isclass(module_member):
continue
if not class_ and not isfunction(module_member):
continue
if return_strings:
module_data.append(f'{module.__name__}.{name}')
else:
module_data.append(module_member)
module_data.sort(key=id)
return module_data
| apache-2.0 | -592,158,582,117,040,500 | 36 | 85 | 0.610859 | false |
ParsonsAMT/Myne | datamining/apps/profiles/migrations/0006_auto__add_courseimage.py | 1 | 20722 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CourseImage'
db.create_table('profiles_courseimage', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('created_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], blank=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100)),
('course', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['profiles.Course'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('author', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('profiles', ['CourseImage'])
def backwards(self, orm):
# Deleting model 'CourseImage'
db.delete_table('profiles_courseimage')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profiles.course': {
'Meta': {'object_name': 'Course'},
'attributes': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'coursenumber': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'credits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'learning_outcomes': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'levels': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'prerequisite': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Course']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Subject']"}),
'tags': ('tagging.fields.TagField', [], {}),
'timeline': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.courseimage': {
'Meta': {'object_name': 'CourseImage'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Course']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'profiles.division': {
'Meta': {'object_name': 'Division'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.expertise': {
'Meta': {'object_name': 'Expertise'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.facultymember': {
'Meta': {'object_name': 'FacultyMember', '_ormbases': ['profiles.Person']},
'academic_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'admin_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'expertise': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Expertise']", 'null': 'True', 'blank': 'True'}),
'homeschool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.School']", 'null': 'True', 'blank': 'True'}),
'office': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'person_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['profiles.Person']", 'unique': 'True', 'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'profiles.person': {
'Meta': {'object_name': 'Person'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'cv': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'n_number': ('django.db.models.fields.CharField', [], {'max_length': '9'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Project']", 'null': 'True', 'blank': 'True'}),
'tags': ('tagging.fields.TagField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'use_which_cv': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'user_account': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'person_profile'", 'unique': 'True', 'null': 'True', 'to': "orm['auth.User']"})
},
'profiles.program': {
'Meta': {'object_name': 'Program'},
'abbreviation': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.School']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.project': {
'Meta': {'object_name': 'Project'},
'collaborators': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Course']", 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Person']", 'null': 'True', 'blank': 'True'}),
'creator_type': ('django.db.models.fields.CharField', [], {'default': "'I'", 'max_length': '2'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'format': ('tagging.fields.TagField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'participating_faculty': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['profiles.FacultyMember']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'ref_type': ('django.db.models.fields.CharField', [], {'default': "'I'", 'max_length': '2'}),
'scope_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'specifications': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('tagging.fields.TagField', [], {}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'Untitled'", 'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'profiles.school': {
'Meta': {'object_name': 'School'},
'abbreviation': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'division': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Division']", 'null': 'True', 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.section': {
'Meta': {'object_name': 'Section'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Course']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'crn': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.FacultyMember']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Project']", 'null': 'True', 'blank': 'True'}),
'semester': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Semester']"}),
'syllabus': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'syllabus_orig_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.semester': {
'Meta': {'object_name': 'Semester'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'profiles.student': {
'Meta': {'object_name': 'Student', '_ormbases': ['profiles.Person']},
'person_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['profiles.Person']", 'unique': 'True', 'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'profiles.subject': {
'Meta': {'object_name': 'Subject'},
'abbreviation': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'program': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Program']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'profiles.workimage': {
'Meta': {'object_name': 'WorkImage'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Person']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'profiles.workurl': {
'Meta': {'object_name': 'WorkURL'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Person']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['profiles']
| agpl-3.0 | 2,768,419,188,654,010,400 | 80.905138 | 196 | 0.545025 | false |
vgmoose/ssm | tests/unittests/test_lvm.py | 1 | 22526 | #!/usr/bin/env python
#
# (C)2011 Red Hat, Inc., Lukas Czerner <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Unittests for the system storage manager lvm backend
import unittest
from ssmlib import main
from ssmlib import problem
from ssmlib.backends import lvm
from tests.unittests.common import *
class LvmFunctionCheck(MockSystemDataSource):
def setUp(self):
super(LvmFunctionCheck, self).setUp()
self._addDevice('/dev/sda', 11489037516)
self._addDevice('/dev/sdb', 234566451)
self._addDevice('/dev/sdc', 2684354560)
self._addDevice('/dev/sdc1', 894784853, 1)
self._addDevice('/dev/sdc2', 29826161, 2)
self._addDevice('/dev/sdc3', 1042177280, 3)
self._addDevice('/dev/sdd', 11673)
self._addDevice('/dev/sde', 1042177280)
main.SSM_DEFAULT_BACKEND = 'lvm'
def mock_run(self, cmd, *args, **kwargs):
# Convert all parts of cmd into string
for i, item in enumerate(cmd):
if type(item) is not str:
cmd[i] = str(item)
self.run_data.append(" ".join(cmd))
output = ""
if cmd[1] == 'pvs':
for dev, data in self.dev_data.iteritems():
if 'pool_name' in data:
output += "{0}|{1}|{2}|{3}\n".format(dev, data['pool_name'],
data['dev_free'], data['dev_used'])
elif cmd[1] == 'vgs':
for pool, data in self.pool_data.iteritems():
output += "{0}|{1}|{2}|{3}|{4}\n".format(pool, data['dev_count'],
data['pool_size'], data['pool_free'], data['vol_count'])
elif cmd[1] == 'lvs':
for vol, data in self.vol_data.iteritems():
output += "{0}|{1}|{2}|{3}|{4}|{5}|{6}|{7}\n".format(data['pool_name'],
data['vol_size'], data['stripes'], data['stripesize'],
data['type'], data['dev_name'].split("/")[-1],
data['origin'], data['attr'])
if 'return_stdout' in kwargs and not kwargs['return_stdout']:
output = None
return (0, output)
def test_lvm_create(self):
default_pool = lvm.SSM_LVM_DEFAULT_POOL
# Create volume using single device from non existent default pool
self._checkCmd("ssm create", ['/dev/sda'],
"lvm lvcreate {0} -l 100%PVS -n lvol001 /dev/sda".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -2)
# Specify backnend
self._checkCmd("ssm -b lvm create", ['/dev/sda'],
"lvm lvcreate {0} -l 100%PVS -n lvol001 /dev/sda".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -2)
main.SSM_DEFAULT_BACKEND = "btrfs"
self._checkCmd("ssm --backend lvm create", ['/dev/sda'],
"lvm lvcreate {0} -l 100%PVS -n lvol001 /dev/sda".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -2)
main.SSM_DEFAULT_BACKEND = "lvm"
self._checkCmd("ssm create", ['--name myvolume', '--fstype ext4', '/dev/sda'])
self._cmdEq("mkfs.ext4 /dev/{0}/myvolume".format(default_pool))
self._cmdEq("lvm lvcreate {0} -l 100%PVS -n myvolume /dev/sda".format(default_pool), -2)
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -3)
self._checkCmd("ssm -f create", ['--fstype ext4', '/dev/sda'])
self._cmdEq("mkfs.ext4 -F /dev/{0}/lvol001".format(default_pool))
self._cmdEq("lvm lvcreate {0} -l 100%PVS -n lvol001 /dev/sda".format(default_pool), -2)
self._cmdEq("lvm vgcreate -f {0} /dev/sda".format(default_pool), -3)
self._checkCmd("ssm -v create", ['--name myvolume', '--fstype xfs', '/dev/sda'])
self._cmdEq("mkfs.xfs /dev/{0}/myvolume".format(default_pool))
self._cmdEq("lvm lvcreate -v {0} -l 100%PVS -n myvolume /dev/sda".format(default_pool), -2)
self._cmdEq("lvm vgcreate -v {0} /dev/sda".format(default_pool), -3)
self._checkCmd("ssm -v -f create", ['--name myvolume', '--fstype xfs', '/dev/sda'])
self._cmdEq("mkfs.xfs -f /dev/{0}/myvolume".format(default_pool))
self._cmdEq("lvm lvcreate -v {0} -l 100%PVS -n myvolume /dev/sda".format(default_pool), -2)
self._cmdEq("lvm vgcreate -v -f {0} /dev/sda".format(default_pool), -3)
self._checkCmd("ssm create", ['-s 2.6T', '/dev/sda'],
"lvm lvcreate {0} -L 2791728742.40K -n lvol001 /dev/sda".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -2)
self._checkCmd("ssm create", ['-r 0', '-s 2.6T', '-I 16', '/dev/sda'],
"lvm lvcreate {0} -L 2791728742.40K -n lvol001 -I 16 -i 1 /dev/sda".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -2)
self._checkCmd("ssm create", ['-r 0', '-s 2.6T', '-I 16', '/dev/sda'],
"lvm lvcreate {0} -L 2791728742.40K -n lvol001 -I 16 -i 1 /dev/sda".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda".format(default_pool), -2)
# Number of stripes must not exceed number of devices
self.assertRaises(problem.GeneralError, main.main, "ssm create -r 0 -s 2.6T -I 16 -i 4 /dev/sda")
# Create volume using single device from non existent my_pool
self._checkCmd("ssm create", ['--pool my_pool', '/dev/sda'],
"lvm lvcreate my_pool -l 100%PVS -n lvol001 /dev/sda")
self._cmdEq("lvm vgcreate my_pool /dev/sda", -2)
self._checkCmd("ssm create", ['-r 0', '-p my_pool', '-s 2.6T', '-I 16',
'-i 2', '/dev/sda /dev/sdb'],
"lvm lvcreate my_pool -L 2791728742.40K -n lvol001 -I 16 -i 2 /dev/sda /dev/sdb")
self._cmdEq("lvm vgcreate my_pool /dev/sda /dev/sdb", -2)
# Create volume using multiple devices
self._checkCmd("ssm create", ['/dev/sda /dev/sdc1'],
"lvm lvcreate {0} -l 100%PVS -n lvol001 /dev/sda /dev/sdc1".format(default_pool))
self._cmdEq("lvm vgcreate {0} /dev/sda /dev/sdc1".format(default_pool), -2)
# Create volume using single devices from existing pool
self._addPool(default_pool, ['/dev/sdb', '/dev/sdd'])
self._checkCmd("ssm create", ['-r 0', '-s 2.6T', '-I 16',
'-n myvolume', '/dev/sda'],
"lvm lvcreate {0} -L 2791728742.40K -n myvolume -I 16 -i 1 /dev/sda". format(default_pool))
self._cmdEq("lvm vgextend {0} /dev/sda".format(default_pool), -2)
self._addPool("my_pool", ['/dev/sdc2', '/dev/sdc3'])
self._checkCmd("ssm create", ['-r 0', '-p my_pool', '-s 2.6T', '-I 16',
'-n myvolume', '/dev/sda'],
"lvm lvcreate my_pool -L 2791728742.40K -n myvolume -I 16 -i 1 /dev/sda")
self._cmdEq("lvm vgextend my_pool /dev/sda", -2)
# Create volume using multiple devices which one of the is in already
# in the pool
self._checkCmd("ssm create", ['-n myvolume', '/dev/sda /dev/sdb'],
"lvm lvcreate {0} -l 100%PVS -n myvolume /dev/sda /dev/sdb". format(default_pool))
self._cmdEq("lvm vgextend {0} /dev/sda".format(default_pool), -2)
self._addPool("my_pool", ['/dev/sdc2', '/dev/sdc3'])
self._checkCmd("ssm create", ['-p my_pool', '-n myvolume', '/dev/sdc2 /dev/sda'],
"lvm lvcreate my_pool -l 100%PVS -n myvolume /dev/sdc2 /dev/sda")
self._cmdEq("lvm vgextend my_pool /dev/sda", -2)
self._checkCmd("ssm create", ['-n myvolume', '/dev/sda /dev/sdb /dev/sde'],
"lvm lvcreate {0} -l 100%PVS -n myvolume /dev/sda /dev/sdb /dev/sde". format(default_pool))
self._cmdEq("lvm vgextend {0} /dev/sda /dev/sde".format(default_pool), -2)
def test_lvm_remove(self):
# Generate some storage data
self._addPool('default_pool', ['/dev/sda', '/dev/sdb'])
self._addPool('my_pool', ['/dev/sdc2', '/dev/sdc3', '/dev/sdc1'])
self._addVol('vol001', 117283225, 1, 'default_pool', ['/dev/sda'])
self._addVol('vol002', 237284225, 1, 'default_pool', ['/dev/sda'])
self._addVol('vol003', 1024, 1, 'default_pool', ['/dev/sdd'])
self._addVol('vol004', 209715200, 2, 'default_pool', ['/dev/sda',
'/dev/sdb'])
# remove volume
main.main("ssm remove /dev/default_pool/vol002")
self._cmdEq("lvm lvremove /dev/default_pool/vol002")
# remove multiple volumes
main.main("ssm remove /dev/default_pool/vol002 /dev/default_pool/vol003")
self.assertEqual(self.run_data[-2], "lvm lvremove /dev/default_pool/vol002")
self._cmdEq("lvm lvremove /dev/default_pool/vol003")
# remove pool
main.main("ssm remove my_pool")
self._cmdEq("lvm vgremove my_pool")
# remove multiple pools
main.main("ssm remove my_pool default_pool")
self.assertEqual(self.run_data[-2], "lvm vgremove my_pool")
self._cmdEq("lvm vgremove default_pool")
# remove device
main.main("ssm remove /dev/sdc1")
self._cmdEq("lvm vgreduce my_pool /dev/sdc1")
# remove multiple devices
main.main("ssm remove /dev/sdc1 /dev/sdb")
self.assertEqual(self.run_data[-2], "lvm vgreduce my_pool /dev/sdc1")
self._cmdEq("lvm vgreduce default_pool /dev/sdb")
# remove combination
main.main("ssm remove /dev/sdb my_pool /dev/default_pool/vol001")
self.assertEqual(self.run_data[-3], "lvm vgreduce default_pool /dev/sdb")
self.assertEqual(self.run_data[-2], "lvm vgremove my_pool")
self._cmdEq("lvm lvremove /dev/default_pool/vol001")
# remove all
main.main("ssm remove --all")
self.assertEqual(self.run_data[-2], "lvm vgremove default_pool")
self._cmdEq("lvm vgremove my_pool")
# remove force
main.main("ssm -f remove /dev/default_pool/vol002")
self._cmdEq("lvm lvremove -f /dev/default_pool/vol002")
# remove verbose
main.main("ssm -v remove /dev/default_pool/vol002")
self._cmdEq("lvm lvremove -v /dev/default_pool/vol002")
# remove verbose + force
main.main("ssm -v -f remove /dev/default_pool/vol002")
self._cmdEq("lvm lvremove -v -f /dev/default_pool/vol002")
def test_lvm_snapshot(self):
# Generate some storage data
self._addPool('default_pool', ['/dev/sda', '/dev/sdb'])
self._addPool('my_pool', ['/dev/sdc2', '/dev/sdc3', '/dev/sdc1'])
self._addVol('vol001', 117283225, 1, 'default_pool', ['/dev/sda'])
self._addVol('vol002', 237284225, 1, 'default_pool', ['/dev/sda'],
'/mnt/mount1')
self._addVol('vol003', 1024, 1, 'default_pool', ['/dev/sdd'])
self._addVol('vol004', 209715200, 2, 'default_pool', ['/dev/sda',
'/dev/sdb'], '/mnt/mount')
# Create snapshot
self._checkCmd("ssm snapshot --name new_snap", ['/dev/default_pool/vol001'],
"lvm lvcreate --size 23456645.0K --snapshot --name new_snap /dev/default_pool/vol001")
main.SSM_DEFAULT_BACKEND = "btrfs"
self._checkCmd("ssm snapshot --name new_snap", ['/dev/default_pool/vol001'],
"lvm lvcreate --size 23456645.0K --snapshot --name new_snap /dev/default_pool/vol001")
main.SSM_DEFAULT_BACKEND = "lvm"
# Create snapshot verbose
self._checkCmd("ssm -v snapshot --name new_snap", ['/dev/default_pool/vol001'],
"lvm lvcreate -v --size 23456645.0K --snapshot --name new_snap /dev/default_pool/vol001")
# Create snapshot force
self._checkCmd("ssm -f snapshot --name new_snap", ['/dev/default_pool/vol001'],
"lvm lvcreate -f --size 23456645.0K --snapshot --name new_snap /dev/default_pool/vol001")
# Create snapshot force verbose
self._checkCmd("ssm -f -v snapshot --name new_snap", ['/dev/default_pool/vol001'],
"lvm lvcreate -v -f --size 23456645.0K --snapshot --name new_snap /dev/default_pool/vol001")
# Create snapshot with size and name specified
self._checkCmd("ssm snapshot", ['--size 1G', '--name new_snap',
'/dev/default_pool/vol001'],
"lvm lvcreate --size 1048576.0K --snapshot --name new_snap /dev/default_pool/vol001")
def test_lvm_resize(self):
# Generate some storage data
self._addPool('default_pool', ['/dev/sda', '/dev/sdb'])
self._addPool('my_pool', ['/dev/sdc2', '/dev/sdc3'])
self._addVol('vol001', 2982616, 1, 'my_pool', ['/dev/sdc2'],
'/mnt/test1')
self._addVol('vol002', 237284225, 1, 'default_pool', ['/dev/sda'])
self._addVol('vol003', 1024, 1, 'default_pool', ['/dev/sdd'])
self._addDevice('/dev/sde', 11489037516)
# Extend Volume
self._checkCmd("ssm resize", ['--size +4m', '/dev/default_pool/vol003'],
"lvm lvresize -L 5120.0k /dev/default_pool/vol003")
# Specify backend
self._checkCmd("ssm --backend lvm resize", ['--size +4m', '/dev/default_pool/vol003'],
"lvm lvresize -L 5120.0k /dev/default_pool/vol003")
main.SSM_DEFAULT_BACKEND = "btrfs"
self._checkCmd("ssm resize", ['--size +4m', '/dev/default_pool/vol003'],
"lvm lvresize -L 5120.0k /dev/default_pool/vol003")
main.SSM_DEFAULT_BACKEND = "lvm"
# Shrink volume
self._checkCmd("ssm resize", ['-s-100G', '/dev/default_pool/vol002'],
"lvm lvresize -L 132426625.0k /dev/default_pool/vol002")
# Set volume size
self._checkCmd("ssm resize", ['-s 10M', '/dev/my_pool/vol001'],
"lvm lvresize -L 10240.0k /dev/my_pool/vol001")
# Set volume and add devices
self._checkCmd("ssm resize -s 12T /dev/default_pool/vol003 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 12884901888.0k /dev/default_pool/vol003")
self.assertEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Set volume size with sufficient amount of space
self._checkCmd("ssm resize -s 10G /dev/default_pool/vol003 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 10485760.0k /dev/default_pool/vol003")
self.assertNotEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Set volume size without sufficient amount of space
self._checkCmd("ssm resize -s 10T /dev/default_pool/vol003 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 10737418240.0k /dev/default_pool/vol003")
self.assertNotEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Extend volume and add devices
self._checkCmd("ssm resize -s +11T /dev/default_pool/vol003 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 11811161088.0k /dev/default_pool/vol003")
self.assertEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Extend volume with ehough space in pool
self._checkCmd("ssm resize -s +10G /dev/default_pool/vol003 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 10486784.0k /dev/default_pool/vol003")
self.assertNotEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Extend volume without ehough space in pool
self._checkCmd("ssm resize -s +20T /dev/default_pool/vol003 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 21474837504.0k /dev/default_pool/vol003")
self.assertEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Shrink volume with devices provided
self._checkCmd("ssm resize -s-10G /dev/default_pool/vol002 /dev/sdc1 /dev/sde",
[], "lvm lvresize -L 226798465.0k /dev/default_pool/vol002")
self.assertNotEqual(self.run_data[-2],
"lvm vgextend default_pool /dev/sdc1 /dev/sde")
# Test that we do not use devices which are already used in different
# pool
self.assertRaises(Exception, main.main, "ssm resize -s +1.5T /dev/my_pool/vol001 /dev/sdb /dev/sda")
# If the device we are able to use can cover the size, then
# it will be resized successfully
self._checkCmd("ssm resize -s +1.5T /dev/my_pool/vol001 /dev/sdb /dev/sda /dev/sdc1",
[], "lvm lvresize -L 1613595352.0k /dev/my_pool/vol001")
# Test resize on inactive volume
self._addVol('vol004', 8192, 1, 'default_pool', ['/dev/sdd'], None, False)
self._checkCmd("ssm resize", ['--size +4m', '/dev/default_pool/vol004'],
"lvm lvresize -L 12288.0k /dev/default_pool/vol004")
self.assertRaises(Exception, main.main, "ssm resize -s-2m /dev/default_pool/vol004")
# We can force it though
self._checkCmd("ssm -f resize", ['-s-2m', '/dev/default_pool/vol004'],
"lvm lvresize -f -L 6144.0k /dev/default_pool/vol004")
def test_lvm_add(self):
default_pool = lvm.SSM_LVM_DEFAULT_POOL
# Adding to non existent pool
# Add device into default pool
self._checkCmd("ssm add", ['/dev/sda'],
"lvm vgcreate {0} /dev/sda".format(default_pool))
# Specify backend
self._checkCmd("ssm --backend lvm add", ['/dev/sda'],
"lvm vgcreate {0} /dev/sda".format(default_pool))
main.SSM_DEFAULT_BACKEND = "btrfs"
self._checkCmd("ssm --backend lvm add", ['/dev/sda'],
"lvm vgcreate {0} /dev/sda".format(default_pool))
main.SSM_DEFAULT_BACKEND = "lvm"
# Add more devices into default pool
self._checkCmd("ssm add", ['/dev/sda /dev/sdc1'],
"lvm vgcreate {0} /dev/sda /dev/sdc1".format(default_pool))
# Add device into defined pool
self._checkCmd("ssm add", ['-p my_pool', '/dev/sda'],
"lvm vgcreate my_pool /dev/sda")
self._checkCmd("ssm add", ['--pool my_pool', '/dev/sda'],
"lvm vgcreate my_pool /dev/sda")
# Add more devices into defined pool
self._checkCmd("ssm add", ['-p my_pool', '/dev/sda /dev/sdc1'],
"lvm vgcreate my_pool /dev/sda /dev/sdc1")
self._checkCmd("ssm add", ['--pool my_pool', '/dev/sda /dev/sdc1'],
"lvm vgcreate my_pool /dev/sda /dev/sdc1")
# Add force
self._checkCmd("ssm -f add", ['--pool my_pool', '/dev/sda'],
"lvm vgcreate -f my_pool /dev/sda")
# Add verbose
self._checkCmd("ssm -v add", ['--pool my_pool', '/dev/sda'],
"lvm vgcreate -v my_pool /dev/sda")
# Add force and verbose
self._checkCmd("ssm -v -f add", ['--pool my_pool', '/dev/sda'],
"lvm vgcreate -v -f my_pool /dev/sda")
# Adding to existing default pool
self._addPool(default_pool, ['/dev/sdb', '/dev/sdd'])
# Add device into default pool
self._checkCmd("ssm add", ['/dev/sda'],
"lvm vgextend {0} /dev/sda".format(default_pool))
# Add more devices into default pool
self._checkCmd("ssm add", ['/dev/sda /dev/sdc1'],
"lvm vgextend {0} /dev/sda /dev/sdc1".format(default_pool))
# Adding to existing defined pool
self._addPool("my_pool", ['/dev/sdc2', '/dev/sdc3'])
# Add device into defined pool
self._checkCmd("ssm add", ['-p my_pool', '/dev/sda'],
"lvm vgextend my_pool /dev/sda")
self._checkCmd("ssm add", ['--pool my_pool', '/dev/sda'],
"lvm vgextend my_pool /dev/sda")
# Add more devices into defined pool
self._checkCmd("ssm add", ['-p my_pool', '/dev/sda /dev/sdc1'],
"lvm vgextend my_pool /dev/sda /dev/sdc1")
self._checkCmd("ssm add", ['--pool my_pool', '/dev/sda /dev/sdc1'],
"lvm vgextend my_pool /dev/sda /dev/sdc1")
# Add force
self._checkCmd("ssm -f add", ['--pool my_pool', '/dev/sda'],
"lvm vgextend -f my_pool /dev/sda")
# Add verbose
self._checkCmd("ssm -v add", ['--pool my_pool', '/dev/sda'],
"lvm vgextend -v my_pool /dev/sda")
# Add force and verbose
self._checkCmd("ssm -v -f add", ['--pool my_pool', '/dev/sda'],
"lvm vgextend -v -f my_pool /dev/sda")
# Add two devices into existing pool (one of the devices already is in
# the pool
self._checkCmd("ssm add", ['--pool my_pool', '/dev/sdc2 /dev/sda'],
"lvm vgextend my_pool /dev/sda")
self._checkCmd("ssm add", ['/dev/sda /dev/sdb'],
"lvm vgextend {0} /dev/sda".format(default_pool))
def test_lvm_mount(self):
self._addDir("/mnt/test")
self._addDir("/mnt/test1")
self._addDir("/mnt/test2")
# Generate some storage data
self._addPool('default_pool', ['/dev/sda', '/dev/sdb'])
self._addPool('my_pool', ['/dev/sdc2', '/dev/sdc3', '/dev/sdc1'])
self._addVol('vol001', 117283225, 1, 'default_pool', ['/dev/sda'],
'/mnt/test1')
self._addVol('vol002', 237284225, 1, 'my_pool', ['/dev/sda'])
# Simple mount test
main.main("ssm mount /dev/default_pool/vol001 /mnt/test")
self._cmdEq("mount /dev/default_pool/vol001 /mnt/test")
main.main("ssm mount -o discard /dev/default_pool/vol001 /mnt/test")
self._cmdEq("mount -o discard /dev/default_pool/vol001 /mnt/test")
main.main("ssm mount --options rw,discard,neco=44 /dev/my_pool/vol002 /mnt/test1")
self._cmdEq("mount -o rw,discard,neco=44 /dev/my_pool/vol002 /mnt/test1")
# Non existing volume
main.main("ssm mount nonexisting /mnt/test1")
self._cmdEq("mount nonexisting /mnt/test1")
main.main("ssm mount -o discard,rw nonexisting /mnt/test1")
self._cmdEq("mount -o discard,rw nonexisting /mnt/test1")
| gpl-2.0 | -2,838,055,307,035,547,600 | 49.620225 | 108 | 0.585634 | false |
Paco1994/sportbot | sportbot/bot.py | 1 | 1953 | # -*- coding: utf-8 -*-
import telebot # Librería de la API del bot.
import random
from telebot import types # Tipos para la API del bot.
import time # Librería para hacer que el programa que controla el bot no se acabe.
import sys
from sportbot import bot
reload(sys)
sys.setdefaultencoding("utf-8")
local = True
gif = "https://lachatupdate.files.wordpress.com/2015/08/547b7a894bcc7.gif"
#usuarios = [line.rstrip('\n') for line in open('sources/usuarios.txt')] #cargamos la lista de usuarios
administrador = '-24696186'
commands = { # command description used in the "help" command
'start': 'Empieza a usar el bot. Recibirás notificaciones globales cuando se actualice el bot.',
'help': 'Muestra el menú de ayuda.'
}
def listener(messages):
for m in messages:
cid = m.chat.id
mensaje = ""
if m.content_type == 'text': # Sólo saldrá en el log los mensajes tipo texto
if cid > 0:
mensaje = str(m.chat.first_name) + "[" + str(cid) + "]: " + m.text
#f = open('sources/log.txt', 'a')
#f.write(mensaje + "\n")
#f.close()
print mensaje
else:
if m.text[0] == '/':
mensaje = str(m.from_user.first_name) + "[" + str(cid) + "]: " + m.text
#f = open('sources/log.txt', 'a')
#f.write(mensaje + "\n")
#f.close()
print mensaje
bot.set_update_listener(listener) # Así, le decimos al bot que utilice como función escuchadora nuestra función 'listener' declarada arriba.
bot.polling(none_stop=True) # Con esto, le decimos al bot que siga funcionando incluso si encuentra algún fallo.
while True: # Ahora le decimos al programa que no se cierre haciendo un bucle que siempre se ejecutará.
#listadoURLs = ini2urls("sources/url.ini",0) # Lectura de URL desde fichero de INICIO
time.sleep(300)
| gpl-3.0 | -2,492,374,418,671,157,000 | 39.458333 | 140 | 0.616375 | false |
topfs2/heimdall | src/thegamesdb.py | 1 | 5883 | import heimdall
from heimdall import tasks
from heimdall import resources
from heimdall import supplies, demands
from heimdall.predicates import *
from game_item import comparePlatforms
import datetime
import difflib
import urllib
import xml.etree.ElementTree as ET
baseImageUrl = "http://thegamesdb.net/banners/"
def readTextElement(parent, elementName):
element = parent.find(elementName)
return element.text if (element != None and element.text != None) else ''
class GamePredicateObject(tasks.SubjectTask):
demand = [
demands.required(dc.identifier, "http://thegamesdb.net/game/"),
]
supply = [
supplies.emit(dc.title),
supplies.emit(dc.type),
supplies.emit(dc.description),
supplies.emit(dc.date),
supplies.emit(media.rating),
supplies.emit(swo.SWO_0000396), # Developer
supplies.emit(swo.SWO_0000397), # Publisher
supplies.emit(edamontology.data_3106), # Platform
supplies.emit("players"),
supplies.emit(foaf.thumbnail),
supplies.emit("fanart"),
supplies.emit("banner"),
supplies.emit("trailer"),
]
def require(self):
uri = self.subject[dc.identifier]
ID = uri[len("http://thegamesdb.net/game/"):]
return resources.SimpleResource("http://thegamesdb.net/api/GetGame.php?id=" + ID)
def run(self, resource):
root = ET.fromstring(resource)
gameRows = root.findall("Game")
if gameRows:
gameRow = gameRows[0]
gameTitle = readTextElement(gameRow, "GameTitle")
self.subject.emit(dc.title, gameTitle)
for genre in gameRow.findall("Genres/genre"):
self.subject.emit(dc.type, genre.text)
self.subject.emit(dc.description, readTextElement(gameRow, "Overview"))
try:
# Deserialize MM/DD/YYYY
dateobject = datetime.datetime.strptime(readTextElement(gameRow, "ReleaseDate"), "%m/%d/%Y")
self.subject.emit(dc.date, dateobject.strftime("%Y-%m-%d"))
except ValueError:
# can't be parsed by strptime()
pass
self.subject.emit(media.rating, readTextElement(gameRow, 'ESRB'))
self.subject.emit(swo.SWO_0000396, readTextElement(gameRow, 'Developer'))
self.subject.emit(swo.SWO_0000397, readTextElement(gameRow, 'Publisher'))
self.subject.emit(edamontology.data_3106, readTextElement(gameRow, 'Platform'))
self.subject.emit("players", readTextElement(gameRow, 'Players'))
for boxartRow in gameRow.findall('Images/boxart'):
side = boxartRow.attrib.get('side')
if side == 'front' and boxartRow.text:
self.subject.emit(foaf.thumbnail, baseImageUrl + boxartRow.text)
for fanartRow in gameRow.findall('Images/fanart'):
original = readTextElement(fanartRow, 'original')
if original:
thumb = readTextElement(fanartRow, 'thumb')
if thumb:
self.subject.emit("fanart", {"fanart": baseImageUrl + original, "thumbnail": baseImageUrl + thumb})
else:
self.subject.emit("fanart", baseImageUrl + original)
for bannerRow in gameRow.findall('Images/banner'):
self.subject.emit("banner", baseImageUrl + bannerRow.text)
self.subject.emit("trailer", readTextElement(gameRow, 'Youtube'))
def readTextElement(self, parent, elementName):
element = parent.find(elementName)
return element.text if (element != None and element.text != None) else ''
class SearchGameCollector(tasks.SubjectTask):
demand = [
demands.required(dc.title),
demands.required(edamontology.data_3106), # Platform
demands.requiredClass("item.game", True),
demands.none(owl.sameAs, "http://thegamesdb.net/game/[0-9]*")
]
supply = [
supplies.emit(owl.sameAs, "http://thegamesdb.net/game/")
]
def require(self):
title = self.subject[dc.title]
platform = self.translatePlatform(self.subject[edamontology.data_3106])
if platform:
uri = "http://thegamesdb.net/api/GetGame.php?name=%s&platform=%s" % \
(urllib.quote_plus(title), urllib.quote_plus(platform))
return resources.SimpleResource(uri)
else:
return []
def run(self, resource):
root = ET.fromstring(resource)
gameRows = root.findall("Game")
# TheGamesDB has search ordering problems. Sucks for XML scrapers... not for difflib!
possibilities = [readTextElement(gameRow, "GameTitle") for gameRow in gameRows]
gameTitle = difflib.get_close_matches(self.subject[dc.title], possibilities, 1)
if gameTitle:
gameTitle = gameTitle[0]
for gameRow in gameRows:
if gameTitle == readTextElement(gameRow, "GameTitle"):
gameId = readTextElement(gameRow, "id")
if gameId:
self.subject.emit(owl.sameAs, "http://thegamesdb.net/game/" + gameId)
break
def translatePlatform(self, platform):
uri = "http://thegamesdb.net/api/GetPlatformsList.php"
resource = resources.CachedSimpleResource(uri)
platformXML = resource.run(resource.require())
root = ET.fromstring(platformXML)
for tgdb_platform in root.findall("Platforms/Platform"):
nametag = tgdb_platform.find("name")
if nametag == None or nametag.text == None:
continue
if comparePlatforms(nametag.text, platform):
return nametag.text
return None
module = [ GamePredicateObject, SearchGameCollector ]
| gpl-2.0 | 1,302,008,425,335,843,600 | 40.723404 | 123 | 0.617372 | false |
jszakmeister/rst2ctags | rst2ctags.py | 1 | 11006 | #!/usr/bin/env python
# Copyright (C) 2013-2018 John Szakmeister <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file LICENSE.txt, which
# you should have received as part of this distribution.
from __future__ import absolute_import
from __future__ import print_function
import codecs
import errno
import io
import locale
import pkg_resources
import sys
import re
def _version():
'''Get version.'''
try:
return pkg_resources.get_distribution('rst2ctags').version
except pkg_resources.DistributionNotFound:
return 'dev'
__version__ = _version()
class ScriptError(Exception):
pass
def detect_encoding(filename):
with open(filename, 'rb') as f:
raw = f.read(4096)
potential_bom = raw[:4]
bom_encodings = [('utf-8-sig', codecs.BOM_UTF8),
('utf-16', codecs.BOM_UTF16_LE),
('utf-16', codecs.BOM_UTF16_BE),
('utf-32', codecs.BOM_UTF32_LE),
('utf-32', codecs.BOM_UTF32_BE)]
for encoding, bom in bom_encodings:
if potential_bom.startswith(bom):
return encoding
# No BOM found, let's try to detect encoding
encoding = None
try:
import chardet
result = chardet.detect(raw)
# If we're not really confident about the encoding, then skip to
# UTF-8 detection.
if result['confidence'] >= 0.9:
encoding = result['encoding']
if encoding == 'ascii':
encoding = 'utf-8'
except ImportError:
pass
if encoding is None:
try:
raw.rsplit(b' ')[0].decode('utf-8')
encoding = 'utf-8'
except UnicodeDecodeError:
pass
return encoding or 'latin1'
def open_autoenc(filename, encoding=None):
if encoding is None:
encoding = detect_encoding(filename)
return io.open(filename, encoding=encoding, newline='')
def ctag_name_escape(str):
str = re.sub('[\t\r\n]+', ' ', str)
str = re.sub(r'^\s*\\\((.)\)', r'(\1)', str)
return str
def ctag_search_escape(str):
str = str.replace('\\', r'\\')
str = str.replace('\t', r'\t')
str = str.replace('\r', r'\r')
str = str.replace('\n', r'\n')
for c in '[]*$.^':
str = str.replace(c, '\\' + c)
return str
class Tag(object):
def __init__(self, tag_name, tag_file, tag_address):
self.tag_name = tag_name
self.tag_file = tag_file
self.tag_address = tag_address
self.fields = []
def add_field(self, type, value=None):
if type == 'kind':
type = None
self.fields.append((type, value or ""))
def format_fields(self):
formattedFields = []
for name, value in self.fields:
if name:
s = '%s:%s' % (name, value or "")
else:
s = str(value)
formattedFields.append(s)
return '\t'.join(formattedFields)
def render(self):
return '%s\t%s\t%s;"\t%s' % (
self.tag_name, self.tag_file, self.tag_address, self.format_fields())
def __repr__(self):
return "<Tag name:%r file:%r: addr:%r %r>" % (
self.tag_name, self.tag_file, self.tag_address,
self.format_fields().replace('\t', ' '))
def _tuple(self):
return (self.tag_name, self.tag_file, self.tag_address,
self.format_fields())
def __eq__(self, other):
return self._tuple() == other._tuple()
def __ne__(self, other):
return self._tuple() != other._tuple()
def __lt__(self, other):
return self._tuple() < other._tuple()
def __le__(self, other):
return self._tuple() <= other._tuple()
def __gt__(self, other):
return self._tuple() > other._tuple()
def __ge__(self, other):
return self._tuple() >= other._tuple()
@staticmethod
def section(section, sro):
tag_name = ctag_name_escape(section.name)
tag_address = '/^%s$/' % ctag_search_escape(section.line)
t = Tag(tag_name, section.filename, tag_address)
t.add_field('kind', 's')
t.add_field('line', section.lineNumber)
parents = []
p = section.parent
while p is not None:
parents.append(ctag_name_escape(p.name))
p = p.parent
parents.reverse()
if parents:
t.add_field('section', sro.join(parents))
return t
class Section(object):
def __init__(self, level, name, line, lineNumber, filename, parent=None):
self.level = level
self.name = name
self.line = line
self.lineNumber = lineNumber
self.filename = filename
self.parent = parent
def __repr__(self):
return '<Section %s %d %d>' % (self.name, self.level, self.lineNumber)
def pop_sections(sections, level):
while sections:
s = sections.pop()
if s and s.level < level:
sections.append(s)
return
heading_re = re.compile(r'''^([-=~:^"#*._+`'])\1+$''')
subject_re = re.compile(r'^[^\s]+.*$')
def find_sections(filename, lines):
sections = []
previousSections = []
level_values = {}
level = 0
for i, line in enumerate(lines):
if i == 0:
continue
if heading_re.match(line) and subject_re.match(lines[i - 1]):
if i >= 2:
topLine = lines[i-2]
else:
topLine = ''
# If the heading line is to short, then docutils doesn't consider
# it a heading.
if len(line) < len(lines[i-1]):
continue
name = lines[i-1].strip()
key = line[0]
if heading_re.match(topLine):
# If there is an overline, it must match the bottom line.
if topLine != line:
# Not a heading.
continue
# We have an overline, so double up.
key = key + key
if key not in level_values:
level_values[key] = level + 1
level = level_values[key]
pop_sections(previousSections, level)
if previousSections:
parent = previousSections[-1]
else:
parent = None
lineNumber = i
s = Section(level, name, lines[i-1], lineNumber,
filename, parent)
previousSections.append(s)
sections.append(s)
# Blank lines to help correctly detect:
# foo
# ===
# bar
# ===
#
# as two underline style headings.
lines[i] = lines[i-1] = ''
if topLine:
lines[i-2] = ''
return sections
def sections_to_tags(sections, sro):
tags = []
for section in sections:
tags.append(Tag.section(section, sro))
return tags
def gen_tags_header(output, sort):
if sort == "yes":
sortedLine = b'!_TAG_FILE_SORTED\t1\t//\n'
elif sort == "foldcase":
sortedLine = b'!_TAG_FILE_SORTED\t2\t//\n'
else:
sortedLine = b'!_TAG_FILE_SORTED\t0\t//\n'
output.write(b'!_TAG_FILE_ENCODING\tutf-8\t//\n')
output.write(b'!_TAG_FILE_FORMAT\t2\t//\n')
output.write(sortedLine)
def gen_tags_content(output, sort, tags):
if sort == "yes":
tags = sorted(tags)
elif sort == "foldcase":
tags = sorted(tags, key=lambda x: str(x).lower())
for t in tags:
output.write(t.render().encode('utf-8'))
output.write('\n'.encode('utf-8'))
def main():
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] file(s)",
version=__version__)
parser.add_option(
"-f", "--file", metavar="FILE", dest="tagfile",
default="tags",
help='Write tags into FILE (default: "tags"). Use "-" to write '
'tags to stdout.')
parser.add_option(
"", "--encoding", metavar="ENCODING", dest="encoding",
default=None,
help='Skips auto detection and uses the specified encoding for the '
'input files. Encoding name should be one that Python would '
'recognize.')
parser.add_option(
"", "--sort", metavar="[yes|foldcase|no]", dest="sort",
choices=["yes", "no", "foldcase"],
default="yes",
help='Produce sorted output. Acceptable values are "yes", '
'"no", and "foldcase". Default is "yes".')
parser.add_option(
"", "--sro", metavar="SEPARATOR", dest="sro",
default="|", action="store",
help=u'Use the specified string to scope nested headings. The '
'default is pipe symbol ("|"), but that can be an issue if your '
'headings contain the pipe symbol. It might be more useful to '
'use a such as the UTF-8 chevron ("\u00bb").')
options, args = parser.parse_args()
if not args:
raise ScriptError("No input files specified.")
if sys.version_info[0] == 2:
encoding = sys.stdin.encoding or locale.getpreferredencoding() or 'utf-8'
options.sro = options.sro.decode(encoding)
if options.tagfile == '-':
if sys.version_info[0] == 2:
output = sys.stdout
else:
output = sys.stdout.buffer
else:
output = open(options.tagfile, 'wb')
gen_tags_header(output, options.sort)
all_sections = []
try:
for filename in args:
if sys.version_info[0] == 2:
filename = filename.decode(sys.getfilesystemencoding())
try:
with open_autoenc(filename, encoding=options.encoding) as f:
buf = f.read()
except IOError as e:
if e.errno == errno.EPIPE:
raise
print_warning(e)
continue
lines = buf.splitlines()
del buf
sections = find_sections(filename, lines)
all_sections.extend(sections)
finally:
# We do this to match ctags behavior... even when a file is missing,
# it'll write out the tags it has.
gen_tags_content(output,
options.sort,
sections_to_tags(all_sections, options.sro))
output.flush()
output.close()
def print_warning(e):
print("WARNING: %s" % str(e), file=sys.stderr)
def print_error(e):
print("ERROR: %s" % str(e), file=sys.stderr)
def cli_main():
try:
main()
except IOError as e:
if e.errno == errno.EPIPE:
# Exit saying we got SIGPIPE.
sys.exit(141)
print_error(e)
sys.exit(1)
except ScriptError as e:
print_error(e)
sys.exit(1)
if __name__ == '__main__':
cli_main()
| bsd-3-clause | 7,713,842,127,701,343,000 | 26.58396 | 81 | 0.535344 | false |
furthz/colegio | src/APIs/urls.py | 1 | 5803 | from django.conf.urls import url, include
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
from .views import UserInfoListView, SnippetDetail, ApoderadoInfo, DocenteInfo
urlpatterns = [
url(r'^colegio_api/$', views.ColegioList.as_view()),
url(r'^colegio_api/(?P<pk>\d+)/$', views.ColegioDetail.as_view()),
url(r'^perfil_api/$', views.PerfilList.as_view()),
url(r'^perfil_api/(?P<pk>\d+)/$', views.PerfilDetail.as_view()),
####################
url(r'^apoderado_api/$', views.ApoderadoList.as_view()),
url(r'^apoderado_api/(?P<pk>\d+)/$', views.ApoderadoDetail.as_view()),
###################
url(r'^alumno_api/$', views.AlumnoList.as_view()),
url(r'^alumno_api/(?P<pk>\d+)/$', views.AlumnoDetail.as_view()),
###################
url(r'^apoderadoalumno_api/$', views.ApoderadoAlumnoList.as_view()),
url(r'^apoderadoalumno_api/(?P<pk>\d+)/$', views.ApoderadoAlumnoDetail.as_view()),
# http://127.0.0.1:8000/APIs/apoderadoalumno_api/?search=3
###################
url(r'^matricula_api/$', views.MatriculaList.as_view()),
url(r'^matricula_api/(?P<pk>\d+)/$', views.MatriculaDetail.as_view()),
###################
# URL para el Web Service del Módulo Académico
url(r'^asistencia_api/$', views.AsistenciaList.as_view()),
url(r'^asistencia_api/(?P<pk>\d+)/$', views.AsistenciaDetail.as_view()),
url(r'^aula_api/$', views.AulaList.as_view()),
url(r'^aula_api/(?P<pk>\d+)/$', views.AulaDetail.as_view()),
# url(r'^curso_docente_api/$', views.CursoDocenteList.as_view()),
url(r'^alumno_api/$', views.AlumnoList.as_view()),
url(r'^alumno_api/(?P<pk>\d+)/$', views.AlumnoDetail.as_view()),
# URL para el Web Service del Módulo Académico
url(r'^asistencia_api/$', views.AsistenciaList.as_view()),
url(r'^asistencia_api/(?P<pk>\d+)/$', views.AsistenciaDetail.as_view()),
url(r'^aula_api/$', views.AulaList.as_view()),
url(r'^aula_api/(?P<pk>\d+)/$', views.AulaDetail.as_view()),
# url(r'^curso_docente_api/$', views.CursoDocenteList.as_view()),
url(r'^matricula_api/$', views.MatriculaList.as_view()),
url(r'^matricula_api/(?P<pk>\d+)/$', views.MatriculaDetail.as_view()),
url(r'^alumno_api/$', views.AlumnoList.as_view()),
url(r'^alumno_api/(?P<pk>\d+)/$', views.AlumnoDetail.as_view()),
url(r'^user_info/$', UserInfoListView.as_view(), name='user_info'),
# url(r'^profesor_info/$', UserInfoListView.as_view(), name='profesor_info'),
url(r'^apoderado_info/$', ApoderadoInfo, name='apoderado_info'),
url(r'^docente_info/$', DocenteInfo, name='docente_info'),
url(r'^colegio/(?P<pk>[0-9]+)/(?P<nombre>\w+)/$', SnippetDetail.as_view()),
url(r'^curso_docente_api/(?P<pk>[0-9]+)/(?P<docente>\w+)/$', views.CursoDocenteList.as_view()),
# Web Service para obtener las ALUMNOS asociados a un aula determinado
url(r'^aula_alumnos_api/(?P<pk>[0-9]+)/(?P<aula>\w+)/$', views.AulaAlumnosList.as_view()),
# Web Service para obtener las CURSOS a las que enseña un docente determinado
url(r'^docente_curso_api/(?P<pk>[0-9]+)/(?P<docente>\w+)/$', views.DocenteCursoList.as_view()),
# Web Service para obtener las AULAS a las que enseña un docente determinado
url(r'^docente_aula_api/(?P<pk>[0-9]+)/(?P<docente>\w+)/$', views.DocenteAulaList.as_view()),
# Web Service para visualizar las ASISTENCIAS POR MES de un aula determinada
url(r'^aula_asistencia_api/(?P<pk>[0-9]+)/(?P<aula>\w+)/(?P<mes>\w+)/$', views.AulaAsistenciaList.as_view()),
# webservice prueba
url(r'^relacionusuarioperfil/(?P<pk>[0-9]+)/(?P<docente>\w+)/$', views.RelacionUsuarioPerfilView.as_view()),
url(r'^relacionperfilalumno/(?P<pk>[0-9]+)/(?P<docente>\w+)/$', views.RelacionPerfilAlumnoView.as_view()),
#################################################################################################################
url(r'^personaemisor_api/$', views.PersonaEmisorList.as_view()),
url(r'^personaemisor_api/(?P<pk>\d+)/$', views.PersonaEmisorDetail.as_view()),
###################
url(r'^personareceptor_api/$', views.PersonaReceptorList.as_view()),
url(r'^personareceptor_api/(?P<pk>\d+)/$', views.PersonaReceptorDetail.as_view()),
###################
url(r'^tipoalerta_api/$', views.TipoAlertaList.as_view()),
url(r'^tipoalerta_api/(?P<pk>\d+)/$', views.TipoAlertaDetail.as_view()),
####################
url(r'^estadoalerta_api/$', views.EstadoAlertaList.as_view()),
url(r'^estadoalerta_api/(?P<pk>\d+)/$', views.EstadoAlertaDetail.as_view()),
###################
url(r'^contenidoalerta_api/$', views.ContenidoAlertaList.as_view()),
url(r'^contenidoalerta_api/(?P<pk>\d+)/$', views.ContenidoAlertaDetail.as_view()),
###################
url(r'^alerta_api/$', views.AlertaList.as_view()),
url(r'^alerta_api/(?P<pk>\d+)/$', views.AlertaDetail.as_view()),
###################
url(r'^token_f/$', views.TokenFirebaseList.as_view()),
url(r'^token_f/(?P<pk>\d+)/$', views.TokenFirebaseDetail.as_view()),
###################
url(r'^usuarios_permisos/$', views.GroupList.as_view()),
url(r'^usuarios_permisos/(?P<pk>\d+)/$', views.GroupDetail.as_view()),
###################
url(r'^colegio_personal/$', views.PersonalColegioList.as_view()),
url(r'^colegio_personal/(?P<pk>\d+)/$', views.PersonalColegioDetail.as_view()),
###################
url(r'^personal/$', views.PersonalList.as_view()),
url(r'^personal/(?P<pk>\d+)/$', views.PersonalDetail.as_view()),
###################
url(r'^alertadata_api/$', views.AlertaDataList.as_view()),
url(r'^alertadata_api/(?P<pk>\d+)/$', views.AlertaDataDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| mit | 8,742,488,692,962,942,000 | 47.308333 | 117 | 0.594963 | false |
Starofall/RTX | rtxlib/executionstrategy/__init__.py | 1 | 2569 | from rtxlib.executionstrategy.ForeverStrategy import start_forever_strategy
from rtxlib.executionstrategy.StepStrategy import start_step_strategy
from rtxlib.executionstrategy.SelfOptimizerStrategy import start_self_optimizer_strategy
from rtxlib.executionstrategy.SequencialStrategy import start_sequential_strategy
from rtxlib import log_results, error, info
from rtxlib.executionstrategy.UncorrelatedSelfOptimizerStrategy import start_uncorrelated_self_optimizer_strategy
def run_execution_strategy(wf):
""" we run the correct execution strategy """
applyInitKnobs(wf)
try:
# start the right execution strategy
if wf.execution_strategy["type"] == "sequential":
log_results(wf.folder, wf.execution_strategy["knobs"][0].keys() + ["result"], append=False)
start_sequential_strategy(wf)
elif wf.execution_strategy["type"] == "self_optimizer":
log_results(wf.folder, wf.execution_strategy["knobs"].keys() + ["result"], append=False)
start_self_optimizer_strategy(wf)
elif wf.execution_strategy["type"] == "uncorrelated_self_optimizer":
log_results(wf.folder, wf.execution_strategy["knobs"].keys() + ["result"], append=False)
start_uncorrelated_self_optimizer_strategy(wf)
elif wf.execution_strategy["type"] == "step_explorer":
log_results(wf.folder, wf.execution_strategy["knobs"].keys() + ["result"], append=False)
start_step_strategy(wf)
elif wf.execution_strategy["type"] == "forever":
start_forever_strategy(wf)
except RuntimeError:
error("Stopped the whole workflow as requested by a RuntimeError")
# finished
info(">")
applyDefaultKnobs(wf)
def applyInitKnobs(wf):
""" we are done, so revert to default if given """
if "pre_workflow_knobs" in wf.execution_strategy:
try:
info("> Applied the pre_workflow_knobs")
wf.change_provider["instance"] \
.applyChange(wf.change_event_creator(wf.execution_strategy["pre_workflow_knobs"]))
except:
error("apply changes did not work")
def applyDefaultKnobs(wf):
""" we are done, so revert to default if given """
if "post_workflow_knobs" in wf.execution_strategy:
try:
info("> Applied the post_workflow_knobs")
wf.change_provider["instance"] \
.applyChange(wf.change_event_creator(wf.execution_strategy["post_workflow_knobs"]))
except:
error("apply changes did not work")
| mit | 7,610,358,323,314,089,000 | 41.816667 | 113 | 0.669132 | false |
prakashselvam/Apart | Maintenance/views.py | 1 | 16398 | from django.http import HttpResponse
from django.views.generic import View
import json
import urllib2
import config
import utils
import log_rotator
from Maintenance_utils.PeopleDataLoader import PeopleDataLoader
from Maintenance_utils.DueDataLoader import DueDataLoader
from Maintenance_utils.ApartmentAccountUtils import ApartmentAccountClass
from Maintenance_utils.ApartmentUserUtils import ApartUserUtil
# Create your views here.
def test(request):
"""
@summary: View method to check the server status.
@param request: HttpRequest.
@rtype: HttpResponse
@return: HttpResponse containing server status.
"""
result = {'status': 'It works'}
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class LoadPeopleFile(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
apartmentid = request.POST.get('apartmentid')
file_name = request.POST.get('file_name')
SheetNo = request.POST.get('SheetNo')
peopleDataLoader = PeopleDataLoader()
result = peopleDataLoader.read_people_data(apartmentid, file_name, SheetNo)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class LoadMaintenanceDueFile(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
apartmentid = request.POST.get('apartmentid')
file_name = request.POST.get('file_name')
SheetNo = request.POST.get('SheetNo')
dueDataLoader = DueDataLoader()
result = dueDataLoader.read_due_data(apartmentid, file_name, SheetNo)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class CreateApartmentAccount(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
AppartmentName = request.POST.get('AppartmentName')
AppartmentEmail = request.POST.get('AppartmentEmail')
AppartmentAddress = request.POST.get('AppartmentAddress')
NoOfBlocks = request.POST.get('NoOfBlocks')
NumberOfFlats = request.POST.get('NumberOfFlats')
EmailAddress = request.POST.get('EmailAddress')
MobileNumber = request.POST.get('MobileNumber')
LandLine = request.POST.get('LandLine')
Password = request.POST.get('Password')
apartmentUserObj = ApartUserUtil()
result = apartmentUserObj.createAccount(AppartmentName,AppartmentEmail,AppartmentAddress,NoOfBlocks,
NumberOfFlats,EmailAddress,MobileNumber,LandLine,Password)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class UpdateApartmentAccount(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
AppartmentEmail = request.POST.get('AppartmentEmail')
AccountHolderName = request.POST.get('AccountHolderName')
AccountNumber = request.POST.get('AccountNumber')
IFSCCode = request.POST.get('IFSCCode')
apartmentAccountObj = ApartmentAccountClass()
result = apartmentAccountObj.UpdateBankDetails(AppartmentEmail,AccountHolderName, AccountNumber, IFSCCode)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class RegisterUserAccount(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
first_name = request.POST.get('first_name')
last_name = request.POST.get('last_name')
block_name = request.POST.get('block_name')
flat_number = request.POST.get('flat_number')
mobile_number = request.POST.get('mobile_number')
email_id = request.POST.get('email_id')
type_occupancy = request.POST.get('type_occupancy')
have_car = request.POST.get('have_car')
apartment_id = request.POST.get('apartment_id')
password = request.POST.get('password')
apartmentUserObj = ApartUserUtil()
result = apartmentUserObj.registerUserAccount(first_name, last_name, block_name, flat_number,
mobile_number, email_id, type_occupancy, have_car,
apartment_id, password)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class UnmatchedRegistrations(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
block_name = request.POST.get('block_name')
flat_number = request.POST.get('flat_number')
apartment_id = request.POST.get('apartment_id')
type_occupancy = request.POST.get('type_occupancy')
apartmentUserObj = ApartUserUtil()
result['unmatch'] = apartmentUserObj.getunmatchreg(block_name, flat_number, apartment_id,type_occupancy)
result['prereg'] = apartmentUserObj.getpreregistrations(block_name, flat_number, apartment_id, type_occupancy)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class UpdatePreRegUser(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
first_name = request.POST.get('first_name')
last_name = request.POST.get('last_name')
block_name = request.POST.get('block_name')
flat_number = request.POST.get('flat_number')
mobile_number = request.POST.get('mobile_number')
email_id = request.POST.get('email_id')
type_occupancy = request.POST.get('type_occupancy')
apartment_id = request.POST.get('apartment_id')
apartmentUserObj = ApartUserUtil()
result = apartmentUserObj.updatePreRegUser(first_name, last_name, block_name, flat_number,
mobile_number, email_id, type_occupancy, apartment_id)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
class VerifyOTP(View):
def get(self,request,url):
result = config.INVALID_REQUEST_METHOD_RESPONSE
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json")
def post(self,request,url):
"""
@summary: View method to handle file load requests.
@param request: file path
@rtype: HttpResponse
@return: HttpResponse containing load file status.
"""
viewslogger = log_rotator.views_logger()
result = {}
try:
mobile_number = request.POST.get('mobile_number')
apartment_id = request.POST.get('apartment_id')
otp = request.POST.get('otp')
apartmentUserObj = ApartUserUtil()
result = apartmentUserObj.verifyOTP(apartment_id, mobile_number, otp)
except urllib2.HTTPError, err:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
if err.code == 401:
result = config.INVALID_CREDENTIALS_RESPONSE
else:
result = config.UNKNOWN_ERROR_RESPONSE
except KeyError:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.MANDATORY_DATA_MISSING_RESPONSE
except:
error_logger = log_rotator.error_logger()
error_logger.debug("Exception::", exc_info=True)
result = config.UNKNOWN_ERROR_RESPONSE
viewslogger.debug("Response : %s" % result)
return HttpResponse(json.dumps(result, default=utils.json_default), content_type="application/json") | gpl-3.0 | 2,509,562,686,776,142,000 | 46.395954 | 122 | 0.621783 | false |
niteeshsood/LoktraTest | webcrawler.py | 1 | 1769 | import urllib2
from bs4 import BeautifulSoup
import getopt
import sys
import pdb
def makeint(s):
s.strip()
ans=0
for i in xrange(len(s)):
if s[i].isdigit():
ans=10*ans+int(s[i])
return ans
def main(argv):
try:
opts, args = getopt.getopt(argv,'hp:k:', )
if len(opts) == 0:
print 'Use python webcrawler.py -h for help'
sys.exit(2)
except getopt.GetoptError:
print 'Use python webcrawler.py -h for help'
sys.exit(2)
for op,ar in opts:
if op == '-p':
try:
int(ar)
except ValueError:
print 'Error. Page number should be a number'
sys.exit(2)
pageno = ar
elif op == '-k':
keyword = ar
elif op == '-h':
print 'Use python webcrawler.py -p pagenumber -k keyword'
sys.exit(2)
else: assert False, 'unhandled option'
if 'keyword' not in locals():
print 'Keyword not specified try again'
sys.exit(2)
if 'pageno' in locals():
test = 'http://www.shopping.com/products~PG-'+str(pageno)+'?KW='+str(keyword)
else:
test = 'http://www.shopping.com/products?KW=' + str(keyword)
page = urllib2.urlopen(test).read()
soup = BeautifulSoup(page)
if soup.body['id'] == 'noResults':
print 'No results for this keyword'
sys.exit(1)
else:
alltext = soup.get_text()
res = alltext[alltext.find('Results '): alltext.find('Results ')+25]
if 'pageno' in locals():
firstno = makeint(res[res.find('Results ')+8: res.find('-')-1])
lastno = makeint(res[res.find('-')+2:res.find('of ')])
print 'Number of results on page', pageno, ':', lastno-firstno+1
else:
print 'Number of results found', res[res.find('of ')+3:res.find('\n')]
if __name__ == '__main__':
main(sys.argv[1:])
| gpl-3.0 | -1,612,103,384,303,443,200 | 24.637681 | 81 | 0.594686 | false |
dkriegner/xrayutilities | lib/xrayutilities/io/helper.py | 1 | 3315 | # This file is part of xrayutilities.
#
# xrayutilities is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2013-2019 Dominik Kriegner <[email protected]>
"""
convenience functions to open files for various data file reader
these functions should be used in new parsers since they transparently allow to
open gzipped and bzipped files
"""
import bz2
import gzip
import lzma
import h5py
from .. import config
from ..exception import InputError
def xu_open(filename, mode='rb'):
"""
function to open a file no matter if zipped or not. Files with extension
'.gz', '.bz2', and '.xz' are assumed to be compressed and transparently
opened to read like usual files.
Parameters
----------
filename : str
filename of the file to open (full including path)
mode : str, optional
mode in which the file should be opened
Returns
-------
file-handle
handle of the opened file
Raises
------
IOError
If the file does not exist an IOError is raised by the open routine,
which is not caught within the function
"""
if config.VERBOSITY >= config.INFO_ALL:
print("XU:io: opening file %s" % filename)
if filename.endswith('.gz'):
fid = gzip.open(filename, mode)
elif filename.endswith('.bz2'):
fid = bz2.BZ2File(filename, mode)
elif filename.endswith('.xz'):
fid = lzma.open(filename, mode)
else:
fid = open(filename, mode)
return fid
class xu_h5open(object):
"""
helper object to decide if a HDF5 file has to be opened/closed when
using with a 'with' statement.
"""
def __init__(self, f, mode='r'):
"""
Parameters
----------
f : str
filename or h5py.File instance
mode : str, optional
mode in which the file should be opened. ignored in case a file
handle is passed as f
"""
self.closeFile = True
self.fid = None
self.mode = mode
if isinstance(f, h5py.File):
self.fid = f
self.closeFile = False
self.filename = f.filename
elif isinstance(f, str):
self.filename = f
else:
raise InputError("f argument of wrong type was passed, "
"should be string or filename")
def __enter__(self):
if self.fid:
if not self.fid.id.valid:
self.fid = h5py.File(self.filename, self.mode)
else:
self.fid = h5py.File(self.filename, self.mode)
return self.fid
def __exit__(self, type, value, traceback):
if self.closeFile:
self.fid.close()
| gpl-2.0 | 4,533,434,051,202,780,700 | 28.336283 | 79 | 0.622624 | false |
jrief/django-websocket-redis | ws4redis/publisher.py | 2 | 2898 | #-*- coding: utf-8 -*-
from redis import ConnectionPool, StrictRedis
from ws4redis import settings
from ws4redis.redis_store import RedisStore
from ws4redis._compat import is_authenticated
from redis.connection import UnixDomainSocketConnection
if 'unix_socket_path' in settings.WS4REDIS_CONNECTION:
# rename 'unix_socket_path' to 'path' and pass as args
conn_args = dict(settings.WS4REDIS_CONNECTION,
path=settings.WS4REDIS_CONNECTION['unix_socket_path'])
del conn_args['unix_socket_path']
redis_connection_pool = ConnectionPool(connection_class=UnixDomainSocketConnection, **conn_args)
else:
redis_connection_pool = ConnectionPool(**settings.WS4REDIS_CONNECTION)
class RedisPublisher(RedisStore):
def __init__(self, **kwargs):
"""
Initialize the channels for publishing messages through the message queue.
"""
connection = StrictRedis(connection_pool=redis_connection_pool)
super(RedisPublisher, self).__init__(connection)
for key in self._get_message_channels(**kwargs):
self._publishers.add(key)
def fetch_message(self, request, facility, audience='any'):
"""
Fetch the first message available for the given ``facility`` and ``audience``, if it has
been persisted in the Redis datastore.
The current HTTP ``request`` is used to determine to whom the message belongs.
A unique string is used to identify the bucket's ``facility``.
Determines the ``audience`` to check for the message. Must be one of ``broadcast``,
``group``, ``user``, ``session`` or ``any``. The default is ``any``, which means to check
for all possible audiences.
"""
prefix = self.get_prefix()
channels = []
if audience in ('session', 'any',):
if request and request.session:
channels.append('{prefix}session:{0}:{facility}'.format(request.session.session_key, prefix=prefix, facility=facility))
if audience in ('user', 'any',):
if is_authenticated(request):
channels.append('{prefix}user:{0}:{facility}'.format(request.user.get_username(), prefix=prefix, facility=facility))
if audience in ('group', 'any',):
try:
if is_authenticated(request):
groups = request.session['ws4redis:memberof']
channels.extend('{prefix}group:{0}:{facility}'.format(g, prefix=prefix, facility=facility)
for g in groups)
except (KeyError, AttributeError):
pass
if audience in ('broadcast', 'any',):
channels.append('{prefix}broadcast:{facility}'.format(prefix=prefix, facility=facility))
for channel in channels:
message = self._connection.get(channel)
if message:
return message
| mit | -6,769,983,965,469,551,000 | 48.965517 | 135 | 0.638026 | false |
annapowellsmith/openpresc | openprescribing/frontend/tests/functional/selenium_base.py | 1 | 4639 | import os
import subprocess
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.conf import settings
# Django 1.11 removes the ability to supply a port range for liveserver tests,
# so we replicate that here. See: https://code.djangoproject.com/ticket/28212
# and https://code.djangoproject.com/ticket/26011
available_test_ports = iter(range(6080, 6580))
def use_saucelabs():
return os.environ.get('TRAVIS') or os.environ.get('USE_SAUCELABS')
@unittest.skipIf(
os.environ.get('TEST_SUITE') == 'nonfunctional',
"nonfunctional tests specified in TEST_SUITE environment variable")
class SeleniumTestCase(StaticLiveServerTestCase):
host = '0.0.0.0'
display = None
@classmethod
def setUpClass(cls):
cls.port = next(available_test_ports)
try:
cls.browser = cls.get_browser()
except Exception:
if cls.display:
cls.display.stop()
raise
cls.browser.maximize_window()
cls.browser.implicitly_wait(1)
super(SeleniumTestCase, cls).setUpClass()
@classmethod
def get_browser(cls):
if use_saucelabs():
return cls.get_saucelabs_browser()
else:
if cls.use_xvfb():
from pyvirtualdisplay import Display
cls.display = Display(visible=0, size=(1200, 800))
cls.display.start()
return cls.get_firefox_driver()
@classmethod
def get_saucelabs_browser(cls):
browser, version, platform = os.environ['BROWSER'].split(":")
caps = {'browserName': browser}
caps['platform'] = platform
caps['version'] = version
caps['screenResolution'] = '1600x1200'
# Disable slow script warning in IE
caps['prerun'] = {
'executable': ('https://raw.githubusercontent.com/'
'ebmdatalab/openprescribing/'
'master/scripts/setup_ie_8.bat'),
'background': 'false'
}
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
if os.environ.get('TRAVIS'):
caps["tunnel-identifier"] = os.environ.get(
"TRAVIS_JOB_NUMBER", 'n/a')
caps["build"] = os.environ.get("TRAVIS_BUILD_NUMBER", 'n/a')
caps["tags"] = ["CI"]
else:
caps["tags"] = ["from-dev-sandbox"]
if os.environ.get('TRAVIS') or os.path.exists('/.dockerenv'):
hub_url = "%s:%s@saucehost:4445" % (username, access_key)
else:
hub_url = "%s:%s@localhost:4445" % (username, access_key)
return webdriver.Remote(
desired_capabilities=caps,
command_executor="http://%s/wd/hub" % hub_url)
@classmethod
def use_xvfb(cls):
if not os.environ.get('SHOW_BROWSER', False):
return subprocess.call(
"type xvfb-run", shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
else:
return False
@classmethod
def get_firefox_driver(cls):
return webdriver.Firefox(
log_path="%s/logs/webdriver.log" % settings.REPO_ROOT)
@classmethod
def tearDownClass(cls):
cls.browser.quit()
if cls.display:
cls.display.stop()
super(SeleniumTestCase, cls).tearDownClass()
def _find_and_wait(self, locator_type, locator, waiter):
if use_saucelabs():
wait = 60
else:
wait = 5
try:
element = WebDriverWait(self.browser, wait).until(
waiter((locator_type, locator))
)
return element
except TimeoutException:
raise AssertionError("Expected to find element %s" % locator)
def find_by_xpath(self, locator):
return self._find_and_wait(By.XPATH, locator, EC.presence_of_element_located)
def find_visible_by_xpath(self, locator):
return self._find_and_wait(By.XPATH, locator, EC.visibility_of_element_located)
def find_by_css(self, locator):
return self._find_and_wait(By.CSS_SELECTOR, locator, EC.presence_of_element_located)
def find_visible_by_css(self, locator):
return self._find_and_wait(By.CSS_SELECTOR, locator, EC.visibility_of_element_located)
| mit | -4,036,378,167,837,115,400 | 34.143939 | 94 | 0.613063 | false |
tzewangdorje/SIPserv | sipServ/message.py | 1 | 6199 | # core
import traceback, random, re
from collections import OrderedDict
# sipServ
from header import HeaderField
from userAgent import UserAgentServer
class Message(object):
def __init__(self, data=None):
self.indentifier = None
self.request = None
self.final = None
self.header = OrderedDict()
self.body = ""
self.sent = False
if data:
self._parse(data)
if not self._validate():
raise Exception('Invalid Message format')
def __repr__(self):
ret = "{\n"
ret = ret+" 'StartLine': '"+self.start_line+"',\n"
ret = ret+" 'Header': {\n"
for field in self.header:
ret = ret+" '"+field.name+"': {\n"
for value in field.values:
ret = ret+" "+str(value)+",\n"
ret = ret+" },\n"
ret = ret+" 'Body': '"+self.body+"'\n"
ret = ret+"}\n"
return ret
def write(self):
ret = self.start_line + "\r\n"
for key,field in self.header.iteritems():
ret = ret + field.write() + "\r\n"
ret = ret + "\r\n"
ret = ret + self.body
return ret
def _parse(self, data):
headerDone = False
start = True
lines = data.splitlines()
for line in lines:
if start:
self.start_line = line
start = False
elif line=="":
headerDone = True
elif headerDone:
self.body = self.body+line
else:
headerField = HeaderField(line)
try:
key = headerField.name.lower()
self.header[key] = headerField
except: # this header field already exists, so add values to the existing one, TO DO
# header[hf.name].append(hf)
print traceback.format_exc()
def _validate(self):
return True
def getUserAgent(self):
return UserAgentServer()
def isRequest(self):
return self.request
def isResponse(self):
return not self.request
def isProvisional(self):
return not self.request and not self.final
def isFinal(self):
return not self.request and self.final
def getId(self):
try:
return self.header["via"].values[0].params["branch"]
except:
return None
class MessageRequest(Message):
def __init__(self, data):
Message.__init__(self, data)
self.request = True
self.requestUri = self.start_line.split(" ")[1]
@property
def addressOfRecord(self):
to = self.header["to"].values[0].value
m = re.search(r"<(.+)>", to)
return m.group(1)
def getReturnIp(self):
via = self.header["via"]
return via.values[0].value.split(" ")[1]
def getReturnPort(self):
via = self.header["via"]
if via.values[0].params["rport"].isdigit():
return via.values[0].params["rport"]
else:
return "5060"
def getReturnTransport(self):
via = self.header["via"]
return via.values[0].value.split(" ")[0].split("/")[2]
class MessageResponse(Message):
@property
def start_line(self):
return "SIP/2.0 "+self.code+" "+self.reasonPhrase
def __init__(self, data):
Message.__init__(self, data)
self.request = False
self.code = ""
self.reasonPhrase = ""
def configureByRequest(self, requestMessage):
self.returnIp = requestMessage.getReturnIp()
self.returnPort = requestMessage.getReturnPort()
self.returnTransport = requestMessage.getReturnTransport()
self.header["via"] = requestMessage.header["via"]
toField = requestMessage.header["to"]
try:
toField.values[0].params["tag"]
except KeyError:
# no dialog tag yet - add one
toField.values[0].params["tag"] = '%x' % random.randint(0,2**31)
self.header["to"] = toField
self.header["from"] = requestMessage.header["from"]
self.header["call-id"] = requestMessage.header["call-id"]
self.header["cseq"] = requestMessage.header["cseq"]
class MessageRequestRegister(MessageRequest):
def __init__(self, data=None):
MessageRequest.__init__(self, data)
self.identifier = "REGISTER"
class MessageRequestInvite(MessageRequest):
def __init__(self, data=None):
MessageRequest.__init__(self, data)
self.identifier = "INVITE"
class MessageResponseProvisional(MessageResponse):
def __init__(self, data=None):
MessageResponse.__init__(self, data)
self.identifier = "PROVISIONAL"
self.final = False
class MessageResponseSuccess(MessageResponse):
def __init__(self, data=None):
MessageResponse.__init__(self, data)
self.identifier = "SUCCESS"
self.final = True
class MessageResponseRedirect(MessageResponse):
def __init__(self, data=None):
MessageResponse.__init__(self, data)
self.identifier = "REDIRECT"
self.final = True
class MessageResponseClientError(MessageResponse):
def __init__(self, data=None):
MessageResponse.__init__(self, data)
self.identifier = "CLIENT_ERROR"
def configureByRequest(self, requestMessage):
MessageResponse.configureByRequest(self, requestMessage)
if self.code=="405":
self.header.append( HeaderField("Allow: INVITE,ACK,BYE,CANCEL,OPTIONS") ) # INVITE missing for testing!
class MessageResponseServerError(MessageResponse):
def __init__(self, data=None):
MessageResponse.__init__(self, data)
self.identifier = "SERVER_ERROR"
class MessageResponseGlobalFailure(MessageResponse):
def __init__(self, data=None):
MessageResponse.__init__(self, data)
self.identifier = "GLOBAL_FAILURE"
| gpl-3.0 | 7,854,859,847,364,854,000 | 28.52381 | 115 | 0.556541 | false |
Pikecillo/genna | external/4Suite-XML-1.0.2/Ft/Xml/Xslt/SaxWriter.py | 1 | 7538 | ########################################################################
# $Header: /var/local/cvsroot/4Suite/Ft/Xml/Xslt/SaxWriter.py,v 1.9 2005/03/18 23:47:19 jkloth Exp $
"""
SAX2 event writer for XSLT output
Copyright 2005 Fourthought, Inc. (USA).
Detailed license and copyright information: http://4suite.org/COPYRIGHT
Project home, documentation, distributions: http://4suite.org/
"""
import os
from Ft.Xml import EMPTY_NAMESPACE, XMLNS_NAMESPACE
from Ft.Xml import CheckVersion
from Ft.Xml.Domlette import implementation
from Ft.Xml.Lib.XmlString import IsXmlSpace, SplitQName
from Ft.Xml.XPath import Util
from Ft.Xml.Xslt import XSL_NAMESPACE, TextSax
from Ft.Xml.Xslt import OutputParameters
class ElementData:
def __init__(self, name, attrs, extraNss=None):
self.name = name
self.attrs = attrs
self.extraNss = extraNss or {}
try:
from xml.dom.ext.reader import Sax, Sax2, HtmlSax
except ImportError:
#It will be caught if a SaxWriter is created
pass
class SaxWriter:
"""
Requires PyXML (will be checked at instantiation time).
"""
def __init__(self, outputParams, saxHandler=None, fragment=False):
CheckVersion(feature="4XSLT's SaxWriter")
self.__fragment = fragment
self.__firstElementFlag = True
self.__orphanedNodes = []
self.__saxHandlerClass = None
self.__saxHandler = None
self.__stateStack = []
self.__currElement = None
self.__currText = u''
self._outputParams = outputParams or OutputParameters.OutputParameters()
if saxHandler:
self.__saxHandler = saxHandler
elif self.__outputParams.method == 'xml':
self.__initSaxHandler(Sax2.XmlDomGenerator)
if self.__outputParams.omitXmlDeclaration in [None, 'no']:
self.__saxHandler.xmlDecl(
self.__outputParams.version,
self.__outputParams.encoding,
self.__outputParams.standalone
)
elif self.__outputParams.method == 'html':
self.__initSaxHandler(HtmlSax.HtmlDomGenerator)
elif self.__outputParams.method == 'text':
self.__initSaxHandler(TextSax.TextGenerator)
def startDocument(self):
return
def endDocument(self):
return
def complete(self):
return self.__saxHandler and self.__saxHandler.getRootNode() or None
def getResult(self):
self.__completeTextNode()
return self.__saxHandler.getRootNode()
def __initSaxHandler(self, saxHandlerClass):
self.__saxHandlerClass = saxHandlerClass
self.__saxHandler = saxHandlerClass(keepAllWs=1)
for o_node in self.__orphanedNodes:
if o_node[0] == 'pi':
self.__saxHandler.processingInstruction(o_node[1], o_node[2])
elif o_node[0] == 'comment':
self.__saxHandler.comment(o_node[1])
del self.__orphanedNodes
return
def __initSax2Doc(self, doctype):
self.__firstElementFlag = False
if not self.__fragment:
if not self.__saxHandler:
self.__initSaxHandler(Sax2.XmlDomGenerator)
if self.__outputParams.omitXmlDeclaration in [None, 'no']:
self.__saxHandler.xmlDecl(
self.__outputParams.version,
self.__outputParams.encoding,
self.__outputParams.standalone
)
self.__saxHandler.startDTD(doctype, self.__outputParams.doctypeSystem, self.__outputParams.doctypePublic)
self.__saxHandler.endDTD()
return
def __initHtmlSaxDoc(self, doctype):
self.__firstElementFlag = False
if not self.__saxHandler:
self.__initSaxHandler(HtmlSax.HtmlDomGenerator)
#self.__saxHandler._4dom_startDTD(doctype, self.__outputParams.doctypeSystem, self.__outputParams.doctypePublic)
#self.__saxHandler.endDTD()
def __completeTextNode(self):
#FIXME: This does not allow multiple root nodes, which is required to be supported
if self.__currText:
if IsXmlSpace(self.__currText):
self.__saxHandler.ignorableWhitespace(self.__currText)
else:
self.__saxHandler.characters(self.__currText)
self.__currText = u''
return
def startElement(self, name, namespace=EMPTY_NAMESPACE, extraNss=None):
extraNss = extraNss or {}
attrs = {}
if self.__firstElementFlag:
if not self.__outputParams.method:
if not namespace and name.upper() == 'HTML':
self.__outputParams.method = 'html'
else:
self.__outputParams.method = 'xml'
if self.__outputParams.method == 'xml':
self.__initSax2Doc(name)
else:
self.__initHtmlSaxDoc(name)
self.__firstElementFlag = False
self.__completeTextNode()
if self.__currElement:
self.__saxHandler.startElement(self.__currElement.name, self.__currElement.attrs)
self.__currElement = None
self.__currElement = ElementData(name, attrs, extraNss)
if self.__outputParams.method == 'xml':
if namespace:
(prefix, local) = SplitQName(name)
if prefix:
self.__currElement.attrs["xmlns:"+prefix] = namespace
else:
self.__currElement.attrs["xmlns"] = namespace
for prefix in extraNss.keys():
if prefix:
new_element.setAttributeNS(XMLNS_NAMESPACE,
u'xmlns:'+prefix,
extraNss[prefix])
else:
new_element.setAttributeNS(XMLNS_NAMESPACE,
u'xmlns',
extraNss[''])
return
def endElement(self, name):
self.__completeTextNode()
if self.__currElement:
self.__saxHandler.startElement(
self.__currElement.name,
self.__currElement.attrs
)
self.__currElement = None
self.__saxHandler.endElement(name)
return
def text(self, text, escapeOutput=True):
if self.__currElement:
self.__saxHandler.startElement(
self.__currElement.name,
self.__currElement.attrs
)
self.__currElement = None
self.__saxHandler.characters(text)
return
def attribute(self, name, value, namespace=EMPTY_NAMESPACE):
self.__currElement.attrs[name] = value
if namespace:
(prefix, local) = SplitQName(name)
if prefix:
self.__currElement.attrs[u"xmlns:"+prefix] = namespace
return
def processingInstruction(self, target, data):
self.__completeTextNode()
if self.__saxHandler:
self.__saxHandler.processingInstruction(target, data)
else:
self.__orphanedNodes.append(('pi', target, data))
return
def comment(self, body):
self.__completeTextNode()
if self.__saxHandler:
self.__saxHandler.comment(body)
else:
self.__orphanedNodes.append(('comment', body))
return
| gpl-2.0 | -4,203,331,712,846,488,600 | 36.316832 | 120 | 0.568188 | false |
DCSR/Analysis | GraphsTab.py | 1 | 13519 | """
This file contains all the precedures called from the GraphsTab
There are several ways to graph stuff. Much of what is in this files draws to a ttk canvas,
in this case self.graphCanvas.
The other way is to use matplotlib.
Index: (alphabetical)
cocaineModel() OK
cumulativeRecord() OK
eventRecords() OK
eventRecordsIntA() OK
histogram() OK
pumpDurationIntA() OK
timeStamps() OK
"""
import GraphLib
import model
import ListLib
def cocaineModel(aCanvas,aRecord,max_x_scale,resolution = 60, aColor = "blue", clear = True, max_y_scale = 20):
if clear:
aCanvas.delete('all')
x_zero = 75
y_zero = 350
x_pixel_width = 500 #700
y_pixel_height = 150 #200
x_divisions = 12
y_divisions = 4
if (max_x_scale == 10) or (max_x_scale == 30): x_divisions = 10
GraphLib.eventRecord(aCanvas, x_zero+5, 185, x_pixel_width, max_x_scale, aRecord.datalist, ["P"], "")
GraphLib.drawXaxis(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, x_divisions, color = "black")
GraphLib.drawYaxis(aCanvas, x_zero, y_zero, y_pixel_height, max_y_scale, y_divisions, True, color = "black")
x_scaler = x_pixel_width / (max_x_scale*60*1000)
y_scaler = y_pixel_height / max_y_scale
cocConcXYList = model.calculateCocConc(aRecord.datalist,aRecord.cocConc,aRecord.pumpSpeed,resolution)
# print(modelList)
x = x_zero
y = y_zero
totalConc = 0
totalRecords = 0
startAverageTime = 10 * 60000 # 10 min
endAverageTime = 180 * 60000 # 120 min
for pairs in cocConcXYList:
if pairs[0] >= startAverageTime:
if pairs[0] < endAverageTime:
totalRecords = totalRecords + 1
totalConc = totalConc + pairs[1]
concentration = round(pairs[1],2)
newX = x_zero + pairs[0] * x_scaler // 1
newY = y_zero - concentration * y_scaler // 1
aCanvas.create_line(x, y, newX, newY, fill= aColor)
# aCanvas.create_oval(newX-2, newY-2, newX+2, newY+2, fill=aColor)
x = newX
y = newY
aCanvas.create_text(300, 400, fill = "blue", text = aRecord.fileName)
"""
dose = 2.8*aRecord.cocConc * aRecord.pumpSpeed
tempStr = "Duration (2.8 sec) * Pump Speed ("+str(aRecord.pumpSpeed)+" ml/sec) * cocConc ("+str(aRecord.cocConc)+" mg/ml) = Unit Dose "+ str(round(dose,3))+" mg/inj"
aCanvas.create_text(300, 450, fill = "blue", text = tempStr)
"""
averageConc = round((totalConc/totalRecords),3)
# draw average line
X1 = x_zero + (startAverageTime * x_scaler) // 1
Y = y_zero-((averageConc) * y_scaler) // 1
X2 = x_zero + (endAverageTime * x_scaler) // 1
# aCanvas.create_line(X1, Y, X2, Y, fill= "red")
# tempStr = "Average Conc (10-180 min): "+str(averageConc)
# aCanvas.create_text(500, Y, fill = "red", text = tempStr)
def cumulativeRecord(aCanvas,aRecord,showBPVar,max_x_scale,max_y_scale):
aCanvas.delete('all')
# graphCanvas is 800 x 600
x_zero = 50
y_zero = 550
x_pixel_width = 700
y_pixel_height = 500
x_divisions = 12
if (max_x_scale == 10) or (max_x_scale == 30): x_divisions = 10
y_divisions = 10
aTitle = aRecord.fileName
GraphLib.drawXaxis(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, x_divisions)
GraphLib.drawYaxis(aCanvas, x_zero, y_zero, y_pixel_height, max_y_scale, y_divisions, True)
GraphLib.cumRecord(aCanvas, x_zero, y_zero, x_pixel_width, y_pixel_height, max_x_scale, max_y_scale, \
aRecord.datalist,showBPVar, aTitle)
def eventRecords(aCanvas,aRecordList,max_x_scale):
# graphCanvas is 800 x 600
aCanvas.delete('all')
x_zero = 50
x_pixel_width = 700
x_divisions = 12
if (max_x_scale == 10) or (max_x_scale == 30): x_divisions = 10
GraphLib.drawXaxis(aCanvas, x_zero, 550, x_pixel_width, max_x_scale, x_divisions)
y_zero = 30
box = 0
# eventRecord(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, datalist, charList, aLabel)
# aTitle = aRecord.fileName
for record in aRecordList:
y_zero = y_zero + 40
box = box + 1
aTitle = "Box "+str(box)
GraphLib.eventRecord(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, record.datalist, ["P"], aTitle)
def eventRecordsIntA(aCanvas,aRecord):
# graphCanvas is 800 x 600
aCanvas.delete('all')
x_zero = 75
x_pixel_width = 600
x_divisions = 12
max_x_scale = 5
x_divisions = 5
GraphLib.drawXaxis(aCanvas, x_zero, 550, x_pixel_width, max_x_scale, x_divisions)
y_zero = 50
for block in range(12):
aTitle = str(block+1)
pump_timestamps = ListLib.get_pump_timestamps(aRecord.datalist,block)
GraphLib.eventRecord(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, pump_timestamps, ["P","p"], aTitle)
y_zero = y_zero + 45
def histogram(aCanvas, aRecord,max_x_scale,clear = True):
"""
Draws a histogram using the datalist from aRecord.
To Do: There is another histogram procedure in GraphLib. Should be merged.
"""
def drawBar(aCanvas,x,y, pixelHeight, width, color = "black"):
aCanvas.create_line(x, y, x, y-pixelHeight, fill=color)
aCanvas.create_line(x, y-pixelHeight, x+width, y-pixelHeight, fill=color)
aCanvas.create_line(x+width, y-pixelHeight, x+width, y, fill=color)
if clear:
aCanvas.delete('all')
# Draw Event Record
x_zero = 75
y_zero = 100
x_pixel_width = 700
y_pixel_height = 200
x_divisions = 12
y_divisions = 5
if (max_x_scale == 10) or (max_x_scale == 30): x_divisions = 10
aCanvas.create_text(200, y_zero-50 , fill = "blue", text = aRecord.fileName)
GraphLib.eventRecord(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, aRecord.datalist, ["P"], "")
# Populate bin array
binSize = 1 # in minutes
intervals = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
T1 = 0
numInj = 0
numIntervals = 0
outOfRange = 0
totalIntervals = 0
for pairs in aRecord.datalist:
if pairs[1] == "P":
numInj = numInj + 1
T2 = pairs[0]
if T1 > 0:
numIntervals = numIntervals + 1
interval = round((T2-T1)/(binSize*60000),3) # rounded to a minute with one decimal point
totalIntervals = totalIntervals + interval
index = int(interval)
if index < len(intervals)-1:
intervals[index] = intervals[index]+1
else:
outOfRange = outOfRange+1
T1 = T2
tempStr = "Number of Injections = "+str(numInj)
aCanvas.create_text(450, y_zero-50, fill = "blue", text = tempStr)
# print("Number of Inter-injection Intervals =",numIntervals)
# print("Inter-injection Intervals = ",intervals)
meanInterval = round(totalIntervals / numIntervals,3)
x_zero = 75
y_zero = 450
x_pixel_width = 400
y_pixel_height = 300
max_x_scale = 20
max_y_scale = 20
x_divisions = 20
y_divisions = max_y_scale
labelLeft = True
GraphLib.drawXaxis(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, x_divisions, color = "black")
GraphLib.drawYaxis(aCanvas, x_zero, y_zero, y_pixel_height, max_y_scale, y_divisions, labelLeft, color = "black")
# intervals = [0,1,2,3,4,5,6,5,4,3,2,1,0,0,0,0,0,0,0,1] #Used for test without loading a file
unitPixelHeight = int(y_pixel_height/y_divisions)
width = int(x_pixel_width/len(intervals))
for i in range(len(intervals)):
x = x_zero + (i*width)
drawBar(aCanvas,x,y_zero,intervals[i]*unitPixelHeight,width)
#Draw OutOfRange Bar
x = x_zero + (len(intervals)*width) + 20
drawBar(aCanvas,x,y_zero,outOfRange*unitPixelHeight,width)
tempStr = "Mean interval (min) = "+ str(meanInterval)
aCanvas.create_text(200, y_zero-y_pixel_height, fill = "red", text = tempStr)
rate = round(60/meanInterval,3)
tempStr = "Rate (inj/hr) = "+str(rate)
aCanvas.create_text(450, y_zero-y_pixel_height, fill = "blue", text = tempStr)
aCanvas.create_line(x_zero+int(width*meanInterval), y_zero, x_zero+int(width*meanInterval), y_zero-y_pixel_height+20, fill="red")
tempStr = "Each Bin = "+str(binSize)+" minute"
aCanvas.create_text(250, y_zero+50, fill = "blue", text = tempStr)
def pumpDurationsIntA(aCanvas,aRecord):
aCanvas.delete('all')
pump_timelist = ListLib.get_pump_duration_list(aRecord.datalist, -1)
duration_list = []
for data in pump_timelist:
duration_list.append(data[2])
x_zero = 75
y_zero = 50
x_pixel_width = 600
x_divisions = 12
max_x_scale = 5
x_divisions = 5
GraphLib.drawXaxis(aCanvas, x_zero, 550, x_pixel_width, max_x_scale, x_divisions)
x_scaler = x_pixel_width / (max_x_scale*60*1000)
y_zero = 50
block = 0
for block in range(12):
x = x_zero
y = y_zero
aLabel = str(block+1)
pump_timelist = ListLib.get_pump_duration_list(aRecord.datalist,block)
aCanvas.create_text(x_zero-30, y_zero-5, fill="blue", text = aLabel)
for data in pump_timelist:
newX = (x_zero + data[1] * x_scaler // 1)
aCanvas.create_line(x, y, newX, y)
height = int(data[2]/40)
aCanvas.create_line(newX, y, newX, y-height)
x = newX
y_zero = y_zero + 45
def timeStamps(aCanvas,aRecord,max_x_scale):
# graphCanvas is 800 x 600
aCanvas.delete('all')
x_zero = 100
y_zero = 550
x_pixel_width = 650
x_divisions = 12
if (max_x_scale == 10) or (max_x_scale == 30): x_divisions = 10
# Axis at (100,550)
GraphLib.drawXaxis(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, x_divisions, color = "black")
startTime = 0;
if len(aRecord.datalist) > 0:
firstEntry=(aRecord.datalist[0])
if (firstEntry[1] == 'M'):
startTime = firstEntry[0]
#print("StartTime =",startTime)
topRow = 40
spacing = 18
GraphLib.drawXaxis(aCanvas, x_zero, y_zero, x_pixel_width, max_x_scale, x_divisions)
GraphLib.eventRecord(aCanvas, x_zero, topRow, x_pixel_width, max_x_scale, aRecord.datalist, ["T","t"], "L1 Trial")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing), x_pixel_width, max_x_scale, aRecord.datalist, ["=","."], "Lever 1")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*2), x_pixel_width, max_x_scale, aRecord.datalist, ["L"], "L1 Resp")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*3), x_pixel_width, max_x_scale, aRecord.datalist, [">"], "L1 inactive")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*4), x_pixel_width, max_x_scale, aRecord.datalist, ["J"], "L2 active")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*5), x_pixel_width, max_x_scale, aRecord.datalist, ["<"], "L2 inactive")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*6), x_pixel_width, max_x_scale, aRecord.datalist, ["P","p"], "Pump")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*7), x_pixel_width, max_x_scale, aRecord.datalist, ["S","s"], "Stim")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*8), x_pixel_width, max_x_scale, aRecord.datalist, ["C","c"], "Stim 2")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*9), x_pixel_width, max_x_scale, aRecord.datalist, ["O","o"], "TimeOut")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*10), x_pixel_width, max_x_scale, aRecord.datalist, ["Z","z"], "HD Trial")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*11), x_pixel_width, max_x_scale, aRecord.datalist, ["~",","], "Lever 2")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*12), x_pixel_width, max_x_scale, aRecord.datalist, ["H","h"], "HD Resp")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*13), x_pixel_width, max_x_scale, aRecord.datalist, ["B","b"], "Block")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*14), x_pixel_width, max_x_scale, aRecord.datalist, ["I","i"], "IBI")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*15), x_pixel_width, max_x_scale, aRecord.datalist, ["G","E"], "Session")
aCanvas.create_text(15, topRow+(spacing*16)+4, fill="red", text="Errors", anchor = "w")
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*18), x_pixel_width, max_x_scale, aRecord.datalist, ["@"], "@ Input", t_zero = startTime)
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*19), x_pixel_width, max_x_scale, aRecord.datalist, ["#"], "# Recover", t_zero = startTime)
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*20), x_pixel_width, max_x_scale, aRecord.datalist, ["$"], "$ Output", t_zero = startTime)
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*21), x_pixel_width, max_x_scale, aRecord.datalist, ["%"], "% Recover", t_zero = startTime)
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*22), x_pixel_width, max_x_scale, aRecord.datalist, ["&"], "& Reset", t_zero = startTime)
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*23), x_pixel_width, max_x_scale, aRecord.datalist, ["!"], "! Abort",t_zero = startTime)
GraphLib.eventRecord(aCanvas, x_zero, topRow+(spacing*24), x_pixel_width, max_x_scale, aRecord.datalist, ["("], "( Safe",t_zero = startTime)
"""
def drawCumulativeRecord(aRecord,aCanvas):
print("drawCumulativeRecord called")
"""
| gpl-3.0 | 3,189,540,916,714,370,600 | 44.063333 | 169 | 0.629558 | false |
punchagan/zulip | zerver/views/realm.py | 2 | 12881 | from typing import Any, Dict, Optional, Union
from django.core.exceptions import ValidationError
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.utils.translation import gettext as _
from django.views.decorators.http import require_safe
from confirmation.models import Confirmation, ConfirmationKeyException, get_object_from_key
from zerver.decorator import require_realm_admin, require_realm_owner
from zerver.forms import check_subdomain_available as check_subdomain
from zerver.lib.actions import (
do_deactivate_realm,
do_reactivate_realm,
do_set_realm_authentication_methods,
do_set_realm_message_editing,
do_set_realm_notifications_stream,
do_set_realm_property,
do_set_realm_signup_notifications_stream,
)
from zerver.lib.exceptions import OrganizationOwnerRequired
from zerver.lib.i18n import get_available_language_codes
from zerver.lib.request import REQ, JsonableError, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.lib.retention import parse_message_retention_days
from zerver.lib.streams import access_stream_by_id
from zerver.lib.validator import (
check_bool,
check_capped_string,
check_dict,
check_int,
check_int_in,
check_string_or_int,
to_non_negative_int,
)
from zerver.models import Realm, UserProfile
@require_realm_admin
@has_request_variables
def update_realm(
request: HttpRequest,
user_profile: UserProfile,
name: Optional[str] = REQ(
str_validator=check_capped_string(Realm.MAX_REALM_NAME_LENGTH), default=None
),
description: Optional[str] = REQ(
str_validator=check_capped_string(Realm.MAX_REALM_DESCRIPTION_LENGTH), default=None
),
emails_restricted_to_domains: Optional[bool] = REQ(json_validator=check_bool, default=None),
disallow_disposable_email_addresses: Optional[bool] = REQ(
json_validator=check_bool, default=None
),
invite_required: Optional[bool] = REQ(json_validator=check_bool, default=None),
invite_to_realm_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.COMMON_POLICY_TYPES), default=None
),
name_changes_disabled: Optional[bool] = REQ(json_validator=check_bool, default=None),
email_changes_disabled: Optional[bool] = REQ(json_validator=check_bool, default=None),
avatar_changes_disabled: Optional[bool] = REQ(json_validator=check_bool, default=None),
inline_image_preview: Optional[bool] = REQ(json_validator=check_bool, default=None),
inline_url_embed_preview: Optional[bool] = REQ(json_validator=check_bool, default=None),
add_emoji_by_admins_only: Optional[bool] = REQ(json_validator=check_bool, default=None),
allow_message_deleting: Optional[bool] = REQ(json_validator=check_bool, default=None),
message_content_delete_limit_seconds: Optional[int] = REQ(
converter=to_non_negative_int, default=None
),
allow_message_editing: Optional[bool] = REQ(json_validator=check_bool, default=None),
allow_community_topic_editing: Optional[bool] = REQ(json_validator=check_bool, default=None),
mandatory_topics: Optional[bool] = REQ(json_validator=check_bool, default=None),
message_content_edit_limit_seconds: Optional[int] = REQ(
converter=to_non_negative_int, default=None
),
allow_edit_history: Optional[bool] = REQ(json_validator=check_bool, default=None),
default_language: Optional[str] = REQ(default=None),
waiting_period_threshold: Optional[int] = REQ(converter=to_non_negative_int, default=None),
authentication_methods: Optional[Dict[str, Any]] = REQ(
json_validator=check_dict([]), default=None
),
notifications_stream_id: Optional[int] = REQ(json_validator=check_int, default=None),
signup_notifications_stream_id: Optional[int] = REQ(json_validator=check_int, default=None),
message_retention_days_raw: Optional[Union[int, str]] = REQ(
"message_retention_days", json_validator=check_string_or_int, default=None
),
send_welcome_emails: Optional[bool] = REQ(json_validator=check_bool, default=None),
digest_emails_enabled: Optional[bool] = REQ(json_validator=check_bool, default=None),
message_content_allowed_in_email_notifications: Optional[bool] = REQ(
json_validator=check_bool, default=None
),
bot_creation_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.BOT_CREATION_POLICY_TYPES), default=None
),
create_stream_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.COMMON_POLICY_TYPES), default=None
),
invite_to_stream_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.COMMON_POLICY_TYPES), default=None
),
move_messages_between_streams_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.COMMON_POLICY_TYPES), default=None
),
user_group_edit_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.USER_GROUP_EDIT_POLICY_TYPES), default=None
),
private_message_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.PRIVATE_MESSAGE_POLICY_TYPES), default=None
),
wildcard_mention_policy: Optional[int] = REQ(
json_validator=check_int_in(Realm.WILDCARD_MENTION_POLICY_TYPES), default=None
),
email_address_visibility: Optional[int] = REQ(
json_validator=check_int_in(Realm.EMAIL_ADDRESS_VISIBILITY_TYPES), default=None
),
default_twenty_four_hour_time: Optional[bool] = REQ(json_validator=check_bool, default=None),
video_chat_provider: Optional[int] = REQ(json_validator=check_int, default=None),
giphy_rating: Optional[int] = REQ(json_validator=check_int, default=None),
default_code_block_language: Optional[str] = REQ(default=None),
digest_weekday: Optional[int] = REQ(
json_validator=check_int_in(Realm.DIGEST_WEEKDAY_VALUES), default=None
),
) -> HttpResponse:
realm = user_profile.realm
# Additional validation/error checking beyond types go here, so
# the entire request can succeed or fail atomically.
if default_language is not None and default_language not in get_available_language_codes():
raise JsonableError(_("Invalid language '{}'").format(default_language))
if authentication_methods is not None:
if not user_profile.is_realm_owner:
raise OrganizationOwnerRequired()
if True not in list(authentication_methods.values()):
return json_error(_("At least one authentication method must be enabled."))
if video_chat_provider is not None and video_chat_provider not in {
p["id"] for p in Realm.VIDEO_CHAT_PROVIDERS.values()
}:
return json_error(_("Invalid video_chat_provider {}").format(video_chat_provider))
if giphy_rating is not None and giphy_rating not in {
p["id"] for p in Realm.GIPHY_RATING_OPTIONS.values()
}:
return json_error(_("Invalid giphy_rating {}").format(giphy_rating))
message_retention_days: Optional[int] = None
if message_retention_days_raw is not None:
if not user_profile.is_realm_owner:
raise OrganizationOwnerRequired()
realm.ensure_not_on_limited_plan()
message_retention_days = parse_message_retention_days(
message_retention_days_raw, Realm.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
)
# The user of `locals()` here is a bit of a code smell, but it's
# restricted to the elements present in realm.property_types.
#
# TODO: It should be possible to deduplicate this function up
# further by some more advanced usage of the
# `REQ/has_request_variables` extraction.
req_vars = {k: v for k, v in list(locals().items()) if k in realm.property_types}
data: Dict[str, Any] = {}
for k, v in list(req_vars.items()):
if v is not None and getattr(realm, k) != v:
do_set_realm_property(realm, k, v, acting_user=user_profile)
if isinstance(v, str):
data[k] = "updated"
else:
data[k] = v
# The following realm properties do not fit the pattern above
# authentication_methods is not supported by the do_set_realm_property
# framework because of its bitfield.
if authentication_methods is not None and (
realm.authentication_methods_dict() != authentication_methods
):
do_set_realm_authentication_methods(realm, authentication_methods, acting_user=user_profile)
data["authentication_methods"] = authentication_methods
# The message_editing settings are coupled to each other, and thus don't fit
# into the do_set_realm_property framework.
if (
(allow_message_editing is not None and realm.allow_message_editing != allow_message_editing)
or (
message_content_edit_limit_seconds is not None
and realm.message_content_edit_limit_seconds != message_content_edit_limit_seconds
)
or (
allow_community_topic_editing is not None
and realm.allow_community_topic_editing != allow_community_topic_editing
)
):
if allow_message_editing is None:
allow_message_editing = realm.allow_message_editing
if message_content_edit_limit_seconds is None:
message_content_edit_limit_seconds = realm.message_content_edit_limit_seconds
if allow_community_topic_editing is None:
allow_community_topic_editing = realm.allow_community_topic_editing
do_set_realm_message_editing(
realm,
allow_message_editing,
message_content_edit_limit_seconds,
allow_community_topic_editing,
acting_user=user_profile,
)
data["allow_message_editing"] = allow_message_editing
data["message_content_edit_limit_seconds"] = message_content_edit_limit_seconds
data["allow_community_topic_editing"] = allow_community_topic_editing
# Realm.notifications_stream and Realm.signup_notifications_stream are not boolean,
# str or integer field, and thus doesn't fit into the do_set_realm_property framework.
if notifications_stream_id is not None:
if realm.notifications_stream is None or (
realm.notifications_stream.id != notifications_stream_id
):
new_notifications_stream = None
if notifications_stream_id >= 0:
(new_notifications_stream, sub) = access_stream_by_id(
user_profile, notifications_stream_id
)
do_set_realm_notifications_stream(
realm, new_notifications_stream, notifications_stream_id, acting_user=user_profile
)
data["notifications_stream_id"] = notifications_stream_id
if signup_notifications_stream_id is not None:
if realm.signup_notifications_stream is None or (
realm.signup_notifications_stream.id != signup_notifications_stream_id
):
new_signup_notifications_stream = None
if signup_notifications_stream_id >= 0:
(new_signup_notifications_stream, sub) = access_stream_by_id(
user_profile, signup_notifications_stream_id
)
do_set_realm_signup_notifications_stream(
realm,
new_signup_notifications_stream,
signup_notifications_stream_id,
acting_user=user_profile,
)
data["signup_notifications_stream_id"] = signup_notifications_stream_id
if default_code_block_language is not None:
# Migrate '', used in the API to encode the default/None behavior of this feature.
if default_code_block_language == "":
data["default_code_block_language"] = None
else:
data["default_code_block_language"] = default_code_block_language
return json_success(data)
@require_realm_owner
@has_request_variables
def deactivate_realm(request: HttpRequest, user: UserProfile) -> HttpResponse:
realm = user.realm
do_deactivate_realm(realm, acting_user=user)
return json_success()
@require_safe
def check_subdomain_available(request: HttpRequest, subdomain: str) -> HttpResponse:
try:
check_subdomain(subdomain)
return json_success({"msg": "available"})
except ValidationError as e:
return json_success({"msg": e.message})
def realm_reactivation(request: HttpRequest, confirmation_key: str) -> HttpResponse:
try:
realm = get_object_from_key(confirmation_key, Confirmation.REALM_REACTIVATION)
except ConfirmationKeyException:
return render(request, "zerver/realm_reactivation_link_error.html")
do_reactivate_realm(realm)
context = {"realm": realm}
return render(request, "zerver/realm_reactivation.html", context)
| apache-2.0 | -8,273,240,410,386,260,000 | 46.18315 | 100 | 0.68799 | false |
google/megalista | megalista_dataflow/main.py | 1 | 11372 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import warnings
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from mappers.ads_ssd_hashing_mapper import AdsSSDHashingMapper
from mappers.ads_user_list_pii_hashing_mapper import AdsUserListPIIHashingMapper
from sources.primary_execution_source import PrimaryExecutionSource
from sources.batches_from_executions import BatchesFromExecutions
from uploaders.appsflyer.appsflyer_s2s_uploader_async import AppsFlyerS2SUploaderDoFn
from uploaders.campaign_manager.campaign_manager_conversion_uploader import CampaignManagerConversionUploaderDoFn
from uploaders.google_ads.customer_match.contact_info_uploader import GoogleAdsCustomerMatchContactInfoUploaderDoFn
from uploaders.google_ads.customer_match.mobile_uploader import GoogleAdsCustomerMatchMobileUploaderDoFn
from uploaders.google_ads.customer_match.user_id_uploader import GoogleAdsCustomerMatchUserIdUploaderDoFn
from uploaders.google_ads.conversions.google_ads_offline_conversions_uploader import GoogleAdsOfflineUploaderDoFn
from uploaders.google_ads.conversions.google_ads_ssd_uploader import GoogleAdsSSDUploaderDoFn
from uploaders.google_analytics.google_analytics_data_import_uploader import GoogleAnalyticsDataImportUploaderDoFn
from uploaders.google_analytics.google_analytics_measurement_protocol import GoogleAnalyticsMeasurementProtocolUploaderDoFn
from uploaders.google_analytics.google_analytics_user_list_uploader import GoogleAnalyticsUserListUploaderDoFn
from uploaders.google_analytics.google_analytics_4_measurement_protocol import GoogleAnalytics4MeasurementProtocolUploaderDoFn
from uploaders.google_analytics.google_analytics_data_import_eraser import GoogleAnalyticsDataImportEraser
from uploaders.big_query.transactional_events_results_writer import TransactionalEventsResultsWriter
from models.execution import DestinationType
from models.execution import Execution
from models.oauth_credentials import OAuthCredentials
from models.options import DataflowOptions
from models.json_config import JsonConfig
from models.sheets_config import SheetsConfig
warnings.filterwarnings(
"ignore", "Your application has authenticated using end user credentials")
def filter_by_action(execution: Execution, destination_type: DestinationType):
return execution.destination.destination_type is destination_type
class MegalistaStep(beam.PTransform):
def __init__(self, oauth_credentials, dataflow_options=None, hasher=None):
self._oauth_credentials = oauth_credentials
self._dataflow_options = dataflow_options
self._hasher = hasher
def expand(self, executions):
pass
class GoogleAdsSSDStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - Google Ads SSD' >> BatchesFromExecutions(DestinationType.ADS_SSD_UPLOAD, 50)
| 'Hash Users - Google Ads SSD' >> beam.Map(self._hasher.map_batch)
| 'Upload - Google Ads SSD' >> beam.ParDo(GoogleAdsSSDUploaderDoFn(self._oauth_credentials,
self._dataflow_options.developer_token))
)
class GoogleAdsCustomerMatchMobileDeviceIdStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - Google Ads Customer Match Mobile Device Id' >> BatchesFromExecutions(DestinationType.ADS_CUSTOMER_MATCH_MOBILE_DEVICE_ID_UPLOAD)
| 'Hash Users - Google Ads Customer Match Contact Info' >> beam.Map(self._hasher.hash_users)
| 'Upload - Google Ads Customer Match Mobile Device Id' >> beam.ParDo(
GoogleAdsCustomerMatchMobileUploaderDoFn(self._oauth_credentials, self._dataflow_options.developer_token))
)
class GoogleAdsCustomerMatchContactInfoStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - Google Ads Customer Match Contact Info' >> BatchesFromExecutions(DestinationType.ADS_CUSTOMER_MATCH_CONTACT_INFO_UPLOAD)
| 'Hash Users - Google Ads Customer Match Contact Info' >> beam.Map(self._hasher.hash_users)
| 'Upload - Google Ads Customer Match Contact Info' >> beam.ParDo(
GoogleAdsCustomerMatchContactInfoUploaderDoFn(self._oauth_credentials, self._dataflow_options.developer_token))
)
class GoogleAdsCustomerMatchUserIdStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - Google Ads Customer Match User Id' >> BatchesFromExecutions(DestinationType.ADS_CUSTOMER_MATCH_USER_ID_UPLOAD)
| 'Hash Users - Google Ads Customer Match Contact Info' >> beam.Map(self._hasher.hash_users)
| 'Upload - Google Ads Customer User Device Id' >> beam.ParDo(
GoogleAdsCustomerMatchUserIdUploaderDoFn(self._oauth_credentials, self._dataflow_options.developer_token))
)
class GoogleAdsOfflineConversionsStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - GoogleAdsOfflineConversions' >> BatchesFromExecutions(DestinationType.ADS_OFFLINE_CONVERSION, 2000)
| 'Upload - GoogleAdsOfflineConversions' >> beam.ParDo(GoogleAdsOfflineUploaderDoFn(self._oauth_credentials,
self._dataflow_options.developer_token))
)
class GoogleAnalyticsUserListStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - GA user list' >> BatchesFromExecutions(DestinationType.GA_USER_LIST_UPLOAD, 5000000)
| 'Upload - GA user list' >> beam.ParDo(GoogleAnalyticsUserListUploaderDoFn(self._oauth_credentials))
)
class GoogleAnalyticsDataImportStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - GA data import' >> BatchesFromExecutions(DestinationType.GA_DATA_IMPORT, 1000000)
| 'Delete Data - GA data import' >> beam.ParDo(GoogleAnalyticsDataImportEraser(self._oauth_credentials))
| 'Upload - GA data import' >> beam.ParDo(GoogleAnalyticsDataImportUploaderDoFn(self._oauth_credentials))
)
class GoogleAnalyticsMeasurementProtocolStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - GA measurement protocol' >>
BatchesFromExecutions(DestinationType.GA_MEASUREMENT_PROTOCOL, 20, transactional=True)
| 'Upload - GA measurement protocol' >>
beam.ParDo(GoogleAnalyticsMeasurementProtocolUploaderDoFn())
| 'Persist results - GA measurement protocol' >> beam.ParDo(TransactionalEventsResultsWriter(self._dataflow_options.bq_ops_dataset))
)
class GoogleAnalytics4MeasurementProtocolStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - GA 4 measurement protocol' >> BatchesFromExecutions(DestinationType.GA_4_MEASUREMENT_PROTOCOL, 20,
transactional=True)
| 'Upload - GA 4 measurement protocol' >>
beam.ParDo(GoogleAnalytics4MeasurementProtocolUploaderDoFn())
| 'Persist results - GA 4 measurement protocol' >> beam.ParDo(TransactionalEventsResultsWriter(self._dataflow_options.bq_ops_dataset))
)
class CampaignManagerConversionStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - CM conversion' >> BatchesFromExecutions(DestinationType.CM_OFFLINE_CONVERSION, 1000, transactional=True)
| 'Upload - CM conversion' >> beam.ParDo(CampaignManagerConversionUploaderDoFn(self._oauth_credentials))
| 'Persist results - CM conversion' >> beam.ParDo(
TransactionalEventsResultsWriter(self._dataflow_options.bq_ops_dataset))
)
class AppsFlyerEventsStep(MegalistaStep):
def expand(self, executions):
return (
executions
| 'Load Data - AppsFlyer S2S events' >>
BatchesFromExecutions(DestinationType.APPSFLYER_S2S_EVENTS, 1000, transactional=True)
| 'Upload - AppsFlyer S2S events' >>
beam.ParDo(AppsFlyerS2SUploaderDoFn(self._dataflow_options.appsflyer_dev_key))
| 'Persist results - AppsFlyer S2S events' >> beam.ParDo(TransactionalEventsResultsWriter(self._dataflow_options.bq_ops_dataset))
)
def run(argv=None):
pipeline_options = PipelineOptions()
dataflow_options = pipeline_options.view_as(DataflowOptions)
oauth_credentials = OAuthCredentials(
dataflow_options.client_id,
dataflow_options.client_secret,
dataflow_options.access_token,
dataflow_options.refresh_token)
sheets_config = SheetsConfig(oauth_credentials)
json_config = JsonConfig()
execution_source = PrimaryExecutionSource(sheets_config,
json_config,
dataflow_options.setup_sheet_id,
dataflow_options.setup_json_url,
dataflow_options.setup_firestore_collection)
with beam.Pipeline(options=pipeline_options) as pipeline:
executions = (pipeline | 'Load executions' >> beam.io.Read(execution_source))
executions | GoogleAdsSSDStep(
oauth_credentials, dataflow_options, AdsSSDHashingMapper())
executions | GoogleAdsCustomerMatchMobileDeviceIdStep(
oauth_credentials, dataflow_options, AdsUserListPIIHashingMapper())
executions | GoogleAdsCustomerMatchContactInfoStep(
oauth_credentials, dataflow_options, AdsUserListPIIHashingMapper())
executions | GoogleAdsCustomerMatchUserIdStep(
oauth_credentials, dataflow_options, AdsUserListPIIHashingMapper())
executions | GoogleAdsOfflineConversionsStep(
oauth_credentials, dataflow_options)
executions | GoogleAnalyticsUserListStep(oauth_credentials)
executions | GoogleAnalyticsDataImportStep(oauth_credentials)
executions | GoogleAnalyticsMeasurementProtocolStep(
oauth_credentials, dataflow_options)
executions | GoogleAnalytics4MeasurementProtocolStep(
oauth_credentials, dataflow_options)
executions | CampaignManagerConversionStep(oauth_credentials, dataflow_options)
executions | AppsFlyerEventsStep(oauth_credentials, dataflow_options)
# todo: update trix at the end
if __name__ == '__main__':
logging.getLogger().setLevel(logging.ERROR)
logging.getLogger("megalista").setLevel(logging.INFO)
run()
logging.getLogger("megalista").info("Completed successfully!") | apache-2.0 | -8,721,198,801,614,752,000 | 49.323009 | 155 | 0.721861 | false |
volpino/Yeps-EURAC | tools/solid_tools/maq_cs_wrapper.py | 1 | 12127 | #! /usr/bin/python
#Guruprasad Ananda
#MAQ mapper for SOLiD colourspace-reads
import sys, os, zipfile, tempfile, subprocess
def stop_err( msg ):
sys.stderr.write( "%s\n" % msg )
sys.exit()
def __main__():
out_fname = sys.argv[1].strip()
out_f2 = open(sys.argv[2].strip(),'r+')
ref_fname = sys.argv[3].strip()
f3_read_fname = sys.argv[4].strip()
f3_qual_fname = sys.argv[5].strip()
paired = sys.argv[6]
if paired == 'yes':
r3_read_fname = sys.argv[7].strip()
r3_qual_fname = sys.argv[8].strip()
min_mapqual = int(sys.argv[9].strip())
max_mismatch = int(sys.argv[10].strip())
out_f3name = sys.argv[11].strip()
subprocess_dict = {}
ref_csfa = tempfile.NamedTemporaryFile()
ref_bfa = tempfile.NamedTemporaryFile()
ref_csbfa = tempfile.NamedTemporaryFile()
cmd2_1 = 'maq fasta2csfa %s > %s 2>&1' %(ref_fname,ref_csfa.name)
cmd2_2 = 'maq fasta2bfa %s %s 2>&1' %(ref_csfa.name,ref_csbfa.name)
cmd2_3 = 'maq fasta2bfa %s %s 2>&1' %(ref_fname,ref_bfa.name)
try:
os.system(cmd2_1)
os.system(cmd2_2)
os.system(cmd2_3)
except Exception, erf:
stop_err(str(erf)+"Error processing reference sequence")
if paired == 'yes': #paired end reads
tmpf = tempfile.NamedTemporaryFile() #forward reads
tmpr = tempfile.NamedTemporaryFile() #reverse reads
tmps = tempfile.NamedTemporaryFile() #single reads
tmpffastq = tempfile.NamedTemporaryFile()
tmprfastq = tempfile.NamedTemporaryFile()
tmpsfastq = tempfile.NamedTemporaryFile()
cmd1 = "solid2fastq_modified.pl 'yes' %s %s %s %s %s %s %s 2>&1" %(tmpf.name,tmpr.name,tmps.name,f3_read_fname,f3_qual_fname,r3_read_fname,r3_qual_fname)
try:
os.system(cmd1)
os.system('gunzip -c %s >> %s' %(tmpf.name,tmpffastq.name))
os.system('gunzip -c %s >> %s' %(tmpr.name,tmprfastq.name))
os.system('gunzip -c %s >> %s' %(tmps.name,tmpsfastq.name))
except Exception, eq:
stop_err("Error converting data to fastq format." + str(eq))
#make a temp directory where the split fastq files will be stored
try:
split_dir = tempfile.mkdtemp()
split_file_prefix_f = tempfile.mktemp(dir=split_dir)
split_file_prefix_r = tempfile.mktemp(dir=split_dir)
splitcmd_f = 'split -a 2 -l %d %s %s' %(32000000,tmpffastq.name,split_file_prefix_f) #32M lines correspond to 8M reads
splitcmd_r = 'split -a 2 -l %d %s %s' %(32000000,tmprfastq.name,split_file_prefix_r) #32M lines correspond to 8M reads
os.system(splitcmd_f)
os.system(splitcmd_r)
os.chdir(split_dir)
ii = 0
for fastq in os.listdir(split_dir):
if not fastq.startswith(split_file_prefix_f.split("/")[-1]):
continue
fastq_r = split_file_prefix_r + fastq.split(split_file_prefix_f.split("/")[-1])[1] #find the reverse strand fastq corresponding to formward strand fastq
tmpbfq_f = tempfile.NamedTemporaryFile()
tmpbfq_r = tempfile.NamedTemporaryFile()
cmd3 = 'maq fastq2bfq %s %s 2>&1; maq fastq2bfq %s %s 2>&1; maq map -c %s.csmap %s %s %s 1>/dev/null 2>&1; maq mapview %s.csmap > %s.txt' %(fastq,tmpbfq_f.name,fastq_r,tmpbfq_r.name,fastq,ref_csbfa.name,tmpbfq_f.name,tmpbfq_r.name,fastq,fastq)
subprocess_dict['sp'+str(ii+1)] = subprocess.Popen([cmd3],shell=True,stdout=subprocess.PIPE)
ii += 1
while True:
all_done = True
for j,k in enumerate(subprocess_dict.keys()):
if subprocess_dict['sp'+str(j+1)].wait() != 0:
err = subprocess_dict['sp'+str(j+1)].communicate()[1]
if err != None:
stop_err("Mapping error: %s" %err)
all_done = False
if all_done:
break
cmdout = "for map in *.txt; do cat $map >> %s; done" %(out_fname)
os.system(cmdout)
tmpcsmap = tempfile.NamedTemporaryFile()
cmd_cat_csmap = "for csmap in *.csmap; do cat $csmap >> %s; done" %(tmpcsmap.name)
os.system(cmd_cat_csmap)
tmppileup = tempfile.NamedTemporaryFile()
cmdpileup = "maq pileup -m %s -q %s %s %s > %s" %(max_mismatch,min_mapqual,ref_bfa.name,tmpcsmap.name,tmppileup.name)
os.system(cmdpileup)
tmppileup.seek(0)
print >> out_f2, "#chr\tposition\tref_nt\tcoverage\tSNP_count\tA_count\tT_count\tG_count\tC_count"
for line in file(tmppileup.name):
elems = line.strip().split()
ref_nt = elems[2].capitalize()
read_nt = elems[4]
coverage = int(elems[3])
a,t,g,c = 0,0,0,0
ref_nt_count = 0
for ch in read_nt:
ch = ch.capitalize()
if ch not in ['A','T','G','C',',','.']:
continue
if ch in [',','.']:
ch = ref_nt
ref_nt_count += 1
try:
nt_ind = ['A','T','G','C'].index(ch)
if nt_ind == 0:
a+=1
elif nt_ind == 1:
t+=1
elif nt_ind == 2:
g+=1
else:
c+=1
except ValueError, we:
print >>sys.stderr, we
print >> out_f2, "%s\t%s\t%s\t%s\t%s\t%s" %("\t".join(elems[:4]),coverage-ref_nt_count,a,t,g,c)
except Exception, er2:
stop_err("Encountered error while mapping: %s" %(str(er2)))
else: #single end reads
tmpf = tempfile.NamedTemporaryFile()
tmpfastq = tempfile.NamedTemporaryFile()
cmd1 = "solid2fastq_modified.pl 'no' %s %s %s %s %s %s %s 2>&1" %(tmpf.name,None,None,f3_read_fname,f3_qual_fname,None,None)
try:
os.system(cmd1)
os.system('gunzip -c %s >> %s' %(tmpf.name,tmpfastq.name))
tmpf.close()
except:
stop_err("Error converting data to fastq format.")
#make a temp directory where the split fastq files will be stored
try:
split_dir = tempfile.mkdtemp()
split_file_prefix = tempfile.mktemp(dir=split_dir)
splitcmd = 'split -a 2 -l %d %s %s' %(32000000,tmpfastq.name,split_file_prefix) #32M lines correspond to 8M reads
os.system(splitcmd)
os.chdir(split_dir)
for i,fastq in enumerate(os.listdir(split_dir)):
tmpbfq = tempfile.NamedTemporaryFile()
cmd3 = 'maq fastq2bfq %s %s 2>&1; maq map -c %s.csmap %s %s 1>/dev/null 2>&1; maq mapview %s.csmap > %s.txt' %(fastq,tmpbfq.name,fastq,ref_csbfa.name,tmpbfq.name,fastq,fastq)
subprocess_dict['sp'+str(i+1)] = subprocess.Popen([cmd3],shell=True,stdout=subprocess.PIPE)
while True:
all_done = True
for j,k in enumerate(subprocess_dict.keys()):
if subprocess_dict['sp'+str(j+1)].wait() != 0:
err = subprocess_dict['sp'+str(j+1)].communicate()[1]
if err != None:
stop_err("Mapping error: %s" %err)
all_done = False
if all_done:
break
cmdout = "for map in *.txt; do cat $map >> %s; done" %(out_fname)
os.system(cmdout)
tmpcsmap = tempfile.NamedTemporaryFile()
cmd_cat_csmap = "for csmap in *.csmap; do cat $csmap >> %s; done" %(tmpcsmap.name)
os.system(cmd_cat_csmap)
tmppileup = tempfile.NamedTemporaryFile()
cmdpileup = "maq pileup -m %s -q %s %s %s > %s" %(max_mismatch,min_mapqual,ref_bfa.name,tmpcsmap.name,tmppileup.name)
os.system(cmdpileup)
tmppileup.seek(0)
print >> out_f2, "#chr\tposition\tref_nt\tcoverage\tSNP_count\tA_count\tT_count\tG_count\tC_count"
for line in file(tmppileup.name):
elems = line.strip().split()
ref_nt = elems[2].capitalize()
read_nt = elems[4]
coverage = int(elems[3])
a,t,g,c = 0,0,0,0
ref_nt_count = 0
for ch in read_nt:
ch = ch.capitalize()
if ch not in ['A','T','G','C',',','.']:
continue
if ch in [',','.']:
ch = ref_nt
ref_nt_count += 1
try:
nt_ind = ['A','T','G','C'].index(ch)
if nt_ind == 0:
a+=1
elif nt_ind == 1:
t+=1
elif nt_ind == 2:
g+=1
else:
c+=1
except:
pass
print >> out_f2, "%s\t%s\t%s\t%s\t%s\t%s" %("\t".join(elems[:4]),coverage-ref_nt_count,a,t,g,c)
except Exception, er2:
stop_err("Encountered error while mapping: %s" %(str(er2)))
#Build custom track from pileup
chr_list=[]
out_f2.seek(0)
fcov = tempfile.NamedTemporaryFile()
fout_a = tempfile.NamedTemporaryFile()
fout_t = tempfile.NamedTemporaryFile()
fout_g = tempfile.NamedTemporaryFile()
fout_c = tempfile.NamedTemporaryFile()
fcov.write('''track type=wiggle_0 name="Coverage track" description="Coverage track (from Galaxy)" color=0,0,0 visibility=2\n''')
fout_a.write('''track type=wiggle_0 name="Track A" description="Track A (from Galaxy)" color=255,0,0 visibility=2\n''')
fout_t.write('''track type=wiggle_0 name="Track T" description="Track T (from Galaxy)" color=0,255,0 visibility=2\n''')
fout_g.write('''track type=wiggle_0 name="Track G" description="Track G (from Galaxy)" color=0,0,255 visibility=2\n''')
fout_c.write('''track type=wiggle_0 name="Track C" description="Track C (from Galaxy)" color=255,0,255 visibility=2\n''')
for line in out_f2:
if line.startswith("#"):
continue
elems = line.split()
chr = elems[0]
if chr not in chr_list:
chr_list.append(chr)
if not (chr.startswith('chr') or chr.startswith('scaffold')):
chr = 'chr'
header = "variableStep chrom=%s" %(chr)
fcov.write("%s\n" %(header))
fout_a.write("%s\n" %(header))
fout_t.write("%s\n" %(header))
fout_g.write("%s\n" %(header))
fout_c.write("%s\n" %(header))
try:
pos = int(elems[1])
cov = int(elems[3])
a = int(elems[5])
t = int(elems[6])
g = int(elems[7])
c = int(elems[8])
except:
continue
fcov.write("%s\t%s\n" %(pos,cov))
try:
a_freq = a*100./cov
t_freq = t*100./cov
g_freq = g*100./cov
c_freq = c*100./cov
except ZeroDivisionError:
a_freq=t_freq=g_freq=c_freq=0
fout_a.write("%s\t%s\n" %(pos,a_freq))
fout_t.write("%s\t%s\n" %(pos,t_freq))
fout_g.write("%s\t%s\n" %(pos,g_freq))
fout_c.write("%s\t%s\n" %(pos,c_freq))
fcov.seek(0)
fout_a.seek(0)
fout_g.seek(0)
fout_t.seek(0)
fout_c.seek(0)
os.system("cat %s %s %s %s %s | cat > %s" %(fcov.name,fout_a.name,fout_t.name,fout_g.name,fout_c.name,out_f3name))
if __name__=="__main__":
__main__()
| mit | 8,546,402,300,464,318,000 | 43.914815 | 259 | 0.504247 | false |
SimonSapin/snippets | event_loop/event_loop.py | 1 | 11983 | """
If an application needs to wait for various events and polling is not
possible or desirable, one solution is to use a blocking threads for each
events. However, multi-threading comes with its pitfalls and problems.
This event loop is a framework that allows an application to wait for
various events without using threads. Currently supported events are
files being ready for reading and timers (repeating or not).
The heart of the loop is basically `select.select()` with a well-chosen
timeout.
See http://exyr.org/2011/event-loop/
Author: Simon Sapin
License: BSD
"""
import sys
import os
import time
import itertools
import select
import decimal
# float('inf') is only officially supported form Python 2.6, while decimal
# is there since 2.4.
Infinity = decimal.Decimal('Infinity')
class Timer(object):
"""
Create a new timer.
If it's `run()` method is called often enough, `callback` will be called
(without parameters) `interval` seconds from now (may be a floating point
number) and, if `repeat` is true, every `interval` seconds after that.
There is no thread or other form of preemption: the callback won't be
called if `run()` is not.
A repeating timers may miss a few beats if `run()` is not called for more
than one interval but is still scheduled for whole numbers of interval
after is was created or reset. See the tests for examples
"""
@classmethod
def decorate(cls, *args, **kwargs):
"""
Decorator factory:
@Timer.decorate(1, repeat=True)
def every_second():
# ...
The decorated function is replaced by the Timer object so you can
write eg.
every_second.cancel()
"""
def decorator(callback):
return cls(callback, *args, **kwargs)
return decorator
def __init__(self, callback, interval, repeat=False,
_time_function=time.time):
# `_time_function` is meant as a dependency injection for testing.
assert interval > 0
self._callback = callback
self._interval = interval
self._repeat = repeat
self._now = _time_function
self.reset()
def reset(self):
"""
Cancel currently scheduled expiry and start again as if the timer
was created just now.
"""
self._expiry = self._now() + self._interval
def cancel(self):
"""Cancel the timer. The same timer object should not be used again."""
self._expiry = None
def __call__(self):
"""Decorated callbacks can still be called at any time."""
self._callback()
def run(self):
"""
Return whether the timer will trigger again. (Repeating or not expired
yet.)
"""
if self._expiry is None:
return False
if self._now() < self._expiry:
return True
if self._repeat:
# Would have expired that many times since last run().
times = (self._now() - self._expiry) // self._interval + 1
self._expiry += times * self._interval
else:
self._expiry = None
# Leave a chance to the callback to call `reset()`.
self()
return self._expiry is not None
def sleep_time(self):
"""
Return the amount of time before `run()` does anything, or
Decimal('Infinity') for a canceled or expired non-repeating timer.
"""
if self._expiry is None:
return Infinity
else:
return max(self._expiry - self._now(), 0)
class TimerManager(object):
"""
TimerManager handle multiple timers.
Not thread-safe, but the point is to avoid threads anyway.
"""
def __init__(self, _time_function=time.time):
"""
`_time_function` is meant as a dependency injection for testing.
"""
self._timers = []
self._time_function = _time_function
def add_timer(self, timeout, callback, repeat=False):
"""
Add a timer with `callback`, expiring `timeout` seconds from now and,
if `repeat` is true, every `timeout` seconds after that.
"""
timer = Timer(callback, timeout, repeat=repeat,
_time_function= self._time_function)
self._timers.append(timer)
return timer
def run(self):
"""
Call without arguments the callback of every expired timer.
Each callback is called at most once, even if a repeating timer
expired several times since last time `run()` was called.
"""
# Run all timers and remove those who won't trigger again.
self._timers = [timer for timer in self._timers if timer.run()]
def sleep_time(self):
"""
How much time you can wait before `run()` does something.
Return None if no timer is registered.
"""
return min(itertools.chain(
# Have at least one element. min() raises on empty sequences.
[Infinity],
(timer.sleep_time() for timer in self._timers)
))
class EventLoop(object):
"""
Manage callback functions to be called on certain events.
Currently supported events are:
* Timers (same as TimerManager)
* File descriptors ready for reading. (Waited for using `select.select()`)
"""
def __init__(self):
self._timers = TimerManager()
self._readers = {}
def add_timer(self, timeout, repeat=False):
"""
Decorator factory for adding a timer:
@loop.add_timer(1)
def one_second_from_now():
# callback code
"""
def decorator(callback):
return self._timers.add_timer(timeout, callback, repeat)
return decorator
def watch_for_reading(self, file_descriptor):
"""
Decorator factory for watching a file descriptor. When the file
descriptor is ready for reading, it is passed as a paramater to
the decorated callback.
Takes either a file descriptor (integer) or a file object with a
`fileno()` method that returns one.
@loop.watch_for_reading(sys.stdin)
def one_second_from_now():
data = os.read(sys.stdin.fileno(), 255)
# ...
Use `os.read()` instead of `some_file.read()` to read just what is
available and avoid blocking, without the file actually being in
non-blocking mode.
"""
if not isinstance(file_descriptor, (int, long)):
file_descriptor = file_descriptor.fileno()
def decorator(callback):
self._readers[file_descriptor] = callback
return callback
return decorator
def block_reader(self, file_descriptor, max_block_size=8 * 1024):
"""
Decorator factory. As soon as some data is available for reading on
the file descriptor, the decorated callback is called with a block
of up to `max_block_size` bytes.
If data comes slowly, blocks will be smaller than max_block_size and
contain just what can be read without blocking. In that case, the value
of max_block_size does not matter.
"""
def decorator(callback):
@self.watch_for_reading(file_descriptor)
def reader(fd):
# According to `select.select()` there is some data,
# so os.read() won't block.
data = os.read(fd, max_block_size)
callback(data)
return callback
return decorator
def push_back_reader(self, file_descriptor, max_block_size=8 * 1024):
"""
Just like block_reader, but allow you to push data "back into tho file".
Callbacks get a `push_back` function as a second parameter. You can
push back the data you don't want to use yet.
Example use case: you get some data in a block, but you need more
before it is useful or meaningful. You can push it back instead of
keeping track of it yourself.
On the next call, the data you pushed back will be prepended to the
next block, in the order it was pushed.
"""
def decorator(callback):
pushed_back = []
@self.block_reader(file_descriptor, max_block_size)
def reader(data):
if pushed_back:
pushed_back.append(data)
data = ''.join(pushed_back)
pushed_back[:] = []
callback(data, pushed_back.append)
return callback
return decorator
def line_reader(self, file_descriptor, max_block_size=8 * 1024):
r"""
Decorator factory. The decorated callback is called once with
every line (terminated by '\n') as they become available.
Just like with `some_file.readline()`, the trailing newline character
is included.
The `max_block_size` paramater is just passed to `block_reader()`.
"""
# line_reader could be implemeted with push_back_reader, but not doing
# so allow us to only search new data for the newline chararcter.
def decorator(callback):
partial_line_fragments = []
@self.block_reader(file_descriptor, max_block_size)
def reader(data):
# Loop since there could be more than one line in one block.
while 1:
try:
end = data.index('\n')
except ValueError:
# no newline here
break
else:
end += 1 # include the newline char
partial_line_fragments.append(data[:end])
line = ''.join(partial_line_fragments)
partial_line_fragments[:] = []
callback(line)
data = data[end:]
if data:
partial_line_fragments.append(data)
return callback
return decorator
def run(self):
"""
Run the event loop. Wait for events, call callbacks when events happen,
and only return when the `stop()` is called.
"""
self._running = True
while self._running:
timeout = self._timers.sleep_time()
if timeout == Infinity:
timeout = None
if self._readers:
ready, _, _ = select.select(
self._readers.keys(), [], [], timeout)
else:
assert timeout is not None, 'Running without any event'
# Some systems do not like 3 empty lists for select()
time.sleep(timeout)
ready = []
self._timers.run()
for fd in ready:
self._readers[fd](fd)
def stop(self):
"""
Signal the event loop to stop before doing another iteration.
Since the point of the event loop is to avoid threads, this will
probably be called from an event callback.
"""
self._running = False
if __name__ == '__main__':
loop = EventLoop()
@loop.add_timer(5, repeat=True)
def timeout():
print 'No new line in 5 seconds. Stopping now.'
loop.stop()
@loop.line_reader(sys.stdin)
def new_line(line):
timeout.reset()
print 'Echo:', line.strip()
print 'Echoing lines.'
loop.run()
print 'Exit.'
| bsd-3-clause | -3,009,963,854,136,994,300 | 33.632948 | 80 | 0.562881 | false |
ElementsProject/elements | contrib/assets_tutorial/assets_tutorial.py | 1 | 17042 | #!/usr/bin/env python3
from test_framework.authproxy import AuthServiceProxy, JSONRPCException
import os
import random
import sys
import time
import subprocess
import shutil
from decimal import Decimal
ELEMENTSPATH=""
BITCOINPATH=""
if len(sys.argv) == 2:
ELEMENTSPATH=sys.argv[0]
BITCOINPATH=sys.argv[1]
else:
ELEMENTSPATH="./src"
BITCOINPATH="./../bitcoin/src"
def startbitcoind(datadir, conf, args=""):
subprocess.Popen((BITCOINPATH+"/bitcoind -datadir="+datadir+" "+args).split(), stdout=subprocess.PIPE)
return AuthServiceProxy("http://"+conf["rpcuser"]+":"+conf["rpcpassword"]+"@127.0.0.1:"+conf["regtest.rpcport"])
def startelementsd(datadir, conf, args=""):
subprocess.Popen((ELEMENTSPATH+"/elementsd -datadir="+datadir+" "+args).split(), stdout=subprocess.PIPE)
return AuthServiceProxy("http://"+conf["rpcuser"]+":"+conf["rpcpassword"]+"@127.0.0.1:"+conf["elementsregtest.rpcport"])
def loadConfig(filename):
conf = {}
with open(filename, encoding="utf8") as f:
for line in f:
if len(line) == 0 or line[0] == "#" or len(line.split("=")) != 2:
continue
conf[line.split("=")[0]] = line.split("=")[1].strip()
conf["filename"] = filename
return conf
def sync_all(e1, e2):
totalWait = 10
while e1.getblockcount() != e2.getblockcount() or len(e1.getrawmempool()) != len(e2.getrawmempool()):
totalWait -= 1
if totalWait == 0:
raise Exception("Nodes cannot sync blocks or mempool!")
time.sleep(1)
return
# Preparations
# Make data directories for each daemon
b_datadir="/tmp/"+''.join(random.choice('0123456789ABCDEF') for i in range(5))
e1_datadir="/tmp/"+''.join(random.choice('0123456789ABCDEF') for i in range(5))
e2_datadir="/tmp/"+''.join(random.choice('0123456789ABCDEF') for i in range(5))
os.makedirs(b_datadir)
os.makedirs(e1_datadir)
os.makedirs(e2_datadir)
print("Bitcoin datadir: "+b_datadir)
print("Elements1 datadir: "+e1_datadir)
print("Elements2 datadir: "+e2_datadir)
# Also configure the nodes by copying the configuration files from
# this directory (and read them back for arguments):
shutil.copyfile("contrib/assets_tutorial/bitcoin.conf", b_datadir+"/bitcoin.conf")
shutil.copyfile("contrib/assets_tutorial/elements1.conf", e1_datadir+"/elements.conf")
shutil.copyfile("contrib/assets_tutorial/elements2.conf", e2_datadir+"/elements.conf")
bconf = loadConfig("contrib/assets_tutorial/bitcoin.conf")
e1conf = loadConfig("contrib/assets_tutorial/elements1.conf")
e2conf = loadConfig("contrib/assets_tutorial/elements2.conf")
# Startup
# Can not start since bitcoind isn't running and validatepegin is set
# elementsd attempts to connect to bitcoind to check if peg-in transactions
# are confirmed in the Bitcoin chain.
e1 = startelementsd(e1_datadir, e1conf)
time.sleep(2)
try:
e1.getinfo()
raise AssertionError("This should fail unless working bitcoind can be reached via JSON RPC")
except:
pass
# Start bitcoind, then elementsd. As long as bitcoind is in RPC warmup, elementsd will connect
bitcoin = startbitcoind(b_datadir, bconf)
e1 = startelementsd(e1_datadir, e1conf)
e2 = startelementsd(e2_datadir, e2conf)
time.sleep(3)
# Alternatively, you can set validatepegin=0 in their configs and not
# run the bitcoin node, but it is necessary for fully validating the two way peg.
# Regtest chain starts with 21M bitcoins as OP_TRUE which the wallet
# understands. This is useful for testing basic functionality and for
# blockchains that have no pegging functionality. A fee currency is required
# for anti-DoS purposes as well as asset issuance, which consumes inputs for entropy.
# In Elements there is no block subsidy. In a production sidechain it can
# be configured to start with no outputs, necessitating peg-in functionality
# for asset issuance.
e1.getwalletinfo()
# In regtest mining "target" is OP_TRUE since we have not set `-signblockscript` argument
# Generate simply works.
e1.generatetoaddress(101, e1.getnewaddress())
sync_all(e1, e2)
# WALLET
# First, send all anyone-can-spend coins to e1 then split so balances are even
e1.sendtoaddress(e1.getnewaddress(), 21000000, "", "", True)
e1.generatetoaddress(101, e1.getnewaddress())
sync_all(e1, e2)
e1.sendtoaddress(e2.getnewaddress(), 10500000, "", "", False)
e1.generatetoaddress(101, e1.getnewaddress())
sync_all(e1, e2)
# Funds should now be evenly split between the two wallets
e1.getwalletinfo()
e2.getwalletinfo()
# Have e2 send coins to themself using a blinded Elements address
# Blinded addresses start with `CTE`, unblinded `2`
addr = e2.getnewaddress()
# How do we know it's blinded? Check for blinding key, unblinded address.
e2.getaddressinfo(addr)
# Basic blinded send
txid = e2.sendtoaddress(addr, 1)
e2.generatetoaddress(1, e1.getnewaddress())
sync_all(e1, e2)
# Now let's examine the transaction, both in wallet and without
# In-wallet, take a look at blinding information
e2.gettransaction(txid)
# e1 doesn't have in wallet since it's unrelated
try:
e1.gettransaction(txid)
raise Exception("Transaction should not be in wallet")
except JSONRPCException:
pass
# Get public info, see blinded ranges, etc
e1.getrawtransaction(txid, 1)
# Now let's private import the key to attempt a spend
e1.importprivkey(e2.dumpprivkey(addr))
# We can't see output value info though
# and can not send.
e1.gettransaction(txid)
# And it won't show in balance or known outputs
e1.getwalletinfo()
# Amount for transaction is unknown, so it is not shown in listunspent.
e1.listunspent(1, 1)
# Solution: Import blinding key
e1.importblindingkey(addr, e2.dumpblindingkey(addr))
# Check again, funds should show
e1.getwalletinfo()
e1.listunspent(1, 1)
e1.gettransaction(txid)
# Let's build a blinded 2-of-2 multisig p2sh address
# 1) Get unblinded addresses from each participant
addr1 = e1.getaddressinfo(e1.getnewaddress())["unconfidential"]
addr2 = e2.getaddressinfo(e2.getnewaddress())["unconfidential"]
# 2) Get blinding keys, private and public
addrinfo1 = e1.getaddressinfo(e1.getnewaddress())
addrinfo2 = e2.getaddressinfo(addr2)
blindingkey = e1.dumpblindingkey(addrinfo1["address"])
blindingpubkey = addrinfo1["confidential_key"]
# 3) Make multisig address like usual
multisig = e1.createmultisig(2, [addrinfo1["pubkey"], addrinfo2["pubkey"]])
# 4) Blind the address using the blinding pubkey
blinded_addr = e1.createblindedaddress(multisig["address"], blindingpubkey)
e1.importaddress(multisig["redeemScript"], "", True, True) # Make sure p2sh addr is added
e2.importaddress(multisig["redeemScript"], "", True, True)
e1.importaddress(blinded_addr)
e2.importaddress(blinded_addr)
# 5) Now the address can be funded, though e2 will not be able to see values
txid = e1.sendtoaddress(blinded_addr, 1)
sync_all(e1, e2)
e2.gettransaction(txid, True)
# 6) Import the blinding privkey and decode the values
e2.importblindingkey(blinded_addr, blindingkey)
e2.gettransaction(txid, True)
# ASSETS
# Many of the RPC calls have added asset type or label
# arguments and reveal alternative asset information. With no argument all are listed:
e1.getwalletinfo()
# Notice we now see "bitcoin" as an asset. This is the asset label for the hex for "bitcoin" which can be discovered:
e1.dumpassetlabels()
# We can also issue our own assets, 1 asset and 1 reissuance token in this case
issue = e1.issueasset(1, 1)
asset = issue["asset"]
# From there you can look at the issuances you have in your wallet
e1.listissuances()
# If you gave `issueasset` a reissuance token argument greater than 0
# you can also reissue the base asset
e1.reissueasset(asset, 1)
# or make another different unblinded asset issuance, with only reissuance tokens initially
e1.issueasset(0, 1, False)
# Then two issuances for that particular asset will show
e1.listissuances(asset)
# To label any asset add a new argument like this to your elements.conf file
# then restart your daemon:
assetentry = "-assetdir="+asset+":namedasset"
# Wallet labels have no consensus meaning, only local node/wallet meaning
sync_all(e1, e2)
e1.stop()
time.sleep(5)
# Restart with a new asset label
e1 = startelementsd(e1_datadir, e1conf, assetentry)
time.sleep(5)
e1.getwalletinfo()
# To send issued assets, add an additional argument to sendtoaddress using the hex or label
e1.sendtoaddress(address=e2.getnewaddress(), amount=1, assetlabel="namedasset")
# Reissuance tokens can also be sent like any other asset
e1.sendtoaddress(address=e2.getnewaddress(), amount=1, assetlabel=issue["token"])
sync_all(e1, e2)
# e2 wallet doesn't know about label, just an unnamed asset
e2.getwalletinfo()["unconfirmed_balance"][asset]
e2.generatetoaddress(1, e2.getnewaddress())
sync_all(e1, e2)
# e2 maybe doesn't know about the issuance for the transaction sending him the new asset
e2.listissuances()
# let's import an associated address(so the wallet captures issuance transaction) and rescan
txid = issue["txid"]
addr = e1.gettransaction(txid)["details"][0]["address"]
e2.importaddress(addr)
# e2 now sees issuance, but doesn't know amounts as they are blinded
e2.listissuances()
# We need to import the issuance blinding key. We refer to issuances by their txid/vin pair
# as there is only one per input
vin = issue["vin"]
issuekey = e1.dumpissuanceblindingkey(txid, vin)
e2.importissuanceblindingkey(txid, vin, issuekey)
# Now e2 can see issuance amounts and blinds
e2.listissuances()
# Since it was also sent a reissuance token, it can reissue the base asset
e2.reissueasset(issue["asset"], 5)
# Reissuing reissuance tokens is currently not supported
try:
e2.reissueasset(issue["token"], 1)
except JSONRPCException:
pass
# For de-issuance, we can send assets or issuance tokens to an OP_RETURN output, provably burning them
e2.destroyamount(issue["asset"], 5)
# BLOCKSIGNING
# Recall blocksigning is OP_TRUE
e1.generatetoaddress(1, e1.getnewaddress())
sync_all(e1, e2)
# Let's set it to something more interesting... 2-of-2 multisig
# First lets get some keys from both clients to make our block "challenge"
addr1 = e1.getnewaddress()
addr2 = e2.getnewaddress()
valid1 = e1.getaddressinfo(addr1)
pubkey1 = valid1["pubkey"]
valid2 = e2.getaddressinfo(addr2)
pubkey2 = valid2["pubkey"]
key1 = e1.dumpprivkey(addr1)
key2 = e2.dumpprivkey(addr2)
e1.stop()
e2.stop()
time.sleep(5)
# Now filled with the pubkeys as 2-of-2 checkmultisig
signblockarg="-signblockscript=5221"+pubkey1+"21"+pubkey2+"52ae"
# Anti-DoS argument, custom chain default is ~1 sig so let's make it at least 2 sigs
blocksign_max_size="-con_max_block_sig_size=150"
dyna_deploy_start="-con_dyna_deploy_start=0"
extra_args=signblockarg+" "+blocksign_max_size+" "+dyna_deploy_start
# Wipe out datadirs, start over
shutil.rmtree(e1_datadir)
shutil.rmtree(e2_datadir)
os.makedirs(e1_datadir)
os.makedirs(e2_datadir)
# Copy back config files
shutil.copyfile("contrib/assets_tutorial/elements1.conf", e1_datadir+"/elements.conf")
shutil.copyfile("contrib/assets_tutorial/elements2.conf", e2_datadir+"/elements.conf")
e1 = startelementsd(e1_datadir, e1conf, extra_args)
e2 = startelementsd(e2_datadir, e2conf, extra_args)
time.sleep(5)
sync_all(e1, e2)
# Now import signing keys
e1.importprivkey(key1)
e2.importprivkey(key2)
# Generate no longer works, even if keys are in wallet
try:
e1.generatetoaddress(1, e1.getnewaddress())
raise Exception("Generate shouldn't work")
except JSONRPCException:
pass
try:
e1.generatetoaddress(1, e1.getnewaddress())
raise Exception("Generate shouldn't work")
except JSONRPCException:
pass
# Let's propose and accept some blocks, e1 is master!
blockhex = e1.getnewblockhex()
# Unsigned is no good
# 0 before, 0 after
e1.getblockcount() == 0
e1.submitblock(blockhex)
# Still 0
e1.getblockcount() == 0
# Signblock tests validity except block signatures
# This signing step can be outsourced to a HSM signing to enforce business logic of any sort
# See Strong Federations paper
sign1 = e1.signblock(blockhex)
sign2 = e2.signblock(blockhex)
# We now can gather signatures any way you want, combine them into a fully signed block
blockresult = e1.combineblocksigs(blockhex, [sign1[0], sign2[0]])
blockresult["complete"] == True
signedblock = blockresult["hex"]
# Now submit the block, doesn't matter who
e2.submitblock(signedblock)
sync_all(e1, e2)
# We now have moved forward one block!
e1.getblockcount() == 1
e2.getblockcount() == 1
e1.stop()
e2.stop()
time.sleep(5)
# Further Exercises:
# - Make a python script that does round-robin consensus
# Pegging
# Everything pegging related can be done inside the Elements daemon directly, except for
# pegging out. This is due to the multisig pool aka Watchmen that controls the bitcoin
# on the Bitcoin blockchain. That is the easiest part to get wrong, and by far the most
# important as there is no going back if you lose the funds.
# Wipe out datadirs, start over
shutil.rmtree(e1_datadir)
shutil.rmtree(e2_datadir)
os.makedirs(e1_datadir)
os.makedirs(e2_datadir)
# Copy back config files
shutil.copyfile("contrib/assets_tutorial/elements1.conf", e1_datadir+"/elements.conf")
shutil.copyfile("contrib/assets_tutorial/elements2.conf", e2_datadir+"/elements.conf")
fedpegarg="-fedpegscript=5221"+pubkey1+"21"+pubkey2+"52ae"
# Back to OP_TRUE blocks, re-using pubkeys for pegin pool instead
# Keys can be the same or different, doesn't matter
e1 = startelementsd(e1_datadir, e1conf, fedpegarg)
e2 = startelementsd(e2_datadir, e2conf, fedpegarg)
time.sleep(5)
# Mature some outputs on each side
e1.generatetoaddress(101, e1.getnewaddress())
bitcoin.generatetoaddress(101, bitcoin.getnewaddress())
sync_all(e1, e2)
# Now we can actually start pegging in. Examine the pegin address fields
e1.getpeginaddress()
# Changes each time as it's a new sidechain address as well as new "tweak" for the watchmen keys
# mainchain_address : where you send your bitcoin from Bitcoin network
# sidechain_address : where the bitcoin will end up on the sidechain after pegging in
# Each call of this takes the pubkeys defined in the config file, adds a random number to them
# that is essetially the hash of the sidechain_address and other information,
# then creates a new P2SH Bitcoin address from that. We reveal that "tweak" to the functionaries
# during `claimpegin`, then they are able to calculate the necessary private key and control
# funds.
addrs = e1.getpeginaddress()
#Send funds to unique watchmen P2SH address
txid = bitcoin.sendtoaddress(addrs["mainchain_address"], 1)
# Confirmations in Bitcoin are what protects the
# sidechain from becoming fractional reserve during reorgs.
bitcoin.generatetoaddress(101, bitcoin.getnewaddress())
proof = bitcoin.gettxoutproof([txid])
raw = bitcoin.getrawtransaction(txid)
# Attempt claim!
claimtxid = e1.claimpegin(raw, proof, addrs["claim_script"])
sync_all(e1, e2)
# Other node should accept to mempool and mine
e2.generatetoaddress(1, e1.getnewaddress())
sync_all(e1, e2)
# Should see confirmations
"confirmations" in e1.getrawtransaction(claimtxid, 1)
# Pegging Out
# This command would trigger watchmen to send payment to Bitcoin address on mainchain
# The Bitcoin-side functionality is not supported directly in Elements.
# The watchmen will notice this transaction and send the funds from their collective
# wallet.
e1.sendtomainchain(bitcoin.getnewaddress(), 10)
#Exercise(s)
#1. Implement really dumb/unsafe watchmen to allow pegouts for learning purposes
# Recover tweak from pegin, add to privkey, combined tweaked pubkeys into a redeemscript, add to Core wallet
# RAW API
# Let's create a basic transaction using the raw api, blind it, sign, and send
# Create a transaction with a single destination output to other wallet
rawtx = e1.createrawtransaction([], {e2.getnewaddress():100})
# Biggest difference compared to Bitcoin is that we have explicit fee outputs
rawtx2 = e1.createrawtransaction([], {e2.getnewaddress():100, e1.getnewaddress():5, "fee":Decimal("0.1")})
# Fee outputs are unblinded, with a scriptPubKey of "", in other words ""
# scriptPubKeys are unspendable
# Next we can fund the transaction (and replaces fee with something more appropriate)
fundedtx = e1.fundrawtransaction(rawtx2)
# Blind
blindedtx = e1.blindrawtransaction(fundedtx["hex"])
# *Warning*: Raw blinding logic can be quite complicated, requiring the use of `ignoreblindfails`
# to avoid having calls fail without manually inspecting transactions in great detail.
# In general any transaction with 2 or more outputs to blind should succeed, so adding additional
# is one strategy to resolve this.
# Sign
signedtx = e1.signrawtransactionwithwallet(blindedtx)
# And send
txid = e1.sendrawtransaction(signedtx["hex"])
sync_all(e1, e2)
e2.gettransaction(txid)
# ADVANCED OPTIONS
# rawblindrawtransaction : blind a raw transaction with no access to a wallet
# -policyasset=<hex> : set network fee asset type to something other than BTC
bitcoin.stop()
e1.stop()
e2.stop()
time.sleep(2)
shutil.rmtree(e1_datadir)
shutil.rmtree(e2_datadir)
| mit | -7,510,705,342,911,157,000 | 32.547244 | 124 | 0.756132 | false |
ericblau/ipf-xsede | ipf/glue2/accelerator_environment.py | 1 | 26893 |
###############################################################################
# Copyright 2011-2014 The University of Texas at Austin #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
###############################################################################
import subprocess
import datetime
import json
import os
import re
import socket
import time
import copy
from xml.dom.minidom import getDOMImplementation
from ipf.data import Data, Representation
from ipf.dt import *
from ipf.error import StepError
from ipf.sysinfo import ResourceName
from ipf.sysinfo import Platform
from .resource import *
from .step import GlueStep
#######################################################################################################################
class AcceleratorEnvironmentsStep(GlueStep):
def __init__(self):
GlueStep.__init__(self)
self.description = "Produces a document containing one or more GLUE 2 AcceleratorEnvironment. For a batch scheduled system, an ExecutionEnivonment is typically a compute node."
self.time_out = 30
self.requires = [ResourceName, Platform]
self.produces = [AcceleratorEnvironments]
self._acceptParameter("queues",
"An expression describing the queues to include (optional). The syntax is a series of +<queue> and -<queue> where <queue> is either a queue name or a '*'. '+' means include '-' means exclude. The expression is processed in order and the value for a queue at the end determines if it is shown.",
False)
self.resource_name = None
def run(self):
self.resource_name = self._getInput(ResourceName).resource_name
host_groups = self._run()
if host_groups:
for host_group in host_groups:
host_group.id = "%s.%s" % (host_group.Name, self.resource_name)
host_group.ID = "urn:glue2:AcceleratorEnvironment:%s.%s" % (
host_group.Name, self.resource_name)
host_group.ManagerID = "urn:glue2:ComputingManager:%s" % (
self.resource_name)
self.debug("host_group.id "+host_group.id)
self.debug("host_group.uas " +
str(host_group.UsedAcceleratorSlots))
self._output(AcceleratorEnvironments(self.resource_name, host_groups))
def _shouldUseName(self, hosts):
names = set()
for host in hosts:
names.add(host.Name)
if len(names) == 1 or len(names) < len(hosts):
return True
else:
return False
def _groupHosts(self, hosts):
use_name = self._shouldUseName(hosts)
host_groups = []
for host in hosts:
for host_group in host_groups:
if host.sameHostGroup(host_group, use_name):
if "UsedAverageLoad" in host.Extension:
host_load = host.Extension["UsedAverageLoad"]
if "UsedAverageLoad" not in host_group.Extension:
host_group.Extension["UsedAverageLoad"] = host_load
else:
host_group_load = host_group.Extension["UsedAverageLoad"]
host_group_load = (host_group_load * host_group.UsedInstances +
host_load * host.UsedInstances) / \
(host_group.UsedInstances + host.UsedInstances)
host_group.Extension["UsedAverageLoad"] = host_group_load
if "AvailableAverageLoad" in host.Extension:
host_load = host.Extension["AvailableAverageLoad"]
if "AvailableAverageLoad" not in host_group.Extension:
host_group.Extension["AvailableAverageLoad"] = host_load
else:
host_group_load = host_group.Extension["AvailableAverageLoad"]
host_group_avail = host_group.TotalInstances - host_group.UsedInstances - \
host_group.UnavailableInstances
host_avail = host.TotalInstances - host.UsedInstances - host.UnavailableInstances
host_group_load = (host_group_load * host_group_avail + host_load * host_avail) / \
(host_group_avail + host_group_avail)
host_group.Extension["AvailableAverageLoad"] = host_group_load
if "PartiallyUsedInstances" in host.Extension:
if "PartiallyUsedInstances" not in host_group.Extension:
host_group.Extension["PartiallyUsedInstances"] = host.Extension["PartiallyUsedInstances"]
else:
host_group.Extension["PartiallyUsedInstances"] = \
host_group.Extension["PartiallyUsedInstances"] + \
host.Extension["PartiallyUsedInstances"]
host_group.TotalInstances += host.TotalInstances
host_group.UsedInstances += host.UsedInstances
host_group.UnavailableInstances += host.UnavailableInstances
# if host_group.UsedAcceleratorSlots is None:
# host_group.UsedAcceleratorSlots = 0
# if host.UsedAcceleratorSlots is None:
# host.UsedAcceleratorSlots = 0
host_group.UsedAcceleratorSlots += host.UsedAcceleratorSlots
if host_group.TotalAcceleratorSlots is None:
host_group.TotalAcceleratorSlots = 0
host_group.TotalAcceleratorSlots += host.PhysicalAccelerators
host = None
break
if host is not None:
host_groups.append(host)
if not use_name:
host.Name = "NodeType%d" % len(host_groups)
return host_groups
def _run(self):
raise StepError("AcceleratorEnvironmentsStep._run not overriden")
def _goodHost(self, host):
# check that it has gpu information
if host.PhysicalAccelerators == None:
return False
# if the host is associated with a queue, check that it is a good one
if len(host.ShareID) == 0:
return True
for share in host.ShareID:
m = re.search("urn:glue2:ComputingShare:(\S+).%s" %
self.resource_name, share)
if self._includeQueue(m.group(1)):
return True
# if the host is associated with a partition, check that it is a good one
if len(host.Partitions) == 0:
return True
partition_list = host.Partitions.split(',')
for share in partition_list:
if self._includeQueue(share):
return True
return False
#######################################################################################################################
class AcceleratorEnvironment(Resource):
def __init__(self):
Resource.__init__(self)
self.Platform = "unknown" # string (Platform_t)
self.VirtualMachine = None # boolean (ExtendedBoolean)
self.TotalInstances = None # integer
self.UsedInstances = None # integer
self.UnavailableInstances = None # integer
self.PhysicalCPUs = None # integer
self.LogicalCPUs = None # integer
self.CPUMultiplicity = None # integer (CPUMultiplicity)
self.CPUVendor = None # string
self.CPUModel = None # string
self.CPUVersion = None # string
self.CPUClockSpeed = None # integer (MHz)
self.CPUTimeScalingFactor = None # float
self.WallTimeScalingFactor = None # float
self.MainMemorySize = 0 # integer (MB)
self.VirtualMemorySize = None # integer (MB)
self.OSFamily = "unknown" # string (OSFamily)
self.OSName = None # string (OSName)
self.OSVersion = None # string
self.ConnectivityIn = None # boolean (ExtendedBoolean)
self.ConnectivityOut = None # boolean (ExtendedBoolean)
self.NetworkInfo = None # string (NetworkInfo)
# use Manager, Share, Activity from Resource, not ComputingManager, ComputingShare, ComputingActivity
self.ApplicationEnvironmentID = [] # list of string (ID)
self.BenchmarkID = [] # list of string (ID)
# For AcceleratorEnvironment, but kludging here for node purposes
self.Type = "unknown" # string (AccType_t)
self.PhysicalAccelerators = None # integer
self.UsedAcceleratorSlots = None # integer
self.TotalAcceleratorSlots = None # integer
self.LogicalAccelerators = None # integer
self.Vendor = None # string
self.Model = None # string
self.Version = None # string
self.ClockSpeed = None # integer (MHz)
self.Memory = 0 # integer (MB)
self.ComputeCapability = None # string (describes CUDA features)
# set defaults to be the same as the host where this runs
(sysName, nodeName, release, version, machine) = os.uname()
self.Platform = machine
self.OSFamily = sysName.lower()
self.OSName = sysName.lower()
self.OSVersion = release
def __str__(self):
return json.dumps(AcceleratorEnvironmentOgfJson(self).toJson(), sort_keys=True, indent=4)
def sameHostGroup(self, accel_env, useName):
if useName and self.Name != accel_env.Name:
return False
if self.Platform != accel_env.Platform:
return False
if self.PhysicalCPUs != accel_env.PhysicalCPUs:
return False
if self.LogicalCPUs != accel_env.LogicalCPUs:
return False
if self.CPUVendor != accel_env.CPUVendor:
return False
if self.CPUModel != accel_env.CPUModel:
return False
if self.CPUVersion != accel_env.CPUVersion:
return False
if self.CPUClockSpeed != accel_env.CPUClockSpeed:
return False
if self.MainMemorySize != accel_env.MainMemorySize:
return False
# if self.VirtualMemorySize != accel_env.VirtualMemorySize:
# return False
if self.OSFamily != accel_env.OSFamily:
return False
if self.OSName != accel_env.OSName:
return False
if self.OSVersion != accel_env.OSVersion:
return False
if len(self.ShareID) != len(accel_env.ShareID):
return False
for share in self.ShareID:
if not share in accel_env.ShareID:
return False
return True
#######################################################################################################################
class AcceleratorEnvironmentTeraGridXml(ResourceTeraGridXml):
data_cls = AcceleratorEnvironment
def __init__(self, data):
ResourceTeraGridXml.__init__(self, data)
def get(self):
return self.toDom().toxml()
def toDom(self):
doc = getDOMImplementation().createDocument("http://info.teragrid.org/glue/2009/02/spec_2.0_r02",
"Entities", None)
root = doc.createElement("AcceleratorEnvironment")
doc.documentElement.appendChild(root)
self.addToDomElement(doc, root)
return doc
def addToDomElement(self, doc, element):
ResourceTeraGridXml.addToDomElement(self, doc, element)
if self.data.Platform is not None:
e = doc.createElement("Platform")
e.appendChild(doc.createTextNode(self.data.Platform))
element.appendChild(e)
if self.data.VirtualMachine is not None:
e = doc.createElement("VirtualMachine")
if self.data.VirtualMachine:
e.appendChild(doc.createTextNode("true"))
else:
e.appendChild(doc.createTextNode("false"))
element.appendChild(e)
if self.data.TotalInstances is not None:
e = doc.createElement("TotalInstances")
e.appendChild(doc.createTextNode(str(self.data.TotalInstances)))
element.appendChild(e)
if self.data.UsedInstances is not None:
e = doc.createElement("UsedInstances")
e.appendChild(doc.createTextNode(str(self.data.UsedInstances)))
element.appendChild(e)
if self.data.UnavailableInstances is not None:
e = doc.createElement("UnavailableInstances")
e.appendChild(doc.createTextNode(
str(self.data.UnavailableInstances)))
element.appendChild(e)
if self.data.PhysicalCPUs is not None:
e = doc.createElement("PhysicalCPUs")
e.appendChild(doc.createTextNode(str(self.data.PhysicalCPUs)))
element.appendChild(e)
if self.data.PhysicalAccelerators is not None:
e = doc.createElement("PhysicalAccelerators")
e.appendChild(doc.createTextNode(
str(self.data.PhysicalAccelerators)))
element.appendChild(e)
if self.data.UsedAcceleratorSlots is not None:
e = doc.createElement("UsedAcceleratorSlots")
e.appendChild(doc.createTextNode(
str(self.data.UsedAcceleratorSlots)))
element.appendChild(e)
if self.data.LogicalCPUs is not None:
e = doc.createElement("LogicalCPUs")
e.appendChild(doc.createTextNode(str(self.data.LogicalCPUs)))
element.appendChild(e)
if self.data.CPUMultiplicity is not None:
e = doc.createElement("CPUMultiplicity")
e.appendChild(doc.createTextNode(self.data.CPUMultiplicity))
element.appendChild(e)
if self.data.CPUVendor is not None:
e = doc.createElement("CPUVendor")
e.appendChild(doc.createTextNode(self.data.CPUVendor))
element.appendChild(e)
if self.data.CPUModel is not None:
e = doc.createElement("CPUModel")
e.appendChild(doc.createTextNode(self.data.CPUModel))
element.appendChild(e)
if self.data.CPUVersion is not None:
e = doc.createElement("CPUVersion")
e.appendChild(doc.createTextNode(self.data.CPUVersion))
element.appendChild(e)
if self.data.CPUClockSpeed is not None:
e = doc.createElement("CPUClockSpeed")
e.appendChild(doc.createTextNode(str(self.data.CPUClockSpeed)))
element.appendChild(e)
if self.data.CPUTimeScalingFactor is not None:
e = doc.createElement("CPUTimeScalingFactor")
e.appendChild(doc.createTextNode(
str(self.data.CPUTimeScalingFactor)))
element.appendChild(e)
if self.data.WallTimeScalingFactor is not None:
e = doc.createElement("WallTimeScalingFactor")
e.appendChild(doc.createTextNode(
str(self.data.WallTimeScalingFactor)))
element.appendChild(e)
if self.data.MainMemorySize is not None:
e = doc.createElement("MainMemorySize")
e.appendChild(doc.createTextNode(str(self.data.MainMemorySize)))
element.appendChild(e)
if self.data.VirtualMemorySize is not None:
e = doc.createElement("VirtualMemorySize")
e.appendChild(doc.createTextNode(str(self.data.VirtualMemorySize)))
element.appendChild(e)
if self.data.OSFamily is not None:
e = doc.createElement("OSFamily")
e.appendChild(doc.createTextNode(self.data.OSFamily))
element.appendChild(e)
if self.data.OSName is not None:
e = doc.createElement("OSName")
e.appendChild(doc.createTextNode(self.data.OSName))
element.appendChild(e)
if self.data.OSVersion is not None:
e = doc.createElement("OSVersion")
e.appendChild(doc.createTextNode(self.data.OSVersion))
element.appendChild(e)
if self.data.ConnectivityIn == None:
e = doc.createElement("ConnectivityIn")
e.appendChild(doc.createTextNode("undefined"))
element.appendChild(e)
elif self.data.ConnectivityIn:
e = doc.createElement("ConnectivityIn")
e.appendChild(doc.createTextNode("true"))
element.appendChild(e)
else:
e = doc.createElement("ConnectivityIn")
e.appendChild(doc.createTextNode("false"))
element.appendChild(e)
if self.data.ConnectivityOut == None:
e = doc.createElement("ConnectivityOut")
e.appendChild(doc.createTextNode("undefined"))
element.appendChild(e)
elif self.data.ConnectivityOut:
e = doc.createElement("ConnectivityOut")
e.appendChild(doc.createTextNode("true"))
element.appendChild(e)
else:
e = doc.createElement("ConnectivityOut")
e.appendChild(doc.createTextNode("false"))
element.appendChild(e)
if self.data.NetworkInfo is not None:
e = doc.createElement("NetworkInfo")
e.appendChild(doc.createTextNode(self.data.NetworkInfo))
element.appendChild(e)
if self.data.ManagerID is not None:
e = doc.createElement("ComputingManager")
e.appendChild(doc.createTextNode(self.data.ManagerID))
element.appendChild(e)
for share in self.data.ShareID:
e = doc.createElement("ComputingShare")
e.appendChild(doc.createTextNode(share))
element.appendChild(e)
for activity in self.data.ActivityID:
e = doc.createElement("ComputingActivity")
e.appendChild(doc.createTextNode(activity))
element.appendChild(e)
for appEnv in self.data.ApplicationEnvironmentID:
e = doc.createElement("ApplicationEnvironment")
e.appendChild(doc.createTextNode(appEnv))
element.appendChild(e)
for benchmark in self.data.BenchmarkID:
e = doc.createElement("Benchmark")
e.appendChild(doc.createTextNode(benchmark))
element.appendChild(e)
#######################################################################################################################
class AcceleratorEnvironmentOgfJson(ResourceOgfJson):
data_cls = AcceleratorEnvironment
def __init__(self, data):
ResourceOgfJson.__init__(self, data)
def get(self):
return json.dumps(self.toJson(), sort_keys=True, indent=4)
def toJson(self):
doc = ResourceOgfJson.toJson(self)
doc["Platform"] = self.data.Platform
if self.data.VirtualMachine is not None:
doc["VirtualMachine"] = self.data.VirtualMachine
if self.data.TotalInstances is not None:
doc["TotalInstances"] = self.data.TotalInstances
if self.data.UsedInstances is not None:
doc["UsedInstances"] = self.data.UsedInstances
if self.data.UnavailableInstances is not None:
doc["UnavailableInstances"] = self.data.UnavailableInstances
if self.data.PhysicalCPUs is not None:
doc["PhysicalCPUs"] = self.data.PhysicalCPUs
if self.data.PhysicalAccelerators is not None:
doc["PhysicalAccelerators"] = self.data.PhysicalAccelerators
if self.data.UsedAcceleratorSlots is not None:
doc["UsedAcceleratorSlots"] = self.data.UsedAcceleratorSlots
if self.data.LogicalCPUs is not None:
doc["LogicalCPUs"] = self.data.LogicalCPUs
if self.data.CPUMultiplicity is not None:
doc["CPUMultiplicity"] = self.data.CPUMultiplicity
if self.data.CPUVendor is not None:
doc["CPUVendor"] = self.data.CPUVendor
if self.data.CPUModel is not None:
doc["CPUModel"] = self.data.CPUModel
if self.data.CPUVersion is not None:
doc["CPUVersion"] = self.data.CPUersion
if self.data.CPUClockSpeed is not None:
doc["CPUClockSpeed"] = self.data.CPUClockSpeed
if self.data.CPUTimeScalingFactor is not None:
doc["CPUTimeScalingFactor"] = self.data.CPUTimeScalingFactor
if self.data.WallTimeScalingFactor is not None:
doc["WallTimeScalingFactor"] = self.data.WallTimeScalingFactor
doc["MainMemorySize"] = self.data.MainMemorySize
if self.data.VirtualMemorySize is not None:
doc["VirtualMemorySize"] = self.data.VirtualMemorySize
doc["OSFamily"] = self.data.OSFamily
if self.data.OSName is not None:
doc["OSName"] = self.data.OSName
if self.data.OSVersion is not None:
doc["OSVersion"] = self.data.OSVersion
doc["ConnectivityIn"] = self.data.ConnectivityIn
doc["ConnectivityOut"] = self.data.ConnectivityOut
if self.data.NetworkInfo is not None:
doc["NetworkInfo"] = self.data.NetworkInfo
if len(self.data.ApplicationEnvironmentID) > 0:
doc["ApplicationEnvironmentID"] = self.data.ApplicationEnvironmentID
if len(self.data.BenchmarkID) > 0:
doc["BenchmarkID"] = self.BenchmarkID
return doc
#######################################################################################################################
# class AcceleratorEnvironmentOgfJson(ResourceOgfJson):
# data_cls = AcceleratorEnvironment
#
# def __init__(self, data):
# ResourceOgfJson.__init__(self,data)
#
# def get(self):
# return json.dumps(self.toJson(),sort_keys=True,indent=4)
#
# def toJson(self):
# doc = ResourceOgfJson.toJson(self)
#
# doc["Platform"] = self.data.Platform
# if self.data.PhysicalAccelerators is not None:
# doc["PhysicalAccelerators"] = self.data.PhysicalAccelerators
# if self.data.LogicalAccelerators is not None:
# doc["LogicalAccelerators"] = self.data.LogicalAccelerators
# if self.data.Vendor is not None:
# doc["Vendor"] = self.data.Vendor
# if self.data.Model is not None:
# doc["Model"] = self.data.Model
# if self.data.Version is not None:
# doc["Version"] = self.data.Version
# if self.data.ClockSpeed is not None:
# doc["ClockSpeed"] = self.data.ClockSpeed
# if self.data.Memory is not None:
# doc["Memory"] = self.data.Memory
# if self.data.ComputeCapability is not None:
# doc["ComputeCapability"] = self.data.ComputeCapability
#
# return doc
#######################################################################################################################
class AcceleratorEnvironments(Data):
def __init__(self, id, accel_envs=[]):
Data.__init__(self, id)
self.accel_envs = accel_envs
#######################################################################################################################
class AcceleratorEnvironmentsOgfJson(Representation):
data_cls = AcceleratorEnvironments
def __init__(self, data):
Representation.__init__(
self, Representation.MIME_APPLICATION_JSON, data)
def get(self):
return json.dumps(self.toJson(), sort_keys=True, indent=4)
def toJson(self):
eedoc = []
for accel_env in self.data.accel_envs:
eedoc.append(AcceleratorEnvironmentOgfJson(accel_env).toJson())
return eedoc
#######################################################################################################################
# class AcceleratorEnvironments(Data):
# def __init__(self, id, accel_envs=[]):
# Data.__init__(self,id)
# self.accel_envs = accel_envs
#######################################################################################################################
class AcceleratorEnvironmentsTeraGridXml(Representation):
data_cls = AcceleratorEnvironments
def __init__(self, data):
Representation.__init__(self, Representation.MIME_TEXT_XML, data)
def get(self):
return self.toDom().toprettyxml()
def toDom(self):
doc = getDOMImplementation().createDocument("http://info.teragrid.org/glue/2009/02/spec_2.0_r02",
"Entities", None)
for accel_env in self.data.accel_envs:
eedoc = AcceleratorEnvironmentTeraGridXml.toDom(accel_env)
doc.documentElement.appendChild(eedoc.documentElement.firstChild)
return doc
#######################################################################################################################
class AcceleratorEnvironmentsOgfJson(Representation):
data_cls = AcceleratorEnvironments
def __init__(self, data):
Representation.__init__(
self, Representation.MIME_APPLICATION_JSON, data)
def get(self):
return json.dumps(self.toJson(), sort_keys=True, indent=4)
def toJson(self):
eedoc = []
for accel_env in self.data.accel_envs:
eedoc.append(AcceleratorEnvironmentOgfJson(accel_env).toJson())
return eedoc
#######################################################################################################################
| apache-2.0 | 4,658,216,852,875,104,000 | 44.581356 | 324 | 0.562451 | false |
danirus/django-comments-xtd | django_comments_xtd/management/commands/populate_xtdcomments.py | 1 | 1794 | import sys
from django.db import connections
from django.db.utils import ConnectionDoesNotExist, IntegrityError
from django.core.management.base import BaseCommand
from django_comments.models import Comment
from django_comments_xtd.models import XtdComment
__all__ = ['Command']
class Command(BaseCommand):
help = "Load the xtdcomment table with valid data from django_comments."
def add_arguments(self, parser):
parser.add_argument('using', nargs='*', type=str)
def populate_db(self, cursor):
for comment in Comment.objects.all():
sql = ("INSERT INTO %(table)s "
" ('comment_ptr_id', 'thread_id', 'parent_id',"
" 'level', 'order', 'followup') "
"VALUES (%(id)d, %(id)d, %(id)d, 0, 1, FALSE)")
cursor.execute(sql % {'table': XtdComment._meta.db_table,
'id': comment.id})
def handle(self, *args, **options):
total = 0
using = options['using'] or ['default']
for db_conn in using:
try:
self.populate_db(connections[db_conn].cursor())
total += XtdComment.objects.using(db_conn).count()
except ConnectionDoesNotExist:
print("DB connection '%s' does not exist." % db_conn)
continue
except IntegrityError:
if db_conn != 'default':
print("Table '%s' (in '%s' DB connection) must be empty."
% (XtdComment._meta.db_table, db_conn))
else:
print("Table '%s' must be empty."
% XtdComment._meta.db_table)
sys.exit(1)
print("Added %d XtdComment object(s)." % total)
| bsd-2-clause | 8,801,720,226,856,467,000 | 36.375 | 77 | 0.541806 | false |
bradleygolden/userapi | tests/test_api.py | 1 | 5577 | from app import verify_password, validate_token, User
import json
from base64 import b64encode
def auth_headers(username, password):
username_password = "%s:%s" % (username, password)
headers = {
'Authorization': 'Basic %s' % b64encode(username_password.encode()).decode("ascii")
}
return headers
def test_verify_password_callback(test_client, app, user):
username = user.username
password = 'password'
token = user.generate_auth_token()
# test username, password, token as auth headers
with app.test_request_context():
assert verify_password(username, password) is True
assert verify_password(token) is True
assert verify_password('blah', 'blah') is False
assert verify_password('12345') is False
assert verify_password() is False
# test token as parameter
uri = "/api/v1/users?token={}".format(token.decode('utf-8'))
with app.test_request_context(uri):
assert verify_password() is True
def test_get_auth_token(test_client, user):
uri = '/api/v1/token'
headers = auth_headers(user.username, 'password')
resp = test_client.get(uri, headers=headers, follow_redirects=True)
data = json.loads(resp.data.decode('utf-8'))
assert 'token' in data
def test_validate_token(user, app):
token = user.generate_auth_token()
with app.test_request_context():
resp = validate_token(token)
data = json.loads(resp.data.decode('utf-8'))
assert 'is_valid' in data
assert data.get('is_valid') is True
class TestUserAPI:
def create_user(self, test_client, user, new_user,
auth_password='password', password='password'):
uri = '/api/v1/users/'
headers = auth_headers(user.username, auth_password)
resp = test_client.post(uri,
query_string=dict(username=new_user.username,
password=password,
email=new_user.email),
headers=headers,
follow_redirects=True)
return resp
def get_user(self, test_client, username, auth_username='foo',
auth_password='password'):
uri = '/api/v1/users/%s' % username
headers = auth_headers(auth_username, auth_password)
resp = test_client.get(uri, headers=headers, follow_redirects=True)
return resp
def test_getting_a_user(self, test_client, user):
resp = self.get_user(test_client, user.username)
assert resp.status_code == 200
data = json.loads(resp.data.decode('utf-8'))
assert data['email'] == user.email
assert data['username'] == user.username
assert data['id'] == user.id
def test_getting_users(self, test_client, users):
uri = '/api/v1/users/'
headers = auth_headers(users[0].username, 'password')
resp = test_client.get(uri, headers=headers, follow_redirects=True)
assert resp.status_code == 200
data = json.loads(resp.data.decode('utf-8'))
assert len(data) == len(users)
for i, user in enumerate(data):
assert data[i]['email'] == users[i].email
assert data[i]['username'] == users[i].username
assert data[i]['id'] == users[i].id
def test_creating_a_user(self, test_client, user):
new_user = User(username='new',
email='[email protected]')
new_password = 'password'
uri = '/api/v1/users/'
headers = auth_headers(user.username, new_password)
resp = test_client.post(uri,
query_string=dict(username=new_user.username,
password=new_password,
email=new_user.email),
headers=headers,
follow_redirects=True)
assert resp.status_code == 201
data = json.loads(resp.data.decode('utf-8'))
assert data['email'] == new_user.email
assert data['username'] == new_user.username
def test_updating_a_user(self, test_client, user):
username = 'new' # created from previous test
uri = '/api/v1/users/%s' % username
headers = auth_headers(user.username, 'password')
new_username = 'updated'
new_email = '[email protected]'
new_password = 'new_password'
resp = test_client.put(uri,
query_string=dict(new_username=new_username,
new_email=new_email,
new_password=new_password),
headers=headers, follow_redirects=True)
assert resp.status_code == 200
resp = self.get_user(test_client, new_username)
data = json.loads(resp.data.decode('utf-8'))
assert data['email'] == new_email
assert data['username'] == new_username
def test_deleting_a_user(self, test_client, user):
username = 'updated' # from previous test
uri = '/api/v1/users/%s' % username
headers = auth_headers(user.username, 'password')
# delete the user
resp = test_client.delete(uri, headers=headers)
assert resp.status_code == 200
# test that the user is actually deleted
resp = self.get_user(test_client, username)
assert resp.status_code == 404
| mit | 2,100,060,883,032,430,600 | 36.18 | 91 | 0.57002 | false |
OpenDrift/opendrift | examples/example_current_from_drifter.py | 1 | 3114 | #!/usr/bin/env python
"""
Current from drifter
====================
"""
from datetime import datetime, timedelta
from opendrift.readers import reader_current_from_drifter
from opendrift.models.oceandrift import OceanDrift
o = OceanDrift(loglevel=20)
o.set_config('environment:fallback:land_binary_mask', 0)
#%%
# We make a reader which reconstructs the ocean current from
# observed time series of a drifter
# This is actual data of SLDMB/Code drifter as used in this study:
# Jones, C.E., Dagestad, K.-F., Breivik, O., Holt, B., Rohrs, J., Christensen, K.H., Espeseth, M.M., Brekke, C., Skrunes, S. (2016): Measurement and modeling of oil slick transport. Journal of Geophysical Research - Oceans, Volume 121, Issue 10, October 2016, Pages 7759-7775. DOI: 10.1002/2016JC012113.
drifterlons = [2.407376, 2.405140, 2.403248, 2.401872, 2.400152, 2.398518, 2.397056, 2.395766, 2.394476, 2.393358, 2.392584, 2.391810, 2.390606, 2.389316, 2.388628, 2.388370, 2.387940, 2.387510, 2.387338, 2.387166, 2.387252, 2.387338, 2.387682, 2.387854, 2.388284, 2.388628, 2.389230, 2.390004, 2.390434, 2.390692, 2.391380, 2.391896, 2.392068, 2.392154, 2.392068, 2.391896, 2.391896, 2.391896, 2.391638, 2.391380, 2.391208, 2.391036, 2.390692, 2.390090, 2.389660, 2.389058, 2.388628]
drifterlats = [60.034740, 60.033880, 60.033106, 60.032246, 60.031300, 60.030182, 60.028892, 60.027602, 60.026656, 60.025538, 60.024420, 60.023388, 60.022442, 60.021496, 60.020378, 60.019346, 60.018572, 60.017626, 60.016852, 60.016164, 60.015734, 60.015304, 60.014616, 60.014100, 60.013670, 60.013412, 60.013240, 60.013068, 60.013154, 60.013412, 60.013584, 60.013842, 60.014186, 60.014616, 60.015218, 60.015820, 60.016594, 60.017454, 60.018400, 60.019346, 60.020464, 60.021410, 60.022442, 60.023474, 60.024678, 60.025882, 60.026914]
drifterlats = drifterlats[::-1]
drifterlons = drifterlons[::-1]
driftertimes = [datetime(2015, 6, 10, 5, 50) +
timedelta(minutes=10)*i for i in range(len(drifterlons))]
r = reader_current_from_drifter.Reader(
lons=drifterlons, lats=drifterlats, times=driftertimes)
o.add_reader(r)
#%%
# We seed elements within polygon, as could have been extracted
# from remote sensing imagery
lons = [2.39, 2.391, 2.392, 2.393, 2.394, 2.393, 2.392, 2.391, 2.39]
lats = [60.02, 60.02, 60.019, 60.02, 60.021, 60.022, 60.021, 60.021, 60.02]
o.seed_within_polygon(lons=lons, lats=lats,
number=2000, time=r.start_time)
#%%
# Finally running simulation
o.run(end_time=r.end_time, time_step=r.time_step)
o.animation(buffer=.01, fast=True, drifter={'time': driftertimes, 'lon': drifterlons, 'lat': drifterlats,
'label': 'CODE Drifter', 'color': 'b', 'linewidth': 2, 'markersize': 40})
#%%
# .. image:: /gallery/animations/example_current_from_drifter_0.gif
#%%
# Drifter track is shown in red, and simulated trajectories are shown in gray. Oil spill is displaced relative to drifter, but drifter current is assumed to be spatially homogeneous.
o.plot(buffer=.01, fast=True, trajectory_dict={
'lon': drifterlons, 'lat': drifterlats,
'time': driftertimes, 'linestyle': 'r-'})
| gpl-2.0 | -4,592,492,746,717,793,300 | 55.618182 | 531 | 0.703597 | false |
DrewsephA/Celeb_Username_Bot | config.py | 1 | 48634 | ''' ---------------------------------------------------------------------------------------------------------------- '''
''' These below are the membership groups and their membership customized replies. Edit this area to expand the bot. '''
''' ---------------------------------------------------------------------------------------------------------------- '''
SelenaGomez = ("selena", "gomez") #Will trigger if these two words BOTH are in the title.
SelenaGomezReply = """
[Instagram](https://instagram.com/SelenaGomez/)\n
[Twitter](https://twitter.com/SelenaGomez)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AlexisRen = ("alexis", "ren") #Every ("firstname", "nickname", "lastname") in the ()'s below must be lower case.
AlexisRenReply = """
[Instagram](https://instagram.com/alexisren)\n
[Twitter](https://twitter.com/alexisreng)\n
[Tumblr](http://alexisreneg.tumblr.com)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
TaylorSwift = ("taylor", "swift")
TaylorSwiftReply = """
[Instagram](https://instagram.com/taylorswift)\n
[Twitter](https://twitter.com/Taylorswift13)\n
[Facebook](https://www.facebook.com/TaylorSwift)\n
[Website](http://www.taylorswift.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
McKaylaMaroney = ("mckayla", "maroney")
McKaylaMaroneyReply = """
[Instagram](https://instagram.com/McKaylaMaroney)\n
[Twitter](https://twitter.com/mckaylamaroney)\n
[YouTube Channel](https://www.youtube.com/channel/UC0HJyx19LKRmuHxfiqp9E8w)\n
[Keek](https://www.keek.com/profile/McKaylaMaroney)\n
[Vine](https://vine.co/u/920773070459904000)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SarahHyland = ("sarah", "hyland")
SarahHylandReply = """
[Instagram](https://instagram.com/therealsarahhyland)\n
[Twitter](https://twitter.com/sarah_hyland)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ArielWinter = ("ariel", "winter")
ArielWinterReply = """
[Instagram](https://instagram.com/arielwinter)\n
[Twitter](https://twitter.com/arielwinter1)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KarlieKloss = ("karlie", "kloss")
KarlieKlossReply = """
[Instagram](https://instagram.com/karliekloss/)\n
[Twitter](https://twitter.com/karliekloss/)\n
[YouTube](https://www.youtube.com/c/karliekloss)\n
[Facebook](https://www.facebook.com/KarlieKloss)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KendallJenner = ("kendall", "jenner")
KendallJennerReply = """
[Instagram](https://instagram.com/kendalljenner)\n
[Twitter](https://twitter.com/kendalljenner)\n
[Kendall + Kylie^TM](https://instagram.com/kendallandkylie/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KylieJenner = ("kylie", "jenner")
KylieJennerReply = """
Snapchat: KylizzleMyNizzl
[Instagram](https://instagram.com/kyliejenner)\n
[Twitter](https://twitter.com/kyliejenner)\n
[Kendall + Kylie^TM](https://instagram.com/kendallandkylie/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ChloeBennet = ("chloe", "bennet")
ChloeBennetReply = """
[Instagram](https://instagram.com/chloebennet4/)\n
[Twitter](https://twitter.com/ChloeBennet4)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
HayleyAtwell = ("hayley", "atwell")
HayleyAtwellReply = """
[Instagram](https://instagram.com/realhayleyatwell)\n
[Twitter](https://twitter.com/HayleyAtwell)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AnnaKendrick = ("anna", "kendrick")
AnnaKendrickReply = """
[Instagram](https://instagram.com/annakendrick47/)\n
[Twitter](https://twitter.com/annakendrick47)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
DaniThorne = ("dani", "thorne")
DaniThorneReply = """
[Instagram](https://instagram.com/dani_thorne/)\n
[Twitter](https://twitter.com/Dani_Thorne)\n
[Tumblr](http://danithornesworld.com/)\n
[Youtube](https://www.youtube.com/user/danithornesworld)\n
[IMDb](http://www.imdb.com/name/nm2374574/)\n
[Facebook](https://www.facebook.com/official.danimthorne)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
BellaThorne = ("bella", "thorne")
BellaThorneReply = """
[Instagram](https://instagram.com/bellathorne)\n
[Twitter](https://twitter.com/BELLATHORNE)\n
[IMDb](http://www.imdb.com/name/nm2254074/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
EmiliaClarke = ("emilia", "clarke")
EmiliaClarkeReply = """
[Instagram](https://instagram.com/emilia_clarke/)\n
[Twitter](https://twitter.com/Emilia_Clarke)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
JessicaBiel = ("jessica", "biel")
JessicaBielReply = """
[Instagram](https://instagram.com/jessicabiel)\n
[Twitter](https://twitter.com/JessicaBiel)\n
[WhoSay](http://www.whosay.com/jessicabiel)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AshleyBenson = ("ashley", "benson")
AshleyBensonReply = """
[Instagram](https://instagram.com/itsashbenzo)\n
[Twitter](https://twitter.com/AshBenzo)\n
[Website](http://ashleybenson.net/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MelissaBenoist = ("melissa", "benoist")
MelissaBenoistReply = """
[Instagram](https://instagram.com/melissabenoist/)\n
[Twitter](https://twitter.com/MelissaBenoist)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MilanaVayntrub = ("milana", "vayntrub")
MilanaVayntrubReply = """
[Instagram](https://instagram.com/mintmilana)\n
[Twitter](https://twitter.com/MintMilana)\n
[YouTube: Live Prude Girls](https://www.youtube.com/user/LivePrudeGirls)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
HeidiKlum = ("heidi", "klum")
HeidiKlumReply = """
[Instagram](https://instagram.com/HeidiKlum)\n
[Twitter](https://twitter.com/heidiklum/)\n
[Website](http://www.heidiklum.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
StanaKatic = ("stana", "katic")
StanaKaticReply = """
[Instagram](https://instagram.com/drstanakatic)\n
[Twitter](https://twitter.com/stana_katic)\n
[Website](http://www.stanakatic.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
BlakeLively = ("blake", "lively")
BlakeLivelyReply = """
[Instagram](https://instagram.com/blakelively/)\n
[Twitter](https://twitter.com/blakelively)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MelissaDebling = ("melissa", "debling")
MelissaDeblingReply = """
[Instagram](https://instagram.com/melissadebling/)\n
[Twitter](https://www.twitter.com/MelissaDebling)\n
[Website](http://melissad.co.uk/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SarahShahi = ("sarah", "shahi")
SarahShahiReply = """
[Instagram](https://instagram.com/theonlysarahshahi/)\n
[Twitter](https://twitter.com/onlysarahshahi)\n
[WhoSay](http://www.whosay.com/sarahshahi)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
IrinaShayk = ("irina", "shayk")
IrinaShaykReply = """
[Instagram](https://github.com/DrewsephA/Celeb_Username_Bot)\n
[Twitter](https://twitter.com/theirishayk/)\n
[Facebook](https://www.facebook.com/IrinaShayk)\n
[Website](http://irinashaykofficial.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MarinaNery = ("marina", "nery")
MarinaNeryReply = """
[Instagram](https://instagram.com/marinadnery/)\n
[Twitter](https://twitter.com/marinadnery)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SandraRadav = ("sandra", "radav")
SandraRadavReply = """
[Instagram](https://instagram.com/sandraradav)\n
[Twitter](https://twitter.com/SandraRadav)\n
[YouTube channel](https://www.youtube.com/user/TheLovezumba)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
VanessaHudgens = ("vanessa", "hudgens")
VanessaHudgensReply = """
[Instagram](https://instagram.com/vanessahudgens)\n
[Twitter](https://twitter.com/vanessahudgens)\n
[Tumblr](http://vanessahudgens.tumblr.com/)\n
[YouTube channel](https://www.youtube.com/vanessahudgens)\n
[Facebook](https://www.facebook.com/VanessaHudgens)\n
[Pinterest](https://www.pinterest.com/vanessahudgens/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KellyBrook = ("kelly", "brook")
KellyBrookReply = """
[Instagram](https://instagram.com/iamkb)\n
[Twitter](https://twitter.com/IAMKELLYBROOK)\n
[YouTube channel](https://www.youtube.com/user/kellybrookofficial)\n
[Facebook](https://www.facebook.com/kellybrookofficial)\n
[Website](http://kellybrook.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MandyMoore = ("mandy", "moore")
MandyMooreReply = """
[Instagram](https://instagram.com/mandymooremm/)\n
[Twitter](https://twitter.com/TheMandyMoore)\n
[Facebook](https://www.facebook.com/mandymooreofficial)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AnnaFaith = ("anna", "faith")
AnnaFaithReply = """
Snapchat: AnnaFaithBae \n
[Instagram](https://instagram.com/annafaithxoxo/)\n
[Twitter](https://twitter.com/TheAnnaFaith)\n
[YouTube channel](https://www.youtube.com/channel/UCTcBaZEehmQeydOl1LTM_5Q/)\n
^Frost ^Sisters ^[instagram](https://instagram.com/frostsisters/) ^& ^[twitter](https://twitter.com/frostsisters)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
LexieGrace = ("lexie", "grace")
LexieGraceReply = """
[Instagram](https://instagram.com/Lexiegracelove/)\n
[Twitter](https://twitter.com/lexiegracelove)\n
^Frost ^Sisters ^[instagram](https://instagram.com/frostsisters/) ^& ^[twitter](https://twitter.com/frostsisters)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
FrostSisters = ("frost", "sisters")
FrostSistersReply = """
[Instagram](https://instagram.com/frostsisters/)\n
[Twitter](https://twitter.com/frostsisters)\n
\n-\n
**Anna Faith** \n
Snapchat: AnnaFaithBae \n
[Instagram](https://instagram.com/annafaithxoxo/)\n
[Twitter](https://twitter.com/TheAnnaFaith)\n
[YouTube channel](https://www.youtube.com/channel/UCTcBaZEehmQeydOl1LTM_5Q/)\n
\n-\n
**Lexie Grace**\n
[Instagram](https://instagram.com/Lexiegracelove/)\n
[Twitter](https://twitter.com/lexiegracelove)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
TaylorHannum = ("taylor", "hannum")
TaylorHannumReply = """
[Instagram](https://instagram.com/taylorhannum_)\n
[Twitter](https://twitter.com/TaylorHannum)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CaraDelevingne = ("cara", "delevingne")
CaraDelevingneReply = """
[Instagram](https://instagram.com/caradelevingne/)\n
[Twitter](https://twitter.com/Caradelevingne)\n
[Tumblr](http://iamcaradelevingne.tumblr.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
LenaGercke = ("lena", "gercke")
LenaGerckeReply = """
[Instagram](https://instagram.com/lenagercke/)\n
[Facebook](https://www.facebook.com/pages/Lena-Gercke/439297919435120)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
JenniferMorrison = ("jen", "jennifer", "jenny", "morrison")
JenniferMorrisonReply = """
[Instagram](https://instagram.com/jenmorrisonlive/)\n
[Twitter](https://twitter.com/jenmorrisonlive/)\n
[Facebook](https://www.facebook.com/JenniferMorrisonOfficial)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MargotRobbie = ("margot", "robbie")
MargotRobbieReply = """
[Instagram](https://instagram.com/margotrobbieofficial)\n
[Twitter](https://twitter.com/MargotRobbie)\n
[Website](http://www.margotrobbie.com.au/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AlyssaArce = ("alyssa", "arce")
AlyssaArceReply = """
[Instagram](https://instagram.com/miss_alyssaarce/)\n
[Twitter](https://twitter.com/missalyssaarce)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MirandaKerr = ("miranda", "kerr")
MirandaKerrReply = """
[Instagram](https://instagram.com/mirandakerr/)\n
[Twitter](https://twitter.com/mirandakerr)\n
[Facebook](https://www.facebook.com/MirandaKerr)\n
[Website](http://www.mirandakerr.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KarlieKloss = ("karlie", "kloss")
KarlieKlossReply = """
[Instagram](https://instagram.com/karliekloss/)\n
[Twitter](https://twitter.com/karliekloss/)\n
[YouTube channel: Klossy](https://www.youtube.com/c/karliekloss)\n
[Facebook](https://www.facebook.com/KarlieKloss)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ElsaHosk = ("elsa", "hosk")
ElsaHoskReply = """
[Instagram](https://instagram.com/hoskelsa/)\n
[Twitter](https://twitter.com/elsahosk)\n
[Facebook](https://www.facebook.com/hoskelsa)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CandiceSwanepoel = ("candice", "swanepoel")
CandiceSwanepoelReply = """
[Instagram](https://instagram.com/angelcandices)\n
[Twitter](https://twitter.com/angelcandice/)\n
[Facebook](https://www.facebook.com/angelcandices)\n
[Website](http://www.candiceswanepoel.com/home.php)\n
[Pinterest](https://www.pinterest.com/angelcandice/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MeganFox = ("megan", "fox")
MeganFoxReply = """
[Instagram](https://instagram.com/the_native_tiger/)\n
[Twitter](https://twitter.com/meganfox)\n
[Facebook](https://www.facebook.com/MeganFox)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
IzzyMarshall = ("izzy", "marshall")
IzzyMarshallReply = """
[Instagram](https://instagram.com/_izzymarshall/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ArianaGrande = ("ariana", "grande")
ArianaGrandeReply = """
Snapchat: moonlightbae
[Instagram](https://instagram.com/arianagrande)\n
[Twitter](https://twitter.com/arianagrande)\n
[YouTube channel(personal)](https://www.youtube.com/user/osnapitzari)\n
[Facebook](https://www.facebook.com/arianagrande)\n
[YouTubeVEVO](https://www.youtube.com/user/ArianaGrandeVevo)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
NathalieEmmanuel = ("nathalie", "emmanuel")
NathalieEmmanuelReply = """
[Instagram](https://instagram.com/nathalieemmanuel/)\n
[Twitter](https://twitter.com/missnemmanuel)\n
[Tumblr tag (nsfw)](https://www.tumblr.com/search/nathalie+emmanuel)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
HannahFerguson = ("hannah", "ferguson")
HannahFergusonReply = """
[Instagram](https://instagram.com/hannahfergusonofficial/)\n
[Twitter](https://twitter.com/thehannahferg)\n
[Facebook](https://www.facebook.com/TheHannahFerg)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KateHudson = ("kate", "hudson")
KateHudsonReply = """
[Instagram](https://instagram.com/katehudson/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
NinaDobrev = ("nina", "dobrev")
NinaDobrevReply = """
[Instagram](https://instagram.com/ninadobrev)\n
[Twitter](https://twitter.com/ninadobrev/)\n
[Tumblr](https://ninadobrev.tumblr.com/)\n
[Facebook](https://www.facebook.com/NinaDobrev)\n
[Website/whosay](http://www.ninadobrev.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
DaphneJoy = ("daphne", "joy")
DaphneJoyReply = """
Snapchat: DaphneJoyLove \n
[Instagram](https://instagram.com/daphnejoy/)\n
[Twitter](https://twitter.com/DaphneJoy)\n
[Website](http://www.daphnejoy.com/site/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
EmilyRudd = ("emily", "rudd")
EmilyRuddReply = """
Snapchat: emilysteaparty \n
[Instagram](https://instagram.com/emilysteaparty/)\n
[Twitter](https://www.twitter.com/emilysteaparty)\n
[Tumblr](https://emilysteaparty.tumblr.com)\n
[YouTube channel](https://www.youtube.com/user/emilysteaparty)\n
[Facebook](https://www.facebook.com/emilyruddofficial)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
OliviaCulpo = ("olivia", "culpo")
OliviaCulpoReply = """
[Instagram](https://instagram.com/oliviaculpo)\n
[Twitter](https://twitter.com/oliviaculpo)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
OdetteAnnable = ("odette", "annable")
OdetteAnnableReply = """
[Instagram](https://instagram.com/odetteannable)\n
[Twitter](https://twitter.com/OdetteAnnable)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
OlgaKurylenko = ("olga", "kurylenko")
OlgaKurylenkoReply = """
[Instagram](https://instagram.com/olgakurylenkoofficial/)\n
[Twitter](https://twitter.com/OlyaKurylenko)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
BrendaSong = ("brenda", "song")
BrendaSongReply = """
/r/BrendaSong \n
[Instagram](https://instagram.com/brendasong)\n
[Twitter](https://twitter.com/BrendaSong)\n
[Facebook](https://www.facebook.com/BrendaSong)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CarolSeleme = ("carol", "seleme")
CarolSelemeReply = """
[Instagram](https://instagram.com/cadeque/)\n
[Tumblr](http://moorslorac.tumblr.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AlessandraAmbrosio = ("alessandra", "ambrosio")
AlessandraAmbrosioReply = """
[Instagram](https://instagram.com/alessandraambrosio)\n
[Twitter](https://twitter.com/angelalessandra)\n
[Facebook](https://www.facebook.com/Alessandra)\n
[Website](http://www.alessandraambrosio.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AlexSchmidt = ("alex", "schmidt")
AlexSchmidtReply = """
[Instagram](https://instagram.com/alxxschmidt/)\n
[Tumblr](http://alxxschmidt.tumblr.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
RachelHilbert = ("rachel", "hilbert")
RachelHilbertReply = """
Snapchat: rachelhilbert \n
[Instagram](https://instagram.com/rachelhilbert/)\n
[Twitter](https://twitter.com/rachel_hil)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
DevonWindsor = ("devon", "windsor")
DevonWindsorReply = """
[Instagram](https://instagram.com/devwindsor/)\n
[Twitter](https://twitter.com/devwindsor/)\n
[Facebook](https://www.facebook.com/devwindsor)\n
[Website](http://www.devonwindsor.com/home/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
TaylorMarieHill = ("taylor", "marie", "hill")
TaylorMarieHillReply = """
Snapchat: taylor_hill \n
[Instagram](https://instagram.com/taylor_hill/)\n
[Twitter](https://twitter.com/TaylorMarieHill)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KateMara = ("kate", "mara")
KateMaraReply = """
[Twitter](https://twitter.com/katemara)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ChloeGraceMortz = ("chloe", "grace", "mortz")
ChloeGraceMortzReply = """
[Instagram](https://instagram.com/ChloeGMoretz)\n
[Twitter](https://twitter.com/chloegmoretz)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CamilleRowe = ("camille", "camilla", "rowe")
CamilleRoweReply = """
[Instagram](https://instagram.com/fingermonkey/)\n
[Twitter](https://twitter.com/CamilleRowe)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
PeytonList = ("peyton", "list")
PeytonListReply = """
[Instagram](https://instagram.com/peytonlist)\n
[Twitter](https://twitter.com/peytonlist)\n
[Facebook](https://www.facebook.com/peyton.list)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SammiHanratty = ("sammi", "hanratty")
SammiHanrattyReply = """
Snapchat: SammiHanratty1
[Instagram](https://instagram.com/sammihanratty143/)\n
[Twitter](https://twitter.com/SammiHanratty1)\n
[Facebook](https://www.facebook.com/TheOfficialSammiHanratty)\n
[YouTube channel](https://www.youtube.com/channel/UCJkIBX-nVKat9C-1PU7FiZg)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MargotRobbie = ("margot", "robbie")
MargotRobbieReply = """
[Instagram](https://instagram.com/margotrobbie/)\n
[Twitter](https://twitter.com/MargotRobbie)\n
[Whosay](http://www.whosay.com/margotrobbie)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
LaurenCohan = ("lauren", "cohan")
LaurenCohanReply = """
[Instagram](https://instagram.com/LaurenCohan)\n
[Twitter](https://twitter.com/LaurenCohan)\n
[Website](http://laurencohan.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CamilaCabello = ("camila", "cabello")
CamilaCabelloReply = """
[Instagram](https://instagram.com/camila_cabello/)\n
[Twitter](https://twitter.com/CamilaCabello97)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
YvonneStrahovski = ("yvonne", "strahovski")
YvonneStrahovskiReply = """
[Instagram](https://instagram.com/therealyvonnestrahovski/)\n
[Twitter](https://twitter.com/Y_Strahovski/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
OliviaMunn = ("olivia", "munn")
OliviaMunnReply = """
[Instagram](https://instagram.com/oliviamunn/)\n
[Twitter](https://twitter.com/oliviamunn)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KatharineMcphee - ("katharine", "mcphee")
KatharineMcpheeReply = """
[Instagram](https://instagram.com/katharinemcphee/)\n
[Twitter](https://twitter.com/KatharineMcPhee)\n
[Website](http://www.katharinemcphee.net/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
EmmaRoberts = ("emma", "roberts")
EmmaRobertsReply = """
[Instagram](https://instagram.com/emmaroberts/)\n
[Twitter](https://twitter.com/robertsemma)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SalmaHayek = ("salma", "hayek")
SalmaHayekReply = """
[Instagram](https://instagram.com/salmahayek/)\n
[Twitter](https://twitter.com/salmahayek)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KatyaEliseHenry = ("katya", "elise", "henry")
KatyaEliseHenryReply = """
Snapchat: katyahenry \n
[Instagram](https://instagram.com/katyaelisehenry/)\n
[Twitter](https://twitter.com/katyaelisehenry)\n
[Facebook](https://www.facebook.com/katyaehenry)\n
[Website](http://www.katyaelisehenry.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ElizabethGillies = ("elizabeth", "liz", "gillies")
ElizabethGilliesReply = """
/r/lizgillies \n
[Twitter](https://twitter.com/lizgillies)\n
[Facebook](https://www.facebook.com/ElizabethGilliesOfficial/timeline)\n
[YouTube channel](https://www.youtube.com/user/LizGilliesOfficial)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
MichelleJenneke = ("michelle", "jenneke")
MichelleJennekeReply = """
[Instagram](https://instagram.com/mjenneke93/)\n
[Twitter](https://twitter.com/MJenneke93)\n
[YouTube channel](https://www.youtube.com/channel/UCOiLtIb9UcXKkulRfMQem1g)\n
[Website](http://www.michellejenneke.com.au/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
GwenCox = ("gwen", "cox")
GwenCoxReply = """
Snapchat: gw3nnyy
[Instagram](https://instagram.com/hologrvphic/)\n
[Tumblr](http://hologrvphicx.tumblr.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
DakotaJohnson = ("dakota", "johnson")
DakotaJohnsonReply = """
[Instagram](https://instagram.com/dakotajohnson/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CamillaLuddington = ("camilla", "luddington")
CamillaLuddingtonReply = """
[Instagram](https://instagram.com/officialcamillaluddington/)\n
[Twitter](https://twitter.com/camilluddington)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
JennaHopkins = ("jenna", "hopkins")
JennaHopkinsReply = """
[Instagram](https://instagram.com/jhopkins_/)\n
[Twitter](https://twitter.com/_jennahop)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
PriscillaQuintana = ("priscilla", "quintana")
PriscillaQuintanaReply = """
[Instagram](https://instagram.com/priscilla_quintana/)\n
[Twitter](https://twitter.com/_paq)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
RachaelFinch = ("rachael", "finch")
RachaelFinchReply = """
[Instagram](https://instagram.com/rachael_finch/)\n
[Twitter](https://twitter.com/RachaelFinch)\n
[YouTube channel](https://www.youtube.com/user/rachaelfinch)\n
[Facebook](https://www.facebook.com/rachaelfinchfanpage)\n
[Website](http://rachaelfinch.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
RachaelTaylor = ("rachael", "taylor")
RachaelTaylorReply = """
[Instagram](https://instagram.com/rachaelmaytaylor/)\n
[Twitter](https://twitter.com/_Rachael_Taylor)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
ElisabettaCanalis = ("elisabetta", "canalis")
ElisabettaCanalisReply = """
[Instagram](https://instagram.com/littlecrumb_)\n
[Twitter](https://twitter.com/justelisabetta)\n
[Website](http://www.elisabettacanalis.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SolveigMorkHansen = ("sloveig", "mork", "hansen")
SolveigMorkHansenReply = """
[Instagram](https://instagram.com/notsolveig)\n
[Twitter](https://twitter.com/mhsolveig)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
AlyssaMilano = ("alyssa", "milano")
AlyssaMilanoReply = """
[Instagram](https://instagram.com/milano_Alyssa/)\n
[Twitter: personal](https://twitter.com/Alyssa_Milano)\n
[Facebook](https://www.facebook.com/AlyssaMilano)\n
[Twitter: AlyssaDotCom](https://twitter.com/AlyssaDotCom)\n
[Twitter: TouchByAM](https://twitter.com/TouchByAM)\n
[Website](http://alyssa.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
FrancoiseBoufhal = ("francoise", "boufhal")
FrancoiseBoufhalReply = """
[Instagram](https://instagram.com/francoisebouf/)\n
[Twitter](https://twitter.com/francoisebouf)\n
[Facebook](https://www.facebook.com/officialfrancoise)\n
[YouTube channel](https://www.youtube.com/user/officialfrancoise)\n
[Website](http://www.officialfrancoise.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KimberleyGarner = ("kimberley", "garner")
KimberleyGarnerReply = """
[Instagram](https://instagram.com/kimberleylondon)\n
[Twitter](https://twitter.com/KimberleyLondon)\n
[Website](http://www.kimberleylondon.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
CarlyCraig = ("carly", "craig")
CarlyCraigReply = """
[Instagram](https://instagram.com/carlyccraig/)\n
[Twitter](https://twitter.com/carly_craig)\n
[Facebook](https://www.facebook.com/CarlyCraigFB)\n
[YouTube channel](https://www.youtube.com/user/carlycraig)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KarolinaKurkova = ("karolina", "kurkova")
KarolinaKurkovaReply = """
[Instagram](https://instagram.com/karolinakurkova)\n
[Twitter](https://twitter.com/KarolinaKurkova)\n
[Facebook](https://www.facebook.com/KarolinaKurkova)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
LindsayHeyser = ("lindsay", "heyser")
LindsayHeyserReply = """
[Instagram](https://instagram.com/lheyser/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
RachelHurdWood = ("rachel", "hurdwood")
RachelHurdWoodReply = """
[Instagram](https://instagram.com/1rachelhurdwood/)\n
[Twitter](https://twitter.com/rachelhurdwood)\n
[Facebook](https://www.facebook.com/rachelhurdwood/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
TiannaGregory = ("tianna", "gregory")
TiannaGregoryReply = """
Snapchat: TiannaGregory
[Instagram](https://instagram.com/_tiannag/)\n
[Twitter](https://twitter.com/_TiannaG)\n
[Tumblr](http://tnutty.tumblr.com/)\n
[Facebook](https://www.facebook.com/pages/Tianna-Gregory/585670294906217)\n
[Website](http://www.tiannagregory.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
PaigeSpiranac = ("paige", "spiranac")
PaigeSpiranacReply = """
[Instagram](https://instagram.com/_paige.renee/)\n
[Twitter](https://twitter.com/PaigeSpiranac)\n
[Facebook](https://www.facebook.com/paigereneespiranac)\n
[Website](http://paigespiranac.com/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
GeorgiaMayJagger = ("georgia", "may", "jagger")
GeorgiaMayJaggerReply = """
[Instagram](https://instagram.com/georgiamayjagger/)\n
[Twitter](https://twitter.com/GeorgiaMJagger)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
BrookeBurke = ("brooke", "burke")
BrookeBurkeReply = """
[Instagram](https://instagram.com/brookeburke/)\n
[Twitter](https://twitter.com/BrookeBurke)\n
[Facebook](https://www.facebook.com/pages/Brooke-Burke/261925180496418)\n
[WhoSay](http://www.whosay.com/brookeburke)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
SydneyFullerMarr = ("sydney", "fuller", "marr")
SydneyFullerMarrReply = """
[Instagram](https://instagram.com/misssydneyfuller/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
-start-
JennaJenovich = ("jenna", "jenovich")
JennaJenovichReply = """
Snapchat: jennajenovich
[Instagram](https://instagram.com/jennajenovich/)\n
[Twitter](https://twitter.com/JennaJenovich)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
OliviaJordan = ("olivia", "jordan")
OliviaJordanReply = """
[Instagram](https://instagram.com/theoliviajordan/)\n
[Twitter - personal](https://twitter.com/theOliviaJordan)\n
[Twitter - @MissUSA](https://twitter.com/missusa)\n
[Facebook](https://www.facebook.com/theoliviajordan)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
HannahPolites = ("hannah", "polites")
HannahPolitesReply = """
[Instagram](https://instagram.com/hannahpolites)\n
[Facebook](https://www.facebook.com/hannah.polites)\n
[Website](http://hannahpolites.com.au/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
DeniseMilani = ("denise", "milani")
DeniseMilaniReply = """
[Instagram](https://instagram.com/denisemilaniofficial/)\n
[Facebook](https://instagram.com/denisemilaniofficial/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
KatherineTimokhina = ("catherine", "katherine", "timokhina")
KatherineTimokhinaReply = """
[Instagram](https://instagram.com/katrintim93/)\n
\n-\n ^| ^v0.1 ^| ^I ^am ^a ^bot, ^accuracy ^not ^guaranteed ^| ^/r/Celeb_Username_Bot ^for ^questions, ^help, ^and ^bug ^reporting. ^| ^[Source](https://github.com/DrewsephA/Celeb_Username_Bot) ^|\n
"""
celebs = ({SelenaGomez: SelenaGomezReply}, {AlexisRen: AlexisRenReply}, {TaylorSwift: TaylorSwiftReply}, {McKaylaMaroney: McKaylaMaroneyReply},\
{SarahHyland: SarahHylandReply}, {ArielWinter: ArielWinterReply}, {KarlieKloss: KarlieKlossReply}, {KendallJenner: KendallJennerReply},\
{KylieJenner: KylieJennerReply}, {ChloeBennet: ChloeBennetReply}, {HayleyAtwell: HayleyAtwellReply}, {AnnaKendrick: AnnaKendrickReply},\
{DaniThorne: DaniThorneReply}, {BellaThorne: BellaThorneReply}, {EmiliaClarke: EmiliaClarkeReply}, {JessicaBiel: JessicaBielReply},\
{AshleyBenson: AshleyBensonReply}, {MelissaBenoist: MelissaBenoistReply}, {MilanaVayntrub: MilanaVayntrubReply}, {HeidiKlum: HeidiKlumReply},\
{StanaKatic: StanaKaticReply}, {BlakeLively: BlakeLivelyReply}, {MelissaDebling: MelissaDeblingReply}, {SarahShahi: SarahShahiReply},\
{IrinaShayk: IrinaShaykReply}, {MarinaNery: MarinaNeryReply}, {SandraRadav: SandraRadavReply}, {VanessaHudgens: VanessaHudgensReply},\
{KellyBrook: KellyBrookReply}, {MandyMoore: MandyMooreReply}, {AnnaFaith: AnnaFaithReply}, {LexieGrace: LexieGraceReply},\
{FrostSisters: FrostSistersReply}, {TaylorHannum: TaylorHannumReply}, {CaraDelevingne: CaraDelevingneReply}, {LenaGercke: LenaGerckeReply},\
{JenniferMorrison: JenniferMorrisonReply}, {MargotRobbie: MargotRobbieReply}, {AlyssaArce: AlyssaArceReply}, {MirandaKerr: MirandaKerrReply},\
{KarlieKloss: KarlieKlossReply}, {ElsaHosk: ElsaHoskReply}, {CandiceSwanepoel: CandiceSwanepoelReply}, {MeganFox: MeganFoxReply},\
{IzzyMarshall: IzzyMarshallReply}, {ArianaGrande: ArianaGrandeReply}, {NathalieEmmanuel: NathalieEmmanuelReply}, {HannahFerguson: HannahFergusonReply}\
{KateHudson: KateHudsonReply}, {NinaDobrev: NinaDobrevReply}, {DaphneJoy: DaphneJoyReply}, {EmilyRudd: EmilyRuddReply}, {OliviaCulpo: OliviaCulpoReply},\
{OdetteAnnable: OdetteAnnableReply}, {OlgaKurylenko: OlgaKurylenkoReply}, {CarolSeleme: CarolSelemeReply}, {AlessandraAmbrosio: AlessandraAmbrosioReply},\
{AlexSchmidt: AlexSchmidtReply}, {RachelHilbert: RachelHilbertReply}, {DevonWindsor: DevonWindsorReply}, {TaylorMarieHill: TaylorMarieHillReply},\
{KateMara: KateMaraReply}, {ChloeGraceMortz: ChloeGraceMortzReply}, {CamilleRowe: CamilleRoweReply}, {SammiHanratty: SammiHanrattyReply},\
{MargotRobbie: MargotRobbieReply}, {LaurenCohan: LaurenCohanReply}, {CamilaCabello: CamilaCabelloReply}, {YvonneStrahovski: YvonneStrahovskiReply},\
{OliviaMunn: OliviaMunnReply}, {KatharineMcphee: KatharineMcpheeReply}, {EmmaRoberts: EmmaRobertsReply}, {SalmaHayek: SalmaHayekReply},\
{KatyaEliseHenry: KatyaEliseHenryReply}, {ElizabethGillies: ElizabethGilliesReply}, {MichelleJenneke: MichelleJennekeReply}, {GwenCox: GwenCoxReply},\
{DakotaJohnson: DakotaJohnsonReply}, {CamillaLuddington: CamillaLuddingtonReply}, {JennaHopkins: JennaHopkinsReply}, {PriscillaQuintana: PriscillaQuintanaReply},\
{RachaelFinch: RachaelFinchReply}, {RachaelTaylor: RachaelTaylorReply}, {ElisabettaCanalis: ElisabettaCanalisReply}, {SolveigMorkHansen: SolveigMorkHansenReply}\
{AlyssaMilano: AlyssaMilanoReply}, {FrancoiseBoufhal: FrancoiseBoufhalReply}, {KimberleyGarner: KimberleyGarnerReply}, {CarlyCraig: CarlyCraigReply},\
{KarolinaKurkova: KarolinaKurkovaReply}, {LindsayHeyser: LindsayHeyserReply}, {RachelHurdWood: RachelHurdWoodReply}, {TiannaGregory: TiannaGregoryReply},\
{PaigeSpiranac: PaigeSpiranacReply}, {GeorgiaMayJagger: GeorgiaMayJaggerReply}, {BrookeBurke: BrookeBurkeReply}, {SydneyFullerMarr: SydneyFullerMarrReply},\
{ {JennaJenovich: JennaJenovichReply}, {OliviaJordan: OliviaJordanReply}, {HannahPolites: HannahPolitesReply}, {DeniseMilani: DeniseMilaniReply}, {KatherineTimokhina: KatherineTimokhinaReply},
#In this string put all+your+target_subreddits:
subreddits_string = "celeb_username_bot+Celeb_Bot_Test+DrewsephA"
#When the script is ready delete the above two lines and remove the "#" from the line below
#subreddits_string = "Celebs+CelebGfys+celebgifs+CelebsWallpaper+goddesses+VSModels+vsangels+Models+PrettyGirls+GirlswithGlasses+GirlswithGreenEyes+GirlswithWetHair+VictoriaSecret+VictoriasSecret+VSfans+WtSSTaDaMiT+starlets+girlsinyogapants+girlsinyogashorts+BeautifulFemales"
''' ---------------------------------------------------------------------------------------------------------------- '''
| gpl-2.0 | 5,112,249,427,415,677,000 | 53.891648 | 276 | 0.689333 | false |
mascot6699/Hackapi-Demo | src/core/utils.py | 1 | 3280 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pprint import pprint
import requests
from django.conf import settings
from PyDictionary import PyDictionary
import wikipedia
import sendgrid
sid = settings.EXOTEL_SID
token = settings.EXOTEL_TOKEN
api = settings.SENDGRID_API_TOKEN
def send_message(sid, token, sms_from, sms_to, sms_body):
return requests.post('https://twilix.exotel.in/v1/Accounts/{sid}/Sms/send.json'.format(sid=sid),
auth=(sid, token),
data={
'From': sms_from,
'To': sms_to,
'Body': sms_body
})
if __name__ == '__main__':
r = send_message(sid, token,
sms_from='8050248326', # sms_from='8808891988',
sms_to='8050248326', # sms_to='9052161119',
sms_body='Some message is sent')
print r.status_code
pprint(r.json())
def process_wiki(word):
return wikipedia.summary(word)
def process_dictionary(word):
meaning = "You searched for the word {}. "
dictionary = PyDictionary(word)
our_meaning = dictionary.getMeanings()
meaning = meaning.format(our_meaning.keys()[0])
l = zip(our_meaning.values()[0].keys(),our_meaning.values()[0].values()[0])
for idx in l:
meaning += idx[0] + ":" + idx[1] + ", "
return meaning[:-1]
def custom_send_email(msg):
msg = msg.split(' ')
from_email = msg[0]
to_email = msg[1]
body = " ".join(msg[2:])
sg = sendgrid.SendGridClient(api)
message = sendgrid.Mail(to=to_email, subject="Urgent Emails", text=body, from_email=from_email)
status, msg = sg.send(message)
print "status", status
print "msg" ,msg
if status==200:
return "Email has been sent!"
else:
return "Email sending is delayed we are on it!"
return " "
def connect_customer(sid, token, customer_no, exotel_no, callerid, url, timelimit=None, timeout=None, calltype="trans",
callback_url=None):
return requests.post('https://twilix.exotel.in/v1/Accounts/{sid}/Calls/connect.json'.format(sid=sid),
auth=(sid, token),
data={
'From': customer_no,
'To': exotel_no,
'CallerId': callerid,
'Url': url,
'TimeLimit': timelimit,
'TimeOut': timeout,
'CallType': calltype,
'StatusCallback': callback_url
})
if __name__ == '__main__':
r = connect_customer(
sid, token,
customer_no="<Your-Customer's-Number>",
exotel_no="<Your-Exotel-Landline-or-Mobile>",
callerid="<Your-Exotel-virtual-number>",
url="http://my.exotel.in/exoml/start/<flow_id>",
timelimit="<time-in-seconds>", # This is optional
timeout="<time-in-seconds>", # This is also optional
calltype="trans", # Can be "trans" for transactional and "promo" for promotional content
callback_url="<http//: your company URL>" # This is also also optional
)
print r.status_code
pprint(r.json())
def get_help():
message = "8050248326 email from_email to_email body \n" \
"8050248326 dictionary term_to_search \n" \
"8050248326 wiki thing_to_search_in_wiki \n" \
"8050248326 song requested_song \n"
print message
return message | mit | -5,266,229,043,800,101,000 | 30.854369 | 119 | 0.601524 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.