repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
sleibrock/discord-bots | bots/dumb-bot.py | 1 | 8132 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
from random import randint, choice
from Bot import ChatBot
from requests import get
from bs4 import BeautifulSoup as BS
from json import dump as jdump
class DumbBot(ChatBot):
"""
Dumb Bot is a basic toy bot integration
He has some built in functionality as well as webhook-bot
integrations to provide a connection to webhooks
WebHook data is stored in the 'shared' folder, so we
allow Dumb Bot to access the shared pool
"""
STATUS = "I'm a bot, Beep Bloop!"
GIT_URL = "https://github.com/sleibrock/discord-bots"
# Other strings
YOUTUBE_URL = "https://www.youtube.com"
# Used to convert chars to emojis for self.roll()
emojis = {f"{i}":x for i, x in enumerate([f":{x}:" for x in
("zero", "one", "two", "three", "four",
"five", "six", "seven", "eight", "nine")])}
def __init__(self, name):
super(DumbBot, self).__init__(name)
self.filegen = self._create_filegen("shared")
@ChatBot.action('<Command>')
async def help(self, args, mobj):
"""
Show the documentation string for a given command
If you came here from '!help help', hi and thanks for using me!
Example: !help roll
"""
if args:
key = args[0]
if not args[0].startswith(ChatBot.PREFIX):
key = f'{ChatBot.PREFIX}{args[0]}'
if key in self.ACTIONS:
t = self.pre_text(f'Help for \'{key}\':{self.ACTIONS[key].__doc__}')
return await self.message(mobj.channel, t)
keys = [f'{k}' for k in self.ACTIONS.keys()]
longest = max((len(s) for s in keys)) + 2
output = 'Thank you for choosing Dumb Bot™ for your channel\n'
output += 'Here are the available commands\n\n'
for c in keys:
output += f'* {c.ljust(longest)} {self.HELPMSGS.get(c, "")}\n'
output += f'\nFor more info on each command, use \'{ChatBot.PREFIX}help <command>\''
output += f'\nVisit {self.GIT_URL} for more info'
return await self.message(mobj.channel, self.pre_text(output))
@ChatBot.action('<Status String>')
async def status(self, args, mobj):
"""
Change the bot's status to a given string
Example: !status haha ur dumb
"""
return await self.set_status(" ".join(args))
@ChatBot.action('<Dota ID String>')
async def dota(self, args, mobj):
"""
Register a Dota ID (enable your profile as Public!)
Example: !dota 40281889
"""
p = self.filegen(f"{mobj.author.id}.dota")
if not args:
if p.is_file():
return await self.message(mobj.channel, f"ID: {p.read_text()}")
return await self.message(mobj.channel, "No Dota ID supplied")
# Get the first argument in the list and check if it's valid
u = args[0].strip().strip("\n")
if len(u) > 30 or not u.isnumeric():
return await self.message(mobj.channel, "Invalid ID given")
# Write to file and finish
with open(p, 'w') as f:
jdump({'dota_id': u}, f)
return await self.message(mobj.channel, f"Registered ID {u}")
@ChatBot.action()
async def coin(self, args, mobj):
"""
Do a coin flip
Example: !coin
"""
return await self.message(mobj.channel, choice([":monkey:", ":snake:"]))
@ChatBot.action('<Number>')
async def roll(self, args, mobj):
"""
Make a roll (similar to Dota 2's /roll) between [1..1000]
Example: !roll 100
"""
if not args or len(args) > 1:
return await self.message(mobj.channel, "Invalid arg count")
x, = args
if not x.isnumeric():
return await self.message(mobj.channel, "Non-numeric arg given")
num = int(x) # bad
if num < 1 or num > 1000:
return await self.message(mobj.channel, "Invalid range given")
res = [self.emojis[x] for x in str(randint(1, num)).zfill(len(x))]
return await self.message(mobj.channel, "".join(res))
@ChatBot.action('[Search terms]')
async def yt(self, args, mobj):
"""
Get the first Youtube search result video
Example: !yt how do I take a screenshot
"""
if not args:
return await self.message(mobj.channel, "Empty search terms")
resp = get(f"{self.YOUTUBE_URL}/results?search_query={self.replace(' '.join(args))}")
if resp.status_code != 200:
return await self.message(mobj.channel, "Failed to retrieve search")
# Build a BS parser and find all Youtube links on the page
bs = BS(resp.text, "html.parser")
main_d = bs.find('div', id='results')
if not main_d:
return await self.message(mobj.channel, 'Failed to find results')
items = main_d.find_all("div", class_="yt-lockup-content")
if not items:
return await self.message(mobj.channel, "No videos found")
# Loop until we find a valid non-advertisement link
for container in items:
href = container.find('a', class_='yt-uix-sessionlink')['href']
if href.startswith('/watch'):
return await self.message(mobj.channel, f'{self.YOUTUBE_URL}{href}')
return await self.message(mobj.channel, "No YouTube video found")
@ChatBot.action('[String]')
async def spam(self, args, mobj):
"""
Spam a channel with dumb things
Example: !spam :ok_hand:
"""
if not args or len(args) > 10:
return await self.message(mobj.channel, "Invalid spam input")
y = args * randint(5, 20)
return await self.message(mobj.channel, f"{' '.join(y)}")
@ChatBot.action('<Poll Query>')
async def poll(self, args, mobj):
"""
Turn a message into a 'poll' with up/down thumbs
Example: !poll should polling be a feature?
"""
await self.client.add_reaction(mobj, '👍')
await self.client.add_reaction(mobj, '👎')
return
@ChatBot.action('[Users]')
async def ban(self, args, mobj):
"""
Ban a user from using bot commands (admin required)
Example: !ban @Username @Username2 ...
"""
if not self.is_admin(mobj):
return await self.message(mobj.channel, "Admin permissions needed")
bancount = 0
ids = [self.convert_user_tag(x) for x in args]
for uid in ids:
if uid is not False:
r = self.add_ban(uid)
bancount += 1 if r else 0
return await self.message(mobj.channel, f"{bancount} users banned")
@ChatBot.action('[Users]')
async def unban(self, args, mobj):
"""
Unban a user from the bot commands (admin required)
Example: !unban @Username @Username2 ...
"""
if not self.is_admin(mobj):
return await self.message(mobj.channel, "Admin permissions needed")
unbanc = 0
ids = [self.convert_user_tag(x) for x in args]
for uid in ids:
if uid is not False:
r = self.del_ban(uid)
unbanc += 1 if r else 0
return await self.message(mobj.channel, f"{unbanc} users unbanned")
@ChatBot.action()
async def botinfo(self, args, mobj):
"""
Print out debug information about the bot
Example: !botinfo
"""
lines = []
with open('.git/refs/heads/master') as f:
head_hash = f.read()[:-1]
# All the lines used in the output
lines = [
f'Name: {self.name}',
f'Actions loaded: {len(self.ACTIONS)}',
f'Ban count: {len(self.BANS)}',
f'Commit version: #{head_hash[:7]}'
f'Commit URL: {self.GIT_URL}/commit/{head_hash}'
]
return await self.message(mobj.channel, '```{}```'.format('\n'.join(lines)))
if __name__ == '__main__':
DumbBot('dumb-bot').run()
pass
# end
| mit | -1,982,356,323,345,148,200 | 33.134454 | 93 | 0.566716 | false | 3.69105 | false | false | false |
deadRaccoons/MameAirlines | tabo/Admi.py | 3 | 6552 | from Conexion import *
import hashlib
import cherrypy
class Administrador(object):
@cherrypy.expose
def index(self):
return """<html>
<head></head>
<body>
<a href="aviones"> Aviones </a> </br>
<a href="registrarse"> Registrarse </a>
</body>
</html> """
@cherrypy.expose
def registrarse(self):
return """<html>
<head></head>
<body>
<form action="createlogin">
Nombres:
<input type="text" name="nombre"><br>
Apellidos:
<input type="text" name="apellido"><br>
Correo:
<input type="text" name="correo"><br>
Contrasena:
<input type="text" name="secreto"><br>
Confirma Contrasena:
<button type="submit">Give it now!</button>
</form>
</body>
</html>"""
@cherrypy.expose
def createlogin(self, nombre, apellido, correo, secreto):
con = Conexion()
secret = hashlib.sha1()
secret.update(secreto)
if con.actualiza("insert into logins values('"+correo+"', '"+secret.hexdigest()+"', 'y');") == 1 :
if con.actualiza("insert into administrador values('"+correo+"', '"+nombre+"', '"+apellido+"');") == 1:
return "se creo"
return "no se creo"
else:
return "no se creo"
@cherrypy.expose
def login(self):
return """<html>
<head>
<scrip>
</script>
</head>
<body>
<form method="get" action="inicia">
Correo:
<input type="text" value="" name="correo"><br>
Contrasena:
<input type="password" value="" name="secreto"><br>
<button type="submit">Give it now!</button>
</form>
</body>
</html>"""
@cherrypy.expose
def inicia(self, correo, secreto):
con = Conexion()
secret = hashlib.sha1()
secret.update(secreto)
if(con.consulta("select secreto from logins where correo = '"+correo+"';") is None):
return self.login()
else:
if(con.consulta("select secreto from logins where correo = '"+correo+"';")[0][0] == secret.hexdigest()):
return self.admin(correo)
return self.login()
@cherrypy.expose
def admin(self, correo):
con = Conexion()
usuario = con.consulta("select * from administrador where correo = '"+ correo+ "';")
return "Bienvenido "+ usuario[0][1]
@cherrypy.expose
def promociones(self):
return "Aqui se manejaran las promociones"
@cherrypy.expose
def estadisticas(self):
return "Aqui se pondra todo lo de las estadisticas"
@cherrypy.expose
def vuelos(self):
return "vuelos lalalala"
@cherrypy.expose
def aviones(self):
con = Conexion()
cuerpo = """<html>
<head></head>
<body>"""
aviones = con.consulta("select * from avion")
for avion in aviones:
cuerpo = cuerpo + avion[1] + " "+ avion [2]+"" + " </br>"
cuerpo = cuerpo + """
</body>
</html>"""
return cuerpo
@cherrypy.expose
def viajes(self):
con = Conexion()
cuerpo = """<html>
<head>
<script>
function quita(){
var x = document.getElementById("viaje_origen");
var s = x.value;
var y = "Todos menos seleccionado "+ s;
var n = document.getElementById("viaje_destino");
var i;
var j;
for(j = 0; j < x.length; j++){
n.remove(0);
}
if(s == 0){
var o = document.createElement("option");
o.text = "no ha seleccionado un pais";
o.value = 0;
fecha(0);
} else{
for(i = 1; i < x.length; i++){
if(s != x.options[i].value){
var o = document.createElement("option");
o.text = x.options[i].text;
o.value = x.options[i].value;
n.add(o);
}
}
fecha(1);
}
}
function fecha(p){
var m = document.getElementById("viaje_mes");
var v = m.value;
var i;
var meses = ["Ene", "Feb", "Mar", "Abr", "May", "Jun", "Jul", "Ago", "Sept", "Oct", "Nov", "Dic"];
if(p == 1){
for(i = 0; i < 12; i++){
var o = document.createElement("option");
o.text = meses[i];
m.add(o);
}
} else {
for(i = 0; i < 12; i++){
m.remove(0);
}
}
}
</script>
</head>
<body >
<form method="post" action="createlogin">
Origen:
<select id="viaje_origen" name="origen" onchange="quita()">
<option value="0">--Selecciona</option>"""
ciudades = con.consulta("select nombre from ciudads")
for ciudad in ciudades:
cuerpo = cuerpo + """<option value=\""""+ ciudad[0] +"""">"""+ ciudad[0]+"""</option>"""
cuerpo = cuerpo + """</select><br>Destino: <select id="viaje_destino" name="destino">"""
cuerpo = cuerpo + """</select></br> Fecha:<select id="viaje_anio" name="anio">
<option value="2014">2014</option><option value="2015">2015</option></select>
<select id="viaje_mes" name="mes">
</select>
<select id="viaje_dia" name="dia"></select></br>
Hora Salida: <select id="viaje_hora" name="hora"></select>
<select id="viaje_minuto" name="minuto"></select></br>
Distancia<input id="viaje_distancia" type="text" name="distancia"/></br>
Avion:<select id="viaje_avion" name="idavion">
"""
aviones = con.consulta("select * from avion")
for avion in aviones:
cuerpo = cuerpo + """<option value""""+avion[0]+"""">"""+avion[1]+", capacidad "+str(avion[3]+avion[4])+"""</option>"""
cuerpo = cuerpo + """</select></br><button type="submit">Crea Viaje</button>
</form>
</body>
</html>"""
return cuerpo
if __name__ == '__main__':
cherrypy.quickstart(Administrador())
| gpl-2.0 | -3,377,978,494,086,084,600 | 32.428571 | 131 | 0.475733 | false | 3.588171 | false | false | false |
shifter/rekall | rekall-core/rekall/plugins/addrspaces/amd64.py | 3 | 16516 | # Rekall Memory Forensics
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Authors:
# Mike Auty
# Michael Cohen
# Jordi Sanchez
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
""" This is based on Jesse Kornblum's patch to clean up the standard AS's.
"""
# pylint: disable=protected-access
import struct
from rekall import addrspace
from rekall import config
from rekall import obj
from rekall.plugins.addrspaces import intel
from rekall.plugins.addrspaces import standard
config.DeclareOption("ept", group="Virtualization support",
type="ArrayIntParser",
help="The EPT physical address.")
class AMD64PagedMemory(intel.IA32PagedMemoryPae):
"""Standard AMD 64-bit address space.
Provides an address space for AMD64 paged memory, aka the x86_64
architecture, which is laid out similarly to Physical Address
Extensions (PAE). Allows callers to map virtual address to
offsets in physical memory.
Create a new AMD64 address space to sit on top of the base address
space and a Directory Table Base (CR3 value) of 'dtb'.
Comments in this class mostly come from the Intel(R) 64 and IA-32
Architectures Software Developer's Manual Volume 3A: System Programming
Guide, Part 1, revision 031, pages 4-8 to 4-15. This book is available
for free at http://www.intel.com/products/processor/manuals/index.htm.
Similar information is also available from Advanced Micro Devices (AMD)
at http://support.amd.com/us/Processor_TechDocs/24593.pdf.
"""
order = 60
def describe_vtop(self, vaddr, collection=None):
"""Describe the resolution process of a Virtual Address.
See base method for docs.
"""
if collection is None:
collection = intel.DescriptorCollection(self.session)
# Bits 51:12 are from CR3
# Bits 11:3 are bits 47:39 of the linear address
pml4e_addr = ((self.dtb & 0xffffffffff000) |
((vaddr & 0xff8000000000) >> 36))
pml4e_value = self.read_pte(pml4e_addr)
collection.add(intel.AddressTranslationDescriptor,
object_name="pml4e", object_value=pml4e_value,
object_address=pml4e_addr)
if not pml4e_value & self.valid_mask:
collection.add(intel.InvalidAddress, "Invalid PML4E\n")
return collection
# Bits 51:12 are from the PML4E
# Bits 11:3 are bits 38:30 of the linear address
pdpte_addr = ((pml4e_value & 0xffffffffff000) |
((vaddr & 0x7FC0000000) >> 27))
pdpte_value = self.read_pte(pdpte_addr)
collection.add(intel.AddressTranslationDescriptor,
object_name="pdpte", object_value=pdpte_value,
object_address=pdpte_addr)
if not pdpte_value & self.valid_mask:
collection.add(intel.InvalidAddress, "Invalid PDPTE\n")
# Large page mapping.
if pdpte_value & self.page_size_mask:
# Bits 51:30 are from the PDE
# Bits 29:0 are from the original linear address
physical_address = ((pdpte_value & 0xfffffc0000000) |
(vaddr & 0x3fffffff))
collection.add(intel.CommentDescriptor, "One Gig page\n")
collection.add(intel.PhysicalAddressDescriptor,
address=physical_address)
return collection
# Bits 51:12 are from the PDPTE
# Bits 11:3 are bits 29:21 of the linear address
pde_addr = ((pdpte_value & 0xffffffffff000) |
((vaddr & 0x3fe00000) >> 18))
self._describe_pde(collection, pde_addr, vaddr)
return collection
def get_available_addresses(self, start=0):
"""Enumerate all available ranges.
Yields tuples of (vaddr, physical address, length) for all available
ranges in the virtual address space.
"""
# Pages that hold PDEs and PTEs are 0x1000 bytes each.
# Each PDE and PTE is eight bytes. Thus there are 0x1000 / 8 = 0x200
# PDEs and PTEs we must test.
for pml4e_index in range(0, 0x200):
vaddr = pml4e_index << 39
next_vaddr = (pml4e_index + 1) << 39
if start >= next_vaddr:
continue
pml4e_addr = ((self.dtb & 0xffffffffff000) |
((vaddr & 0xff8000000000) >> 36))
pml4e_value = self.read_pte(pml4e_addr)
if not pml4e_value & self.valid_mask:
continue
tmp1 = vaddr
for pdpte_index in range(0, 0x200):
vaddr = tmp1 | (pdpte_index << 30)
next_vaddr = tmp1 | ((pdpte_index + 1) << 30)
if start >= next_vaddr:
continue
# Bits 51:12 are from the PML4E
# Bits 11:3 are bits 38:30 of the linear address
pdpte_addr = ((pml4e_value & 0xffffffffff000) |
((vaddr & 0x7FC0000000) >> 27))
pdpte_value = self.read_pte(pdpte_addr)
if not pdpte_value & self.valid_mask:
continue
# 1 gig page.
if pdpte_value & self.page_size_mask:
yield (vaddr,
((pdpte_value & 0xfffffc0000000) |
(vaddr & 0x3fffffff)),
0x40000000)
continue
for x in self._get_available_PDEs(vaddr, pdpte_value, start):
yield x
def _get_pte_addr(self, vaddr, pde_value):
if pde_value & self.valid_mask:
return (pde_value & 0xffffffffff000) | ((vaddr & 0x1ff000) >> 9)
def _get_pde_addr(self, pdpte_value, vaddr):
if pdpte_value & self.valid_mask:
return ((pdpte_value & 0xffffffffff000) |
((vaddr & 0x3fe00000) >> 18))
def _get_available_PDEs(self, vaddr, pdpte_value, start):
tmp2 = vaddr
for pde_index in range(0, 0x200):
vaddr = tmp2 | (pde_index << 21)
next_vaddr = tmp2 | ((pde_index + 1) << 21)
if start >= next_vaddr:
continue
pde_addr = self._get_pde_addr(pdpte_value, vaddr)
if pde_addr is None:
continue
pde_value = self.read_pte(pde_addr)
if pde_value & self.valid_mask and pde_value & self.page_size_mask:
yield (vaddr,
(pde_value & 0xfffffffe00000) | (vaddr & 0x1fffff),
0x200000)
continue
# This reads the entire PTE table at once - On
# windows where IO is extremely expensive, its
# about 10 times more efficient than reading it
# one value at the time - and this loop is HOT!
pte_table_addr = self._get_pte_addr(vaddr, pde_value)
# Invalid PTEs.
if pte_table_addr is None:
continue
data = self.base.read(pte_table_addr, 8 * 0x200)
pte_table = struct.unpack("<" + "Q" * 0x200, data)
for x in self._get_available_PTEs(
pte_table, vaddr, start=start):
yield x
def _get_available_PTEs(self, pte_table, vaddr, start=0):
tmp3 = vaddr
for i, pte_value in enumerate(pte_table):
if not pte_value & self.valid_mask:
continue
vaddr = tmp3 | i << 12
next_vaddr = tmp3 | ((i + 1) << 12)
if start >= next_vaddr:
continue
yield (vaddr,
(pte_value & 0xffffffffff000) | (vaddr & 0xfff),
0x1000)
def end(self):
return (2 ** 64) - 1
class VTxPagedMemory(AMD64PagedMemory):
"""Intel VT-x address space.
Provides an address space that does EPT page translation to provide access
to the guest physical address space, thus allowing plugins to operate on a
virtual machine running on a host operating system.
This is described in the Intel(R) 64 and IA-32 Architectures Software
Developer's Manual Volume 3C: System Programming Guide, Part 3, pages 28-1
to 28-12. This book is available for free at
http://www.intel.com/products/processor/manuals/index.htm.
This address space depends on the "ept" parameter. You can use the vmscan
plugin to find valid ept values on a physical memory image.
Note that support for AMD's AMD-V address space is untested at the moment.
"""
# Virtualization is always the last AS since it has to overlay any form of
# image AS.
order = standard.FileAddressSpace.order + 10
__image = True
_ept = None
# A page entry being present depends only on bits 2:0 for EPT translation.
valid_mask = 7
def __init__(self, ept=None, **kwargs):
# A dummy DTB is passed to the base class so the DTB checks on
# IA32PagedMemory don't bail out. We require the DTB to never be used
# for page translation outside of get_pml4e.
try:
super(VTxPagedMemory, self).__init__(dtb=0xFFFFFFFF, **kwargs)
except TypeError:
raise addrspace.ASAssertionError()
# Reset the DTB, in case a plugin or AS relies on us providing one.
self.dtb = None
ept_list = ept or self.session.GetParameter("ept")
if not isinstance(ept_list, (list, tuple)):
ept_list = [ept_list]
self.as_assert(ept_list, "No EPT specified")
this_ept = None
if isinstance(self.base, VTxPagedMemory):
# Find our EPT, which will be the next one after the base one.
base_idx = ept_list.index(self.base._ept)
try:
this_ept = ept_list[base_idx + 1]
except IndexError:
pass
else:
this_ept = ept_list[0]
self.as_assert(this_ept != None, "No more EPTs specified")
self._ept = this_ept
self.name = "VTxPagedMemory@%#x" % self._ept
def get_pml4e(self, vaddr):
# PML4 for VT-x is in the EPT, not the DTB as AMD64PagedMemory does.
ept_pml4e_paddr = ((self._ept & 0xffffffffff000) |
((vaddr & 0xff8000000000) >> 36))
return self.read_long_long_phys(ept_pml4e_paddr)
def __str__(self):
return "%s@0x%08X" % (self.__class__.__name__, self._ept)
class XenParaVirtAMD64PagedMemory(AMD64PagedMemory):
"""XEN ParaVirtualized guest address space."""
PAGE_SIZE = 0x1000
P2M_PER_PAGE = P2M_TOP_PER_PAGE = P2M_MID_PER_PAGE = PAGE_SIZE / 8
def __init__(self, **kwargs):
super(XenParaVirtAMD64PagedMemory, self).__init__(**kwargs)
self.page_offset = self.session.GetParameter("page_offset")
self.m2p_mapping = {}
self.rebuilding_map = False
if self.page_offset:
self._RebuildM2PMapping()
def _ReadP2M(self, offset, p2m_size):
"""Helper function to return p2m entries at offset.
This function is used to speed up reading the p2m tree, because
traversal via the Array struct is slow.
Yields tuples of (index, p2m) for each p2m, up to a number of p2m_size.
"""
for index, mfn in zip(
xrange(0, p2m_size),
struct.unpack(
"<" + "Q" * p2m_size,
self.read(offset, 0x1000))):
yield (index, mfn)
def _RebuildM2PMapping(self):
"""Rebuilds the machine to physical mapping.
A XEN ParaVirtualized kernel (the guest) maintains a special set of
page tables. Each entry is to machine (host) memory instead of
physical (guest) memory.
XEN maintains a mapping of machine to physical and mapping of physical
to machine mapping in a set of trees. We need to use the former to
translate the machine addresses in the page tables, but only the later
tree is available (mapped in memory) on the guest.
When rekall is run against the memory of a paravirtualized Linux kernel
we traverse the physical to machine mapping and invert it so we can
quickly translate from machine (host) addresses to guest physical
addresses.
See: http://lxr.free-electrons.com/source/arch/x86/xen/p2m.c?v=3.0 for
reference.
"""
self.session.logging.debug(
"Rebuilding the machine to physical mapping...")
self.rebuilding_map = True
try:
p2m_top_location = self.session.profile.get_constant_object(
"p2m_top", "Pointer", vm=self).deref()
end_value = self.session.profile.get_constant("__bss_stop", False)
new_mapping = {}
for p2m_top in self._ReadP2M(
p2m_top_location, self.P2M_TOP_PER_PAGE):
p2m_top_idx, p2m_top_entry = p2m_top
self.session.report_progress(
"Building m2p map %.02f%%" % (
100 * (float(p2m_top_idx) / self.P2M_TOP_PER_PAGE)))
if p2m_top_entry == end_value:
continue
for p2m_mid in self._ReadP2M(
p2m_top_entry, self.P2M_MID_PER_PAGE):
p2m_mid_idx, p2m_mid_entry = p2m_mid
if p2m_mid_entry == end_value:
continue
for p2m in self._ReadP2M(p2m_mid_entry, self.P2M_PER_PAGE):
p2m_idx, mfn = p2m
pfn = (p2m_top_idx * self.P2M_MID_PER_PAGE
* self.P2M_PER_PAGE
+ p2m_mid_idx * self.P2M_PER_PAGE
+ p2m_idx)
new_mapping[mfn] = pfn
self.m2p_mapping = new_mapping
self.session.SetCache("mapping", self.m2p_mapping)
finally:
self.rebuilding_map = False
def m2p(self, machine_address):
"""Translates from a machine address to a physical address.
This translates host physical addresses to guest physical.
Requires a machine to physical mapping to have been calculated.
"""
machine_address = obj.Pointer.integer_to_address(machine_address)
mfn = machine_address / 0x1000
pfn = self.m2p_mapping.get(mfn)
if pfn is None:
return 0
return (pfn * 0x1000) | (0xFFF & machine_address)
def get_pml4e(self, vaddr):
return self.m2p(
super(XenParaVirtAMD64PagedMemory, self).get_pml4e(vaddr))
def get_pdpte(self, vaddr, pml4e):
return self.m2p(
super(XenParaVirtAMD64PagedMemory, self).get_pdpte(vaddr, pml4e))
def get_pde(self, vaddr, pml4e):
return self.m2p(
super(XenParaVirtAMD64PagedMemory, self).get_pde(vaddr, pml4e))
def get_pte(self, vaddr, pml4e):
return self.m2p(
super(XenParaVirtAMD64PagedMemory, self).get_pte(vaddr, pml4e))
def vtop(self, vaddr):
vaddr = obj.Pointer.integer_to_address(vaddr)
if not self.session.GetParameter("mapping"):
# Simple shortcut for linux. This is required for the first set
# of virtual to physical resolutions while we're building the
# mapping.
page_offset = obj.Pointer.integer_to_address(
self.profile.GetPageOffset())
if vaddr > page_offset:
return self.profile.phys_addr(vaddr)
# Try to update the mapping
if not self.rebuilding_map:
self._RebuildM2PMapping()
return super(XenParaVirtAMD64PagedMemory, self).vtop(vaddr)
| gpl-2.0 | 4,323,372,041,946,845,000 | 36.967816 | 79 | 0.585917 | false | 3.696509 | false | false | false |
tvgdb/pianissimo | backend/models/audio/smart_collage.py | 1 | 1994 | from models import db, ma
from models.audio import Album
from models.audio.meta import WritableField, SmartCollageField
from webargs import Arg
smart_collage_album_association_table = db.Table(
"smart_collage_albums",
db.Column('album_id', db.Integer,
db.ForeignKey("album.id", ondelete='cascade')),
db.Column('collage_id', db.Integer,
db.ForeignKey("smart_collage.id", ondelete='cascade'))
)
smart_collage_smart_rules_association_table = db.Table(
"smart_collage_rules",
db.Column('smart_collage_id', db.Integer,
db.ForeignKey("smart_collage.id", ondelete='cascade')),
db.Column('smart_rule_id', db.Integer,
db.ForeignKey("smart_rule.id", ondelete='cascade'))
)
class SmartCollage(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
private = db.Column(db.Boolean)
owner = db.relationship("User", uselist=False, backref="smart_collages")
owner_id = db.Column(db.Integer, db.ForeignKey("user.id", ondelete="cascade"))
albums = db.relationship("Album", secondary=smart_collage_album_association_table)
rules = db.relationship("SmartRule", secondary=smart_collage_smart_rules_association_table)
connector = db.Column(db.String)
sort_by = db.Column(db.String)
def __init__(self, owner, name, rules=None):
self.owner = owner
self.name = name
self.albums = []
self.private = True
self.rules = rules if rules is not None else []
class SmartCollageSchema(ma.Schema):
id = ma.Integer()
name = ma.Str()
private = ma.Bool()
owner = ma.Nested("UserSchema", only=["id", "username"])
albums = ma.Nested("AlbumSchema", many=True, only=["id", "title", "albumartist", "albumart", "tracks"])
writable = WritableField(attribute="owner_id")
connector = ma.Str()
rules = ma.Nested("SmartRuleSchema", many=True)
sort_by = ma.Str()
smart = SmartCollageField(attribute="id")
| gpl-3.0 | -5,952,614,232,860,148,000 | 35.254545 | 107 | 0.662487 | false | 3.385399 | false | false | false |
ScriptRock/content | code/python/snippets/template.py | 1 | 2684 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import http.client
from snippets import getBrowser, APICall, getNodes, getNodeGroups, getConnectionManagerGroups, getNodesInCMGroups, getPolicies, getEvents, scan
from datetime import date, timedelta
parser = argparse.ArgumentParser(description='')
parser.add_argument('--target-url', required=True, help='URL for the UpGuard instance')
parser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')
parser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')
parser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate checks')
args = parser.parse_args()
try:
browser = getBrowser(target_url=args.target_url, insecure=args.insecure)
token = "{}{}".format(args.api_key, args.secret_key)
# Nodes
nodes = getNodes(browser=browser, token=token, details=True)
print("\n\nNodes\n-----")
for node in nodes:
print("{}\n{}\n\n".format(node["name"], node))
# Node Groups
node_groups = getNodeGroups(browser=browser, token=token, details=True)
print("\n\nNode Groups\n-----")
for group in node_groups:
print("{}\n{}\n\n".format(group["name"], group))
# CM Groups
cm_groups = getConnectionManagerGroups(browser=browser, token=token)
print("\n\nConnection Manager Groups\n-------------------------")
for group in cm_groups:
print("{}\n{}\n\n".format(group["name"], group))
# CM Groups with Nodes
cm_groups = getConnectionManagerGroups(browser=browser, token=token)
cm_groups_with_nodes = getNodesInCMGroups(browser=browser, token=token)
print("\n\nCM Groups Node Count\n--------------------")
for id, nodes in cm_groups_with_nodes.iteritems():
group_name = next((g["name"] for g in cm_groups if g["id"] == id), None)
print("{}: {}".format(group_name, len(nodes)))
# Policies
policies = getPolicies(browser=browser, token=token, details=True)
print("\n\nPolicies\n--------")
for policy in policies:
print("{}\n{}\n\n".format(policy["name"], policy))
# Events
events = getEvents(browser=browser, token=token, view="User Logins", since=(date.today() - timedelta(1)))
print("\n\nEvents\n-----")
for event in events:
print("{}\n{}\n\n".format(event["id"], event))
print("Total Events: {}".format(len(events)))
# Scan
result = scan(browser=browser, token=token, node="dev", wait=True)
print("Node scanned, result:\n{}".format(str(result)))
except http.client.HTTPException as h:
print(h.message)
finally:
if browser:
browser.close()
| mit | -6,655,439,311,549,170,000 | 38.470588 | 143 | 0.659091 | false | 3.622132 | false | false | false |
benzkji/django-layout | apps/project_name/models.py | 1 | 1467 | # coding: utf-8
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from cms.models.fields import PageField
from django.db import models
from django.conf import settings
from filer.fields.file import FilerFileField
from ckeditor_link.link_model.models import CMSFilerLinkBase
class Link(CMSFilerLinkBase):
pass
class PublishedBase(models.Model):
published = models.BooleanField(default=False)
class Meta:
abstract = True
class ModifiedBase(models.Model):
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class SEOBase(models.Model):
seo_title = models.CharField(
max_length=255,
default='',
blank=True,
)
meta_description = models.CharField(
max_length=255,
default='',
blank=True,
)
class Meta:
abstract = True
def get_seo_title(self):
if getattr(self, 'seo_title', None):
return self.seo_title
if getattr(self, 'name', None):
return self.name
if getattr(self, 'title', None):
return self.title
return ''
def get_seo_description(self):
if getattr(self, 'meta_description', None):
return self.meta_description
if getattr(self, 'description', None):
return self.description
return ''
| mit | 5,993,031,699,955,553,000 | 23.45 | 60 | 0.644853 | false | 3.986413 | false | false | false |
mekkablue/Glyphs-Scripts | Compare Frontmost Fonts/Compare Glyph Info.py | 1 | 8188 | #MenuTitle: Compare Glyph Info
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Compares open fonts and builds a lits of differing glyph info, including Unicode values and categorisation.
"""
import vanilla
# CONSTANTS:
thingsToCompare = (
"Unicodes",
"Category",
"Subcategory",
"Production Name",
"Script",
"Export Status",
"Left Kerning Group",
"Right Kerning Group",
"Color",
"Left Metrics Key",
"Right Metrics Key",
"Width Metrics Key",
"Is Aligned",
"Has Aligned Width",
"Has Annotations",
"Has Components",
"Has Corners",
"Has Custom Glyph Info",
"Has Hints",
"Has PostScript Hints",
"Has TrueType Hints",
"Has Special Layers",
"Is Color Glyph",
"Is Apple Color Glyph",
"Is SVG Color Glyph",
"Is Hangul Key Glyph",
"Masters Are Compatible",
"Glyph Note",
)
predefinedColors = (
"red",
"orange",
"brown",
"yellow",
"lightgreen",
"green",
"lightblue",
"blue",
"purple",
"pink",
"lightgrey",
"grey",
)
missingGlyphValue = "(missing glyph)"
missingValue = "–"
class CompareGlyphInfo( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 400
windowHeight = 160
windowWidthResize = 1000 # user can resize width by this value
windowHeightResize = 1000 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Compare Glyph Info", # window title
minSize = ( windowWidth, windowHeight ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.CompareGlyphInfo.mainwindow" # stores last window position and size
)
# UI elements:
self.linePos, inset, lineHeight = 5, 6, 22
self.w.descriptionText = vanilla.TextBox( (inset, self.linePos+2, 140, 14), "Compare between fonts:", sizeStyle='small', selectable=True )
self.w.whatToCompare = vanilla.PopUpButton( (inset+140, self.linePos, -160-inset-10, 17), thingsToCompare, sizeStyle='small', callback=self.Reload )
self.w.whatToCompare.getNSPopUpButton().setToolTip_("Choose which glyph info to compare between all open fonts.")
self.w.ignoreMissingGlyphs = vanilla.CheckBox( (-160-inset, self.linePos, -inset-25, 17), "Ignore missing glyphs", value=False, callback=self.Reload, sizeStyle='small' )
self.w.ignoreMissingGlyphs.getNSButton().setToolTip_("If activated, will only list glyphs that are present in ALL open fonts.")
self.w.updateButton = vanilla.SquareButton( (-inset-20, self.linePos, -inset, 18), "↺", sizeStyle='small', callback=self.Reload )
self.w.updateButton.getNSButton().setToolTip_("Reload with currently opened fonts. Useful if you just opened or closed a font, or brought another font forward.")
self.linePos += lineHeight
self.Reload()
# Open window and focus on it:
self.w.open()
self.w.makeKey()
def getColumnHeaders(self, sender=None):
# {"title":xxx, "key":xxx, "editable":False, "width":xxx} for every font
headers = [
{"title":"Glyph Name", "key":"glyphName", "editable":False, "width":200, "maxWidth":400},
]
for i,thisFont in enumerate(Glyphs.fonts):
if thisFont.filepath:
fileName = thisFont.filepath.lastPathComponent()
else:
fileName = "%s (unsaved)" % thisFont.familyName
columnHeader = {"title":fileName, "key":"font-%i"%i, "editable":False, "width":150, "maxWidth":300}
headers.append(columnHeader)
return headers
def returnValue(self, value):
if value:
return value
else:
return missingValue
def returnBool(self, value):
if value:
return "✅"
return "🚫"
def getInfoItemForGlyph(self, glyph):
if glyph is None:
return missingGlyphValue
index = self.w.whatToCompare.get()
if index==0:
if not glyph.unicodes:
return missingValue
else:
return ", ".join(glyph.unicodes)
elif index==1:
return self.returnValue( glyph.category )
elif index==2:
return self.returnValue( glyph.subCategory )
elif index==3:
return self.returnValue( glyph.productionName )
elif index==4:
return self.returnValue( glyph.script )
elif index==5:
return self.returnBool( glyph.export )
elif index==6:
return self.returnValue( glyph.leftKerningGroup )
elif index==7:
return self.returnValue( glyph.rightKerningGroup )
elif index==8:
if glyph.color is None:
return missingValue
else:
return predefinedColors[glyph.color]
elif index==9:
return self.returnValue( glyph.leftMetricsKey )
elif index==10:
return self.returnValue( glyph.rightMetricsKey )
elif index==11:
return self.returnValue( glyph.widthMetricsKey )
elif index==12:
return self.returnBool( glyph.isAligned() )
elif index==13:
return self.returnBool( glyph.hasAlignedWidth() )
elif index==14:
return self.returnBool( glyph.hasAnnotations() )
elif index==15:
return self.returnBool( glyph.hasComponents() )
elif index==16:
return self.returnBool( glyph.hasCorners() )
elif index==17:
return self.returnBool( glyph.hasCustomGlyphInfo() )
elif index==18:
return self.returnBool( glyph.hasHints() )
elif index==19:
return self.returnBool( glyph.hasPostScriptHints() )
elif index==20:
return self.returnBool( glyph.hasTrueTypeHints() )
elif index==21:
return self.returnBool( glyph.hasSpecialLayers() )
elif index==22:
return self.returnBool( glyph.isColorGlyph() )
elif index==23:
return self.returnBool( glyph.isAppleColorGlyph() )
elif index==24:
return self.returnBool( glyph.isSVGColorGlyph() )
elif index==25:
return self.returnBool( glyph.isHangulKeyGlyph() )
elif index==26:
return self.returnBool( glyph.mastersCompatible )
elif index==27:
return self.returnValue( glyph.note )
return "⚠️ Error"
def listContent( self ):
try:
ignoreMissingGlyphs = self.w.ignoreMissingGlyphs.get()
allNames = []
for thisFont in Glyphs.fonts:
allNames.extend([g.name for g in thisFont.glyphs])
allNames = sorted(set(allNames))
displayedLines = []
for glyphName in allNames:
line = {"glyphName":glyphName}
checkList = []
for i, thisFont in enumerate(Glyphs.fonts):
column = "font-%i"%i
glyph = thisFont.glyphs[glyphName]
cell = self.getInfoItemForGlyph(glyph)
line[column] = cell
checkList.append(cell)
# check if line contains differences:
countOfDistinctItems = len(set(checkList))
if checkList and countOfDistinctItems>1:
if ignoreMissingGlyphs and countOfDistinctItems==2:
if not all([item!=missingGlyphValue for item in checkList]):
continue
displayedLines.append(line)
return displayedLines
except Exception as e:
print("listContent Error: %s\n" % e)
import traceback
print(traceback.format_exc())
return None
def Reload( self, sender=None ):
try:
try:
del self.w.List
except:
pass
self.w.List = vanilla.List(
( 0, self.linePos, -0, -0 ),
self.listContent(),
columnDescriptions = self.getColumnHeaders(),
drawVerticalLines = True,
enableDelete = True,
drawFocusRing = True,
# selectionCallback = self.selectedAction,
doubleClickCallback = self.openGlyphInFont,
# editCallback = self.editAction,
)
self.w.List.getNSTableView().setToolTip_("Double click to open the selected glyphs in all fonts. You can select more than one line.")
except Exception as e:
print("Reload Error: %s\n" % e)
import traceback
print(traceback.format_exc())
return None
def openGlyphInFont(self, sender=None):
if sender:
selectedIndexes = sender.getSelection()
if selectedIndexes:
tabText = ""
for index in selectedIndexes:
item = self.w.List.get()[index]
tabText += "/%s" % item["glyphName"]
if tabText:
for i, thisFont in enumerate(Glyphs.fonts):
tab = thisFont.currentTab
if not tab:
tab = thisFont.newTab()
tab.text = tabText
tab.textCursor = 0
tab.scale = 0.15
if i>0:
tab.viewPort = Glyphs.fonts[0].currentTab.viewPort
CompareGlyphInfo() | apache-2.0 | 8,742,762,920,542,435,000 | 28.730909 | 171 | 0.689297 | false | 3.160031 | false | false | false |
joyxu/kernelci-backend | app/utils/tests/test_log_parser.py | 1 | 5410 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import mock
import os
import shutil
import tempfile
import types
import unittest
import utils.log_parser as lparser
class TestBuildLogParser(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
@mock.patch("utils.log_parser._traverse_dir_and_parse")
def test_log_parser_correct(self, mock_parse):
mock_parse.return_value = (200, {})
job_id = "job_id"
job = "job"
kernel = "kernel"
json_obj = {
"job": job,
"kernel": kernel
}
status, errors = lparser.parse_build_log(job_id, json_obj, {})
self.assertEqual(200, status)
self.assertDictEqual({}, errors)
def test_log_parser_no_job_id(self):
job_id = None
job = "job"
kernel = "kernel"
json_obj = {
"job": job,
"kernel": kernel
}
status, errors = lparser.parse_build_log(job_id, json_obj, {})
self.assertEqual(500, status)
self.assertEqual(1, len(errors.keys()))
self.assertEqual([500], errors.keys())
def test_log_parser_hidden_dir(self):
job_id = "job_id"
job = ".job"
kernel = "kernel"
json_obj = {
"job": job,
"kernel": kernel
}
status, errors = lparser.parse_build_log(job_id, json_obj, {})
self.assertEqual(500, status)
self.assertEqual(1, len(errors.keys()))
self.assertEqual([500], errors.keys())
@mock.patch("os.path.isdir")
def test_log_parser_not_dir(self, mock_isdir):
mock_isdir.return_value = False
job_id = "job_id"
job = "job"
kernel = "kernel"
json_obj = {
"job": job,
"kernel": kernel
}
status, errors = lparser.parse_build_log(job_id, json_obj, {})
self.assertEqual(500, status)
self.assertEqual(1, len(errors.keys()))
self.assertEqual([500], errors.keys())
def test_parse_build_log(self):
build_dir = None
try:
build_dir = tempfile.mkdtemp()
log_file = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"assets", "build_log_0.log")
status, errors, e_l, w_l, m_l = lparser._parse_log(
"job", "kernel", "defconfig", log_file, build_dir)
self.assertEqual(200, status)
self.assertIsInstance(errors, types.ListType)
self.assertIsInstance(e_l, types.ListType)
self.assertIsInstance(w_l, types.ListType)
self.assertIsInstance(m_l, types.ListType)
self.assertEqual(0, len(errors))
self.assertEqual(9, len(e_l))
self.assertEqual(2, len(w_l))
self.assertEqual(0, len(m_l))
finally:
shutil.rmtree(build_dir, ignore_errors=True)
def test_parse_build_log_no_file(self):
build_dir = None
try:
build_dir = tempfile.mkdtemp()
status, errors, e_l, w_l, m_l = lparser._parse_log(
"job", "kernel", "defconfig", build_dir, build_dir)
self.assertEqual(500, status)
self.assertIsInstance(errors, types.ListType)
self.assertIsInstance(e_l, types.ListType)
self.assertIsInstance(w_l, types.ListType)
self.assertIsInstance(m_l, types.ListType)
self.assertEqual(1, len(errors))
self.assertEqual(0, len(e_l))
self.assertEqual(0, len(w_l))
self.assertEqual(0, len(m_l))
finally:
shutil.rmtree(build_dir, ignore_errors=True)
@mock.patch("io.open", create=True)
def test_parse_build_log_error_opening(self, mock_open):
mock_open.side_effect = IOError
build_dir = None
try:
build_dir = tempfile.mkdtemp()
log_file = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"assets", "build_log_0.log")
status, errors, e_l, w_l, m_l = lparser._parse_log(
"job", "kernel", "defconfig", log_file, build_dir)
self.assertEqual(500, status)
self.assertIsInstance(errors, types.ListType)
self.assertIsInstance(e_l, types.ListType)
self.assertIsInstance(w_l, types.ListType)
self.assertIsInstance(m_l, types.ListType)
self.assertEqual(1, len(errors))
self.assertEqual(0, len(e_l))
self.assertEqual(0, len(w_l))
self.assertEqual(0, len(m_l))
finally:
shutil.rmtree(build_dir, ignore_errors=True)
| agpl-3.0 | -1,274,055,998,561,299,700 | 30.823529 | 74 | 0.580591 | false | 3.796491 | true | false | false |
Micronaet/micronaet-mx8 | vector_not_registred/vector.py | 1 | 2813 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class SaleOrder(orm.Model):
''' Model name: SaleOrder
'''
_inherit = 'sale.order'
_columns = {
'force_vector': fields.text('Force vector'),
#'fast_vector': fields.boolean('Fast Vector',
# help='Add fast vector for report and not the one in carrier'),
#'vector_name': fields.char('Vector name'),
#'vector_street': fields.char('Street'),
#'vector_street2': fields.char('Street2'),
#'vector_zip': fields.char('Zip', size=24),
#'vector_city': fields.char('City'),
#'vector_state_id': fields.many2one('res.country.state', 'State',
# ondelete='restrict'),
#'vector_country_id': fields.many2one('res.country', 'Country',
# ondelete='restrict'),
}
class AccountInvoice(orm.Model):
''' Model name: AccountInvoice
'''
_inherit = 'account.invoice'
_columns = {
'force_vector': fields.text('Force vector'),
}
class StockDdt(orm.Model):
''' Model name: Stock DDT
'''
_inherit = 'stock.ddt'
_columns = {
'force_vector': fields.text('Force vector'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,158,299,057,235,871,500 | 34.607595 | 79 | 0.621756 | false | 4.118594 | false | false | false |
trondhindenes/ansible | lib/ansible/modules/network/f5/bigip_partition.py | 9 | 9992 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_partition
short_description: Manage BIG-IP partitions
description:
- Manage BIG-IP partitions.
version_added: 2.5
options:
name:
description:
- Name of the partition
required: True
description:
description:
- The description to attach to the Partition.
route_domain:
description:
- The default Route Domain to assign to the Partition. If no route domain
is specified, then the default route domain for the system (typically
zero) will be used only when creating a new partition.
state:
description:
- Whether the partition should exist or not.
default: present
choices:
- present
- absent
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create partition "foo" using the default route domain
bigip_partition:
name: foo
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create partition "bar" using a custom route domain
bigip_partition:
name: bar
route_domain: 3
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Change route domain of partition "foo"
bigip_partition:
name: foo
route_domain: 8
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Set a description for partition "foo"
bigip_partition:
name: foo
description: Tenant CompanyA
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Delete the "foo" partition
bigip_partition:
name: foo
password: secret
server: lb.mydomain.com
user: admin
state: absent
delegate_to: localhost
'''
RETURN = r'''
route_domain:
description: Name of the route domain associated with the partition.
returned: changed and success
type: int
sample: 0
description:
description: The description of the partition.
returned: changed and success
type: string
sample: Example partition
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'defaultRouteDomain': 'route_domain',
}
api_attributes = [
'description', 'defaultRouteDomain'
]
returnables = [
'description', 'route_domain'
]
updatables = [
'description', 'route_domain'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
except Exception:
return result
@property
def partition(self):
# Cannot create a partition in a partition, so nullify this
return None
@property
def route_domain(self):
if self._values['route_domain'] is None:
return None
return int(self._values['route_domain'])
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
changed[k] = change
if changed:
self.changes = Parameters(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
if self.module.check_mode:
return True
self.create_on_device()
if not self.exists():
raise F5ModuleError("Failed to create the partition.")
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the partition.")
return True
def read_current_from_device(self):
resource = self.client.api.tm.auth.partitions.partition.load(
name=self.want.name
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.auth.partitions.partition.exists(
name=self.want.name
)
return result
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.auth.partitions.partition.load(
name=self.want.name
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.auth.partitions.partition.create(
name=self.want.name,
**params
)
def remove_from_device(self):
result = self.client.api.tm.auth.partitions.partition.load(
name=self.want.name
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
route_domain=dict(type='int'),
state=dict(
choices=['absent', 'present'],
default='present'
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
client = None
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
if client:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 | 8,998,525,193,449,595,000 | 26.226158 | 91 | 0.614692 | false | 4.152951 | false | false | false |
ihatevim/aetherbot | plugins/steam_user.py | 33 | 2667 | import requests
from lxml import etree
from cloudbot import hook
from cloudbot.util import formatting
# security
parser = etree.XMLParser(resolve_entities=False, no_network=True)
API_URL = "http://steamcommunity.com/id/{}/"
ID_BASE = 76561197960265728
headers = {}
class SteamError(Exception):
pass
def convert_id32(id_64):
"""
Takes a Steam ID_64 formatted ID and returns a ID_32 formatted ID
:type id_64: int
:return: str
"""
out = ["STEAM_0:"]
final = id_64 - ID_BASE
if final % 2 == 0:
out.append("0:")
else:
out.append("1:")
out.append(str(final // 2))
return "".join(out)
def convert_id3(id_64):
"""
Takes a Steam ID_64 formatted ID and returns a ID_3 formatted ID
:typetype id_64: int
:return: str
"""
_id = (id_64 - ID_BASE) * 2
if _id % 2 == 0:
_id += 0
else:
_id += 1
actual = str(_id // 2)
return "U:1:{}".format(actual)
def get_data(user):
"""
Takes a Steam Community ID of a Steam user and returns a dict of data about that user
:type user: str
:return: dict
"""
data = {}
# form the request
params = {'xml': 1}
# get the page
try:
request = requests.get(API_URL.format(user), params=params, headers=headers)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
raise SteamError("Could not get user info: {}".format(e))
profile = etree.fromstring(request.content, parser=parser)
try:
data["name"] = profile.find('steamID').text
data["id_64"] = int(profile.find('steamID64').text)
online_state = profile.find('stateMessage').text
except AttributeError:
raise SteamError("Could not get data for this user.")
online_state = online_state.replace("<br/>", ": ") # will make this pretty later
data["state"] = formatting.strip_html(online_state)
data["id_32"] = convert_id32(data["id_64"])
data["id_3"] = convert_id3(data["id_64"])
return data
@hook.on_start
def set_headers(bot):
""" Runs on initial plugin load and sets the HTTP headers for this plugin. """
global headers
headers = {
'User-Agent': bot.user_agent
}
@hook.command("steamid", "sid", "steamuser", "su")
def steamid(text):
"""steamid <username> -- gets the steam ID of <username>. Uses steamcommunity.com/id/<nickname>. """
try:
data = get_data(text)
except SteamError as e:
return "{}".format(e)
return "{name} ({state}): \x02ID64:\x02 {id_64}, \x02ID32:\x02 {id_32}, \x02ID3:\x02 {id_3}".format(**data)
| gpl-3.0 | -2,445,590,810,197,459,500 | 24.160377 | 111 | 0.610049 | false | 3.304833 | false | false | false |
sostenibilidad-unam/SHV | create_run.py | 1 | 5127 | import argparse
import random
import os
import setup
import numpy as np
parser = argparse.ArgumentParser(description='Create meta-simulation package at workdir')
parser.add_argument('--netlogo', required=True, help='absolute path to netlogo directory')
parser.add_argument('--workdir', required=True, help='absolute path to working directory where meta-simulation will be setup.')
parser.add_argument('--threads', type=int, default=12)
args = parser.parse_args()
# create working directory
if not os.path.isdir(args.workdir):
os.makedirs(args.workdir)
# create symlink to netlogo
netlogo_jar = os.path.join( args.netlogo, "app/netlogo-6.0.2.jar")
assert os.path.exists(netlogo_jar)
os.symlink(netlogo_jar, os.path.join(args.workdir, "NetLogo.jar"))
# symlink extensions
extensions = ['csv', 'matrix', 'gis', 'bitmap', 'profiler']
for extension in extensions:
ext_path = os.path.join( args.netlogo, "app/extensions/%s" % extension)
assert os.path.exists(ext_path)
os.symlink(ext_path, os.path.join(args.workdir, extension))
# create symlinks to model, argumentspace and run script
this_dir = os.path.dirname(os.path.realpath(__file__))
os.symlink(os.path.join(this_dir, "ABM-Empirical-MexicoCity_V6.nlogo"),
os.path.join(args.workdir, "ABM-Empirical-MexicoCity_V6.nlogo"))
os.symlink(os.path.join(this_dir, "setup.nls"),
os.path.join(args.workdir, "setup.nls"))
os.symlink(os.path.join(this_dir,"value_functions.nls"),
os.path.join(args.workdir,"value_functions.nls"))
os.symlink(os.path.join(this_dir, "run.sh"), os.path.join(args.workdir, "run.sh"))
os.symlink(os.path.join(this_dir, "data"), os.path.join(args.workdir, "data"))
# read setup and submit templates
setup_template = open('setup_template_empirical.xml').read()
condor_template= open('submit_template.condor').read()
# create setup XML files and condor files
with open('%s/submit_all.condor' % args.workdir, 'w') as condorfile:
for eficiencia_nuevainfra in setup.eficiencia_nuevainfra:
for eficiencia_mantenimiento in setup.eficiencia_mantenimiento:
for Lambda in setup.Lambda:
for factor_subsidencia in setup.factor_subsidencia:
for recursos_para_distribucion in setup.recursos_para_distribucion:
for recursos_para_mantenimiento in setup.recursos_para_mantenimiento:
for recursos_nuevainfrastructura in setup.recursos_nuevainfrastructura:
for requerimiento_deagua in setup.requerimiento_deagua:
for n_runs in setup.n_runs:
run_id = "r_%s_%s_%s_%s_%s_%s_%s_%s_%s" % (eficiencia_nuevainfra,
eficiencia_mantenimiento,
Lambda,
factor_subsidencia,
recursos_para_mantenimiento,
recursos_para_distribucion,
recursos_nuevainfrastructura,
requerimiento_deagua,
n_runs)
condorfile.write(condor_template.format(run_id=run_id,
threads=args.threads))
with open('%s/setup_%s.xml' % (args.workdir, run_id), 'w') as setupfile:
e = {"time_limit" : setup.years * 365,
"eficiencia_nuevainfra": eficiencia_nuevainfra,
"eficiencia_mantenimiento": eficiencia_mantenimiento,
"lambda": Lambda,
"escala": setup.escala,
"factor_subsidencia": factor_subsidencia,
"recursos_para_distribucion": recursos_para_distribucion,
"recursos_para_mantenimiento": recursos_para_mantenimiento,
"recursos_nuevainfrastructura": recursos_nuevainfrastructura,
"ANP": setup.ANP,
"requerimiento_deagua": requerimiento_deagua,
"n_runs": n_runs}
setupfile.write(
setup_template.format(**e)
)
| gpl-3.0 | 6,282,423,725,192,038,000 | 57.261364 | 127 | 0.497367 | false | 4.164907 | false | false | false |
RedHatInsights/insights-core | insights/combiners/virt_what.py | 1 | 3545 | """
VirtWhat
========
Combiner to check if the host is running on a virtual or physical machine. It
uses the results of the ``DMIDecode`` and ``VirtWhat`` parsers. Prefer
``VirtWhat`` to ``DMIDecode``.
Examples:
>>> vw = shared[VirtWhat]
>>> vw.is_virtual
True
>>> vw.is_physical
False
>>> vw.generic
'kvm'
>>> vw.amended_generic
'rhev'
>>> 'aws' in vw
False
"""
from insights.core.plugins import combiner
from insights.parsers.dmidecode import DMIDecode
from insights.parsers.virt_what import VirtWhat as VW, BAREMETAL
# Below 2 Maps are only For DMIDecode
GENERIC_MAP = {
'vmware': ['VMware'],
'kvm': ['Red Hat', 'KVM'],
'xen': ['Xen', 'domU'],
'virtualpc': ['Microsoft Corporation', 'Virtual Machine'],
'virtualbox': ['innotek GmbH'],
'parallels': ['Parallels'],
'qemu': ['QEMU'],
}
SPECIFIC_MAP = {
'aws': ['amazon'],
'xen-hvm': ['HVM'],
}
OVIRT = 'ovirt'
RHEV = 'rhev'
@combiner([DMIDecode, VW])
class VirtWhat(object):
"""
A combiner for checking if this machine is virtual or physical by checking
``virt-what`` or ``dmidecode`` command.
Prefer ``virt-what`` to ``dmidecode``
Attributes:
is_virtual (bool): It's running in a virtual machine?
is_physical (bool): It's running in a physical machine?
generic (str): The type of the virtual machine. 'baremetal' if physical machine.
specifics (list): List of the specific information.
amended_generic (str):The type of the virtual machine. 'baremetal' if physical machine.
Added to address an issue with virt_what/dmidecode when identifying 'ovirt' vs 'rhev'.
Will match the generic attribute in all other cases.
"""
def __init__(self, dmi, vw):
self.is_virtual = self.is_physical = None
self.generic = ''
self.specifics = []
if vw and not vw.errors:
self.is_physical = vw.is_physical
self.is_virtual = vw.is_virtual
self.generic = vw.generic
self.specifics = vw.specifics
# Error occurred in ``virt-what``, try ``dmidecode``
if (vw is None or vw.errors) and dmi:
sys_info = dmi.get("system_information", [{}])[0]
bios_info = dmi.get("bios_information", [{}])[0]
dmi_info = list(sys_info.values()) + list(bios_info.values())
if dmi_info:
for dmi_v in dmi_info:
if not self.generic:
generic = [g for g, val in GENERIC_MAP.items() if any(v in dmi_v for v in val)]
self.generic = generic[0] if generic else ''
self.specifics.extend([g for g, val in SPECIFIC_MAP.items() if any(v in dmi_v for v in val)])
self.is_virtual = True
self.is_physical = False
if self.generic == '':
self.generic = BAREMETAL
self.is_virtual = False
self.is_physical = True
sys_info = dmi.get("system_information", [{}])[0] if dmi else None
self.amended_generic = (RHEV if sys_info['product_name'].lower() == 'rhev hypervisor' else
OVIRT if sys_info['product_name'].lower() == 'ovirt node' else
self.generic) if sys_info else self.generic
def __contains__(self, name):
"""bool: Is this ``name`` found in the specific list?"""
return name in [self.generic] + self.specifics
| apache-2.0 | -1,563,430,414,772,870,000 | 33.086538 | 113 | 0.573484 | false | 3.580808 | false | false | false |
pjsier/rainapp | events/views.py | 1 | 4334 | import pandas as pd
from django.http import HttpResponse
from django.shortcuts import render
from csos.models import RiverCso, RiverOutfall
from events.analyzer import rainfall_graph, find_n_years, build_flooding_data
from events.models import HourlyPrecip, NYearEvent
from flooding.models import BasementFloodingEvent
def index(request):
_default_start = '07/22/2011 08:00'
_default_end = '07/23/2011 06:00'
return show_date(request, _default_start, _default_end)
def show_date(request, start_stamp, end_stamp):
ret_val = {}
try:
start = pd.to_datetime(start_stamp)
end = pd.to_datetime(end_stamp)
ret_val['start_date'] = start.strftime("%m/%d/%Y %H:%M")
ret_val['end_date'] = end.strftime("%m/%d/%Y %H:%M")
hourly_precip_dict = list(
HourlyPrecip.objects.filter(
start_time__gte=start,
end_time__lte=end
).values()
)
hourly_precip_df = pd.DataFrame(hourly_precip_dict)
ret_val['total_rainfall'] = "%s inches" % hourly_precip_df['precip'].sum()
high_intensity = find_n_years(hourly_precip_df)
if high_intensity is None:
ret_val['high_intensity'] = 'No'
else:
ret_val['high_intensity'] = "%s inches in %s hours!<br> A %s-year storm" % (
high_intensity['inches'], high_intensity['duration_hrs'], high_intensity['n'])
graph_data = {'total_rainfall_data': rainfall_graph(hourly_precip_df)}
csos_db = RiverCso.objects.filter(open_time__range=(start, end)).values() | RiverCso.objects.filter(
close_time__range=(start, end)).values()
csos_df = pd.DataFrame(list(csos_db))
csos = []
ret_val['sewage_river'] = 'None'
if len(csos_df) > 0:
csos_df['duration'] = (csos_df['close_time'] - csos_df['open_time'])
ret_val['sewage_river'] = "%s minutes" % int(csos_df['duration'].sum().seconds / 60)
river_outfall_ids = csos_df['river_outfall_id'].unique()
river_outfall_ids = list(river_outfall_ids)
river_outfalls = RiverOutfall.objects.filter(
id__in=river_outfall_ids,
lat__isnull=False
)
for river_outfall in river_outfalls:
csos.append({'lat': river_outfall.lat, 'lon': river_outfall.lon})
cso_map = {'cso_points': csos}
graph_data['cso_map'] = cso_map
flooding_df = pd.DataFrame(
list(BasementFloodingEvent.objects.filter(date__gte=start).filter(date__lte=end).values()))
if len(flooding_df) > 0:
graph_data['flooding_data'] = build_flooding_data(flooding_df)
ret_val['basement_flooding'] = flooding_df[flooding_df['unit_type'] == 'ward']['count'].sum()
else:
graph_data['flooding_data'] = {}
ret_val['basement_flooding'] = 0
ret_val['graph_data'] = graph_data
except ValueError as e:
return HttpResponse("Not valid dates")
ret_val['hourly_precip'] = str(hourly_precip_df.head())
return render(request, 'show_event.html', ret_val)
def nyear(request, recurrence):
recurrence = int(recurrence)
ret_val = {'recurrence': recurrence, 'likelihood': str(int(1 / int(recurrence) * 100)) + '%'}
events = []
events_db = NYearEvent.objects.filter(n=recurrence)
for event in events_db:
date_formatted = event.start_time.strftime("%m/%d/%Y") + "-" + event.end_time.strftime("%m/%d/%Y")
duration = str(event.duration_hours) + ' hours' if event.duration_hours <= 24 else str(
int(event.duration_hours / 24)) + ' days'
events.append({'date_formatted': date_formatted, 'inches': "%.2f" % event.inches,
'duration_formatted': duration,
'event_url': '/date/%s/%s' % (event.start_time, event.end_time)})
ret_val['events'] = events
ret_val['num_occurrences'] = len(events)
return render(request, 'nyear.html', ret_val)
def viz_animation(request):
return render(request, 'viz.html')
def basement_flooding(request):
return render(request, 'flooding.html')
def viz_splash(request):
return render(request, 'viz-splash.html')
def about(request):
return render(request, 'about.html')
| mit | -7,805,179,108,453,252,000 | 35.728814 | 108 | 0.603138 | false | 3.222305 | false | false | false |
Niektory/fifengine | tests/swig_tests/timer_tests.py | 5 | 1770 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from swig_test_utils import *
import time
class MyTimeEvent(fife.TimeEvent):
def __init__(self, period):
fife.TimeEvent.__init__(self, period)
self.counter = 0
def updateEvent(self, curtime):
print "testing timer event... %d, %d" % (curtime, self.counter)
self.counter += 1
class TestTimer(unittest.TestCase):
def setUp(self):
self.engine = getEngine(True)
self.timemanager = self.engine.getTimeManager()
def tearDown(self):
self.engine.destroy()
def testEvents(self):
e = MyTimeEvent(100)
self.timemanager.registerEvent(e)
for i in xrange(10):
time.sleep(0.1)
self.timemanager.update()
self.timemanager.unregisterEvent(e)
TEST_CLASSES = [TestTimer]
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | 7,787,208,797,495,950,000 | 29 | 70 | 0.657062 | false | 3.532934 | true | false | false |
adviti/melange | app/soc/modules/gsoc/views/helper/request_data.py | 1 | 15071 | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the RequestData object that will be created for each
request in the GSoC module.
"""
from google.appengine.ext import db
from soc.logic.exceptions import NotFound
from soc.views.helper.access_checker import isSet
from soc.views.helper import request_data
from soc.modules.gsoc.models.profile import GSoCProfile
from soc.modules.gsoc.models.organization import GSoCOrganization
from soc.modules.gsoc.models.timeline import GSoCTimeline
class TimelineHelper(request_data.TimelineHelper):
"""Helper class for the determination of the currently active period.
Methods ending with "On", "Start", or "End" return a date.
Methods ending with "Between" return a tuple with two dates.
Methods ending with neither return a Boolean.
"""
def currentPeriod(self):
"""Return where we are currently on the timeline.
"""
if not self.programActive():
return 'offseason'
if self.beforeOrgSignupStart():
return 'kickoff_period'
if self.afterStudentSignupStart():
return 'student_signup_period'
if self.afterOrgSignupStart():
return 'org_signup_period'
if self.studentsAnnounced():
return 'coding_period'
return 'offseason'
def nextDeadline(self):
"""Determines the next deadline on the timeline.
Returns:
A two-tuple containing deadline text and the datetime object for
the next deadline
"""
if self.beforeOrgSignupStart():
return ("Org Application Starts", self.orgSignupStart())
# we do not have deadlines for any of those programs that are not active
if not self.programActive():
return ("", None)
if self.orgSignup():
return ("Org Application Deadline", self.orgSignupEnd())
if request_data.isBetween(self.orgSignupEnd(), self.orgsAnnouncedOn()):
return ("Accepted Orgs Announced In", self.orgsAnnouncedOn())
if self.orgsAnnounced() and self.beforeStudentSignupStart():
return ("Student Application Opens", self.studentSignupStart())
if self.studentSignup():
return ("Student Application Deadline", self.studentSignupEnd())
if request_data.isBetween(self.studentSignupEnd(), self.applicationMatchedOn()):
return ("Proposal Matched Deadline", self.applicationMatchedOn())
if request_data.isBetween(self.applicationMatchedOn(), self.applicationReviewEndOn()):
return ("Proposal Scoring Deadline", self.applicationReviewEndOn())
if request_data.isBetween(self.applicationReviewEndOn(), self.studentsAnnouncedOn()):
return ("Accepted Students Announced", self.studentsAnnouncedOn())
return ('', None)
def studentsAnnouncedOn(self):
return self.timeline.accepted_students_announced_deadline
def studentsAnnounced(self):
return request_data.isAfter(self.studentsAnnouncedOn())
def beforeStudentsAnnounced(self):
return request_data.isBefore(self.studentsAnnouncedOn())
def applicationReviewEndOn(self):
return self.timeline.application_review_deadline
def applicationMatchedOn(self):
return self.timeline.student_application_matched_deadline
def mentorSignup(self):
return self.programActiveBetween() and self.orgsAnnounced()
def afterFirstSurveyStart(self, surveys):
"""Returns True if we are past at least one survey has start date.
Args:
surveys: List of survey entities for which we need to determine if
at least one of them have started
"""
first_survey_start = min([s.survey_start for s in surveys])
return request_data.isAfter(first_survey_start)
class RequestData(request_data.RequestData):
"""Object containing data we query for each request in the GSoC module.
The only view that will be exempt is the one that creates the program.
Fields:
site: The Site entity
user: The user entity (if logged in)
css_path: a part of the css to fetch the GSoC specific CSS resources
program: The GSoC program entity that the request is pointing to
programs: All GSoC programs.
program_timeline: The GSoCTimeline entity
timeline: A TimelineHelper entity
is_host: is the current user a host of the program
is_mentor: is the current user a mentor in the program
is_student: is the current user a student in the program
is_org_admin: is the current user an org admin in the program
org_map: map of retrieved organizations
org_admin_for: the organizations the current user is an admin for
mentor_for: the organizations the current user is a mentor for
student_info: the StudentInfo for the current user and program
organization: the GSoCOrganization for the current url
Raises:
out_of_band: 404 when the program does not exist
"""
def __init__(self):
"""Constructs an empty RequestData object.
"""
super(RequestData, self).__init__()
# module wide fields
self.css_path = 'gsoc'
# program wide fields
self._programs = None
self.program = None
self.program_timeline = None
self.org_app = None
# user profile specific fields
self.profile = None
self.is_host = False
self.is_mentor = False
self.is_student = False
self.is_org_admin = False
self.org_map = {}
self.mentor_for = []
self.org_admin_for = []
self.student_info = None
self.organization = None
@property
def programs(self):
"""Memoizes and returns a list of all programs.
"""
from soc.modules.gsoc.models.program import GSoCProgram
if not self._programs:
self._programs = list(GSoCProgram.all())
return self._programs
def getOrganization(self, org_key):
"""Retrieves the specified organization.
"""
if org_key not in self.org_map:
org = db.get(org_key)
self.org_map[org_key] = org
return self.org_map[org_key]
def orgAdminFor(self, organization):
"""Returns true iff the user is admin for the specified organization.
Organization may either be a key or an organization instance.
"""
if self.is_host:
return True
if isinstance(organization, db.Model):
organization = organization.key()
return organization in [i.key() for i in self.org_admin_for]
def mentorFor(self, organization):
"""Returns true iff the user is mentor for the specified organization.
Organization may either be a key or an organization instance.
"""
if self.is_host:
return True
if isinstance(organization, db.Model):
organization = organization.key()
return organization in [i.key() for i in self.mentor_for]
def isPossibleMentorForProposal(self, mentor_profile=None):
"""Checks if the user is a possible mentor for the proposal in the data.
"""
assert isSet(self.profile)
assert isSet(self.proposal)
profile = mentor_profile if mentor_profile else self.profile
return profile.key() in self.proposal.possible_mentors
def populate(self, redirect, request, args, kwargs):
"""Populates the fields in the RequestData object.
Args:
request: Django HTTPRequest object.
args & kwargs: The args and kwargs django sends along.
"""
super(RequestData, self).populate(redirect, request, args, kwargs)
if kwargs.get('sponsor') and kwargs.get('program'):
program_key_name = "%s/%s" % (kwargs['sponsor'], kwargs['program'])
program_key = db.Key.from_path('GSoCProgram', program_key_name)
else:
from soc.models.site import Site
program_key = Site.active_program.get_value_for_datastore(self.site)
program_key_name = program_key.name()
import logging
logging.error("No program specified")
timeline_key = db.Key.from_path('GSoCTimeline', program_key_name)
org_app_key_name = 'gsoc_program/%s/orgapp' % program_key_name
org_app_key = db.Key.from_path('OrgAppSurvey', org_app_key_name)
keys = [program_key, timeline_key, org_app_key]
self.program, self.program_timeline, self.org_app = db.get(keys)
if not self.program:
raise NotFound("There is no program for url '%s'" % program_key_name)
self.timeline = TimelineHelper(self.program_timeline, self.org_app)
if kwargs.get('organization'):
fields = [self.program.key().id_or_name(), kwargs.get('organization')]
org_key_name = '/'.join(fields)
self.organization = GSoCOrganization.get_by_key_name(org_key_name)
if not self.organization:
raise NotFound("There is no organization for url '%s'" % org_key_name)
if self.user:
key_name = '%s/%s' % (self.program.key().name(), self.user.link_id)
self.profile = GSoCProfile.get_by_key_name(
key_name, parent=self.user)
from soc.modules.gsoc.models.program import GSoCProgram
host_key = GSoCProgram.scope.get_value_for_datastore(self.program)
self.is_host = host_key in self.user.host_for
if self.profile:
org_keys = set(self.profile.mentor_for + self.profile.org_admin_for)
prop = GSoCProfile.student_info
student_info_key = prop.get_value_for_datastore(self.profile)
if student_info_key:
self.student_info = db.get(student_info_key)
self.is_student = True
else:
orgs = db.get(org_keys)
org_map = self.org_map = dict((i.key(), i) for i in orgs)
self.mentor_for = org_map.values()
self.org_admin_for = [org_map[i] for i in self.profile.org_admin_for]
self.is_org_admin = self.is_host or bool(self.org_admin_for)
self.is_mentor = self.is_org_admin or bool(self.mentor_for)
class RedirectHelper(request_data.RedirectHelper):
"""Helper for constructing redirects.
"""
def review(self, id=None, student=None):
"""Sets the kwargs for an url_patterns.REVIEW redirect.
"""
if not student:
assert 'user' in self._data.kwargs
student = self._data.kwargs['user']
self.id(id)
self.kwargs['user'] = student
return self
def invite(self, role=None):
"""Sets args for an url_patterns.INVITE redirect.
"""
if not role:
assert 'role' in self._data.kwargs
role = self._data.kwargs['role']
self.organization()
self.kwargs['role'] = role
return self
def orgApp(self, survey=None):
"""Sets kwargs for an url_patterns.SURVEY redirect for org application.
"""
if not survey:
assert 'survey' in self._data.kwargs
survey = self._data.kwargs['survey']
self.organization()
self.kwargs['survey'] = survey
def document(self, document):
"""Override this method to set GSoC specific _url_name.
"""
super(RedirectHelper, self).document(document)
self._url_name = 'show_gsoc_document'
return self
def acceptedOrgs(self):
"""Sets the _url_name to the list all the accepted orgs.
"""
super(RedirectHelper, self).acceptedOrgs()
self._url_name = 'gsoc_accepted_orgs'
return self
def allProjects(self):
"""Sets the _url_name to list all GSoC projects.
"""
self.program()
self._url_name = 'gsoc_accepted_projects'
return self
def homepage(self):
"""Sets the _url_name for the homepage of the current GSOC program.
"""
super(RedirectHelper, self).homepage()
self._url_name = 'gsoc_homepage'
return self
def searchpage(self):
"""Sets the _url_name for the searchpage of the current GSOC program.
"""
super(RedirectHelper, self).searchpage()
self._url_name = 'search_gsoc'
return self
def orgHomepage(self, link_id):
"""Sets the _url_name for the specified org homepage
"""
super(RedirectHelper, self).orgHomepage(link_id)
self._url_name = 'gsoc_org_home'
return self
def dashboard(self):
"""Sets the _url_name for dashboard page of the current GSOC program.
"""
super(RedirectHelper, self).dashboard()
self._url_name = 'gsoc_dashboard'
return self
def events(self):
"""Sets the _url_name for the events page, if it is set.
"""
from soc.modules.gsoc.models.program import GSoCProgram
key = GSoCProgram.events_page.get_value_for_datastore(self._data.program)
if not key:
self._clear()
self._no_url = True
self.program()
self._url_name = 'gsoc_events'
return self
def request(self, request):
"""Sets the _url_name for a request.
"""
assert request
self.id(request.key().id())
if request.type == 'Request':
self._url_name = 'show_gsoc_request'
else:
self._url_name = 'gsoc_invitation'
return self
def comment(self, comment, full=False, secure=False):
"""Creates a direct link to a comment.
"""
review = comment.parent()
self.review(review.key().id_or_name(), review.parent().link_id)
url = self.urlOf('review_gsoc_proposal', full=full, secure=secure)
return "%s#c%s" % (url, comment.key().id())
def project(self, id=None, student=None):
"""Returns the URL to the Student Project.
Args:
student: entity which represents the user for the student
"""
if not student:
assert 'user' in self._data.kwargs
student = self._data.kwargs['user']
self.id(id)
self.kwargs['user'] = student
return self
def survey(self, survey=None):
"""Sets kwargs for an url_patterns.SURVEY redirect.
Args:
survey: the survey's link_id
"""
self.program()
if not survey:
assert 'survey' in self._data.kwargs
survey = self._data.kwargs['survey']
self.kwargs['survey'] = survey
return self
def survey_record(self, survey=None, id=None, student=None):
"""Returns the redirector object with the arguments for survey record
Args:
survey: the survey's link_id
"""
self.program()
self.project(id, student)
if not survey:
assert 'survey' in self._data.kwargs
survey = self._data.kwargs['survey']
self.kwargs['survey'] = survey
return self
def grading_record(self, record):
"""Returns the redirector object with the arguments for grading record
Args:
record: the grading record entity
"""
self.program()
project = record.parent()
self.project(project.key().id(), project.parent().link_id)
self.kwargs['group'] = record.grading_survey_group.key().id_or_name()
self.kwargs['record'] = record.key().id()
return self
def editProfile(self, profile):
"""Returns the URL for the edit profile page for the given profile.
"""
self.program()
self._url_name = 'edit_gsoc_profile'
return self
| apache-2.0 | -116,678,680,622,381,740 | 30.33264 | 90 | 0.681574 | false | 3.690255 | false | false | false |
rosswhitfield/mantid | qt/python/mantidqt/widgets/workspacedisplay/table/view.py | 3 | 10194 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
# This file is part of the mantidqt package.
import sys
from functools import partial
from qtpy import QtGui
from qtpy.QtCore import QVariant, Qt, Signal, Slot
from qtpy.QtGui import QKeySequence, QStandardItemModel
from qtpy.QtWidgets import (QAction, QHeaderView, QItemEditorFactory, QMenu, QMessageBox,
QStyledItemDelegate, QTableView)
import mantidqt.icons
from mantidqt.widgets.workspacedisplay.table.plot_type import PlotType
class PreciseDoubleFactory(QItemEditorFactory):
def __init__(self):
QItemEditorFactory.__init__(self)
def createEditor(self, user_type, parent):
widget = super(PreciseDoubleFactory, self).createEditor(user_type, parent)
if user_type == QVariant.Double:
widget.setFrame(True)
widget.setDecimals(16)
widget.setRange(-sys.float_info.max, sys.float_info.max)
return widget
class TableWorkspaceDisplayView(QTableView):
repaint_signal = Signal()
def __init__(self, presenter=None, parent=None, window_flags=Qt.Window, table_model=None):
super().__init__(parent)
self.table_model = table_model if table_model else QStandardItemModel(parent)
self.setModel(self.table_model)
self.presenter = presenter
self.COPY_ICON = mantidqt.icons.get_icon("mdi.content-copy")
self.DELETE_ROW = mantidqt.icons.get_icon("mdi.minus-box-outline")
item_delegate = QStyledItemDelegate(self)
item_delegate.setItemEditorFactory(PreciseDoubleFactory())
self.setItemDelegate(item_delegate)
self.setAttribute(Qt.WA_DeleteOnClose, True)
self.repaint_signal.connect(self._run_repaint)
header = self.horizontalHeader()
header.sectionDoubleClicked.connect(self.handle_double_click)
self.setWindowFlags(window_flags)
def columnCount(self):
return self.table_model.columnCount()
def rowCount(self):
return self.table_model.rowCount()
def subscribe(self, presenter):
"""
:param presenter: A reference to the controlling presenter
"""
self.presenter = presenter
def resizeEvent(self, event):
super().resizeEvent(event)
header = self.horizontalHeader()
# resizes the column headers to fit the contents,
# currently this overwrites any manual changes to the widths of the columns
header.resizeSections(QHeaderView.ResizeToContents)
# then allows the users to resize the headers manually
header.setSectionResizeMode(QHeaderView.Interactive)
def emit_repaint(self):
self.repaint_signal.emit()
@Slot()
def _run_repaint(self):
self.viewport().update()
def handle_double_click(self, section):
header = self.horizontalHeader()
header.resizeSection(section, header.defaultSectionSize())
def keyPressEvent(self, event):
if event.matches(QKeySequence.Copy):
self.presenter.action_keypress_copy()
return
elif event.key() in (Qt.Key_F2, Qt.Key_Return, Qt.Key_Enter):
self.edit(self.currentIndex())
return
def set_context_menu_actions(self, table):
"""
Sets up the context menu actions for the table
:type table: QTableView
:param table: The table whose context menu actions will be set up.
:param ws_read_function: The read function used to efficiently retrieve data directly from the workspace
"""
copy_action = QAction(self.COPY_ICON, "Copy", table)
copy_action.triggered.connect(self.presenter.action_copy_cells)
table.setContextMenuPolicy(Qt.ActionsContextMenu)
table.addAction(copy_action)
horizontalHeader = table.horizontalHeader()
horizontalHeader.setContextMenuPolicy(Qt.CustomContextMenu)
horizontalHeader.customContextMenuRequested.connect(self.custom_context_menu)
verticalHeader = table.verticalHeader()
verticalHeader.setContextMenuPolicy(Qt.ActionsContextMenu)
verticalHeader.setSectionResizeMode(QHeaderView.Fixed)
copy_spectrum_values = QAction(self.COPY_ICON, "Copy", verticalHeader)
copy_spectrum_values.triggered.connect(self.presenter.action_copy_spectrum_values)
delete_row = QAction(self.DELETE_ROW, "Delete Row", verticalHeader)
delete_row.triggered.connect(self.presenter.action_delete_row)
separator2 = self.make_separator(verticalHeader)
verticalHeader.addAction(copy_spectrum_values)
verticalHeader.addAction(separator2)
verticalHeader.addAction(delete_row)
def custom_context_menu(self, position):
menu_main = QMenu()
plot = QMenu("Plot...", menu_main)
plot_line = QAction("Line", plot)
plot_line.triggered.connect(partial(self.presenter.action_plot, PlotType.LINEAR))
plot_line_with_yerr = QAction("Line with Y Errors", plot)
plot_line_with_yerr.triggered.connect(
partial(self.presenter.action_plot, PlotType.LINEAR_WITH_ERR))
plot_scatter = QAction("Scatter", plot)
plot_scatter.triggered.connect(partial(self.presenter.action_plot, PlotType.SCATTER))
plot_scatter_with_yerr = QAction("Scatter with Y Errors", plot)
plot_scatter_with_yerr.triggered.connect(
partial(self.presenter.action_plot, PlotType.SCATTER_WITH_ERR))
plot_line_and_points = QAction("Line + Symbol", plot)
plot_line_and_points.triggered.connect(
partial(self.presenter.action_plot, PlotType.LINE_AND_SYMBOL))
plot.addAction(plot_line)
plot.addAction(plot_line_with_yerr)
plot.addAction(plot_scatter)
plot.addAction(plot_scatter_with_yerr)
plot.addAction(plot_line_and_points)
menu_main.addMenu(plot)
copy_bin_values = QAction(self.COPY_ICON, "Copy", menu_main)
copy_bin_values.triggered.connect(self.presenter.action_copy_bin_values)
set_as_x = QAction("Set as X", menu_main)
set_as_x.triggered.connect(self.presenter.action_set_as_x)
set_as_y = QAction("Set as Y", menu_main)
set_as_y.triggered.connect(self.presenter.action_set_as_y)
set_as_none = QAction("Set as None", menu_main)
set_as_none.triggered.connect(self.presenter.action_set_as_none)
statistics_on_columns = QAction("Statistics on Columns", menu_main)
statistics_on_columns.triggered.connect(self.presenter.action_statistics_on_columns)
hide_selected = QAction("Hide Selected", menu_main)
hide_selected.triggered.connect(self.presenter.action_hide_selected)
show_all_columns = QAction("Show All Columns", menu_main)
show_all_columns.triggered.connect(self.presenter.action_show_all_columns)
sort_ascending = QAction("Sort Ascending", menu_main)
sort_ascending.triggered.connect(partial(self.presenter.action_sort, True))
sort_descending = QAction("Sort Descending", menu_main)
sort_descending.triggered.connect(partial(self.presenter.action_sort, False))
menu_main.addAction(copy_bin_values)
menu_main.addAction(self.make_separator(menu_main))
menu_main.addAction(set_as_x)
menu_main.addAction(set_as_y)
marked_y_cols = self.presenter.get_columns_marked_as_y()
num_y_cols = len(marked_y_cols)
# If any columns are marked as Y then generate the set error menu
if num_y_cols > 0:
menu_set_as_y_err = QMenu("Set error for Y...")
for label_index in range(num_y_cols):
set_as_y_err = QAction("Y{}".format(label_index), menu_main)
# This is the column index of the Y column for which a YERR column is being added.
# The column index is relative to the whole table, this is necessary
# so that later the data of the column marked as error can be retrieved
related_y_column = marked_y_cols[label_index]
# label_index here holds the index in the LABEL (multiple Y columns have labels Y0, Y1, YN...)
# this is NOT the same as the column relative to the WHOLE table
set_as_y_err.triggered.connect(
partial(self.presenter.action_set_as_y_err, related_y_column))
menu_set_as_y_err.addAction(set_as_y_err)
menu_main.addMenu(menu_set_as_y_err)
menu_main.addAction(set_as_none)
menu_main.addAction(self.make_separator(menu_main))
menu_main.addAction(statistics_on_columns)
menu_main.addAction(self.make_separator(menu_main))
menu_main.addAction(hide_selected)
menu_main.addAction(show_all_columns)
menu_main.addAction(self.make_separator(menu_main))
menu_main.addAction(sort_ascending)
menu_main.addAction(sort_descending)
menu_main.exec_(self.mapToGlobal(position))
def make_separator(self, horizontalHeader):
separator1 = QAction(horizontalHeader)
separator1.setSeparator(True)
return separator1
@staticmethod
def copy_to_clipboard(data):
"""
Uses the QGuiApplication to copy to the system clipboard.
:type data: str
:param data: The data that will be copied to the clipboard
:return:
"""
cb = QtGui.QGuiApplication.clipboard()
cb.setText(data, mode=cb.Clipboard)
def ask_confirmation(self, message, title="Mantid Workbench"):
"""
:param message:
:return:
"""
reply = QMessageBox.question(self, title, message, QMessageBox.Yes, QMessageBox.No)
return True if reply == QMessageBox.Yes else False
def show_warning(self, message, title="Mantid Workbench"):
QMessageBox.warning(self, title, message)
| gpl-3.0 | -8,458,622,923,449,505,000 | 39.452381 | 112 | 0.671768 | false | 3.829452 | false | false | false |
OiNutter/microbit-scripts | rainman/rainman.py | 1 | 2395 | from microbit import *
import random
letters = ['A', 'B']
def get_letter():
return random.choice(letters)
display.scroll("RAINMAN")
display.scroll('Press any button to play', wait=False, loop=True)
while True:
if button_a.is_pressed() or button_b.is_pressed():
print("start playing")
break
display.clear()
seq_length = 3
round = 1
pause = 500
correct = True
def init():
global seq_length, round, pause, correct
seq_length = 3
round = 1
pause = 500
correct = True
display.clear()
init()
while True:
# Draw seperator
for y in range(0, 5):
display.set_pixel(2, y, 5)
# Get sequence
sequence = []
for i in range(0, seq_length):
# Clear previous
for x in range(0, 5):
if x != 2:
for y in range(0, 5):
display.set_pixel(x, y, 0)
sleep(pause)
letter = get_letter()
sequence.append(letter)
print(letter)
if letter == 'A':
for x in range(0, 2):
for y in range(0, 5):
display.set_pixel(x, y, 9)
elif letter == 'B':
for x in range(3, 5):
for y in range(0, 5):
display.set_pixel(x, y, 9)
sleep(500)
display.clear()
# Await input
correct = True
reset = False
print("ENTERED:");
for letter in sequence:
while correct:
entered = ""
if button_a.is_pressed() or button_b.is_pressed():
if button_a.is_pressed():
while button_a.is_pressed():
continue
entered = "A"
else:
while button_b.is_pressed():
continue
entered = "B"
print ("%s:%s" % (letter, entered))
if entered != letter:
correct = False
else:
break
if not correct:
display.scroll("X")
display.scroll("You reached level: %d" % round, wait=False, loop=True)
while True:
if button_a.is_pressed() or button_b.is_pressed():
init()
reset = True
break
if reset:
break
round += 1
seq_length += 1
| mit | 8,623,448,568,692,925,000 | 21.809524 | 82 | 0.469729 | false | 4.087031 | false | false | false |
rienafairefr/nYNABapi | pynYNAB/utils.py | 2 | 2524 | import random
import time
import json
from datetime import datetime
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
from pynYNAB.schema import AccountTypes
from pynYNAB.schema.budget import Account, Payee
def rate_limited(maxpersecond):
minInterval = 1.0 / float(maxpersecond)
def decorate(func):
lastTimeCalled = [None]
def rateLimitedFunction(*args, **kargs):
if lastTimeCalled[0] is not None:
elapsed = time.clock() - lastTimeCalled[0]
leftToWait = minInterval - elapsed
if leftToWait > 0:
print('rate limiting, waiting %g...' % leftToWait)
time.sleep(leftToWait)
ret = func(*args, **kargs)
lastTimeCalled[0] = time.clock()
return ret
return rateLimitedFunction
return decorate
def get_or_create_account(client, name):
accounts = {a.account_name: a for a in client.budget.be_accounts if
a.account_name == name}
if name in accounts:
return accounts[name]
account = Account(
account_type=AccountTypes.Checking,
account_name=name
)
client.add_account(account, balance=random.randint(-10, 10), balance_date=datetime.now())
return account
def get_or_create_payee(client, name):
payees = {p.name: p for p in client.budget.be_payees if
p.name == name}
if name in payees:
return payees[name]
payee = Payee(
name=name
)
client.budget.be_payees.append(payee)
client.push(1)
return payee
def get_one_or_create(session,
model,
create_method='',
create_method_kwargs=None,
**kwargs):
try:
return session.query(model).filter_by(**kwargs).one(), False
except NoResultFound:
kwargs.update(create_method_kwargs or {})
created = getattr(model, create_method, model)(**kwargs)
try:
session.add(created)
session.flush()
return created, True
except IntegrityError:
session.rollback()
return session.query(model).filter_by(**kwargs).one(), True
# https://stackoverflow.com/a/37757378/1685379
def pp_json(json_thing, sort=True, indents=4):
def d(t):
return json.dumps(t, sort_keys=sort, indent=indents)
return d(json.loads(json_thing)) if type(json_thing) is str else d(json_thing)
| mit | -704,101,776,097,413,900 | 28.694118 | 93 | 0.608954 | false | 3.895062 | false | false | false |
nedlowe/amaas-core-sdk-python | amaascore/assets/interface.py | 1 | 8700 | from __future__ import absolute_import, division, print_function, unicode_literals
import json
import logging
from amaascore.assets.utils import json_to_asset
from amaascore.config import ENVIRONMENT
from amaascore.core.interface import Interface
from amaascore.core.amaas_model import json_handler
class AssetsInterface(Interface):
def __init__(self, environment=ENVIRONMENT, endpoint=None, logger=None, username=None, password=None):
self.logger = logger or logging.getLogger(__name__)
super(AssetsInterface, self).__init__(endpoint=endpoint, endpoint_type='assets',
environment=environment, username=username, password=password)
def new(self, asset):
self.logger.info('New Asset - Asset Manager: %s - Asset ID: %s', asset.asset_manager_id, asset.asset_id)
url = '%s/assets/%s' % (self.endpoint, asset.asset_manager_id)
response = self.session.post(url, json=asset.to_interface())
if response.ok:
self.logger.info('Successfully Created Asset - Asset Manager: %s - Asset ID: %s', asset.asset_manager_id,
asset.asset_id)
asset = json_to_asset(response.json())
return asset
else:
self.logger.error(response.text)
response.raise_for_status()
def create_many(self, assets):
if not assets or not isinstance(assets, list):
raise ValueError('Invalid argument. Argument must be a non-empty list.')
self.logger.info('New Assets - Asset Manager: %s', assets[0].asset_manager_id)
url = '%s/assets/%s' % (self.endpoint, assets[0].asset_manager_id)
json_body = [asset.to_interface() for asset in assets]
response = self.session.post(url, json=json_body)
if response.ok:
self.logger.info('Successfully Created Assets - Asset Manager: %s', assets[0].asset_manager_id)
t = response.json()
assets = [asset for asset in response.json()]
return assets
else:
self.logger.error(response.text)
response.raise_for_status()
def amend(self, asset):
self.logger.info('Amend Asset - Asset Manager: %s - Asset ID: %s', asset.asset_manager_id, asset.asset_id)
url = '%s/assets/%s/%s' % (self.endpoint, asset.asset_manager_id, asset.asset_id)
response = self.session.put(url, json=asset.to_interface())
if response.ok:
self.logger.info('Successfully Amended Asset - Asset Manager: %s - Asset ID: %s', asset.asset_manager_id,
asset.asset_id)
asset = json_to_asset(response.json())
return asset
else:
self.logger.error(response.text)
response.raise_for_status()
def partial(self, asset_manager_id, asset_id, updates):
self.logger.info('Partial Amend Asset - Asset Manager: %s - Asset ID: %s', asset_manager_id,
asset_id)
url = '%s/assets/%s/%s' % (self.endpoint, asset_manager_id, asset_id)
# Setting handler ourselves so we can be sure Decimals work
response = self.session.patch(url, data=json.dumps(updates, default=json_handler), headers=self.json_header)
if response.ok:
asset = json_to_asset(response.json())
return asset
else:
self.logger.error(response.text)
response.raise_for_status()
def retrieve(self, asset_manager_id, asset_id, version=None):
self.logger.info('Retrieve Asset - Asset Manager: %s - Asset ID: %s', asset_manager_id, asset_id)
url = '%s/assets/%s/%s' % (self.endpoint, asset_manager_id, asset_id)
if version:
url += '?version=%d' % int(version)
response = self.session.get(url)
if response.ok:
self.logger.info('Successfully Retrieved Asset - Asset Manager: %s - Asset ID: %s', asset_manager_id,
asset_id)
return json_to_asset(response.json())
else:
self.logger.error(response.text)
response.raise_for_status()
def deactivate(self, asset_manager_id, asset_id):
self.logger.info('Deactivate Asset - Asset Manager: %s - Asset ID: %s', asset_manager_id, asset_id)
url = '%s/assets/%s/%s' % (self.endpoint, asset_manager_id, asset_id)
json = {'asset_status': 'Inactive'}
response = self.session.patch(url, json=json)
if response.ok:
self.logger.info('Successfully Deactivated Asset - Asset Manager: %s - Asset ID: %s', asset_manager_id,
asset_id)
return json_to_asset(response.json())
else:
self.logger.error(response.text)
response.raise_for_status()
def search(self, asset_manager_ids=None, asset_ids=None, asset_classes=None, asset_types=None,
page_no=None, page_size=None):
self.logger.info('Search for Assets - Asset Manager(s): %s', asset_manager_ids)
search_params = {}
# Potentially roll this into a loop through args rather than explicitly named - depends on additional validation
if asset_manager_ids:
search_params['asset_manager_ids'] = ','.join([str(amid) for amid in asset_manager_ids])
if asset_ids:
search_params['asset_ids'] = ','.join(asset_ids)
if asset_classes:
search_params['asset_classes'] = ','.join(asset_classes)
if asset_types:
search_params['asset_types'] = ','.join(asset_types)
if page_no is not None:
search_params['page_no'] = page_no
if page_size:
search_params['page_size'] = page_size
url = self.endpoint + '/assets'
response = self.session.get(url, params=search_params)
if response.ok:
assets = [json_to_asset(json_asset) for json_asset in response.json()]
self.logger.info('Returned %s Assets.', len(assets))
return assets
else:
self.logger.error(response.text)
response.raise_for_status()
def fields_search(self, asset_manager_ids=None, asset_ids=None, asset_classes=None, asset_types=None,
fields=None, page_no=None, page_size=None):
self.logger.info('Search for Assets - Asset Manager(s): %s', asset_manager_ids)
search_params = {}
if asset_manager_ids:
search_params['asset_manager_ids'] = ','.join([str(amid) for amid in asset_manager_ids])
if asset_ids:
search_params['asset_ids'] = ','.join(asset_ids)
if asset_classes:
search_params['asset_classes'] = ','.join(asset_classes)
if asset_types:
search_params['asset_types'] = ','.join(asset_types)
if fields:
search_params['fields'] = ','.join(fields)
if page_no is not None:
search_params['page_no'] = page_no
if page_size:
search_params['page_size'] = page_size
url = self.endpoint + '/assets'
response = self.session.get(url, params=search_params)
if response.ok:
asset_dicts = response.json()
self.logger.info('Returned %s Assets.', len(asset_dicts))
return asset_dicts
else:
self.logger.error(response.text)
response.raise_for_status()
def assets_by_asset_manager(self, asset_manager_id):
self.logger.info('Retrieve Assets By Asset Manager: %s', asset_manager_id)
url = '%s/assets/%s' % (self.endpoint, asset_manager_id)
response = self.session.get(url)
if response.ok:
assets = [json_to_asset(json_asset) for json_asset in response.json()]
self.logger.info('Returned %s Assets.', len(assets))
return assets
else:
self.logger.error(response.text)
response.raise_for_status()
def clear(self, asset_manager_id):
""" This method deletes all the data for an asset_manager_id.
It should be used with extreme caution. In production it
is almost always better to Inactivate rather than delete. """
self.logger.info('Clear Assets - Asset Manager: %s', asset_manager_id)
url = '%s/clear/%s' % (self.endpoint, asset_manager_id)
response = self.session.delete(url)
if response.ok:
count = response.json().get('count', 'Unknown')
self.logger.info('Deleted %s Assets.', count)
return count
else:
self.logger.error(response.text)
response.raise_for_status()
| apache-2.0 | 1,866,251,523,994,682,400 | 46.027027 | 120 | 0.599195 | false | 3.847855 | false | false | false |
minesense/VisTrails | vistrails/gui/pipeline_view.py | 1 | 168552 | ###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
""" This is a QGraphicsView for pipeline view, it also holds different
types of graphics items that are only available in the pipeline
view. It only handles GUI-related actions, the rest of the
functionalities are implemented at somewhere else,
e.g. core.vistrails
QGraphicsConnectionItem
QGraphicsPortItem
QGraphicsConfigureItem
QGraphicsModuleItem
QPipelineScene
QPipelineView
"""
from __future__ import division
from PyQt4 import QtCore, QtGui
from vistrails.core.configuration import get_vistrails_configuration
from vistrails.core import debug
from vistrails.core.data_structures.graph import GraphContainsCycles
from vistrails.core.db.action import create_action
from vistrails.core.system import systemType
from vistrails.core.modules.module_registry import get_module_registry, \
ModuleRegistryException, MissingPackage
from vistrails.core.system import get_vistrails_basic_pkg_id
from vistrails.core.vistrail.location import Location
from vistrails.core.vistrail.module import Module
from vistrails.core.vistrail.module_function import ModuleFunction
from vistrails.core.vistrail.module_param import ModuleParam
from vistrails.core.vistrail.port import PortEndPoint
from vistrails.core.vistrail.port_spec import PortSpec
from vistrails.core.interpreter.base import AbortExecution
from vistrails.core.interpreter.default import get_default_interpreter
from vistrails.core.utils import VistrailsDeprecation
from vistrails.gui.base_view import BaseView
from vistrails.gui.controlflow_assist import QControlFlowAssistDialog
from vistrails.gui.graphics_view import (QInteractiveGraphicsScene,
QInteractiveGraphicsView,
QGraphicsItemInterface)
from vistrails.gui.module_info import QModuleInfo
from vistrails.gui.module_palette import QModuleTreeWidget
from vistrails.gui.modules.utils import get_widget_class
from vistrails.gui.ports_pane import Parameter
from vistrails.gui.theme import CurrentTheme
from vistrails.gui.utils import getBuilderWindow
from vistrails.gui.variable_dropbox import QDragVariableLabel
import copy
import math
import operator
import string
import warnings
import vistrails.api
import vistrails.gui.utils
##############################################################################
# 2008-06-24 cscheid
#
# - Profiling has shown that calling setPen and setBrush takes a longer
# time than we expected. Watch out for that in the future.
##############################################################################
# QAbstractGraphicsPortItem
class QAbstractGraphicsPortItem(QtGui.QAbstractGraphicsShapeItem):
"""
QAbstractGraphicsPortItem represents a port shape drawing on top
(a child) of QGraphicsModuleItem, it must be implemented by a
specific qgraphicsitem type.
"""
def __init__(self, port, x, y, ghosted=False, parent=None, union_group=None):
""" QAbstractGraphicsPortItem(port: PortSpec,
x: float,
y: float,
ghosted: bool,
union: [PortSpec],
parent: QGraphicsItem)
-> QAbstractGraphicsPortItem
Create the shape, initialize its pen and brush accordingly
"""
# local lookups are faster than global lookups..
self._rect = CurrentTheme.PORT_RECT.translated(x,y)
QtGui.QAbstractGraphicsShapeItem.__init__(self, parent)
self.setZValue(1)
self.setFlags(QtGui.QGraphicsItem.ItemIsSelectable)
self.controller = None
self.port = port
self.union_group = union_group
self.dragging = False
self.tmp_connection_item = None
self.vistrail_vars = {}
self.removeVarActions = []
if port is not None:
self._min_conns = port.min_conns
self._max_conns = port.max_conns
self.optional = port.optional
else:
self._min_conns = 0
self._max_conns = -1
self.optional = False
self._connected = 0
self._selected = False
self.ghosted = ghosted
self.invalid = False
self.setPainterState()
self.updateToolTip()
self.updateActions()
def getRect(self):
return self._rect
def boundingRect(self):
return self._boundingRect
def computeBoundingRect(self):
halfpw = self.pen().widthF() / 2
self._boundingRect = self.getRect().adjusted(-halfpw, -halfpw,
halfpw, halfpw)
def getPosition(self):
return self.sceneBoundingRect().center()
def setPainterState(self):
if self._selected:
self._pen_color = CurrentTheme.PORT_PEN_COLOR_SELECTED
elif self.ghosted:
self._pen_color = CurrentTheme.PORT_PEN_COLOR_GHOSTED
# self.setPen(CurrentTheme.GHOSTED_PORT_PEN)
self.setBrush(CurrentTheme.GHOSTED_PORT_BRUSH)
elif self.invalid:
self._pen_color = CurrentTheme.PORT_PEN_COLOR_INVALID
# self.setPen(CurrentTheme.INVALID_PORT_PEN)
self.setBrush(CurrentTheme.INVALID_PORT_BRUSH)
elif self._max_conns >= 0 and self._connected >= self._max_conns:
self._pen_color = CurrentTheme.PORT_PEN_COLOR_FULL
self.setBrush(CurrentTheme.PORT_BRUSH)
else:
self._pen_color = CurrentTheme.PORT_PEN_COLOR_NORMAL
# self.setPen(CurrentTheme.PORT_PEN)
self.setBrush(CurrentTheme.PORT_BRUSH)
if self.brush() == CurrentTheme.PORT_BRUSH:
if self._connected > 0:
self.setBrush(CurrentTheme.PORT_CONNECTED_BRUSH)
elif self._connected < self._min_conns:
self.setBrush(CurrentTheme.PORT_MANDATORY_BRUSH)
if self._selected:
self._pen_width = CurrentTheme.PORT_PEN_WIDTH_SELECTED
elif self._min_conns > 0 and self._connected < self._min_conns:
self._pen_width = CurrentTheme.PORT_PEN_WIDTH_MANDATORY
else:
self._pen_width = CurrentTheme.PORT_PEN_WIDTH_NORMAL
self.setPen(CurrentTheme.PORT_PENS[(self._pen_color,
self._pen_width)])
self.computeBoundingRect()
def setGhosted(self, ghosted):
""" setGhosted(ghosted: True) -> None
Set this link to be ghosted or not
"""
if self.ghosted <> ghosted:
self.ghosted = ghosted
self.setPainterState()
def setInvalid(self, invalid):
if self.invalid != invalid:
self.invalid = invalid
self.setPainterState()
def setOptional(self, optional):
if self.optional != optional:
self.optional = optional
self.setPainterState()
def setSelected(self, selected):
# QtGui.QAbstractGraphicsShapeItem.setSelected(self, selected)
if self._selected != selected:
self._selected = selected
self.setPainterState()
def disconnect(self):
self._connected -= 1
# print "disconnecting", self._connected, self._min_conns, self._max_conns
if self._connected == 0 or self._connected+1 == self._min_conns or \
(self._max_conns >= 0 and self._connected+1 == self._max_conns):
self.setPainterState()
def connect(self):
self._connected += 1
# print "connecting", self._connected, self._min_conns, self._max_conns
if self._connected == 1 or self._connected == self._min_conns or \
(self._max_conns >= 0 and self._connected == self._max_conns):
self.setPainterState()
def draw(self, painter, option, widget=None):
raise NotImplementedError("Must implement draw method")
def paint(self, painter, option, widget=None):
painter.setPen(self.pen())
painter.setBrush(self.brush())
self.draw(painter, option, widget)
def addVistrailVar(self, uuid, name=None):
if name is None:
name = self.getVistrailVarName(uuid)
self.vistrail_vars[uuid] = name
if not self.controller.has_vistrail_variable_with_uuid(uuid):
self.setInvalid(True)
self.updateActions()
self.updateToolTip()
def deleteVistrailVar(self, var_uuid):
del self.vistrail_vars[var_uuid]
self.updateActions()
self.updateToolTip()
def deleteAllVistrailVars(self):
self.vistrail_vars = {}
self.updateActions()
self.updateToolTip()
def getVistrailVarName(self, uuid):
if self.controller.has_vistrail_variable_with_uuid(uuid):
return self.controller.get_vistrail_variable_by_uuid(uuid).name
return '<missing>'
def updateToolTip(self):
tooltip = ""
if (self.port is not None and self.port.is_valid and
hasattr(self.port, 'toolTip')):
tooltip = self.port.toolTip(self.union_group)
for vistrail_var in self.vistrail_vars.itervalues():
tooltip += '\nConnected to vistrail var "%s"' % vistrail_var
self.setToolTip(tooltip)
def contextMenuEvent(self, event):
if len(self.removeVarActions) > 0:
menu = QtGui.QMenu()
for (action, _) in self.removeVarActions:
menu.addAction(action)
menu.exec_(event.screenPos())
event.accept()
def updateActions(self):
def gen_action(var_uuid):
def remove_action():
self.removeVar(var_uuid)
return remove_action
for (action, callback) in self.removeVarActions:
action.disconnect(action, QtCore.SIGNAL("triggered()"), callback)
self.removeVarActions = []
if len(self.vistrail_vars) > 1:
removeAllVarsAct = \
QtGui.QAction("Disconnect all vistrail variables",
self.scene())
removeAllVarsAct.setStatusTip("Disconnects all vistrail"
" variables from the port")
QtCore.QObject.connect(removeAllVarsAct,
QtCore.SIGNAL("triggered()"),
self.removeAllVars)
self.removeVarActions.append((removeAllVarsAct,
self.removeAllVars))
for vistrail_var_uuid in sorted(self.vistrail_vars,
key=lambda x: self.getVistrailVarName(x)):
vistrail_var_name = self.getVistrailVarName(vistrail_var_uuid)
removeVarAction = QtGui.QAction('Disconnect vistrail var "%s"' % \
vistrail_var_name, self.scene())
removeVarAction.setStatusTip('Disconnects vistrail variable "%s"'
' from the port' % vistrail_var_name)
callback = gen_action(vistrail_var_uuid)
QtCore.QObject.connect(removeVarAction,
QtCore.SIGNAL("triggered()"),
callback)
self.removeVarActions.append((removeVarAction, callback))
def removeVar(self, var_uuid):
(to_delete_modules, to_delete_conns) = \
self.controller.get_disconnect_vistrail_vars( \
self.parentItem().module, self.port.name, var_uuid)
for conn in to_delete_conns:
self.scene().remove_connection(conn.id)
for module in to_delete_modules:
self.scene().remove_module(module.id)
self.deleteVistrailVar(var_uuid)
self.controller.disconnect_vistrail_vars(to_delete_modules,
to_delete_conns)
self.setInvalid(False)
def removeAllVars(self):
# Get all connections to vistrail variables for this port
(to_delete_modules, to_delete_conns) = \
self.controller.get_disconnect_vistrail_vars( \
self.parentItem().module, self.port.name)
for conn in to_delete_conns:
self.scene().remove_connection(conn.id)
for module in to_delete_modules:
self.scene().remove_module(module.id)
self.deleteAllVistrailVars()
self.controller.disconnect_vistrail_vars(to_delete_modules,
to_delete_conns)
def mousePressEvent(self, event):
""" mousePressEvent(event: QMouseEvent) -> None
Prepare for dragging a connection
"""
if (self.controller and event.buttons() & QtCore.Qt.LeftButton
and not self.scene().read_only_mode):
self.dragging = True
self.setSelected(True)
event.accept()
QtGui.QAbstractGraphicsShapeItem.mousePressEvent(self, event)
def mouseReleaseEvent(self, event):
""" mouseReleaseEvent(event: QMouseEvent) -> None
Apply the connection
"""
if self.tmp_connection_item:
if self.tmp_connection_item.snapPortItem is not None:
self.scene().addConnectionFromTmp(self.tmp_connection_item,
self.parentItem().module,
self.port.type == "output")
self.tmp_connection_item.disconnect(True)
self.scene().removeItem(self.tmp_connection_item)
self.tmp_connection_item = None
self.dragging = False
self.setSelected(False)
QtGui.QAbstractGraphicsShapeItem.mouseReleaseEvent(self, event)
def mouseMoveEvent(self, event):
""" mouseMoveEvent(event: QMouseEvent) -> None
Change the connection
"""
if self.dragging:
if not self.tmp_connection_item:
z_val = max(self.controller.current_pipeline.modules) + 1
self.tmp_connection_item = \
QGraphicsTmpConnItem(self,
self.union_group or [self],
z_val,
True)
self.scene().addItem(self.tmp_connection_item)
self.tmp_connection_item.setCurrentPos(event.scenePos())
snapPortItem = None
snapPorts = None
snapModule = self.scene().findModuleUnder(event.scenePos())
converters = []
if snapModule and snapModule != self.parentItem():
if self.port.type == 'output':
portMatch = self.scene().findPortMatch(
[self], set(snapModule.inputPorts.values()),
fixed_out_pos=event.scenePos(),
allow_conversion=True, out_converters=converters)
if portMatch[1] is not None:
snapPortItem = portMatch[1]
snapPorts = portMatch[2]
elif self.port.type == 'input':
portMatch = self.scene().findPortMatch(
snapModule.outputPorts.values(), [self],
fixed_in_pos=event.scenePos(),
allow_conversion=True, out_converters=converters)
if portMatch[0] is not None:
snapPortItem = portMatch[0]
snapPorts = portMatch[0].port
# select matching ports in input union
self.tmp_connection_item.setStartPort(portMatch[1],
portMatch[2])
self.tmp_connection_item.setSnapPort(snapPortItem, snapPorts)
if snapPortItem:
tooltip = self.tmp_connection_item.snapPortItem.toolTip()
if converters:
tooltip = ('<strong>conversion required</strong><br/>\n'
'%s' % tooltip)
QtGui.QToolTip.showText(event.screenPos(), tooltip)
else:
QtGui.QToolTip.hideText()
self.tmp_connection_item.setConverting(snapPortItem and converters)
QtGui.QAbstractGraphicsShapeItem.mouseMoveEvent(self, event)
def findSnappedPort(self, pos):
""" findSnappedPort(pos: QPoint) -> Port
Search all ports of the module under mouse cursor (if any) to
find the closest matched port
"""
# FIXME don't hardcode input/output strings...
snapModule = self.scene().findModuleUnder(pos)
if snapModule and snapModule!=self.parentItem():
if self.port.type == 'output':
return snapModule.getDestPort(pos, self.port)
elif self.port.type == 'input':
return snapModule.getSourcePort(pos, self.port)
else:
return None
def itemChange(self, change, value):
""" itemChange(change: GraphicsItemChange, value: value) -> value
Do not allow port to be selected
"""
if change==QtGui.QGraphicsItem.ItemSelectedChange and value:
return False
return QtGui.QAbstractGraphicsShapeItem.itemChange(self, change, value)
##############################################################################
# QGraphicsPortItem
class QGraphicsPortRectItem(QAbstractGraphicsPortItem):
def draw(self, painter, option, widget=None):
painter.drawRect(self.getRect())
class QGraphicsPortEllipseItem(QAbstractGraphicsPortItem):
def draw(self, painter, option, widget=None):
painter.drawEllipse(self.getRect())
class QGraphicsPortTriangleItem(QAbstractGraphicsPortItem):
def __init__(self, *args, **kwargs):
if 'angle' in kwargs:
angle = kwargs['angle']
del kwargs['angle']
else:
angle = 0
QAbstractGraphicsPortItem.__init__(self, *args, **kwargs)
angle = angle % 360
if angle not in set([0,90,180,270]):
raise ValueError("Triangle item limited to angles 0,90,180,270.")
rect = self.getRect()
if angle == 0 or angle == 180:
width = rect.width()
height = width * math.sqrt(3)/2.0
if height > rect.height():
height = rect.height()
width = height * 2.0/math.sqrt(3)
else:
height = rect.height()
width = height * math.sqrt(3)/2.0
if width > rect.width():
width = rect.width()
height = width * 2.0/math.sqrt(3)
left_x = (rect.width() - width)/2.0 + rect.x()
right_x = (rect.width() + width) / 2.0 + rect.x()
mid_x = rect.width() / 2.0 + rect.x()
top_y = (rect.height() - height)/2.0 + rect.y()
bot_y = (rect.height() + height)/2.0 + rect.y()
mid_y = rect.height() / 2.0 + rect.y()
if angle == 0:
self._polygon = QtGui.QPolygonF([QtCore.QPointF(left_x, bot_y),
QtCore.QPointF(mid_x, top_y),
QtCore.QPointF(right_x, bot_y)])
elif angle == 90:
self._polygon = QtGui.QPolygonF([QtCore.QPointF(left_x, bot_y),
QtCore.QPointF(left_x, top_y),
QtCore.QPointF(right_x, mid_y)])
elif angle == 180:
self._polygon = QtGui.QPolygonF([QtCore.QPointF(left_x, top_y),
QtCore.QPointF(right_x, top_y),
QtCore.QPointF(mid_x, bot_y)])
elif angle == 270:
self._polygon = QtGui.QPolygonF([QtCore.QPointF(left_x, mid_y),
QtCore.QPointF(right_x, top_y),
QtCore.QPointF(right_x, bot_y)])
def draw(self, painter, option, widget=None):
painter.drawConvexPolygon(self._polygon)
class QGraphicsPortPolygonItem(QAbstractGraphicsPortItem):
def __init__(self, *args, **kwargs):
if 'points' in kwargs:
points = kwargs['points']
del kwargs['points']
else:
points = None
if points is None or len(points) < 3:
raise ValueError("Must have at least three points")
QAbstractGraphicsPortItem.__init__(self, *args, **kwargs)
rect = self.getRect()
new_points = []
for p in points:
if p[0] is None:
x = rect.x() + rect.width()
# can't do +1 (2+ is fine)
elif p[0] != 0 and p[0] > 0 and p[0] < 1.0001:
x = rect.x() + rect.width() * p[0]
elif p[0] < 0:
x = rect.x() + rect.width() + p[0]
else:
x = rect.x() + p[0]
if p[1] is None:
y = rect.y() + rect.height()
elif p[1] != 0 and p[1] > 0 and p[1] < 1.0001:
y = rect.y() + rect.height() * p[1]
elif p[1] < 0:
y = rect.y() + rect.height() + p[1]
else:
y = rect.y() + p[1]
if x < rect.x():
x = rect.x()
# can't do +1 (2+ is fine)
elif x > (rect.x() + rect.width()):
x = rect.x() + rect.width()
if y < rect.y():
y = rect.y()
elif y > (rect.y() + rect.height()):
y = rect.y() + rect.height()
new_points.append(QtCore.QPointF(x,y))
self._polygon = QtGui.QPolygonF(new_points)
def draw(self, painter, option, widget=None):
painter.drawPolygon(self._polygon)
class QGraphicsPortDiamondItem(QGraphicsPortPolygonItem):
def __init__(self, *args, **kwargs):
kwargs['points'] = [(0, 0.5), (0.5, 0.999999),
(0.999999, 0.5), (0.5, 0)]
QGraphicsPortPolygonItem.__init__(self, *args, **kwargs)
################################################################################
# QGraphicsConfigureItem
class QGraphicsConfigureItem(QtGui.QGraphicsPolygonItem):
"""
QGraphicsConfigureItem is a small triangle shape drawing on top (a child)
of QGraphicsModuleItem
"""
def __init__(self, parent=None, scene=None):
""" QGraphicsConfigureItem(parent: QGraphicsItem, scene: QGraphicsScene)
-> QGraphicsConfigureItem
Create the shape, initialize its pen and brush accordingly
"""
_pen = CurrentTheme.CONFIGURE_PEN
_brush = CurrentTheme.CONFIGURE_BRUSH
_shape = CurrentTheme.CONFIGURE_SHAPE
QtGui.QGraphicsPolygonItem.__init__(self, _shape, parent, scene)
self.setZValue(1)
self.setPen(_pen)
self.setBrush(_brush)
self.ghosted = False
self.controller = None
self.moduleId = None
self.is_breakpoint = False
self.createActions()
def setGhosted(self, ghosted):
""" setGhosted(ghosted: Bool) -> None
Set this link to be ghosted or not
"""
if ghosted <> self.ghosted:
self.ghosted = ghosted
if ghosted:
self.setPen(CurrentTheme.GHOSTED_CONFIGURE_PEN)
self.setBrush(CurrentTheme.GHOSTED_CONFIGURE_BRUSH)
else:
self.setPen(CurrentTheme.CONFIGURE_PEN)
self.setBrush(CurrentTheme.CONFIGURE_BRUSH)
def setBreakpoint(self, breakpoint):
if self.is_breakpoint != breakpoint:
if breakpoint:
self.setBreakpointAct.setText("Remove Breakpoint")
self.setBreakpointAct.setStatusTip("Remove Breakpoint")
else:
self.setBreakpointAct.setText("Set Breakpoint")
self.setBreakpointAct.setStatusTip("Set Breakpoint")
def mousePressEvent(self, event):
""" mousePressEvent(event: QMouseEvent) -> None
Open the context menu
"""
self.scene().clearSelection()
self.parentItem().setSelected(True)
self.ungrabMouse()
self.contextMenuEvent(event)
event.accept()
def contextMenuEvent(self, event):
"""contextMenuEvent(event: QGraphicsSceneContextMenuEvent) -> None
Captures context menu event.
"""
module = self.controller.current_pipeline.modules[self.moduleId]
menu = QtGui.QMenu()
menu.addAction(self.configureAct)
menu.addAction(self.annotateAct)
menu.addAction(self.viewDocumentationAct)
menu.addAction(self.changeModuleLabelAct)
menu.addAction(self.editLoopingAct)
menu.addAction(self.setBreakpointAct)
menu.addAction(self.setWatchedAct)
menu.addAction(self.runModuleAct)
menu.addAction(self.setErrorAct)
if module.is_abstraction() and not module.is_latest_version():
menu.addAction(self.upgradeAbstractionAct)
menu.exec_(event.screenPos())
def createActions(self):
""" createActions() -> None
Create actions related to context menu
"""
self.configureAct = QtGui.QAction("Edit &Configuration\tCtrl+E", self.scene())
self.configureAct.setStatusTip("Edit the Configure of the module")
QtCore.QObject.connect(self.configureAct,
QtCore.SIGNAL("triggered()"),
self.configure)
self.annotateAct = QtGui.QAction("&Annotate", self.scene())
self.annotateAct.setStatusTip("Annotate the module")
QtCore.QObject.connect(self.annotateAct,
QtCore.SIGNAL("triggered()"),
self.annotate)
self.viewDocumentationAct = QtGui.QAction("View &Documentation", self.scene())
self.viewDocumentationAct.setStatusTip("View module documentation")
QtCore.QObject.connect(self.viewDocumentationAct,
QtCore.SIGNAL("triggered()"),
self.viewDocumentation)
self.editLoopingAct = QtGui.QAction("Execution &Options", self.scene())
self.editLoopingAct.setStatusTip("Edit module execution options")
QtCore.QObject.connect(self.editLoopingAct,
QtCore.SIGNAL("triggered()"),
self.editLooping)
self.changeModuleLabelAct = QtGui.QAction("Set Module &Label...", self.scene())
self.changeModuleLabelAct.setStatusTip("Set or remove module label")
QtCore.QObject.connect(self.changeModuleLabelAct,
QtCore.SIGNAL("triggered()"),
self.changeModuleLabel)
self.setBreakpointAct = QtGui.QAction("Set &Breakpoint", self.scene())
self.setBreakpointAct.setStatusTip("Set Breakpoint")
QtCore.QObject.connect(self.setBreakpointAct,
QtCore.SIGNAL("triggered()"),
self.set_breakpoint)
self.setWatchedAct = QtGui.QAction("&Watch Module", self.scene())
self.setWatchedAct.setStatusTip("Watch Module")
QtCore.QObject.connect(self.setWatchedAct,
QtCore.SIGNAL("triggered()"),
self.set_watched)
self.runModuleAct = QtGui.QAction("&Run this module", self.scene())
self.runModuleAct.setStatusTip("Run this module")
QtCore.QObject.connect(self.runModuleAct,
QtCore.SIGNAL("triggered()"),
self.run_module)
self.setErrorAct = QtGui.QAction("Show &Error", self.scene())
self.setErrorAct.setStatusTip("Show Error")
QtCore.QObject.connect(self.setErrorAct,
QtCore.SIGNAL("triggered()"),
self.set_error)
self.upgradeAbstractionAct = QtGui.QAction("&Upgrade Module", self.scene())
self.upgradeAbstractionAct.setStatusTip("Upgrade the subworkflow module")
QtCore.QObject.connect(self.upgradeAbstractionAct,
QtCore.SIGNAL("triggered()"),
self.upgradeAbstraction)
def run_module(self):
self.scene().parent().execute(target=self.moduleId)
def set_breakpoint(self):
""" set_breakpoint() -> None
Sets this module as a breakpoint for execution
"""
if self.moduleId >= 0:
self.scene().toggle_breakpoint(self.moduleId)
self.setBreakpoint(not self.is_breakpoint)
debug = get_default_interpreter().debugger
if debug:
debug.update()
def set_watched(self):
if self.moduleId >= 0:
self.scene().toggle_watched(self.moduleId)
debug = get_default_interpreter().debugger
if debug:
debug.update()
def set_error(self):
if self.moduleId >= 0:
self.scene().print_error(self.moduleId)
def configure(self):
""" configure() -> None
Open the modal configuration window
"""
if self.moduleId>=0:
self.scene().open_configure_window(self.moduleId)
def annotate(self):
""" anotate() -> None
Open the annotations window
"""
if self.moduleId>=0:
self.scene().open_annotations_window(self.moduleId)
def viewDocumentation(self):
""" viewDocumentation() -> None
Show the documentation for the module
"""
assert self.moduleId >= 0
self.scene().open_documentation_window(self.moduleId)
def editLooping(self):
""" editLooping() -> None
Show the looping options for the module
"""
assert self.moduleId >= 0
self.scene().open_looping_window(self.moduleId)
def changeModuleLabel(self):
""" changeModuleLabel() -> None
Show the module label configuration widget
"""
if self.moduleId>=0:
self.scene().open_module_label_window(self.moduleId)
def upgradeAbstraction(self):
""" upgradeAbstraction() -> None
Upgrade the abstraction to the latest version
"""
if self.moduleId>=0:
(connections_preserved, missing_ports) = self.controller.upgrade_abstraction_module(self.moduleId, test_only=True)
upgrade_fail_prompt = getattr(get_vistrails_configuration(), 'upgradeModuleFailPrompt', True)
do_upgrade = True
if not connections_preserved and upgrade_fail_prompt:
ports_msg = '\n'.join([" - %s port '%s'" % (p[0].capitalize(), p[1]) for p in missing_ports])
r = QtGui.QMessageBox.question(getBuilderWindow(), 'Modify Pipeline',
'Upgrading this module will change the pipeline because the following ports no longer exist in the upgraded module:\n\n'
+ ports_msg +
'\n\nIf you proceed, function calls or connections to these ports will no longer exist and the pipeline may not execute properly.\n\n'
'Are you sure you want to proceed?',
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No)
do_upgrade = (r==QtGui.QMessageBox.Yes)
if do_upgrade:
self.controller.upgrade_abstraction_module(self.moduleId)
self.scene().setupScene(self.controller.current_pipeline)
self.controller.invalidate_version_tree()
class QGraphicsTmpConnItem(QtGui.QGraphicsLineItem):
def __init__(self, startPortItem, startPorts, zValue=1, alwaysDraw=False, parent=None):
QtGui.QGraphicsLineItem.__init__(self, parent)
self.startPortItem = startPortItem
self.startPorts = startPorts
self.setPen(CurrentTheme.CONNECTION_SELECTED_PEN)
self.setZValue(zValue)
self.snapPortItem = None
self.snapPort = None
self.alwaysDraw = alwaysDraw
self.currentPos = None
def updateLine(self):
if self.startPortItem is not None:
if self.snapPortItem is not None:
self.prepareGeometryChange()
self.setLine(QtCore.QLineF(self.startPortItem.getPosition(),
self.snapPortItem.getPosition()))
return
elif self.alwaysDraw and self.currentPos is not None:
self.prepareGeometryChange()
self.setLine(QtCore.QLineF(self.startPortItem.getPosition(),
self.currentPos))
return
self.disconnect()
def setStartPort(self, portItem, ports=None):
self.startPortItem = portItem
self.startPorts = ports
self.updateLine()
def setSnapPort(self, portItem, ports=None):
self.snapPortItem = portItem
self.snapPorts = ports
self.updateLine()
def setCurrentPos(self, pos):
self.currentPos = pos
self.updateLine()
def disconnect(self, override=False):
if (not self.alwaysDraw or override) and self.startPortItem:
self.startPortItem.setSelected(False)
if self.snapPortItem:
self.snapPortItem.setSelected(False)
def hide(self):
self.disconnect(True)
QtGui.QGraphicsLineItem.hide(self)
def setConverting(self, converting):
if converting:
self.setPen(CurrentTheme.CONNECTION_SELECTED_CONVERTING_PEN)
else:
self.setPen(CurrentTheme.CONNECTION_SELECTED_PEN)
##############################################################################
# QGraphicsConnectionItem
class QGraphicsConnectionItem(QGraphicsItemInterface,
QtGui.QGraphicsPathItem):
"""
QGraphicsConnectionItem is a connection shape connecting two port items
"""
def __init__(self,
srcPortItem, dstPortItem,
srcModule, dstModule,
connection,
parent=None):
""" QGraphicsConnectionItem(
srcPortItem, dstPortItem: QAbstractGraphicsPortItem
srcModule, dstModule: QGraphicsModuleItem
connection: core.vistrail.connection.Connection
parent: QGraphicsItem
) -> QGraphicsConnectionItem
Create the shape, initialize its pen and brush accordingly
"""
self.srcPortItem = srcPortItem
self.dstPortItem = dstPortItem
path = self.create_path(srcPortItem.getPosition(),
dstPortItem.getPosition())
QtGui.QGraphicsPathItem.__init__(self, path, parent)
self.setFlags(QtGui.QGraphicsItem.ItemIsSelectable)
# Bump it slightly higher than the highest module
self.setZValue(max(srcModule.id,
dstModule.id) + 0.1)
self.connectionPen = CurrentTheme.CONNECTION_PEN
self.connectingModules = (srcModule, dstModule)
self.ghosted = False
self.connection = connection
self.id = connection.id
# Keep a flag for changing selection state during module selection
self.useSelectionRules = True
def setGhosted(self, ghosted):
""" setGhosted(ghosted: True) -> None
Set this link to be ghosted or not
"""
self.ghosted = ghosted
if ghosted:
self.connectionPen = CurrentTheme.GHOSTED_CONNECTION_PEN
else:
self.connectionPen = CurrentTheme.CONNECTION_PEN
def set_custom_brush(self, brush):
self.connectionPen = QtGui.QPen(CurrentTheme.CONNECTION_PEN)
self.connectionPen.setBrush(brush)
def paint(self, painter, option, widget=None):
""" paint(painter: QPainter, option: QStyleOptionGraphicsItem,
widget: QWidget) -> None
Peform actual painting of the connection
"""
if self.isSelected():
painter.setPen(CurrentTheme.CONNECTION_SELECTED_PEN)
else:
painter.setPen(self.connectionPen)
painter.drawPath(self.path())
def setupConnection(self, startPos=None, endPos=None):
path = self.create_path(startPos or self.startPos,
endPos or self.endPos)
self.setPath(path)
def create_path(self, startPos, endPos):
self.startPos = startPos
self.endPos = endPos
dx = abs(self.endPos.x() - self.startPos.x())
dy = (self.startPos.y() - self.endPos.y())
# This is reasonably ugly logic to get reasonably nice
# curves. Here goes: we use a cubic bezier p0,p1,p2,p3, where:
# p0 is the source port center
# p3 is the destination port center
# p1 is a control point displaced vertically from p0
# p2 is a control point displaced vertically from p3
# We want most curves to be "straight": they shouldn't bend
# much. However, we want "inverted" connections (connections
# that go against the natural up-down flow) to bend a little
# as they go out of the ports. So the logic is:
# As dy/dx -> oo, we want the control point displacement to go
# to max(dy/2, m) (m is described below)
# As dy/dx -> 0, we want the control point displacement to go
# to m
# As dy/dx -> -oo, we want the control point displacement to go
# to max(-dy/2, m)
# On points away from infinity, we want some smooth transition.
# I'm using f(x) = 2/pi arctan (x) as the mapping, since:
# f(-oo) = -1
# f(0) = 0
# f(oo) = 1
# m is the monotonicity breakdown point: this is the minimum
# displacement when dy/dx is low
m = float(CurrentTheme.MODULE_LABEL_MARGIN[0]) * 3.0
# positive_d and negative_d are the displacements when dy/dx is
# large positive and large negative
positive_d = max(m/3.0, dy / 2.0)
negative_d = max(m/3.0, -dy / 4.0)
if dx == 0.0:
v = 0.0
else:
w = math.atan(dy/dx) * (2 / math.pi)
if w < 0:
w = -w
v = w * negative_d + (1.0 - w) * m
else:
v = w * positive_d + (1.0 - w) * m
displacement = QtCore.QPointF(0.0, v)
self._control_1 = startPos + displacement
# !!! MAC OS X BUG !!!
# the difference between startPos.y and control_1.y cannot be
# equal to the difference between control_2.y and endPos.y
self._control_2 = self.endPos - displacement + QtCore.QPointF(0.0, 1e-11)
# self._control_2 = endPos - displacement
# draw multiple connections depending on list depth
def diff(i, depth):
return QtCore.QPointF((5.0 + 10.0*i)/depth - 5.0, 0.0)
srcParent = self.srcPortItem.parentItem()
startDepth = srcParent.module.list_depth + 1 if srcParent else 1
dstParent = self.dstPortItem.parentItem()
endDepth = dstParent.module.list_depth + 1 if dstParent else 1
starts = [diff(i, startDepth) for i in xrange(startDepth)]
ends = [diff(i, endDepth) for i in xrange(endDepth)]
first = True
for start in starts:
for end in ends:
if first:
path = QtGui.QPainterPath(self.startPos + start)
first = False
else:
path.moveTo(self.startPos + start)
path.cubicTo(self._control_1, self._control_2,
self.endPos + end)
return path
def itemChange(self, change, value):
""" itemChange(change: GraphicsItemChange, value: value) -> value
If modules are selected, only allow connections between
selected modules
"""
# Selection rules to be used only when a module isn't forcing
# the update
if (change==QtGui.QGraphicsItem.ItemSelectedChange and
self.useSelectionRules):
# Check for a selected module
selectedItems = self.scene().selectedItems()
selectedModules = False
for item in selectedItems:
if isinstance(item, QGraphicsModuleItem):
selectedModules = True
break
if selectedModules:
# Don't allow a connection between selected
# modules to be deselected
if (self.connectingModules[0].isSelected() and
self.connectingModules[1].isSelected()):
if not value:
return True
# Don't allow a connection to be selected if
# it is not between selected modules
else:
if value:
return False
self.useSelectionRules = True
return QtGui.QGraphicsPathItem.itemChange(self, change, value)
##############################################################################
# QGraphicsModuleItem
class QGraphicsModuleItem(QGraphicsItemInterface, QtGui.QGraphicsItem):
"""
QGraphicsModuleItem knows how to draw a Vistrail Module into the
pipeline view. It is usually a rectangular shape with a bold text
in the center. It also has its input/output port shapes as its
children. Another remark is that connections are also children of
module shapes. Each connection belongs to its source module
('output port' end of the connection)
"""
def __init__(self, parent=None, scene=None):
""" QGraphicsModuleItem(parent: QGraphicsItem, scene: QGraphicsScene)
-> QGraphicsModuleItem
Create the shape, initialize its pen and brush accordingly
"""
QtGui.QGraphicsItem.__init__(self, parent, scene)
self.paddedRect = QtCore.QRectF()
if QtCore.QT_VERSION >= 0x40600:
#Qt 4.6 specific flags
self.setFlags(QtGui.QGraphicsItem.ItemIsSelectable |
QtGui.QGraphicsItem.ItemIsMovable |
QtGui.QGraphicsItem.ItemSendsGeometryChanges)
else:
self.setFlags(QtGui.QGraphicsItem.ItemIsSelectable |
QtGui.QGraphicsItem.ItemIsMovable)
self.setAcceptHoverEvents(True)
self.setFlag(self.ItemIsFocusable)
self.setZValue(0)
self.labelFont = CurrentTheme.MODULE_FONT
self.labelFontMetric = CurrentTheme.MODULE_FONT_METRIC
self.descFont = CurrentTheme.MODULE_DESC_FONT
self.descFontMetric = CurrentTheme.MODULE_DESC_FONT_METRIC
self.modulePen = CurrentTheme.MODULE_PEN
self.moduleBrush = CurrentTheme.MODULE_BRUSH
self.labelPen = CurrentTheme.MODULE_LABEL_PEN
self.customBrush = None
self.statusBrush = None
self.labelRect = QtCore.QRectF()
self.descRect = QtCore.QRectF()
self.abstRect = QtCore.QRectF()
self.editRect = QtCore.QRectF()
self.id = -1
self.label = ''
self.description = ''
self.inputPorts = {}
self.outputPorts = {}
self.union_ports = {}
self.to_union = {}
self.port_groups = []
self.controller = None
self.module = None
self.ghosted = False
self.invalid = False
self._module_shape = None
self._original_module_shape = None
self.errorTrace = None
self.is_breakpoint = False
self._needs_state_updated = True
self.progress = 0.0
self.progressBrush = CurrentTheme.SUCCESS_MODULE_BRUSH
self.connectionItems = {}
self._cur_function_names = set()
self.function_overview = ''
self.show_widgets = get_vistrails_configuration(
).check('showInlineParameterWidgets')
self.function_widgets = []
self.functions_widget = None
self.edit_rect = QtCore.QRectF(0.0, 0.0, 0.0, 0.0)
self.handlePositionChanges = True
def moduleHasChanged(self, core_module):
def module_text_has_changed(m1, m2):
m1_has = '__desc__' in m1.db_annotations_key_index
if m1_has != ('__desc__' in m2.db_annotations_key_index):
return True
if (m1_has and
# m2_has, since m1_has and previous condition
m1.db_annotations_key_index['__desc__'].value.strip()!=
m2.db_annotations_key_index['__desc__'].value.strip()):
return True
return False
# def module_functions_have_changed(m1, m2):
# f1_names = set(f.name for f in m1.functions)
# f2_names = set(f.name for f in m2.functions)
# return (len(f1_names ^ f2_names) > 0)
if not self.invalid and self.show_widgets != get_vistrails_configuration(
).check('showInlineParameterWidgets') and \
core_module.editable_input_ports:
return True
elif self.scenePos().x() != core_module.center.x or \
-self.scenePos().y() != core_module.center.y:
return True
elif module_text_has_changed(self.module, core_module):
return True
# elif module_functions_have_changed(self.module, core_module):
# return True
else:
# check for changed edit widgets
if not self.invalid and core_module.editable_input_ports != \
self.module.editable_input_ports:
# shape has changed so we need to recreate the module
return True
# check for deleted edit widgets
if self.functions_have_been_deleted(core_module):
return True
# Check for changed ports
# _db_name because this shows up in the profile.
cip = sorted([x.key_no_id() for x in self.inputPorts])
cop = sorted([x.key_no_id() for x in self.outputPorts])
d = PortEndPoint.Destination
s = PortEndPoint.Source
ipv = core_module.visible_input_ports
opv = core_module.visible_output_ports
new_ip = []
new_op = []
try:
new_ip = sorted([x.key_no_id()
for x in core_module.destinationPorts()
if (not x.optional or x._db_name in ipv)])
new_op = sorted([x.key_no_id()
for x in core_module.sourcePorts()
if (not x.optional or x._db_name in opv)])
except ModuleRegistryException, e:
debug.critical("MODULE REGISTRY EXCEPTION: %s" % e)
if cip <> new_ip or cop <> new_op:
return True
return False
def functions_have_been_deleted(self, core_module):
# check if a visible function has been deleted
if self.invalid:
return set()
before = self._cur_function_names
after = set(f.name for f in core_module.functions)
if self.invalid:
return before - after
else:
return (before - after) & core_module.editable_input_ports
def moduleFunctionsHaveChanged(self, core_module):
m2 = core_module
f2_names = set(f.name for f in m2.functions)
return (len(self._cur_function_names ^ f2_names) > 0)
def update_function_ports(self, core_module=None):
if core_module is None:
core_module = self.module
added_functions = set(f.name for f in self.module.functions)
deleted_functions = set()
self._cur_function_names = copy.copy(added_functions)
else:
before_names = self._cur_function_names
after_names = set(f.name for f in core_module.functions)
added_functions = after_names - before_names
deleted_functions = before_names - after_names
self._cur_function_names = copy.copy(after_names)
if len(deleted_functions) > 0:
for function_name in deleted_functions:
try:
r_spec = self.module.get_port_spec(function_name, 'input')
f_spec = PortSpec(id=-1,
name=function_name,
type=PortSpec.port_type_map['input'],
sigstring=r_spec.sigstring)
item = self.getInputPortItem(f_spec)
if item is not None:
item.disconnect()
except Exception:
pass
if len(added_functions) > 0:
for function in core_module.functions:
if function.name not in added_functions:
continue
added_functions.remove(function.name)
f_spec = PortSpec(id=-1,
name=function.name,
type=PortSpec.port_type_map['input'],
sigstring=function.sigstring)
item = self.getInputPortItem(f_spec)
if item is not None:
item.connect()
self.module = core_module
def update_function_values(self, core_module):
""" Updates widget values if they have changed
"""
for function_widget in self.function_widgets:
for f in core_module.functions:
if f.name == function_widget.function.name:
value = [p.strValue for p in f.params]
if function_widget.getContents() != value:
function_widget.setContents(value)
continue
def setProgress(self, progress):
self.progress = progress
def computeBoundingRect(self):
""" computeBoundingRect() -> None
Adjust the module size according to contents
"""
width = 0
height = CurrentTheme.MODULE_LABEL_MARGIN[1]
# for each rect: Add height, adjust min width,
# set pos to distance to top middle corner, to be adjusted when
# paddedRect is known
labelRect = self.labelFontMetric.boundingRect(self.label)
labelRect.moveTo(-labelRect.width()//2, height)
height += labelRect.height()
padding = labelRect.adjusted(-CurrentTheme.MODULE_LABEL_MARGIN[0], 0,
CurrentTheme.MODULE_LABEL_MARGIN[2], 0)
width = max(width, padding.width())
if self.description:
self.description = '(' + self.description + ')'
descRect = self.descFontMetric.boundingRect(self.description)
descRect.moveTo(-descRect.width()//2, height)
height += descRect.height()
padding = descRect.adjusted(-CurrentTheme.MODULE_LABEL_MARGIN[0], 0,
CurrentTheme.MODULE_LABEL_MARGIN[2], 0)
width = max(width, padding.width())
if self.edit_rect.height():
height += CurrentTheme.MODULE_EDIT_MARGIN[1] # top margin
editRect = self.edit_rect
editRect.moveTo(-editRect.width()//2, height)
height += editRect.height()
padding = editRect.adjusted(-CurrentTheme.MODULE_EDIT_MARGIN[0], 0,
CurrentTheme.MODULE_EDIT_MARGIN[2], 0)
width = max(width, padding.width())
height += CurrentTheme.MODULE_EDIT_MARGIN[3] # bottom edit margin
height += CurrentTheme.MODULE_LABEL_MARGIN[3] # bottom margin
# move to final position
self.paddedRect = QtCore.QRectF(-width/2, -height/2, width, height)
labelRect.translate(0, -height//2)
self.labelRect = labelRect
if self.description:
descRect.translate(0, -height//2)
self.descRect = descRect
if self.edit_rect.height():
editRect.translate(0, -height//2)
self.editRect = editRect
self.abstRect = QtCore.QRectF(
self.paddedRect.left(),
-self.paddedRect.top()-CurrentTheme.MODULE_LABEL_MARGIN[3],
CurrentTheme.MODULE_LABEL_MARGIN[0],
CurrentTheme.MODULE_LABEL_MARGIN[3])
def boundingRect(self):
""" boundingRect() -> QRectF
Returns the bounding box of the module
"""
try:
r = self.paddedRect.adjusted(-2, -2, 2, 2)
except Exception:
r = QtCore.QRectF()
return r
def setPainterState(self, is_selected=None):
if is_selected is None:
is_selected = self.isSelected()
if is_selected:
self.modulePen = CurrentTheme.MODULE_SELECTED_PEN
self.labelPen = CurrentTheme.MODULE_LABEL_SELECTED_PEN
elif self.is_breakpoint:
self.modulePen = CurrentTheme.BREAKPOINT_MODULE_PEN
self.labelPen = CurrentTheme.BREAKPOINT_MODULE_LABEL_PEN
elif self.ghosted:
self.modulePen = CurrentTheme.GHOSTED_MODULE_PEN
self.labelPen = CurrentTheme.GHOSTED_MODULE_LABEL_PEN
# do not show as invalid in search mode
elif self.invalid and not (self.controller and self.controller.search):
self.modulePen = CurrentTheme.INVALID_MODULE_PEN
self.labelPen = CurrentTheme.INVALID_MODULE_LABEL_PEN
else:
self.labelPen = CurrentTheme.MODULE_LABEL_PEN
if self.module is not None and self.module.is_abstraction():
self.modulePen = CurrentTheme.ABSTRACTION_PEN
elif self.module is not None and self.module.is_group():
self.modulePen = CurrentTheme.GROUP_PEN
else:
self.modulePen = CurrentTheme.MODULE_PEN
if self.statusBrush:
self.moduleBrush = self.statusBrush
elif self.customBrush:
self.moduleBrush = self.customBrush
elif self.is_breakpoint:
self.moduleBrush = CurrentTheme.BREAKPOINT_MODULE_BRUSH
elif self.ghosted:
self.moduleBrush = CurrentTheme.GHOSTED_MODULE_BRUSH
# do not show as invalid in search mode
elif self.invalid and not (self.controller and self.controller.search):
self.moduleBrush = CurrentTheme.INVALID_MODULE_BRUSH
else:
self.moduleBrush = CurrentTheme.MODULE_BRUSH
def setGhosted(self, ghosted):
""" setGhosted(ghosted: True) -> None
Set this link to be ghosted or not
"""
if self.ghosted != ghosted:
self.ghosted = ghosted
for port in self.inputPorts.itervalues():
port.setGhosted(ghosted)
for port in self.outputPorts.itervalues():
port.setGhosted(ghosted)
self._needs_state_updated = True
# if ghosted:
# self.modulePen = CurrentTheme.GHOSTED_MODULE_PEN
# self.moduleBrush = CurrentTheme.GHOSTED_MODULE_BRUSH
# self.labelPen = CurrentTheme.GHOSTED_MODULE_LABEL_PEN
# else:
# self.modulePen = CurrentTheme.MODULE_PEN
# self.moduleBrush = CurrentTheme.MODULE_BRUSH
# self.labelPen = CurrentTheme.MODULE_LABEL_PEN
def setInvalid(self, invalid):
if self.invalid != invalid:
self.invalid = invalid
for port in self.inputPorts.itervalues():
port.setInvalid(invalid)
for port in self.outputPorts.itervalues():
port.setInvalid(invalid)
self._needs_state_updated = True
def setBreakpoint(self, breakpoint):
if self.is_breakpoint != breakpoint:
self.is_breakpoint = breakpoint
if breakpoint:
self._original_module_shape = self._module_shape
self.set_module_shape(self.create_shape_from_fringe(
CurrentTheme.BREAKPOINT_FRINGE))
else:
self._module_shape = self._original_module_shape
self._needs_state_updated = True
# if breakpoint:
# self.modulePen = CurrentTheme.BREAKPOINT_MODULE_PEN
# self.moduleBrush = CurrentTheme.BREAKPOINT_MODULE_BRUSH
# self.labelPen = CurrentTheme.BREAKPOINT_MODULE_LABEL_PEN
def set_module_shape(self, module_shape=None):
self._module_shape = module_shape
if self._module_shape is not None:
self.paddedRect = self._module_shape.boundingRect()
def set_custom_brush(self, brush):
self.customBrush = brush
self._needs_state_updated = True
def paint(self, painter, option, widget=None):
""" paint(painter: QPainter, option: QStyleOptionGraphicsItem,
widget: QWidget) -> None
Peform actual painting of the module
"""
if self.progress>0.0:
width = (self.progress-1.0)*self.paddedRect.width()
progressRect = self.paddedRect.adjusted(0, 0, width, 0)
if self._needs_state_updated:
self.setPainterState()
self._needs_state_updated = False
# draw module shape
painter.setBrush(self.moduleBrush)
painter.setPen(self.modulePen)
if self._module_shape:
painter.drawPolygon(self._module_shape)
if self.progress>0.0:
painter.setClipRect(progressRect)
painter.setBrush(self.progressBrush)
painter.drawPolygon(self._module_shape)
painter.setClipping(False)
painter.drawPolyline(self._module_shape)
else:
painter.fillRect(self.paddedRect, painter.brush())
if self.progress>0.0:
painter.fillRect(progressRect, self.progressBrush)
painter.setBrush(QtCore.Qt.NoBrush)
painter.drawRect(self.paddedRect)
# draw module labels
painter.setPen(self.labelPen)
painter.setFont(self.labelFont)
painter.drawText(self.labelRect.adjusted(-10,-10,10,10), QtCore.Qt.AlignCenter, self.label)
if self.module.is_abstraction() and not self.module.is_latest_version():
painter.drawText(self.abstRect, QtCore.Qt.AlignCenter, '!')
if self.descRect:
painter.setFont(self.descFont)
painter.drawText(self.descRect.adjusted(-10,-10,10,10), QtCore.Qt.AlignCenter,
self.description)
def paintToPixmap(self, scale_x, scale_y):
bounding_rect = self.paddedRect.adjusted(-6,-6,6,6)
center_x = (bounding_rect.width() / 2.0) #* m.m11()
center_y = (bounding_rect.height() / 2.0) #* m.m22()
pixmap = QtGui.QPixmap(int(bounding_rect.width() * scale_x),
int(bounding_rect.height() * scale_y))
pixmap.fill(QtGui.QColor(255,255,255,0))
painter = QtGui.QPainter(pixmap)
painter.setOpacity(0.5)
painter.scale(scale_x, scale_y)
painter.setRenderHints(QtGui.QPainter.Antialiasing |
QtGui.QPainter.SmoothPixmapTransform)
painter.translate(center_x, center_y)
self.paint(painter, QtGui.QStyleOptionGraphicsItem())
for port in self.inputPorts.itervalues():
m = port.matrix()
painter.save()
painter.translate(m.dx(), m.dy())
port.paint(painter, QtGui.QStyleOptionGraphicsItem())
painter.restore()
for port in self.outputPorts.itervalues():
m = port.matrix()
painter.save()
painter.translate(m.dx(), m.dy())
port.paint(painter, QtGui.QStyleOptionGraphicsItem())
painter.restore()
painter.end()
return pixmap
def adjustWidthToMin(self, minWidth):
""" adjustWidthToContain(minWidth: int) -> None
Resize the module width to at least be minWidth
"""
if minWidth>self.paddedRect.width():
diff = minWidth - self.paddedRect.width() + 1
self.paddedRect.adjust(-diff/2, 0, diff/2, 0)
def setupModule(self, module, read_only=False):
""" setupModule(module: Module) -> None
Set up the item to reflect the info in 'module'
"""
# Update module info and visual
self.id = module.id
self.setZValue(float(self.id))
self.module = module
self.union_ports = module.unionPorts() if module.is_valid else {}
# reverse map
self.to_union = dict((p.name, self.union_ports[union])
for union, p_list in self.union_ports.items()
for p in p_list)
self.center = copy.copy(module.center)
if '__desc__' in module.db_annotations_key_index:
self.label = module.get_annotation_by_key('__desc__').value.strip()
self.description = module.label
else:
self.label = module.label
self.description = ''
# Show inline edit widgets
if get_vistrails_configuration().check('showInlineParameterWidgets') and \
module.is_valid and not read_only and module.editable_input_ports:
self.functions_widget = QGraphicsFunctionsWidget(self.module,
self, module.editable_input_ports == set(['value']))
set_lod(0.5, self.functions_widget)
self.functions_widget.function_changed.connect(self.function_changed)
self.function_widgets = self.functions_widget.function_widgets
self.edit_rect = self.functions_widget.boundingRect()
self.setToolTip(self.description)
self.computeBoundingRect()
self.setPos(module.center.x, -module.center.y)
# Check to see which ports will be shown on the screen
# setupModule is in a hotpath, performance-wise, which is the
# reason for the strange ._db_name lookup - we're
# avoiding property calls
inputPorts = []
self.inputPorts = {}
visibleOptionalInputPorts = []
self.optionalInputPorts = []
outputPorts = []
self.outputPorts = {}
visibleOptionalOutputPorts = []
self.optionalOutputPorts = []
error = None
if module.is_valid:
try:
for p in module.destinationPorts():
if not p.optional:
inputPorts.append(p)
elif p.name in module.visible_input_ports:
# add all in union if one is marked as visible
for up in self.to_union.get(p.name, [p]):
if up not in visibleOptionalInputPorts:
visibleOptionalInputPorts.append(up)
else:
# Make sure it was not added with union
if p.name not in visibleOptionalInputPorts:
self.optionalInputPorts.append(p)
inputPorts += visibleOptionalInputPorts
for p in module.sourcePorts():
if not p.optional:
outputPorts.append(p)
elif p.name in module.visible_output_ports:
visibleOptionalOutputPorts.append(p)
else:
self.optionalOutputPorts.append(p)
outputPorts += visibleOptionalOutputPorts
except ModuleRegistryException, e:
error = e
# group unions while keeping order
pos_to_name = {}
name_to_ports = {}
for port_spec in inputPorts:
name = port_spec.union or port_spec.name
if name not in pos_to_name.itervalues():
pos = (max(pos_to_name) + 1) if pos_to_name else 0
pos_to_name[pos] = name
name_to_ports[name] = []
name_to_ports[name].append(port_spec)
self.port_groups = [name_to_ports[name]
for _, name in sorted(pos_to_name.iteritems())]
# Local dictionary lookups are faster than global ones..
t = CurrentTheme
(mpm0, mpm1, mpm2, mpm3) = t.MODULE_PORT_MARGIN
# Adjust the width to fit all ports
maxPortCount = max(len(self.port_groups), len(outputPorts))
minWidth = (mpm0 +
t.PORT_WIDTH*maxPortCount +
t.MODULE_PORT_SPACE*(maxPortCount-1) +
mpm2 +
t.MODULE_PORT_PADDED_SPACE)
self.adjustWidthToMin(minWidth)
self.nextInputPortPos = [self.paddedRect.x() + mpm0,
self.paddedRect.y() + mpm1]
self.nextOutputPortPos = [self.paddedRect.right() - \
t.PORT_WIDTH - mpm2,
self.paddedRect.bottom() - \
t.PORT_HEIGHT - mpm3]
# Update input ports
[x, y] = self.nextInputPortPos
for ports in self.port_groups:
item = self.createPortItem(ports[0], x, y,
ports if len(ports)>1 else [])
for port in ports:
self.inputPorts[port] = item
x += t.PORT_WIDTH + t.MODULE_PORT_SPACE
self.nextInputPortPos = [x,y]
# Update output ports
[x, y] = self.nextOutputPortPos
for port in reversed(outputPorts):
self.outputPorts[port] = self.createPortItem(port, x, y)
x -= t.PORT_WIDTH + t.MODULE_PORT_SPACE
self.nextOutputPortPos = [x, y]
# Add a configure button
y = self.paddedRect.y() + mpm1
x = (self.paddedRect.right() - t.CONFIGURE_WIDTH
- mpm2)
self.createConfigureItem(x, y)
if module.is_valid:
try:
# update module color and shape
descriptor = module.module_descriptor
# c = registry.get_module_color(module.package, module.name,
# module.namespace)
c = descriptor.module_color()
if c:
ic = [int(cl*255) for cl in c]
b = QtGui.QBrush(QtGui.QColor(ic[0], ic[1], ic[2]))
self.set_custom_brush(b)
# fringe = registry.get_module_fringe(module.package,
# module.name,
# module.namespace)
fringe = descriptor.module_fringe()
if fringe:
self.set_module_shape(self.create_shape_from_fringe(fringe))
except ModuleRegistryException, e:
error = e
if self.functions_widget:
self.functions_widget.setPos(self.editRect.topLeft())
self.update_function_ports()
else:
self.setInvalid(True)
def function_changed(self, name, values):
""" Called when a function value has changed by the inline edit widget
"""
controller = self.scene().controller
module = controller.current_pipeline.modules[self.module.id]
controller.update_function(module, name, values)
if self.moduleFunctionsHaveChanged(module):
self.update_function_ports(module)
if self.isSelected():
from vistrails.gui.vistrails_window import _app
module = controller.current_pipeline.modules[self.module.id]
_app.notify('module_changed', module)
def create_shape_from_fringe(self, fringe):
left_fringe, right_fringe = fringe
if left_fringe[0] != (0.0, 0.0):
left_fringe = [(0.0, 0.0)] + left_fringe
if left_fringe[-1] != (0.0, 1.0):
left_fringe = left_fringe + [(0.0, 1.0)]
if right_fringe[0] != (0.0, 0.0):
right_fringe = [(0.0, 0.0)] + right_fringe
if right_fringe[-1] != (0.0, 1.0):
right_fringe = right_fringe + [(0.0, 1.0)]
P = QtCore.QPointF
module_shape = QtGui.QPolygonF()
height = self.paddedRect.height()
# right side of shape
for (px, py) in right_fringe:
p = P(px, -py)
p *= height
p += self.paddedRect.bottomRight()
module_shape.append(p)
# left side of shape
for (px, py) in reversed(left_fringe):
p = P(px, -py)
p *= height
p += self.paddedRect.bottomLeft()
module_shape.append(p)
# close polygon
module_shape.append(module_shape[0])
return module_shape
def createPortItem(self, port, x, y, union_group=None):
""" createPortItem(port: Port, x: int, y: int) -> QGraphicsPortItem
Create a item from the port spec
"""
# pts = [(0,2),(0,-2), (2,None), (-2,None),
# (None,-2), (None,2), (-2,0), (2,0)]
# pts = [(0,0.2), (0, 0.8), (0.2, None), (0.8, None),
# (None, 0.8), (None, 0.2), (0.8,0), (0.2, 0)]
# portShape = QGraphicsPortPolygonItem(x, y, self.ghosted, self,
# port.optional, port.min_conns,
# port.max_conns, points=pts)
# portShape = QGraphicsPortTriangleItem(x, y, self.ghosted, self,
# port.optional, port.min_conns,
# port.max_conns, angle=0)
# portShape = QGraphicsPortDiamondItem(x, y, self.ghosted, self,
# port.optional, port.min_conns,
# port.max_conns)
if not union_group and port.union and port.union in self.union_ports:
union_group = self.union_ports[port.union]
port_klass = QGraphicsPortRectItem
kwargs = {}
if union_group:
kwargs['union_group'] = union_group
shape = port.shape()
if shape is not None:
if isinstance(shape, basestring):
if shape.startswith("triangle"):
port_klass = QGraphicsPortTriangleItem
try:
kwargs['angle'] = int(shape[8:])
except ValueError:
kwargs['angle'] = 0
elif shape == "diamond":
port_klass = QGraphicsPortDiamondItem
elif shape == "circle" or shape == "ellipse":
port_klass = QGraphicsPortEllipseItem
else:
try:
iter(shape)
except TypeError:
pass
else:
port_klass = QGraphicsPortPolygonItem
kwargs['points'] = shape
portShape = port_klass(port, x, y, self.ghosted, self, **kwargs)
# portShape = QGraphicsPortRectItem(port, x, y, self.ghosted, self)
portShape.controller = self.controller
portShape.port = port
# do not show as invalid in search mode
if not port.is_valid and not (self.controller and self.controller.search):
portShape.setInvalid(True)
return portShape
def createConfigureItem(self, x, y):
""" createConfigureItem(x: int, y: int) -> QGraphicsConfigureItem
Create a item from the configure spec
"""
if self.module.is_valid:
configureShape = QGraphicsConfigureItem(self, self.scene())
configureShape.controller = self.controller
configureShape.moduleId = self.id
configureShape.setGhosted(self.ghosted)
configureShape.setBreakpoint(self.module.is_breakpoint)
configureShape.translate(x, y)
return configureShape
return None
def getPortItem(self, port, port_dict=None):
# print 'looking for port', port.name, port.type, port_type
registry = get_module_registry()
# if we haven't validated pipeline, don't try to use the registry
if self.module.is_valid:
# check enabled ports
for (p, item) in port_dict.iteritems():
if registry.port_and_port_spec_match(port, p):
return item
# FIXME Raise Error!
# else not available for some reason, just draw port and raise error?
# can also decide to use Variant/Module types
# or use types from the signature
# port_descs = port.descriptors()
# first, check if we've already added the port
for (p, item) in port_dict.iteritems():
if (PortSpec.port_type_map.inverse[port.type] == p.type and
port.name == p.name and
port.sigstring == p.sigstring):
return item
return None
def buildPortItem(self, port, port_dict, optional_ports, visible_ports,
next_pos, next_op, default_sig):
"""buildPortItem(port: Port,
port_dict: {PortSpec: QGraphicsPortItem},
optional_ports: [PortSpec],
visible_ports: set(string),
next_pos: [float, float],
next_op: operator (operator.add, operator.sub),
default_sig: str
) -> QPointF
Return the scene position of a port matched 'port' in port_dict
"""
registry = get_module_registry()
# check optional ports
if self.module.is_valid:
for p in optional_ports:
if registry.port_and_port_spec_match(port, p):
item = self.createPortItem(p, *next_pos)
for union_port in self.to_union.get(port.name, [port]):
visible_ports.add(union_port.name)
port_dict[union_port] = item
next_pos[0] = next_op(next_pos[0],
(CurrentTheme.PORT_WIDTH +
CurrentTheme.MODULE_PORT_SPACE))
return item
if not port.signature or port.signature == '()':
# or len(port_descs) == 0:
sigstring = default_sig
else:
sigstring = port.signature
port_type = PortSpec.port_type_map.inverse[port.type]
names = []
for sig in sigstring[1:-1].split(','):
k = sig.split(':', 2)
if len(k) < 2:
names.append(k[0])
else:
names.append(k[1])
short_sigstring = '(' + ','.join(names) + ')'
tooltip = "%s port %s\n%s" % (port_type.capitalize(),
port.name,
short_sigstring)
new_spec = PortSpec(id=-1,
name=port.name,
type=port_type,
sigstring=sigstring,
tooltip=tooltip,
optional=True)
item = self.createPortItem(new_spec, *next_pos)
# do not show as invalid in search mode
if not (self.controller and self.controller.search):
item.setInvalid(True)
port_dict[new_spec] = item
next_pos[0] = next_op(next_pos[0],
(CurrentTheme.PORT_WIDTH +
CurrentTheme.MODULE_PORT_SPACE))
return item
def getInputPortItem(self, port, do_create=False):
item = self.getPortItem(port, self.inputPorts)
if not item and do_create:
item = self.buildPortItem(port,
self.inputPorts,
self.optionalInputPorts,
self.module.visible_input_ports,
self.nextInputPortPos,
operator.add,
'(%s:Variant)' % \
get_vistrails_basic_pkg_id())
return item
def getOutputPortItem(self, port, do_create=False):
item = self.getPortItem(port, self.outputPorts)
if not item and do_create:
item = self.buildPortItem(port,
self.outputPorts,
self.optionalOutputPorts,
self.module.visible_output_ports,
self.nextOutputPortPos,
operator.sub,
'(%s:Module)' % \
get_vistrails_basic_pkg_id())
return item
def addConnectionItem(self, item):
self.connectionItems[item.connection.id] = item
def removeConnectionItem(self, item):
if item.connectingModules[0].id == self.module.id:
if item.srcPortItem is not None:
item.srcPortItem.disconnect()
if item.connectingModules[1].id == self.module.id:
if item.dstPortItem is not None:
item.dstPortItem.disconnect()
del self.connectionItems[item.connection.id]
# returns a dictionary of (id, connection) key-value pairs!
def dependingConnectionItems(self):
return self.connectionItems
# this is a generator that yields (connection, is_source [bool]) pairs
def dependingConnectionItemsWithDir(self):
for item in self.connectionItems.itervalues():
if item.connectingModules[0].id == self.id:
yield (item, False)
else:
yield (item, True)
def keyPressEvent(self, event):
""" keyPressEvent(event: QKeyEvent) -> None
Capture 'Del', 'Backspace' for deleting modules.
Ctrl+C, Ctrl+V, Ctrl+A for copy, paste and select all
"""
if (self.scene().controller and
event.key() in [QtCore.Qt.Key_Backspace, QtCore.Qt.Key_Delete]):
if not self.scene().read_only_mode:
self.scene().delete_selected_items()
else:
QtGui.QGraphicsItem.keyPressEvent(self, event)
def mouseReleaseEvent(self, event):
super(QGraphicsModuleItem, self).mouseReleaseEvent(event)
if not self.controller.changed and self.controller.has_move_actions():
self.controller.set_changed(True)
def hoverEnterEvent(self, event):
if QtGui.QApplication.keyboardModifiers() == QtCore.Qt.ControlModifier:
scene = self.scene()
if scene.function_tooltip:
scene.removeItem(scene.function_tooltip)
module = scene.controller.current_pipeline.modules[self.module.id]
if module.functions:
function_overview = []
for f in module.functions:
if len(f.params)>1:
params = ', '.join([p.strValue for p in f.params])
elif len(f.params)>0:
params = f.params[0].strValue
else:
params = ''
if len(params)>100:
params = params[:97] + '...'
function_template = "<b>%s(</b>%s<b>)</b>"
function_overview.append(function_template % (f.name, params))
template = '<html><p style="background:#FFFFFF;">%s</p></html>'
self.function_overview = template % '<br/>'.join(function_overview)
else:
self.function_overview = ''
scene.function_tooltip = QtGui.QGraphicsTextItem()
pos = self.paddedRect.bottomLeft()+self.pos()
scene.function_tooltip.setPos(pos)
scene.function_tooltip.setAcceptHoverEvents(False)
scene.addItem(scene.function_tooltip)
scene.function_tooltip.setHtml(self.function_overview)
scene.function_tooltip.setZValue(1000000)
return QtGui.QGraphicsItem.hoverEnterEvent(self, event)
def hoverLeaveEvent(self, event):
if self.scene().function_tooltip:
self.scene().removeItem(self.scene().function_tooltip)
self.scene().function_tooltip = None
return QtGui.QGraphicsItem.hoverLeaveEvent(self, event)
def itemChange(self, change, value):
""" itemChange(change: GraphicsItemChange, value: value) -> value
Capture move event to also move the connections. Also unselect any
connections between unselected modules
"""
# Move connections with modules
if change==QtGui.QGraphicsItem.ItemPositionChange and \
self.handlePositionChanges:
oldPos = self.pos()
newPos = value
dis = newPos - oldPos
for connectionItem, s in self.dependingConnectionItemsWithDir():
# If both modules are selected, both of them will
# trigger itemChange events.
# If we just add 'dis' to both connection endpoints, we'll
# end up moving each endpoint twice.
# But we also don't want to call setupConnection twice on these
# connections, so we ignore one of the endpoint dependencies and
# perform the change on the other one
(srcModule, dstModule) = connectionItem.connectingModules
start_s = srcModule.isSelected()
end_s = dstModule.isSelected()
if start_s and end_s and s:
continue
start = connectionItem.startPos
end = connectionItem.endPos
if start_s: start += dis
if end_s: end += dis
connectionItem.prepareGeometryChange()
connectionItem.setupConnection(start, end)
# Do not allow lone connections to be selected with modules.
# Also autoselect connections between selected modules. Thus the
# selection is always the subgraph
elif change==QtGui.QGraphicsItem.ItemSelectedHasChanged:
# Unselect any connections between modules that are not selected
for item in self.scene().selectedItems():
if isinstance(item,QGraphicsConnectionItem):
(srcModule, dstModule) = item.connectingModules
if (not srcModule.isSelected() or
not dstModule.isSelected()):
item.useSelectionRules = False
item.setSelected(False)
# Handle connections from self
for item in self.dependingConnectionItems().itervalues():
# Select any connections between self and other selected modules
(srcModule, dstModule) = item.connectingModules
if value:
if (srcModule==self and dstModule.isSelected() or
dstModule==self and srcModule.isSelected()):
# Because we are setting a state variable in the
# connection, do not make the change unless it is
# actually going to be performed
if not item.isSelected():
item.useSelectionRules = False
item.setSelected(True)
# Unselect any connections between self and other modules
else:
if item.isSelected():
item.useSelectionRules = False
item.setSelected(False)
# Capture only selected modules + or - self for selection signal
selectedItems = [m for m in self.scene().selectedItems()
if isinstance(m, QGraphicsModuleItem)]
#print "selectedItems", selectedItems
selectedId = -1
if len(selectedItems)==1:
selectedId = selectedItems[0].id
self.scene().emit(QtCore.SIGNAL('moduleSelected'),
selectedId, selectedItems)
self._needs_state_updated = True
return QtGui.QGraphicsItem.itemChange(self, change, value)
def choose_converter(converters, parent=None):
"""Chooses a converter among a list.
"""
if len(converters) == 1:
return converters[0]
class ConverterItem(QtGui.QListWidgetItem):
def __init__(self, converter):
QtGui.QListWidgetItem.__init__(self, converter.name)
self.converter = converter
dialog = QtGui.QDialog(parent)
dialog.setWindowTitle("Automatic conversion")
layout = QtGui.QVBoxLayout()
label = QtGui.QLabel(
"You are connecting two incompatible ports, however there are "
"matching Converter modules. Please choose which Converter should "
"be inserted on this connection:")
label.setWordWrap(True)
layout.addWidget(label)
list_widget = QtGui.QListWidget()
list_widget.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
for converter in sorted(converters, key=lambda c: c.name):
list_widget.addItem(ConverterItem(converter))
layout.addWidget(list_widget)
buttons = QtGui.QDialogButtonBox(
QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel,
QtCore.Qt.Horizontal)
QtCore.QObject.connect(buttons, QtCore.SIGNAL('accepted()'),
dialog, QtCore.SLOT('accept()'))
QtCore.QObject.connect(buttons, QtCore.SIGNAL('rejected()'),
dialog, QtCore.SLOT('reject()'))
layout.addWidget(buttons)
ok = buttons.button(QtGui.QDialogButtonBox.Ok)
ok.setEnabled(False)
QtCore.QObject.connect(
list_widget, QtCore.SIGNAL('itemSelectionChanged()'),
lambda: ok.setEnabled(True))
dialog.setLayout(layout)
if dialog.exec_() == QtGui.QDialog.Accepted:
return list_widget.selectedItems()[0].converter
else:
return None
class StacktracePopup(QtGui.QDialog):
def __init__(self, errorTrace='', parent=None):
QtGui.QDialog.__init__(self, parent)
self.resize(700, 400)
self.setWindowTitle('Module Error')
layout = QtGui.QVBoxLayout()
self.setLayout(layout)
text = QtGui.QTextEdit('')
text.insertPlainText(errorTrace)
text.setReadOnly(True)
text.setLineWrapMode(QtGui.QTextEdit.NoWrap)
layout.addWidget(text)
close = QtGui.QPushButton('Close', self)
close.setFixedWidth(100)
layout.addWidget(close)
self.connect(close, QtCore.SIGNAL('clicked()'),
self, QtCore.SLOT('close()'))
##############################################################################
# QPipelineScene
class QPipelineScene(QInteractiveGraphicsScene):
"""
QPipelineScene inherits from QInteractiveGraphicsScene to keep track of the
pipeline scenes, i.e. modules, connections, selection
"""
def __init__(self, parent=None):
""" QPipelineScene(parent: QWidget) -> QPipelineScene
Initialize the graphics scene with no shapes
"""
QInteractiveGraphicsScene.__init__(self, parent)
self.setBackgroundBrush(CurrentTheme.PIPELINE_VIEW_BACKGROUND_BRUSH)
self.setSceneRect(QtCore.QRectF(-5000, -5000, 10000, 10000))
self.controller = None
self.modules = {}
self.connections = {}
self.noUpdate = False
self.installEventFilter(self)
self.pipeline_tab = None
# These are the IDs currently present in the scene, used to update it
# faster when switching pipelines via setupScene()
self._old_module_ids = set()
self._old_connection_ids = set()
self._var_selected_port = None
self.read_only_mode = False
self.current_pipeline = None
self.current_version = -1
self.skip_update = False
self.function_tooltip = None
self.tmp_module_item = None
self.tmp_input_conn = None
self.tmp_output_conn = None
def _get_pipeline(self):
warnings.warn("Use of deprecated field 'pipeline' replaced by "
"'current_pipeline'",
category=VistrailsDeprecation)
return self.current_pipeline
pipeline = property(_get_pipeline)
def addModule(self, module, moduleBrush=None):
""" addModule(module: Module, moduleBrush: QBrush) -> QGraphicsModuleItem
Add a module to the scene
"""
moduleItem = QGraphicsModuleItem(None)
if self.controller and self.controller.search:
moduleQuery = (self.controller.current_version, module)
matched = self.controller.search.matchModule(*moduleQuery)
moduleItem.setGhosted(not matched)
moduleItem.controller = self.controller
moduleItem.setupModule(module, self.read_only_mode)
moduleItem.setBreakpoint(module.is_breakpoint)
if moduleBrush:
moduleItem.set_custom_brush(moduleBrush)
self.addItem(moduleItem)
self.modules[module.id] = moduleItem
self._old_module_ids.add(module.id)
# Hide vistrail variable modules
if module.is_vistrail_var():
moduleItem.hide()
return moduleItem
def addConnection(self, connection, connectionBrush=None):
""" addConnection(connection: Connection) -> QGraphicsConnectionItem
Add a connection to the scene
"""
srcModule = self.modules[connection.source.moduleId]
dstModule = self.modules[connection.destination.moduleId]
srcPortItem = srcModule.getOutputPortItem(connection.source, True)
dstPortItem = dstModule.getInputPortItem(connection.destination, True)
connectionItem = QGraphicsConnectionItem(srcPortItem, dstPortItem,
srcModule, dstModule,
connection)
srcPortItem.connect()
dstPortItem.connect()
connectionItem.id = connection.id
connectionItem.connection = connection
if connectionBrush:
connectionItem.set_custom_brush(connectionBrush)
self.addItem(connectionItem)
self.connections[connection.id] = connectionItem
self._old_connection_ids.add(connection.id)
srcModule.addConnectionItem(connectionItem)
dstModule.addConnectionItem(connectionItem)
if srcModule.module.is_vistrail_var():
connectionItem.hide()
var_uuid = srcModule.module.get_vistrail_var()
dstPortItem.addVistrailVar(var_uuid)
self.update_connections([srcModule.id, dstModule.id])
return connectionItem
def selected_subgraph(self):
"""Returns the subgraph containing the selected modules and its
mutual connections.
"""
items = self.selectedItems()
modules = [x.id
for x in items
if isinstance(x, QGraphicsModuleItem)]
return self.controller.current_pipeline.graph.subgraph(modules)
# def contextMenuEvent(self, event):
# selectedItems = self.selectedItems()
# if len(selectedItems) == 0:
# return QInteractiveGraphicsScene.contextMenuEvent(self, event)
# else:
# self._context_menu.exec_(event.screenPos())
def clear(self):
""" clear() -> None
Clear the whole scene
"""
self.modules = {}
self.connections = {}
self._old_module_ids = set()
self._old_connection_ids = set()
self.unselect_all()
self.clearItems()
def remove_module(self, m_id):
"""remove_module(m_id): None
Removes module from scene, updating appropriate data structures.
"""
core_module = self.modules[m_id].module
if not core_module.has_annotation_with_key('__vistrail_var__'):
self.removeItem(self.modules[m_id])
del self.modules[m_id]
self._old_module_ids.remove(m_id)
def remove_connection(self, c_id):
"""remove_connection(c_id): None
Removes connection from scene, updating appropriate data structures.
"""
# if c_id in self.connections:
connItem = self.connections[c_id]
(srcModule, dstModule) = connItem.connectingModules
srcModule.removeConnectionItem(connItem)
dstModule.removeConnectionItem(connItem)
if not srcModule.module.has_annotation_with_key('__vistrail_var__'):
self.removeItem(self.connections[c_id])
del self.connections[c_id]
self._old_connection_ids.remove(c_id)
self.update_connections([srcModule.id, dstModule.id])
def recreate_module(self, pipeline, m_id):
"""recreate_module(pipeline, m_id): None
Recreates a module on the scene."""
selected = self.modules[m_id].isSelected()
depending_connections = \
[c_id for c_id in self.modules[m_id].dependingConnectionItems()]
# when configuring a python source, maybe connections were deleted
# but are not in the current pipeline. So we need to check the depending
# connections of the module just before the configure.
for c_id in depending_connections:
self.remove_connection(c_id)
self.remove_module(m_id)
self.addModule(pipeline.modules[m_id])
for c_id in depending_connections:
# only add back those connections that are in the pipeline
if c_id in pipeline.connections:
self.addConnection(pipeline.connections[c_id])
if selected:
self.modules[m_id].setSelected(True)
def update_module_functions(self, pipeline, m_id):
""" Used by ports_pane to update modules
"""
module = pipeline.modules[m_id]
if self.modules[m_id].functions_have_been_deleted(module):
self.recreate_module(pipeline, m_id)
return
self.modules[m_id].update_function_values(module)
if self.modules[m_id].moduleFunctionsHaveChanged(module):
self.modules[m_id].update_function_ports(module)
def setupScene(self, pipeline):
""" setupScene(pipeline: Pipeline) -> None
Construct the scene to view a pipeline
"""
old_pipeline = self.current_pipeline
self.current_pipeline = pipeline
if self.noUpdate: return
if (pipeline is None or
(old_pipeline and not old_pipeline.is_valid) or
(pipeline and not pipeline.is_valid)):
# clear things
self.clear()
if not pipeline: return
needReset = len(self.items())==0
try:
self.skip_update = True
new_modules = set(pipeline.modules)
modules_to_be_added = new_modules - self._old_module_ids
modules_to_be_deleted = self._old_module_ids - new_modules
common_modules = new_modules.intersection(self._old_module_ids)
new_connections = set(pipeline.connections)
connections_to_be_added = new_connections - self._old_connection_ids
connections_to_be_deleted = self._old_connection_ids - new_connections
common_connections = new_connections.intersection(self._old_connection_ids)
# Check if connections to be added require
# optional ports in modules to be visible
# check all connections because the visible flag
# may have been cleared
for c_id in new_connections:
connection = pipeline.connections[c_id]
smid = connection.source.moduleId
s = connection.source.spec
if s and s.optional:
smm = pipeline.modules[smid]
smm.visible_output_ports.add(s.name)
dmid = connection.destination.moduleId
d = connection.destination.spec
if d and d.optional:
dmm = pipeline.modules[dmid]
dmm.visible_input_ports.add(d.name)
# remove old connection shapes
for c_id in connections_to_be_deleted:
self.remove_connection(c_id)
# remove old module shapes
for m_id in modules_to_be_deleted:
self.remove_module(m_id)
selected_modules = []
# create new module shapes
for m_id in modules_to_be_added:
self.addModule(pipeline.modules[m_id])
if self.modules[m_id].isSelected():
selected_modules.append(m_id)
moved = set()
# Update common modules
for m_id in common_modules:
tm_item = self.modules[m_id]
nm = pipeline.modules[m_id]
if tm_item.moduleHasChanged(nm):
self.recreate_module(pipeline, m_id)
tm_item = self.modules[m_id]
elif tm_item.moduleFunctionsHaveChanged(nm):
tm_item.update_function_ports(pipeline.modules[m_id])
tm_item.update_function_values(pipeline.modules[m_id])
if tm_item.isSelected():
selected_modules.append(m_id)
if self.controller and self.controller.search:
moduleQuery = (self.controller.current_version, nm)
matched = \
self.controller.search.matchModule(*moduleQuery)
tm_item.setGhosted(not matched)
else:
tm_item.setGhosted(False)
tm_item.setBreakpoint(nm.is_breakpoint)
# create new connection shapes
for c_id in connections_to_be_added:
self.addConnection(pipeline.connections[c_id])
# Update common connections
for c_id in common_connections:
connection = pipeline.connections[c_id]
pip_c = self.connections[c_id]
pip_c.connectingModules = (self.modules[connection.source.moduleId],
self.modules[connection.destination.moduleId])
(srcModule, dstModule) = pip_c.connectingModules
self._old_module_ids = new_modules
self._old_connection_ids = new_connections
self.unselect_all()
self.reset_module_colors()
for m_id in selected_modules:
self.modules[m_id].setSelected(True)
except ModuleRegistryException, e:
debug.print_exc()
views = self.views()
assert len(views) > 0
debug.critical("Missing package/module",
("Package '%s' is missing (or module '%s' is not present " +
"in that package)") % (e._identifier, e._name))
self.clear()
self.controller.change_selected_version(0)
finally:
self.skip_update = False
self.update_connections()
if needReset and len(self.items())>0:
self.fitToAllViews()
def findModuleUnder(self, pos):
""" findModuleUnder(pos: QPoint) -> QGraphicsItem
Search all items under pos and return the top-most module item if any
"""
for item in self.items(pos):
if isinstance(item, QGraphicsModuleItem):
return item
return None
def findModulesNear(self, pos, where_mult):
rect = QtCore.QRectF(pos.x()-50+25*where_mult,
(pos.y()-50) + 50*where_mult,
100, 100)
### code to display target rectangle
#
# if where_mult < 0:
# if not hasattr(self, 'tmp_rect'):
# self.tmp_rect = QtGui.QGraphicsRectItem(rect)
# self.tmp_rect.setPen(QtGui.QColor("red"))
# self.addItem(self.tmp_rect)
# else:
# self.tmp_rect.setRect(rect)
# else:
# if not hasattr(self, 'tmp_rect2'):
# self.tmp_rect2 = QtGui.QGraphicsRectItem(rect)
# self.tmp_rect2.setPen(QtGui.QColor("red"))
# self.addItem(self.tmp_rect2)
# else:
# self.tmp_rect2.setRect(rect)
closest_item = None
min_dis = None
for item in self.items(rect):
if isinstance(item, QGraphicsModuleItem) and item.isVisible():
vector = item.scenePos() - pos
dis = vector.x() * vector.x() + vector.y() * vector.y()
if min_dis is None or dis < min_dis:
min_dis = dis
closest_item = item
return closest_item
def findPortsNear(self, pos, where_mult):
width = self.tmp_module_item.paddedRect.width() + 50
rect = QtCore.QRectF(pos.x()-width/2+25*where_mult,
pos.y()-50 + 50*where_mult,
width, 100)
### code to display target rectangle
#
# rect = QtCore.QRectF(pos.x()-50+25*where_mult,
# (pos.y()-50) + 50*where_mult,
# 100, 100)
# if where_mult < 0:
# if not hasattr(self, 'tmp_rect'):
# self.tmp_rect = QtGui.QGraphicsRectItem(rect)
# self.tmp_rect.setPen(QtGui.QColor("red"))
# self.addItem(self.tmp_rect)
# else:
# self.tmp_rect.setRect(rect)
# else:
# if not hasattr(self, 'tmp_rect2'):
# self.tmp_rect2 = QtGui.QGraphicsRectItem(rect)
# self.tmp_rect2.setPen(QtGui.QColor("red"))
# self.addItem(self.tmp_rect2)
# else:
# self.tmp_rect2.setRect(rect)
# if not hasattr(self, 'tmp_rect'):
# self.tmp_rect = QtGui.QGraphicsRectItem(rect)
# self.tmp_rect.setPen(QtGui.QColor("red"))
# self.addItem(self.tmp_rect)
# else:
# self.tmp_rect.setRect(rect)
near_ports = []
for item in self.items(rect):
if isinstance(item, QAbstractGraphicsPortItem) and item.isVisible():
near_ports.append(item)
return near_ports
def findPortMatch(self, output_ports, input_ports, x_trans=0,
fixed_out_pos=None, fixed_in_pos=None,
allow_conversion=False, out_converters=None):
"""findPortMatch(output_ports: list(QAbstractGraphicsPortItem),
input_ports: list(QAbstractGraphicsPortItem),
x_trans: int,
fixed_out_pos: QPointF | None,
fixed_in_pos: QPointF | None,
) -> tuple(QAbstractGraphicsPortItem,
QAbstractGraphicsPortItem)
findPortMatch returns a port from output_ports and a port from
input_ports where the ports are compatible and the distance
between these ports is minimal with respect to compatible
ports
If allow_conversion is True, we also search for ports that are not
directly matched but can be connected if a Converter module is used. In
this case, we extend the optional 'out_converters' list with the
possible Converters' ModuleDescriptors.
"""
reg = get_module_registry()
result = (None, None, None)
min_dis = None
selected_convs = None
for o_item in output_ports:
if o_item.invalid:
continue
for i_item in input_ports:
if i_item.invalid:
continue
# Check all union types
# add all matches to iports
# check without converters first
for iport in i_item.union_group or [i_item.port]:
if reg.ports_can_connect(o_item.port, iport):
if fixed_out_pos is not None:
out_pos = fixed_out_pos
else:
out_pos = o_item.getPosition()
if fixed_in_pos is not None:
in_pos = fixed_in_pos
else:
in_pos = i_item.getPosition()
vector = (out_pos - in_pos)
dis = (vector.x()-x_trans)*(vector.x()-x_trans) + \
vector.y()*vector.y()
if (result[0] is not None and result[0] == o_item and
result[1] == i_item):
# additional match in same union
result[2].append(iport)
elif result[1] is None or dis < min_dis:
min_dis = dis
result = (o_item, i_item, [iport])
if result[0] == o_item and result[1] == i_item:
continue
convs = []
# this selects only the first match in a union
for iport in i_item.union_group or [i_item.port]:
if reg.ports_can_connect(o_item.port, iport,
allow_conversion=True,
out_converters=convs):
if fixed_out_pos is not None:
out_pos = fixed_out_pos
else:
out_pos = o_item.getPosition()
if fixed_in_pos is not None:
in_pos = fixed_in_pos
else:
in_pos = i_item.getPosition()
vector = (out_pos - in_pos)
dis = (vector.x()-x_trans)*(vector.x()-x_trans) + \
vector.y()*vector.y()
if result[0] is None or dis < min_dis:
min_dis = dis
result = (o_item, i_item, [iport])
selected_convs = convs
if selected_convs and out_converters is not None:
out_converters.extend(selected_convs)
return result
def updateTmpConnection(self, pos, tmp_connection_item, tmp_module_ports,
where_mult, order_f):
near_ports = self.findPortsNear(pos, where_mult)
if len(near_ports) > 0:
(src_item, dst_item, dst_ports) = \
self.findPortMatch(*order_f([near_ports,tmp_module_ports]),
x_trans=-50)
if src_item is not None:
if tmp_connection_item is None:
tmp_connection_item = QGraphicsTmpConnItem(dst_item, dst_ports, 1000)
self.addItem(tmp_connection_item)
# We are assuming the first view is the real pipeline view
v = self.views()[0]
tmp_connection_item.setStartPort(dst_item, dst_ports)
tmp_connection_item.setSnapPort(src_item)
dst_type_str = ' or '.join([('List of ' * dst_port.depth +
dst_port.short_sigstring)
for dst_port in dst_ports])
tooltip = "%s %s\n -> %s %s" % (src_item.port.name,
'List of ' * src_item.port.depth +
src_item.port.short_sigstring,
dst_port.union or dst_port.name,
dst_type_str)
QtGui.QToolTip.showText(v.mapToGlobal(
v.mapFromScene((dst_item.getPosition() +
src_item.getPosition())/2.0)),
tooltip)
tmp_connection_item.show()
return tmp_connection_item
if tmp_connection_item is not None:
tmp_connection_item.hide()
QtGui.QToolTip.hideText()
return tmp_connection_item
def updateTmpInputConnection(self, pos):
self.tmp_input_conn = \
self.updateTmpConnection(pos, self.tmp_input_conn,
set(self.tmp_module_item.inputPorts.values()),
-1, lambda x: x)
def updateTmpOutputConnection(self, pos):
self.tmp_output_conn = \
self.updateTmpConnection(pos, self.tmp_output_conn,
set(self.tmp_module_item.outputPorts.values()),
1, reversed)
def dragEnterEvent(self, event):
""" dragEnterEvent(event: QDragEnterEvent) -> None
Set to accept drops from the module palette
"""
if (self.controller and (
isinstance(event.source(), QModuleTreeWidget) or
isinstance(event.source(), QDragVariableLabel))):
data = event.mimeData()
if not self.read_only_mode:
if hasattr(data, 'items'):
if self.tmp_module_item and \
get_vistrails_configuration().check('autoConnect'):
self.tmp_module_item.setPos(event.scenePos())
self.updateTmpInputConnection(event.scenePos())
self.updateTmpOutputConnection(event.scenePos())
event.accept()
return
elif hasattr(data, 'variableData'):
event.accept()
return
# Ignore if not accepted and returned by this point
event.ignore()
def dragMoveEvent(self, event):
""" dragMoveEvent(event: QDragMoveEvent) -> None
Set to accept drag move event from the module palette
"""
if (self.controller and (
isinstance(event.source(), QModuleTreeWidget) or
isinstance(event.source(), QDragVariableLabel))):
data = event.mimeData()
if hasattr(data, 'items') and not self.read_only_mode:
if self.tmp_module_item and \
get_vistrails_configuration().check('autoConnect'):
self.tmp_module_item.setPos(event.scenePos())
self.updateTmpInputConnection(event.scenePos())
self.updateTmpOutputConnection(event.scenePos())
event.accept()
return
elif hasattr(data, 'variableData'):
# Find nearest suitable port
snapModule = self.findModuleUnder(event.scenePos())
nearest_port = None
if snapModule is not None:
tmp_port = QAbstractGraphicsPortItem(None, 0, 0)
tmp_port.port = data.variableData[0]
(_, nearest_port, iport) = \
self.findPortMatch([tmp_port],
set(snapModule.inputPorts.values()),
fixed_out_pos=event.scenePos())
del tmp_port
# Unhighlight previous nearest port
if self._var_selected_port is not None:
self._var_selected_port.setSelected(False)
self._var_selected_port = nearest_port
# Highlight new nearest port
if nearest_port is not None:
nearest_port.setSelected(True)
QtGui.QToolTip.showText(event.screenPos(), nearest_port.toolTip())
event.accept()
return
else:
QtGui.QToolTip.hideText()
# Ignore if not accepted and returned by this point
if not systemType in ['Darwin']:
# Workaround: On a Mac, dropEvent isn't called if dragMoveEvent is ignored
event.ignore()
def dragLeaveEvent(self, event):
if (self.controller and isinstance(event.source(), QModuleTreeWidget)):
if self.tmp_output_conn:
self.tmp_output_conn.disconnect(True)
self.removeItem(self.tmp_output_conn)
self.tmp_output_conn = None
if self.tmp_input_conn:
self.tmp_input_conn.disconnect(True)
self.removeItem(self.tmp_input_conn)
self.tmp_input_conn = None
elif isinstance(event.source(), QDragVariableLabel):
data = event.mimeData()
if hasattr(data, 'variableData'):
if self._var_selected_port is not None:
self._var_selected_port.setPen(CurrentTheme.PORT_PEN)
self._var_selected_port = None
event.accept()
def unselect_all(self):
self.clearSelection()
if self.pipeline_tab:
self.pipeline_tab.moduleSelected(-1)
def createConnectionFromTmp(self, tmp_connection_item, module,
start_is_src=False):
def select_type(ports):
selected_port_spec = [None]
def add_selector(ps):
def triggered(*args, **kwargs):
selected_port_spec[0] = ps
return triggered
menu = QtGui.QMenu(self.parent())
for port_spec in ports:
type_name = port_spec.type_name()
label = 'Select destination port type: ' + type_name
act = QtGui.QAction(label, self.parent())
act.setStatusTip(label)
act.triggered.connect(add_selector(port_spec))
menu.addAction(act)
menu.exec_(QtGui.QCursor.pos())
return selected_port_spec[0]
if start_is_src:
src_port_item = tmp_connection_item.startPortItem
dst_port_item = tmp_connection_item.snapPortItem
if len(tmp_connection_item.snapPorts) > 1:
dst_port = select_type(tmp_connection_item.snapPorts)
if not dst_port:
return
else:
dst_port = tmp_connection_item.snapPorts[0]
else:
src_port_item = tmp_connection_item.snapPortItem
dst_port_item = tmp_connection_item.startPortItem
if len(tmp_connection_item.startPorts) > 1:
dst_port = select_type(tmp_connection_item.startPorts)
if not dst_port:
return
else:
dst_port = tmp_connection_item.startPorts[0]
if src_port_item.parentItem().id < 0 or start_is_src:
src_module_id = module.id
dst_module_id = dst_port_item.parentItem().id
else:
src_module_id = src_port_item.parentItem().id
dst_module_id = module.id
graph = copy.copy(self.controller.current_pipeline.graph)
graph.add_edge(src_module_id, dst_module_id)
try:
graph.dfs(raise_if_cyclic=True)
except GraphContainsCycles:
return False
reg = get_module_registry()
if reg.ports_can_connect(src_port_item.port, dst_port):
# Direct connection
conn = self.controller.add_connection(src_module_id,
src_port_item.port,
dst_module_id,
dst_port)
self.addConnection(conn)
else:
# Add a converter module
converters = reg.get_converters(src_port_item.port.descriptors(),
dst_port.descriptors())
converter = choose_converter(converters)
if converter is None:
return
src_pos = src_port_item.getPosition()
dst_pos = dst_port_item.getPosition()
mod_x = (src_pos.x() + dst_pos.x())/2.0
mod_y = (src_pos.y() + dst_pos.y())/2.0
mod = self.controller.create_module_from_descriptor(
converter,
x=mod_x,
y=-mod_y)
conn1 = self.controller.create_connection(
self.controller.current_pipeline.modules[src_module_id],
src_port_item.port,
mod,
'in_value')
conn2 = self.controller.create_connection(
mod, 'out_value',
self.controller.current_pipeline.modules[dst_module_id],
dst_port)
operations = [('add', mod), ('add', conn1), ('add', conn2)]
action = create_action(operations)
self.controller.add_new_action(action)
self.controller.perform_action(action)
graphics_mod = self.addModule(mod)
graphics_mod.update()
self.addConnection(conn1)
self.addConnection(conn2)
return True
def addConnectionFromTmp(self, tmp_connection_item, module, start_is_src):
result = self.createConnectionFromTmp(tmp_connection_item, module,
start_is_src)
self.reset_module_colors()
self._old_connection_ids = \
set(self.controller.current_pipeline.connections)
self._old_module_ids = set(self.controller.current_pipeline.modules)
return result
def add_module_event(self, event, data):
"""Adds a new module from a drop event"""
item = data.items[0]
self.controller.reset_pipeline_view = False
self.noUpdate = True
internal_version = -1L
reg = get_module_registry()
if reg.is_abstraction(item.descriptor):
internal_version = item.descriptor.module.internal_version
adder = self.controller.add_module_from_descriptor
module = adder(item.descriptor,
event.scenePos().x(),
-event.scenePos().y(),
internal_version)
self.reset_module_colors()
graphics_item = self.addModule(module)
graphics_item.update()
if get_vistrails_configuration().check('autoConnect'):
if self.tmp_output_conn is not None:
if self.tmp_output_conn.isVisible():
self.createConnectionFromTmp(self.tmp_output_conn, module)
self.tmp_output_conn.disconnect()
self.removeItem(self.tmp_output_conn)
self.tmp_output_conn = None
if self.tmp_input_conn is not None:
if self.tmp_input_conn.isVisible():
self.createConnectionFromTmp(self.tmp_input_conn, module)
self.tmp_input_conn.disconnect()
self.removeItem(self.tmp_input_conn)
self.tmp_input_conn = None
self.unselect_all()
# Change selection
graphics_item.setSelected(True)
# controller changed pipeline: update ids
self._old_connection_ids = \
set(self.controller.current_pipeline.connections)
self._old_module_ids = set(self.controller.current_pipeline.modules)
# We are assuming the first view is the real pipeline view
self.views()[0].setFocus()
self.noUpdate = False
def add_tmp_module(self, desc):
self.noUpdate = True
self.tmp_module_item = QGraphicsModuleItem(None)
module = Module(id=-1,
name=desc.name,
package=desc.identifier,
location=Location(id=-1,x=0.0,y=0.0),
namespace=desc.namespace,
)
module.is_valid = True
self.tmp_module_item.setupModule(module, True)
self.addItem(self.tmp_module_item)
self.tmp_module_item.hide()
self.tmp_module_item.update()
self.noUpdate = False
return self.tmp_module_item
def update_connections(self, modules=None):
if self.skip_update:
return
if (self.controller.current_pipeline and
self.controller.current_pipeline.is_valid):
try:
depths = \
self.controller.current_pipeline.mark_list_depth(modules)
except GraphContainsCycles:
# Pipeline is invalid, we don't really care that the depths are
# not right
pass
else:
for module_id, list_depth in depths:
if module_id in self.modules:
self.modules[module_id].module.list_depth = list_depth
for c in self.connections.itervalues():
c.setupConnection()
def delete_tmp_module(self):
if self.tmp_module_item is not None:
self.removeItem(self.tmp_module_item)
self.tmp_module_item = None
def dropEvent(self, event):
""" dropEvent(event: QDragMoveEvent) -> None
Accept drop event to add a new module
"""
if (self.controller and (
isinstance(event.source(), QModuleTreeWidget) or
isinstance(event.source(), QDragVariableLabel))):
data = event.mimeData()
if hasattr(data, 'items') and not self.read_only_mode and \
self.controller.current_pipeline == self.current_pipeline:
assert len(data.items) == 1
self.add_module_event(event, data)
event.accept()
return
elif hasattr(data, 'variableData'):
if self._var_selected_port is not None:
# Unhighlight selected port and get var data
self._var_selected_port.setSelected(False)
output_portspec = data.variableData[0]
var_uuid = data.variableData[1]
var_name = data.variableData[2]
descriptor = output_portspec.descriptors()[0]
input_module = self._var_selected_port.parentItem().module
#input_portspec = self._var_selected_port.port
input_port = self._var_selected_port.port.name
m_pos_x = event.scenePos().x()
m_pos_y = -event.scenePos().y()
(new_conn, new_module) = \
self.controller.connect_vistrail_var(descriptor,
var_uuid,
input_module,
input_port,
m_pos_x,
m_pos_y)
if new_module is not None:
self.addModule(new_module)
if new_conn is not None:
self.addConnection(new_conn)
else:
msg = 'Vistrail Variable "%s" is already connected' \
' to this port.' % var_name
QtGui.QMessageBox.information(None,
"Already Connected",
msg)
event.accept()
return
# Ignore if not accepted and returned by this point
event.ignore()
def delete_selected_items(self):
selectedItems = self.selectedItems()
if len(selectedItems)>0:
modules = []
module_ids = []
connection_ids = []
for it in selectedItems:
if isinstance(it, QGraphicsModuleItem):
modules.append(it)
module_ids.append(it.id)
elif isinstance(it, QGraphicsConnectionItem):
connection_ids.append(it.id)
if len(modules)>0:
# add connected vistrail variables
vvms, vvcs = \
self.controller.get_connected_vistrail_vars(module_ids, True)
for vvm in vvms:
if vvm not in module_ids:
modules.append(self.modules[vvm])
module_ids.append(vvm)
for vvc in vvcs:
if vvc not in connection_ids:
connection_ids.append(vvc)
self.noUpdate = True
dep_connection_ids = set()
for m in modules:
dep_connection_ids.update(
m.dependingConnectionItems().iterkeys())
# remove_connection updates the dependency list on the
# other side of connections, cannot use removeItem
try:
skip_update = True
for c_id in dep_connection_ids:
self.remove_connection(c_id)
for m_id in module_ids:
self.remove_module(m_id)
self.controller.delete_module_list(module_ids)
finally:
self.skip_update = False
self.update_connections()
self.updateSceneBoundingRect()
self.reset_module_colors()
self.update()
self.noUpdate = False
# Notify that no module is selected
self.emit(QtCore.SIGNAL('moduleSelected'),
-1, selectedItems)
# Current pipeline changed, so we need to change the
# _old_*_ids. However, remove_module takes care of
# module ids, and the for loop above takes care of
# connection ids. So we don't need to call anything.
else:
try:
self.skip_update = False
for c_id in connection_ids:
self.remove_connection(c_id)
self.controller.reset_pipeline_view = False
self.controller.delete_connection_list(connection_ids)
self.reset_module_colors()
self.controller.reset_pipeline_view = True
finally:
self.skip_update = False
self.update_connections()
# Current pipeline changed, so we need to change the
# _old_connection_ids. However, remove_connection
# above takes care of connection ids, so we don't need
# to call anything.
def keyPressEvent(self, event):
""" keyPressEvent(event: QKeyEvent) -> None
Capture 'Del', 'Backspace' for deleting modules.
Ctrl+C, Ctrl+V, Ctrl+A for copy, paste and select all
"""
if (not self.focusItem() and self.controller and
event.key() in [QtCore.Qt.Key_Backspace, QtCore.Qt.Key_Delete]):
if not self.read_only_mode:
self.delete_selected_items()
else:
QInteractiveGraphicsScene.keyPressEvent(self, event)
# super(QPipelineScene, self).keyPressEvent(event)
def get_selected_module_ids(self):
module_ids = []
for item in self.selectedItems():
if isinstance(item, QGraphicsModuleItem):
module_ids.append(item.module.id)
return module_ids
def get_selected_item_ids(self, dangling=False):
"""get_selected_item_ids( self, dangling: bool) ->
(module_ids : list, connection_ids : list)
returns the list of selected modules and the connections
between them. If dangling is true, it includes connections
for which only one end point is selected, otherwise it only
includes connectiosn where both end points are selected
"""
selectedItems = self.selectedItems()
if len(selectedItems) <= 0:
return None
connection_ids = {}
module_ids = {}
for item in selectedItems:
if isinstance(item, QGraphicsModuleItem):
module_ids[item.module.id] = 1
# Add connected vistrail variables
vvms, vvcs = \
self.controller.get_connected_vistrail_vars(module_ids)
for vvm in vvms:
module_ids[vvm] = 1
for vvc in vvcs:
connection_ids[vvc] = 1
for item in selectedItems:
if isinstance(item, QGraphicsModuleItem):
for connItem in item.dependingConnectionItems().itervalues():
conn = connItem.connection
if not conn.id in connection_ids:
source_exists = conn.sourceId in module_ids
dest_exists = conn.destinationId in module_ids
if source_exists and dest_exists:
connection_ids[conn.id] = 1
elif dangling and (source_exists or dest_exists):
connection_ids[conn.id] = 1
return (module_ids.keys(), connection_ids.keys())
def group(self):
items = self.get_selected_item_ids(True)
if items is not None:
# self.clear()
self.controller.create_group(items[0], items[1])
self.setupScene(self.controller.current_pipeline)
def ungroup(self):
items = self.get_selected_item_ids(True)
if items is not None:
# self.clear()
self.controller.ungroup_set(items[0])
self.setupScene(self.controller.current_pipeline)
def layout(self):
if len(self.items()) <= 0:
return
def _func(module):
rect = self.modules[module.shortname].boundingRect()
return (rect.width(), rect.height())
selected = [self.modules[i].module for i in self.get_selected_module_ids()]
self.controller.layout_modules(selected,
module_size_func=_func)
def makeAbstraction(self):
items = self.get_selected_item_ids(True)
if items is not None:
# self.clear()
self.controller.create_abstraction_with_prompt(items[0], items[1])
self.setupScene(self.controller.current_pipeline)
def convertToAbstraction(self):
items = self.get_selected_item_ids(False)
if items is not None:
# self.clear()
self.controller.create_abstractions_from_groups(items[0])
self.setupScene(self.controller.current_pipeline)
def importAbstraction(self):
items = self.get_selected_item_ids(False)
if items is not None:
self.controller.import_abstractions(items[0])
def exportAbstraction(self):
items = self.get_selected_item_ids(False)
if items is not None:
self.controller.export_abstractions(items[0])
def copySelection(self):
""" copySelection() -> None
Copy the current selected modules into clipboard
"""
items = self.get_selected_item_ids(False)
if items is not None:
cb = QtGui.QApplication.clipboard()
text = self.controller.copy_modules_and_connections(items[0],items[1])
cb.setText(text)
def pasteFromClipboard(self, center):
""" pasteFromClipboard(center: (float, float)) -> None
Paste modules/connections from the clipboard into this pipeline view
"""
if self.controller and not self.read_only_mode:
cb = QtGui.QApplication.clipboard()
text = cb.text()
if text=='' or not text.startswith("<workflow"): return
ids = self.controller.paste_modules_and_connections(text, center)
self.setupScene(self.controller.current_pipeline)
self.reset_module_colors()
if len(ids) > 0:
self.unselect_all()
for moduleId in ids:
self.modules[moduleId].setSelected(True)
def event(self, e):
""" event(e: QEvent) -> None
Process the set module color events
"""
if e.type()==QModuleStatusEvent.TYPE:
if e.moduleId>=0:
item = self.modules.get(e.moduleId, None)
if not item:
return True
item.setToolTip(e.toolTip)
item.errorTrace = e.errorTrace
statusMap = {
0: CurrentTheme.SUCCESS_MODULE_BRUSH,
1: CurrentTheme.ERROR_MODULE_BRUSH,
2: CurrentTheme.NOT_EXECUTED_MODULE_BRUSH,
3: CurrentTheme.ACTIVE_MODULE_BRUSH,
4: CurrentTheme.COMPUTING_MODULE_BRUSH,
6: CurrentTheme.PERSISTENT_MODULE_BRUSH,
7: CurrentTheme.SUSPENDED_MODULE_BRUSH,
}
item.setProgress(e.progress)
if item.statusBrush is not None and e.status == 3:
# do not update, already in cache
pass
elif e.status in statusMap:
item.statusBrush = statusMap[e.status]
else:
item.statusBrush = None
item._needs_state_updated = True
item.update()
return True
return QInteractiveGraphicsScene.event(self, e)
def selectAll(self):
""" selectAll() -> None
Select all module items in the scene
"""
for item in self.items():
if isinstance(item, QGraphicsModuleItem) or \
isinstance(item, QGraphicsConnectionItem):
item.setSelected(True)
def open_configure_window(self, id):
""" open_configure_window(int) -> None
Open the modal configuration window for module with given id
"""
from vistrails.gui.vistrails_window import _app
_app.configure_module()
def perform_configure_done_actions(self, module_id):
if self.controller:
self.reset_module_colors()
self.flushMoveActions()
self.recreate_module(self.controller.current_pipeline, module_id)
def open_documentation_window(self, id):
""" open_documentation_window(int) -> None
Opens the modal module documentation window for module with given id
"""
from vistrails.gui.vistrails_window import _app
_app.show_documentation()
def open_looping_window(self, id):
""" open_looping_window(int) -> None
Opens the modal module looping options window for module with given id
"""
from vistrails.gui.vistrails_window import _app
_app.show_looping_options()
def toggle_breakpoint(self, id):
""" toggle_breakpoint(int) -> None
Toggles the breakpoint attribute for the module with given id
"""
if self.controller:
module = self.controller.current_pipeline.modules[id]
module.toggle_breakpoint()
self.recreate_module(self.controller.current_pipeline, id)
def toggle_watched(self, id):
if self.controller:
module = self.controller.current_pipeline.modules[id]
module.toggle_watched()
def print_error(self, id):
toolTip = str(self.modules[id].toolTip())
errorTrace = self.modules[id].errorTrace
if not toolTip and not errorTrace:
return
text = toolTip
if errorTrace and errorTrace.strip() != 'None':
text += '\n\n' + errorTrace
sp = StacktracePopup(text)
sp.exec_()
def open_annotations_window(self, id):
""" open_annotations_window(int) -> None
Opens the modal annotations window for module with given id
"""
if self.controller:
from vistrails.gui.module_info import QModuleInfo
module_info = QModuleInfo.instance()
module_info.show_annotations()
def open_module_label_window(self, id):
""" open_module_label_window(int) -> None
Opens the modal module label window for setting module label
"""
if self.controller:
module = self.controller.current_pipeline.modules[id]
if module.has_annotation_with_key('__desc__'):
currentLabel = module.get_annotation_by_key('__desc__').value.strip()
else:
currentLabel = ''
(text, ok) = QtGui.QInputDialog.getText(None, 'Set Module Label',
'Enter the module label',
QtGui.QLineEdit.Normal,
currentLabel)
if ok:
if not text:
if module.has_annotation_with_key('__desc__'):
self.controller.delete_annotation('__desc__', id)
self.recreate_module(self.controller.current_pipeline, id)
else:
self.controller.add_annotation(('__desc__', str(text)), id)
self.recreate_module(self.controller.current_pipeline, id)
##########################################################################
# Execution reporting API
def check_progress_canceled(self):
"""Checks if the user have canceled the execution and takes
appropriate action
"""
p = self.controller.progress
if p.wasCanceled():
if p._progress_canceled:
# It has already been confirmed in a progress update
p._progress_canceled = False
raise AbortExecution("Execution aborted by user")
r = QtGui.QMessageBox.question(self.parent(),
'Execution Paused',
'Are you sure you want to abort the execution?',
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No)
if r == QtGui.QMessageBox.Yes:
raise AbortExecution("Execution aborted by user")
else:
p.goOn()
def set_module_success(self, moduleId):
""" set_module_success(moduleId: int) -> None
Post an event to the scene (self) for updating the module color
"""
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 0, ''))
QtCore.QCoreApplication.processEvents()
def set_module_error(self, moduleId, error, errorTrace=None):
""" set_module_error(moduleId: int, error: str) -> None
Post an event to the scene (self) for updating the module color
"""
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 1, error,
errorTrace=errorTrace))
QtCore.QCoreApplication.processEvents()
def set_module_not_executed(self, moduleId):
""" set_module_not_executed(moduleId: int) -> None
Post an event to the scene (self) for updating the module color
"""
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 2, ''))
QtCore.QCoreApplication.processEvents()
def set_module_active(self, moduleId):
""" set_module_active(moduleId: int) -> None
Post an event to the scene (self) for updating the module color
"""
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 3, ''))
QtCore.QCoreApplication.processEvents()
def set_module_computing(self, moduleId):
""" set_module_computing(moduleId: int) -> None
Post an event to the scene (self) for updating the module color
"""
p = self.controller.progress
if p is not None:
self.check_progress_canceled()
pipeline = self.controller.current_pipeline
try:
module = pipeline.get_module_by_id(moduleId)
except KeyError:
# Module does not exist in pipeline
return
p.setLabelText(module.name)
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 4, ''))
QtCore.QCoreApplication.processEvents()
def set_module_progress(self, moduleId, progress=0.0):
""" set_module_computing(moduleId: int, progress: float) -> None
Post an event to the scene (self) for updating the module color
"""
p = self.controller.progress
if p is not None:
try:
self.check_progress_canceled()
except AbortExecution:
p._progress_canceled = True
raise
status = '%d%% Completed' % int(progress*100)
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 5,
status, progress))
QtCore.QCoreApplication.processEvents()
def set_module_persistent(self, moduleId):
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 6, ''))
QtCore.QCoreApplication.processEvents()
def set_module_suspended(self, moduleId, error):
""" set_module_suspended(moduleId: int, error: str/instance) -> None
Post an event to the scene (self) for updating the module color
"""
status = "Module is suspended, reason: %s" % error
QtGui.QApplication.postEvent(self,
QModuleStatusEvent(moduleId, 7, status))
QtCore.QCoreApplication.processEvents()
def set_execution_progress(self, progress):
p = self.controller.progress
if p is not None:
p.setValue(int(progress * 100))
def reset_module_colors(self):
for module in self.modules.itervalues():
module.statusBrush = None
module._needs_state_updated = True
def hasMoveActions(self):
controller = self.controller
for (mId, item) in self.modules.iteritems():
module = controller.current_pipeline.modules[mId]
(dx,dy) = (item.scenePos().x(), -item.scenePos().y())
if (dx != module.center.x or dy != module.center.y):
return True
return False
def flushMoveActions(self):
""" flushMoveActions() -> None
Update all move actions into vistrail
"""
controller = self.controller
moves = []
for (mId, item) in self.modules.iteritems():
module = controller.current_pipeline.modules[mId]
(dx,dy) = (item.scenePos().x(), -item.scenePos().y())
if (dx != module.center.x or dy != module.center.y):
moves.append((mId, dx, dy))
if len(moves)>0:
controller.quiet = True
controller.move_module_list(moves)
controller.quiet = False
return True
return False
def set_read_only_mode(self, on):
"""set_read_only_mode(on: bool) -> None
This will prevent user to add/remove modules and connections."""
self.read_only_mode = on
class QGraphicsFunctionsWidget(QtGui.QGraphicsWidget):
""" GraphicsWidget containing all editable functions
"""
function_changed = QtCore.pyqtSignal(str, list)
def __init__(self, module, parent=None, constant=None):
QtGui.QGraphicsWidget.__init__(self, parent)
self.function_widgets = []
height = 0
width = 0
for port_spec in module.destinationPorts():
if port_spec.name in module.editable_input_ports:
# create default dummies
params = []
for psi in port_spec.items:
param = ModuleParam(type=psi.descriptor.name,
identifier=psi.descriptor.identifier,
namespace=psi.descriptor.namespace)
params.append(param)
function = ModuleFunction(name=port_spec.name,
parameters=params)
for f in module.functions:
if f.name == port_spec.name:
function = f
for psi, param in zip(port_spec.port_spec_items,
function.params):
param.port_spec_item = psi
function_widget = QGraphicsFunctionWidget(function, self, constant)
function_widget.setPos(0, height)
function_widget.function_changed.connect(self.function_changed)
self.function_widgets.append(function_widget)
height += function_widget.boundingRect().height()
width = max(width,function_widget.boundingRect().width())
# center widgets
for function_widget in self.function_widgets:
for widget, w in function_widget.widths:
widget.moveBy((width-w)/2, 0.0)
self.bounds = QtCore.QRectF(0,0,width,height)
def boundingRect(self):
return self.bounds
class QGraphicsFunctionWidget(QtGui.QGraphicsWidget):
""" GraphicsWidget containing an editable function
"""
function_changed = QtCore.pyqtSignal(str, list)
def __init__(self, function, parent=None, constant=None):
QtGui.QGraphicsWidget.__init__(self, parent)
self.function = function
self.param_widgets = []
self.widths = [] # (widget, width)
self.bounds = None
width = 0
height = 0
SCALE = 3.0/4 # make QWidgets a bit smaller than normal
MAX_WIDTH = 150
if not constant:
# add name label
name = self.function.name
bounds = CurrentTheme.MODULE_EDIT_FONT_METRIC.boundingRect
editRect = bounds(name)
if editRect.width()>MAX_WIDTH:
while bounds(name + '...').width() > MAX_WIDTH:
name = name[:-1]
name += '...'
editRect = bounds(name)
width = max(width, editRect.width())
fname = QtGui.QGraphicsSimpleTextItem(name, self)
fname.setFont(CurrentTheme.MODULE_EDIT_FONT)
fname.setPos(-1, -1)
names = []
sigstring = function.sigstring
for sig in sigstring[1:-1].split(','):
k = sig.split(':', 2)
if len(k) < 2:
names.append(k[0])
else:
names.append(k[1])
short_sigstring = '(' + ','.join(names) + ')'
tooltip = function.name + short_sigstring
fname.setToolTip(tooltip)
#height += bounds(name).height()
height += 11 # hardcoded because fontmetric can give wrong value
for i in xrange(len(function.parameters)):
param = function.parameters[i]
# check
psi = param.port_spec_item
if psi.entry_type is not None:
# !!only pull off the prefix!! options follow in camelcase
prefix_end = len(psi.entry_type.lstrip(string.lowercase))
if prefix_end == 0:
entry_type = psi.entry_type
else:
entry_type = psi.entry_type[:-prefix_end]
else:
entry_type = None
Widget = get_widget_class(psi.descriptor, entry_type)
if hasattr(Widget, 'GraphicsItem') and Widget.GraphicsItem:
param_widget = Widget.GraphicsItem(param, self)
# resize to MAX_WIDTH
rect = param_widget.boundingRect()
param_widget.setZValue(self.zValue()+0.2)
scale = max(rect.width(), rect.height())
if scale>MAX_WIDTH:
param_widget.setScale(MAX_WIDTH/scale)
rect.setSize(rect.size()*MAX_WIDTH/scale)
bg = QtGui.QGraphicsRectItem(rect, self)
# TODO COLOR
bg.setBrush(QtGui.QBrush(QtGui.QColor('#FFFFFF')))
bg.setZValue(-1)
bg.setPos(0, height)
def get_focusable(widget):
return lambda e:widget.setFocus() or widget.mousePressEvent(e)
bg.mousePressEvent = get_focusable(param_widget)
param_widget.setPos(0, height)
self.widths.append((param_widget,rect.width()))
else:
param_widget = Widget(param)
param_widget.setAutoFillBackground(False)
param_widget.setAttribute(QtCore.Qt.WA_NoSystemBackground, True)
param_widget.setMaximumSize(MAX_WIDTH/SCALE, MAX_WIDTH/SCALE)
param_widget.setWindowFlags(QtCore.Qt.BypassGraphicsProxyWidget)
proxy = QtGui.QGraphicsProxyWidget(self)
proxy.setWidget(param_widget)
proxy.setScale(SCALE)
rect = param_widget.geometry()
rect.setSize(rect.size()*SCALE)# uninitialized bounds need to be scaled
rect.moveTo(0.0,0.0)
proxy.setPos(0, height)
self.widths.append((proxy,rect.width()))
width = max(width, rect.width())
rect.setHeight(rect.height()+2) # space between parameters
height += rect.height()
param_widget.contentsChanged.connect(self.param_changed)
self.param_widgets.append(param_widget)
self.bounds = QtCore.QRectF(0.0, 0.0, width, height)
def param_changed(self, values):
# get values from all parameters
values = [p.contents() for p in self.param_widgets]
self.function_changed.emit(self.function.name, values)
def setContents(self, values):
for pw, value in zip(self.param_widgets, values):
pw.setContents(value, silent=True)
def getContents(self):
return [pw.contents() for pw in self.param_widgets]
def boundingRect(self):
return self.bounds
def set_lod(limit, item, draw=None):
"""Sets the limit of level of detail used when painting items.
"""
# This function replaces the paint() methods of the given item and its
# children. The new version doesn't actually draw any of the items if the
# level of detail OF THE TOP ITEM (which is the only one checked) is under
# the threshold
# Only the top-level item is checked, because children might have different
# scales
paint_orig = item.paint # store reference to original paint method
top_item = draw is None
if draw is None:
draw = [True]
# Overrides paint() on that item
def paint_with_lod_check(painter, option, widget):
if top_item:
draw[0] = option.levelOfDetailFromTransform(
painter.worldTransform()) > limit
if draw[0]:
return paint_orig(painter, option, widget)
item.paint = paint_with_lod_check
# Recursively process children
for i in item.childItems():
set_lod(limit, i, draw)
class QModuleStatusEvent(QtCore.QEvent):
"""
QModuleStatusEvent is trying to handle thread-safe real-time
module updates in the scene through post-event
"""
TYPE = QtCore.QEvent.Type(QtCore.QEvent.User)
def __init__(self, moduleId, status, toolTip, progress=0.0,
errorTrace=None):
""" QModuleStatusEvent(type: int) -> None
Initialize the specific event with the module status. Status 0
for success, 1 for error and 2 for not execute, 3 for active,
and 4 for computing
"""
QtCore.QEvent.__init__(self, QModuleStatusEvent.TYPE)
self.moduleId = moduleId
self.status = status
self.toolTip = toolTip
self.progress = progress
self.errorTrace = errorTrace
class QPipelineView(QInteractiveGraphicsView, BaseView):
"""
QPipelineView inherits from QInteractiveGraphicsView that will
handle drawing of module, connection shapes and selecting
mechanism.
"""
def __init__(self, parent=None):
""" QPipelineView(parent: QWidget) -> QPipelineView
Initialize the graphics view and its properties
"""
QInteractiveGraphicsView.__init__(self, parent)
BaseView.__init__(self)
self.setScene(QPipelineScene(self))
self.set_title('Pipeline')
self.controller = None
self.detachable = True
self._view_fitted = False
def set_default_layout(self):
from vistrails.gui.module_palette import QModulePalette
from vistrails.gui.module_info import QModuleInfo
self.set_palette_layout(
{QtCore.Qt.LeftDockWidgetArea: QModulePalette,
QtCore.Qt.RightDockWidgetArea: QModuleInfo,
})
def set_action_links(self):
# FIXME execute should be tied to a pipleine_changed signal...
self.action_links = \
{'copy': ('module_changed', self.has_selected_modules),
'paste': ('clipboard_changed', self.clipboard_non_empty),
'layout': ('pipeline_changed', self.pipeline_non_empty),
'group': ('module_changed', self.has_selected_modules),
'ungroup': ('module_changed', self.has_selected_groups),
'showGroup': ('module_changed', self.has_selected_group),
'makeAbstraction': ('module_changed', self.has_selected_modules),
'execute': ('pipeline_changed', self.pipeline_non_empty),
'configureModule': ('module_changed', self.has_selected_module),
'documentModule': ('module_changed', self.has_selected_module),
'makeAbstraction': ('module_changed', self.has_selected_modules),
'convertToAbstraction': ('module_changed',
self.has_selected_group),
'editAbstraction': ('module_changed', self.has_selected_abs),
'importAbstraction': ('module_changed', self.has_selected_abs),
'exportAbstraction': ('module_changed', self.has_selected_abs),
'publishWeb' : ('pipeline_changed', self.check_publish_db),
'publishPaper' : ('pipeline_changed', self.pipeline_non_empty),
'controlFlowAssist': ('pipeline_changed', self.pipeline_non_empty),
'redo': ('version_changed', self.can_redo),
'undo': ('version_changed', self.can_undo),
}
def can_redo(self, versionId):
return self.controller and self.controller.can_redo()
def can_undo(self, versionId):
return self.controller and self.controller.can_undo()
def set_action_defaults(self):
self.action_defaults.update(
{ 'execute': [('setEnabled', True, self.set_execute_action),
('setIcon', False, CurrentTheme.EXECUTE_PIPELINE_ICON),
('setToolTip', False, 'Execute the current pipeline')],
})
def set_execute_action(self):
if self.controller:
return self.pipeline_non_empty(self.controller.current_pipeline)
return False
def execute(self, target=None):
try:
if target is not None:
self.controller.execute_user_workflow(
sinks=[target],
reason="Execute specific module")
else:
self.controller.execute_user_workflow()
except Exception, e:
debug.unexpected_exception(e)
debug.critical("Error executing workflow: %s" % debug.format_exception(e))
else:
from vistrails.gui.vistrails_window import _app
_app.notify('execution_updated')
def publish_to_web(self):
from vistrails.gui.publishing import QVersionEmbed
panel = QVersionEmbed.instance()
panel.switchType('Wiki')
panel.set_visible(True)
def publish_to_paper(self):
from vistrails.gui.publishing import QVersionEmbed
panel = QVersionEmbed.instance()
panel.switchType('Latex')
panel.set_visible(True)
def check_publish_db(self, pipeline):
loc = self.controller.locator
result = False
if hasattr(loc,'host'):
result = True
return result and self.pipeline_non_empty(pipeline)
def has_selected_modules(self, module, only_one=False):
module_ids_len = len(self.scene().get_selected_module_ids())
#print ' module_ids_len:', module_ids_len
if only_one and module_ids_len != 1:
return False
return module_ids_len > 0
def has_selected_module(self, module):
# 'calling has_selected_module'
return self.has_selected_modules(module, True)
def has_selected_groups(self, module, only_one=False):
module_ids = self.scene().get_selected_module_ids()
if len(module_ids) <= 0:
return False
if only_one and len(module_ids) != 1:
return False
for m_id in module_ids:
if not self.scene().current_pipeline.modules[m_id].is_group():
return False
return True
def has_selected_group(self, module):
return self.has_selected_groups(True)
def has_selected_abs(self, module):
module_ids = self.scene().get_selected_module_ids()
if len(module_ids) != 1:
return False
for m_id in module_ids:
if not self.scene().current_pipeline.modules[m_id].is_abstraction():
return False
return True
def clipboard_non_empty(self):
clipboard = QtGui.QApplication.clipboard()
clipboard_text = clipboard.text()
return bool(clipboard_text) #and \
# str(clipboard_text).startswith("<workflow")
def pipeline_non_empty(self, pipeline):
return pipeline is not None and len(pipeline.modules) > 0
def pasteFromClipboard(self):
center = self.mapToScene(self.width()/2.0, self.height()/2.0)
self.scene().pasteFromClipboard((center.x(), -center.y()))
def setQueryEnabled(self, on):
QInteractiveGraphicsView.setQueryEnabled(self, on)
if not self.scene().noUpdate and self.scene().controller:
self.scene().setupScene(self.scene().controller.current_pipeline)
def setReadOnlyMode(self, on):
self.scene().set_read_only_mode(on)
def set_title(self, title):
BaseView.set_title(self, title)
self.setWindowTitle(title)
def set_controller(self, controller):
oldController = self.controller
if oldController != controller:
#if oldController is not None:
# self.disconnect(oldController,
# QtCore.SIGNAL('versionWasChanged'),
# self.version_changed)
# is this needed. It causes errors in api tests
#oldController.current_pipeline_view = None
self.controller = controller
self.scene().controller = controller
# self.connect(controller,
# QtCore.SIGNAL('versionWasChanged'),
# self.version_changed)
# self.module_info.set_controller(controller)
# self.moduleConfig.controller = controller
# controller.current_pipeline_view = self.scene()
def set_to_current(self):
QModuleInfo.instance().setReadOnly(self.scene().read_only_mode)
self.controller.set_pipeline_view(self)
def get_long_title(self):
pip_name = self.controller.get_pipeline_name()
vt_name = self.controller.name
self.long_title = "Pipeline %s from %s" % (pip_name,vt_name)
return self.long_title
def get_controller(self):
return self.controller
def version_changed(self):
self._view_fitted = False
self.scene().setupScene(self.controller.current_pipeline)
def run_control_flow_assist(self):
currentScene = self.scene()
if currentScene.controller:
selected_items = currentScene.get_selected_item_ids(True)
if selected_items is None:
selected_items = ([],[])
selected_module_ids = selected_items[0]
selected_connection_ids = selected_items[1]
if len(selected_module_ids) > 0:
try:
dialog = QControlFlowAssistDialog(
self,
selected_module_ids, selected_connection_ids,
currentScene)
except MissingPackage:
debug.critical("The controlflow package is not available")
else:
dialog.exec_()
else:
QtGui.QMessageBox.warning(
self,
'No modules selected',
'You must select at least one module to use the '
'Control Flow Assistant')
def done_configure(self, mid):
self.scene().perform_configure_done_actions(mid)
def paintModuleToPixmap(self, module_item):
m = self.matrix()
return module_item.paintToPixmap(m.m11(), m.m22())
def viewSelected(self):
if not self._view_fitted and self.isVisible():
# We only do this once after a version_changed() call
self.zoomToFit()
self._view_fitted = True
################################################################################
# Testing
class TestPipelineView(vistrails.gui.utils.TestVisTrailsGUI):
def test_quick_change_version_with_ports(self):
import vistrails.core.system
filename = (vistrails.core.system.vistrails_root_directory() +
'/tests/resources/triangle_count.vt')
view = vistrails.api.open_vistrail_from_file(filename)
vistrails.api.select_version(-1, view.controller)
vistrails.api.select_version('count + area', view.controller)
vistrails.api.select_version('writing to file', view.controller)
def test_change_version_with_common_connections(self):
import vistrails.core.system
filename = (vistrails.core.system.vistrails_root_directory() +
'/tests/resources/terminator.vt')
view = vistrails.api.open_vistrail_from_file(filename)
vistrails.api.select_version('Image Slices HW', view.controller)
vistrails.api.select_version('Combined Rendering HW', view.controller)
def test_switch_mode(self):
vistrails.api.switch_to_pipeline_view()
vistrails.api.switch_to_history_view()
vistrails.api.switch_to_query_view()
vistrails.api.switch_to_pipeline_view()
vistrails.api.switch_to_history_view()
vistrails.api.switch_to_query_view()
def test_group(self):
vistrails.api.new_vistrail()
basic_pkg = get_vistrails_basic_pkg_id()
m1 = vistrails.api.add_module(0, 0, basic_pkg, 'File', '')
m2 = vistrails.api.add_module(0, -100, basic_pkg, 'File', '')
m3 = vistrails.api.add_module(0, -100, basic_pkg, 'File', '')
r = vistrails.api.get_module_registry()
src = r.get_port_spec(basic_pkg, 'File', None, 'value_as_string',
'output')
dst = r.get_port_spec(basic_pkg, 'File', None, 'name', 'input')
# src = r.module_source_ports(True, basic_pkg, 'File', '')[1]
# assert src.name == 'value_as_string'
# dst = r.module_destination_ports(True, basic_pkg, 'File', '')[1]
# assert dst.name == 'name'
vistrails.api.add_connection(m1.id, src, m2.id, dst)
vistrails.api.add_connection(m2.id, src, m3.id, dst)
vistrails.api.create_group([0, 1, 2], [0, 1])
| bsd-3-clause | 5,503,474,275,681,508,000 | 41.574387 | 173 | 0.55571 | false | 4.365162 | false | false | false |
JetStarBlues/Nand-2-Tetris | PlayArea/screen/screen.py | 1 | 2677 | ''''''''''''''''''''''''' imports '''''''''''''''''''''''''''
# Built ins
import tkinter
import threading
SCREEN_MEMORY_MAP = 16384
''''''''''''''''''''''''' screen '''''''''''''''''''''''''''
class Screen():
'''
16 bit screen with a 512 x 256 pixels display.
Specifications hardcoded for simplicity.
Data stored using a 256 x 512 array to help with tkinter draw speed
(In lieu of using a 1 x 8192 array which more closely resembles RAM).
'''
def __init__( self, main_memory ):
self.main_memory = main_memory
self.N = 16
self.nRegisters = 8192
# Screen dimensions ---
self.width = 512
self.height = 256
self.registersPerRow = self.width // self.N
# Pixel array ---
self.pixels = [ [0] * self.width for _ in range( self.height ) ]
# Initialize tkinter ---
self.refreshRate = 100 # ms
self.root = None
self.img = None
threading.Thread(
target = self._initTkinter,
name = 'screen_thread'
).start()
def update( self ):
# Get screen data from Hack's main memory ---
self.readRAM()
# Format pixel array to tkinter string ---
data = [ '{' + ''.join( map( str, row ) ) + '} ' for row in self.pixels ]
data = ''.join( data )
data = data.replace( '0', 'white ' ).replace( '1', 'black ' )
# Update tkinter ---
self._updateTkinterImg( data )
def readRAM( self ):
# Get screen data from Hack's main memory
for address in range( self.nRegisters ):
data = self.main_memory.read( SCREEN_MEMORY_MAP + address )
self.write( data, address )
def write( self, x, address ):
# Psuedo N bit RAM interface ---
# Maps RAM access style to pixel array access style
row = address // self.registersPerRow
col_0 = address % self.registersPerRow * self.N
for col, bit in zip( range( col_0, col_0 + self.N ), range( 0, self.N ) ):
self.pixels[row][col] = x[bit]
# Tkinter ---
def _initTkinter( self ):
self.root = tkinter.Tk()
self.root.wm_title('Hack')
self.root.iconbitmap('favicon.ico')
self.root.bind( '<Escape>', self._quitTkinter )
self.img = tkinter.PhotoImage( width = self.width, height = self.height )
label = tkinter.Label(self.root)
label.pack()
label.config( image = self.img )
self.update()
self.root.mainloop()
def _updateTkinterImg( self, data ):
self.img.put( data, to = (0, 0, self.width, self.height) )
self.root.after( self.refreshRate, self.update ) # set timer
def _quitTkinter( self, ev = None ):
self.root.quit()
screen = Screen()
v = '1' * 16
screen.write( v, 0 * 32 + 10 )
screen.write( v, 1 * 32 + 10 )
screen.write( v, 2 * 32 + 10 )
screen.write( v, 3 * 32 + 10 )
screen.write( v, 4 * 32 + 10 ) | mit | -1,159,839,531,719,495,200 | 19.442748 | 76 | 0.609638 | false | 3.077011 | false | false | false |
genome21/dcos-cli | tests/test_cmds.py | 3 | 2181 | from dcos import cmds
import pytest
@pytest.fixture
def args():
return {
'cmd-a': True,
'cmd-b': True,
'cmd-c': False,
'arg-1': 'arg-1',
'arg-2': 'arg-2',
'arg-0': 'arg-0',
}
def test_single_cmd(args):
commands = [
cmds.Command(
hierarchy=['cmd-a', 'cmd-b'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=function),
]
assert cmds.execute(commands, args) == 1
def test_multiple_cmd(args):
commands = [
cmds.Command(
hierarchy=['cmd-c'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=pytest.fail),
cmds.Command(
hierarchy=['cmd-a', 'cmd-b'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=function),
]
assert cmds.execute(commands, args) == 1
def test_no_matching_cmd(args):
commands = [
cmds.Command(
hierarchy=['cmd-c'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=pytest.fail),
]
with pytest.raises(Exception):
cmds.execute(commands, args)
def test_similar_cmds(args):
commands = [
cmds.Command(
hierarchy=['cmd-a', 'cmd-b'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=function),
cmds.Command(
hierarchy=['cmd-a'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=pytest.fail),
]
assert cmds.execute(commands, args) == 1
def test_missing_cmd(args):
commands = [
cmds.Command(
hierarchy=['cmd-d'],
arg_keys=['arg-0', 'arg-1', 'arg-2'],
function=pytest.fail),
]
with pytest.raises(KeyError):
returncode, err = cmds.execute(commands, args)
def test_missing_arg(args):
commands = [
cmds.Command(
hierarchy=['cmd-a'],
arg_keys=['arg-3'],
function=pytest.fail),
]
with pytest.raises(KeyError):
returncode, err = cmds.execute(commands, args)
def function(*args):
for i in range(len(args)):
assert args[i] == 'arg-{}'.format(i)
return 1
| apache-2.0 | 6,580,438,426,622,830,000 | 21.030303 | 54 | 0.498854 | false | 3.523425 | true | false | false |
moira-alert/python-moira-client | setup.py | 1 | 1468 | from distutils.core import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name='moira-client',
version='2.4',
description='Client for Moira - Alerting system based on Graphite data',
keywords='moira monitoring client metrics alerting',
long_description="""
Moira is a real-time alerting tool, based on Graphite data.
moira-client is a python client for Moira API.
Key features:
- create, update, delete, manage triggers
- create, delete, update subscriptions
- manage tags, patterns, notifications, events, contacts
""",
author = 'Alexander Lukyanchenko',
author_email = '[email protected]',
packages=[
'moira_client',
'moira_client.models'
],
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
'Topic :: System :: Monitoring',
"License :: OSI Approved :: MIT License"
],
url='https://github.com/moira-alert/python-moira-client',
install_requires=required
)
| mit | -803,249,410,176,711,800 | 31.622222 | 76 | 0.617166 | false | 4.170455 | false | false | false |
dan-cristian/haiot | common/performance.py | 1 | 3139 | __author__ = 'dcristian'
import threading
import utils
import prctl
from main.logger_helper import L
from main import thread_pool
import psutil
import os
from common import Constant
class P:
query_count = None
query_cumulative_time_miliseconds = None
min_query_time_miliseconds = None
max_query_time_miliseconds = None
log_file = None
def add_query(start_time, query_details=None):
elapsed = int((utils.get_base_location_now_date() - start_time).total_seconds()*1000)
if not P.query_count:
P.query_count = 0
P.query_cumulative_time_miliseconds = 0
P.max_query_time_miliseconds = elapsed
P.min_query_time_miliseconds = elapsed
if P.max_query_time_miliseconds < elapsed:
P.max_query_time_miliseconds = elapsed
L.l.debug("Longest query details:{}".format(str(query_details)[:50]))
L.l.debug("Count={} avg={} min={} max={}".format(
P.query_count, P.query_cumulative_time_miliseconds / P.query_count, P.min_query_time_miliseconds,
P.max_query_time_miliseconds))
if P.min_query_time_miliseconds > elapsed:
P.min_query_time_miliseconds = elapsed
P.query_count += 1
P.query_cumulative_time_miliseconds += elapsed
L.l.debug("Count={} avg={} min={} max={}".format(P.query_count,
P.query_cumulative_time_miliseconds / P.query_count,
P.min_query_time_miliseconds, P.max_query_time_miliseconds))
return elapsed
# https://stackoverflow.com/questions/34361035/python-thread-name-doesnt-show-up-on-ps-or-htop
def _thread_for_ident(ident):
return threading._active.get(ident)
def _save_threads_cpu_percent(p, interval=0.1):
total_percent = p.cpu_percent(interval)
total_time = sum(p.cpu_times())
list = []
with open(P.log_file, "w") as log:
total = 0
for t in p.threads():
load = round(total_percent * ((t.system_time + t.user_time)/total_time), 2)
total += load
th = _thread_for_ident(t.id)
if th is None:
tname = "None"
else:
tname = th.name
log.write("{} % \t {} \t\t\t\t {}\n".format(load, tname, t))
log.write("Total={} %\n".format(total))
def _cpu_profiling():
# p = psutil.Process(os.getpid())
# mem = p.get_memory_info()[0] / float(2 ** 20)
# treads_list = p.threads()
_save_threads_cpu_percent(p)
# li = get_threads_cpu_percent(p)
# with open(P.log_file, "w") as log:
#log.write(mem)
#log.write("PID={}\n".format(os.getpid()))
#log.write("Threads: {}\n".format(li))
def thread_run():
prctl.set_name("performance")
threading.current_thread().name = "performance"
# _cpu_profiling()
prctl.set_name("idle_performance")
threading.current_thread().name = "idle_performance"
def init(log_file):
P.log_file = log_file
thread_pool.add_interval_callable(thread_run, run_interval_second=5)
| gpl-2.0 | -7,687,099,009,642,972,000 | 33.494505 | 117 | 0.591271 | false | 3.328738 | false | false | false |
somemarsupials/bnfparsing | bnfparsing/whitespace.py | 1 | 1761 | # -*- encoding: utf-8 -*-
from functools import wraps
from .utils import NULL
from .exceptions import DelimiterError
""" This module contains decorators used to handle the whitespace
between tokens, when parsing.
"""
def ignore(string):
""" A whitespace handler that ignores the whitespace between tokens.
This means that it doesn't matter if there is whitespace or not -
any whitespace is stripped from the input string before the next
token is parsed.
"""
if string and string[0] == NULL:
string = string[1:]
return string.lstrip()
def ignore_specific(whitespace):
""" A whitespace handler that ignores certain whitespace between
tokens. This means that it doesn't matter if there is whitespace or
not - the chosen whitespace is stripped from the input string before
the next token is parsed.
"""
def handler(string):
if string and string[0] == NULL:
string = string[1:]
return string.lstrip(whitespace)
return handler
def require(whitespace, ignore=False):
""" A factory for whitespace handlers that require a given
whitespace 'phrase' between tokens. If the required whitespace is
not present an exception is raised. Use the 'ignore' parameter to
ignore any additional whitespace above that which is required.
This function generates a handler function for make_handler.
Returns a function.
"""
n = len(whitespace)
def handler(string):
if string and string[0] == NULL:
return string[1:]
elif string[:n] != whitespace:
raise DelimiterError('"%s..." not delimited' % string[:50])
return string[n:].lstrip() if ignore else string[n:]
return handler
| gpl-3.0 | 4,995,026,481,958,532,000 | 30.446429 | 73 | 0.675185 | false | 4.733871 | false | false | false |
ColdSauce/golems | src/game_objects.py | 1 | 38506 | import pygame
class Tile:
def __init__(self, image, solid = False):
self.sprite = image
self.solid = solid
class MovableCharacter:
def move(self,speed = 2):
direction = self.current_direction
if direction == Direction.UP:
self.yOffset -= speed
elif direction == Direction.RIGHT:
self.xOffset += speed
elif direction == Direction.DOWN:
self.yOffset += speed
elif direction == Direction.LEFT:
self.xOffset -= speed
if ((direction == Direction.UP or direction == Direction.DOWN) and (self.yOffset % 50 == 0)) or ((direction == Direction.LEFT or direction == Direction.RIGHT) and (self.xOffset % 50 == 0)):
if(self.yOffset < 0):
self.gridY -= 1
elif(self.yOffset > 0):
self.gridY += 1
self.yOffset = 0;
if(self.xOffset < 0):
self.gridX -= 1
elif(self.xOffset > 0):
self.gridX += 1
self.xOffset = 0;
self.moving = False;
def change_direction(self, direction, override_opt = False):
# Optimization
if not override_opt and self.current_direction == direction:
return
self.current_direction = direction
name = self.directional_sprites[direction]
self.sprite = self.load_function(name)
def __init__(self, name, load_function,list_of_bots, directional_sprites,x=0,y=0, gold=0):
# for now this is just hard coded
# placeholder sprite's justgonna be one of the directional sprites
self.moving = False
self.load_function = load_function
self.directional_sprites = directional_sprites
self.gridX = x
self.gridY = y
self.xOffset = 0
self.yOffset = 0
self.name = name
self.gold = gold
self.current_direction = Direction.UP
self.list_of_bots = list_of_bots
class MainPlayer(MovableCharacter):
pass
class EnemyPlayer(MovableCharacter):
def move(self,speed = 2):
pass # for now.. but implement ai
class Direction:
UP = 0
RIGHT = 1
DOWN = 2
LEFT = 3
class Element:
NONE = 0
FIRE = 1
EARTH = 2
WATER = 3
AIR = 4
#Z- renamed these for code readability, feel free to call them anything in game
class GenericBot:
def __init__(self, name, sprite, speed=10, health=100,mana=100,element=Element.NONE,spell_xp=dict(),list_of_spells=[],queue_of_code_blocks = list(),pOwned = False):
self.queue_of_code_blocks = queue_of_code_blocks
self.name = name
self.sprite = sprite
self.baseSpeed = speed
self.speed = speed
self.maxHealth = health
self.health = health
self.maxMana = mana
self.mana = mana
self.element = element
self.spell_xp = spell_xp
self.list_of_spells = list_of_spells
self.pOwned = pOwned #Boolean, 'player owned':
self.location = None #Gets set once battle begins
# Let's you change what the string representation of this class is
def __repr__(self):
return "Health: {} Mana: {} Speed: {}".format(str(self.health), str(self.mana), str(self.speed))
class Spells:
def __init__(self, name, mana_cost=25, attack_power=5,multiplier=dict(),accuracy=0.5):
self.name = name
self.mana_cost = mana_cost
self.attack_power = attack_power
self.multiplier = multiplier
self.accuracy = accuracy
class CodeBlock(object):
def __init__(self):
self.font = pygame.font.SysFont("comicsansms", 24)
# Renders the Block to the screen. Should return the total height of the block.
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
raise NotImplementedError
# Gets the screen height of the block
def getRenderHeight(self):
raise NotImplementedError
# Gets the count of arrow positions within this block (typically only the one after it)
def getArrowCount(self):
return 1
# Gets the total block count within this block (typically just the one)
def getBlockCount(self):
return 1
# Executes the Block, taking into consideration whether or not this is a calc-mana-cost-only dry run. Should return mana spent in total, or a tuple of (mana total, flag saying 'had hit an End Turn block').
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
pass
# Inserts a new Block somewhere in the listing. True if successful, false if failed; block containers should implement something other than "always fail"
def insert(self, blockToInsert, arrowIndex):
return False
# Removes the Block at the specific index inside this Block. True if successful, false if failed; block containers should implement something other than "always fail"
def remove(self, index):
return False
# Fetch the Block at a given index.
def fetch(self, index):
if(index == 0):
return self
else:
return None
# Comment Block. Does nothing, handy for in-code notes.
class CommentBlock(CodeBlock):
def __init__(self):
super(CommentBlock, self).__init__()
self.comment = "";
self.cwidth, self.cheight = self.font.size("# ")
self.fontRender = self.font.render("# ", 0, (0, 0, 0), (190, 255, 190))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (190, 255, 190), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
return 0 # Comment blocks do nothing
def setComment(self, newComment):
self.comment = newComment
self.cwidth, self.cheight = self.font.size("# " + self.comment)
self.fontRender = self.font.render("# " + self.comment, 0, (0, 0, 0), (190, 255, 190))
# Say Block. Causes the Golem to say a bit of text.
class SayBlock(CodeBlock):
def __init__(self):
super(SayBlock, self).__init__()
self.message = "";
self.cwidth, self.cheight = self.font.size("Say \"\"")
self.fontRender = self.font.render("Say \"\"", 0, (0, 0, 0), (205, 205, 205))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (205, 205, 205), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
callback(ownerBot.name + " says: " + self.message)
def setMessage(self, newMessage):
self.message = newMessage
self.cwidth, self.cheight = self.font.size("Say \"" + self.message + "\"")
self.fontRender = self.font.render("Say \"" + self.message + "\"", 0, (0, 0, 0), (205, 205, 205))
# While Block. Performs a task while a condition remains true.
class WhileBlock(CodeBlock):
def __init__(self):
super(WhileBlock, self).__init__()
self.blocks = []
_, self.cheight = self.font.size("WAAA")
self.fontRender = self.font.render("Do Forever", 0, (0, 0, 0), (255, 255, 190))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + 196 + 26, yOffset), (xOffset + 196 + 26, yOffset + 6), (xOffset + 196 + 16, yOffset + 1)])
pygame.draw.rect(surface, (255, 255, 190), (xOffset, yOffset + 1, 196, self.cheight + 6))
heightsum = self.cheight + 8
selCount = 1
for block in self.blocks:
heightsum += block.render(surface, xOffset + 8, yOffset + heightsum, selIndex - selCount, mode)
if(mode == 0):
selCount += block.getArrowCount()
else:
selCount += block.getBlockCount()
if(mode == 0 and selIndex == selCount):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + heightsum + 1), (xOffset - 10, yOffset + 6 + heightsum), (xOffset - 10, yOffset + heightsum), (xOffset + 196 + 26, yOffset + heightsum), (xOffset + 196 + 26, yOffset + 6 + heightsum), (xOffset + 196 + 16, yOffset + heightsum + 1)])
pygame.draw.rect(surface, (255, 255, 190), (xOffset, yOffset + 1 + heightsum, 196, self.cheight + 6))
pygame.draw.rect(surface, (255, 255, 190), (xOffset, yOffset + 1, 6, heightsum + self.cheight + 8 - 2))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, 196, heightsum + self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, 196, heightsum + self.cheight + 6), 2)
return heightsum + self.cheight + 8
def getArrowCount(self):
rtn = 2
for block in self.blocks:
rtn += block.getArrowCount()
return rtn
def getBlockCount(self):
rtn = 1
for block in self.blocks:
rtn += block.getBlockCount()
return rtn
def getRenderHeight(self):
heightsum = self.cheight + 8
for block in self.blocks:
heightsum += self.trueBlocks[i].getRenderHeight()
return heightsum + self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
for block in self.blocks:
block.execute(ownerBot,opponentBot, callback)
def insert(self, blockToInsert, arrowIndex):
if(arrowIndex == 0): # Insert before current block
return False # Should have been handled by calling function
elif(arrowIndex == 1): # Insert before rest of list
self.blocks = [blockToInsert] + self.blocks
return True
elif(arrowIndex == self.getArrowCount() - 1): # Insert at end of list
self.blocks.append(blockToInsert)
return True
else: # Insert into middle of list
currArrowIndex = arrowIndex - 1;
for i in range(0, len(self.blocks)):
if(currArrowIndex == 0):
self.blocks.insert(i, blockToInsert)
return True
elif(self.blocks[i].insert(blockToInsert, currArrowIndex)):
return True
else:
currArrowIndex -= self.blocks[i].getArrowCount()
return False
def remove(self, index):
if(index == 0): # Remove current block
return False # Should have been handled by calling function
else:
currIndex = index - 1;
for i in range(0, len(self.blocks)):
if(currIndex == 0):
del self.blocks[i]
return True
elif(self.blocks[i].remove(currIndex)):
return True
else:
currIndex -= self.blocks[i].getBlockCount()
return False
def fetch(self, index):
if(index == 0):
return self
else:
currIndex = index - 1;
for i in range(0, len(self.blocks)):
if(currIndex == 0):
return self.blocks[i]
rtn = self.blocks[i].fetch(currIndex)
if(rtn != None): return rtn
currIndex -= self.blocks[i].getBlockCount()
return None
# For Block. Performs a task on each Golem being faced. Do not implement, only doing 1v1 battles atm.
#class ForBlock(CodeBlock):
# def __init__(self):
# pass
# def render(self, surface, xOffset = 0, yOffset = 0):
# return 0
# def execute(self, ownerBot, opponentBot, dryRun = False):
# return 0
# End Turn Block. Immediately stops the Golem's execution, and ends their turn.
class EndTurnBlock(CodeBlock):
def __init__(self):
super(EndTurnBlock, self).__init__()
self.cwidth, self.cheight = self.font.size("End my Turn")
self.fontRender = self.font.render("End my Turn", 0, (0, 0, 0), (255, 64, 64))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (255, 64, 64), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
return (0, True)
# Branch Block, Mana. Allows for some decision making based on how much Mana a Golem has in reserve.
class IfManaBlock(CodeBlock):
def __init__(self):
super(IfManaBlock, self).__init__()
self.mthresh = 0
self.trueBlocks = []
self.falseBlocks = []
self.cwidth, self.cheight = self.font.size("If I have more than 999999 Mana")
self.fontRender = self.font.render("If I have more than 0 Mana", 0, (0, 0, 0), (128, 205, 255))
self.elseRender = self.font.render("Otherwise", 0, (0, 0, 0), (128, 205, 255))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (128, 205, 255), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
heightsum = self.cheight + 8
selCount = 1
for i in range(0, len(self.trueBlocks)):
heightsum += self.trueBlocks[i].render(surface, xOffset + 8, yOffset + heightsum, selIndex - selCount, mode)
if(mode == 0):
selCount += self.trueBlocks[i].getArrowCount()
else:
selCount += self.trueBlocks[i].getBlockCount()
if(mode == 0 and selIndex == selCount):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + heightsum + 1), (xOffset - 10, yOffset + 6 + heightsum), (xOffset - 10, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + 6 + heightsum), (xOffset + self.cwidth + 16, yOffset + heightsum + 1)])
if(mode == 0):
selCount += 1
pygame.draw.rect(surface, (128, 205, 255), (xOffset, yOffset + 1 + heightsum, self.cwidth + 16, self.cheight + 6))
secondBlitHeight = heightsum
heightsum += self.cheight + 8
for i in range(0, len(self.falseBlocks)):
heightsum += self.falseBlocks[i].render(surface, xOffset + 8, yOffset + heightsum, selIndex - selCount, mode)
if(mode == 0):
selCount += self.falseBlocks[i].getArrowCount()
else:
selCount += self.falseBlocks[i].getBlockCount()
if(mode == 0 and selIndex == selCount):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + heightsum + 1), (xOffset - 10, yOffset + 6 + heightsum), (xOffset - 10, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + 6 + heightsum), (xOffset + self.cwidth + 16, yOffset + heightsum + 1)])
pygame.draw.rect(surface, (128, 205, 255), (xOffset, yOffset + 1 + heightsum, self.cwidth + 16, self.cheight + 6))
pygame.draw.rect(surface, (128, 205, 255), (xOffset, yOffset + 1, 6, heightsum + self.cheight + 8 - 2))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
surface.blit(self.elseRender, (xOffset + 4, yOffset + secondBlitHeight + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, heightsum + self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, heightsum + self.cheight + 6), 2)
return heightsum + self.cheight + 8
def getArrowCount(self):
rtn = 3
for block in self.trueBlocks:
rtn += block.getArrowCount()
for block in self.falseBlocks:
rtn += block.getArrowCount()
return rtn
def getBlockCount(self):
rtn = 1
for block in self.trueBlocks:
rtn += block.getBlockCount()
for block in self.falseBlocks:
rtn += block.getBlockCount()
return rtn
def getRenderHeight(self):
heightsum = self.cheight + 8
for i in range(0, len(self.trueBlocks)):
heightsum += trueBlocks[i].getRenderHeight()
heightsum += self.cheight + 8
for i in range(0, len(self.falseBlocks)):
heightsum += falseBlocks[i].getRenderHeight()
return heightsum + self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
if ownerBot.mana > self.mthresh:
for t_block in self.trueBlocks:
t_block.execute(ownerBot, opponentBot,callback)
else:
for f_block in self.falseBlocks:
f_block.execute(ownerBot, opponentBot,callback)
def insert(self, blockToInsert, arrowIndex):
if(arrowIndex == 0): # Insert before current block
return False # Should have been handled by calling function
else: # Insert into middle of list
currArrowIndex = arrowIndex - 1;
for i in range(0, len(self.trueBlocks)):
if(currArrowIndex == 0):
self.trueBlocks.insert(i, blockToInsert)
return True
elif(self.trueBlocks[i].insert(blockToInsert, currArrowIndex)):
return True
else:
currArrowIndex -= self.trueBlocks[i].getArrowCount()
if(currArrowIndex == 0): # Insert at end of TrueBlocks
self.trueBlocks.append(blockToInsert)
return True
currArrowIndex -= 1
for i in range(0, len(self.falseBlocks)):
if(currArrowIndex == 0):
self.falseBlocks.insert(i, blockToInsert)
return True
elif(self.falseBlocks[i].insert(blockToInsert, currArrowIndex)):
return True
else:
currArrowIndex -= self.falseBlocks[i].getArrowCount()
if(currArrowIndex == 0): # Insert at end of FalseBlocks
self.falseBlocks.append(blockToInsert)
return True
return False
def remove(self, index):
if(index == 0): # Remove current block
return False # Should have been handled by calling function
else:
currIndex = index - 1;
for i in range(0, len(self.trueBlocks)):
if(currIndex == 0):
del self.trueBlocks[i]
return True
elif(self.trueBlocks[i].remove(currIndex)):
return True
else:
currIndex -= self.trueBlocks[i].getBlockCount()
for i in range(0, len(self.falseBlocks)):
if(currIndex == 0):
del self.falseBlocks[i]
return True
elif(self.falseBlocks[i].remove(currIndex)):
return True
else:
currIndex -= self.falseBlocks[i].getBlockCount()
return False
def fetch(self, index):
if(index == 0):
return self
else:
currIndex = index - 1;
for i in range(0, len(self.trueBlocks)):
if(currIndex == 0):
return self.falseBlocks[i]
rtn = self.falseBlocks[i].fetch(currIndex)
if(rtn != None): return rtn
currIndex -= self.falseBlocks[i].getBlockCount()
for i in range(0, len(self.falseBlocks)):
if(currIndex == 0):
return self.falseBlocks[i]
rtn = self.falseBlocks[i].fetch(currIndex)
if(rtn != None): return rtn
currIndex -= self.falseBlocks[i].getBlockCount()
return None
def setThresh(self, newThresh):
self.mthresh = newThresh
self.fontRender = self.font.render("If I have more than " + str(self.mthresh) + " Mana", 0, (0, 0, 0), (128, 205, 255))
# Branch Block, Health. Allows for some decision making based on how much Health a Golem has.
class IfOwnHealthBlock(CodeBlock):
def __init__(self):
super(IfOwnHealthBlock, self).__init__()
self.hthresh = 0
self.trueBlocks = []
self.falseBlocks = []
self.cwidth, self.cheight = self.font.size("If I have less than 999999 Health")
self.fontRender = self.font.render("If I have less than 0 Health", 0, (0, 0, 0), (255, 200, 200))
self.elseRender = self.font.render("Otherwise", 0, (0, 0, 0), (255, 200, 200))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (255, 200, 200), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
heightsum = self.cheight + 8
selCount = 1
for i in range(0, len(self.trueBlocks)):
heightsum += self.trueBlocks[i].render(surface, xOffset + 8, yOffset + heightsum, selIndex - selCount, mode)
if(mode == 0):
selCount += self.trueBlocks[i].getArrowCount()
else:
selCount += self.trueBlocks[i].getBlockCount()
if(mode == 0 and selIndex == selCount):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + heightsum + 1), (xOffset - 10, yOffset + 6 + heightsum), (xOffset - 10, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + 6 + heightsum), (xOffset + self.cwidth + 16, yOffset + heightsum + 1)])
if(mode == 0):
selCount += 1
pygame.draw.rect(surface, (255, 200, 200), (xOffset, yOffset + 1 + heightsum, self.cwidth + 16, self.cheight + 6))
secondBlitHeight = heightsum
heightsum += self.cheight + 8
for i in range(0, len(self.falseBlocks)):
heightsum += self.falseBlocks[i].render(surface, xOffset + 8, yOffset + heightsum, selIndex - selCount, mode)
if(mode == 0):
selCount += self.falseBlocks[i].getArrowCount()
else:
selCount += self.falseBlocks[i].getBlockCount()
if(mode == 0 and selIndex == selCount):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + heightsum + 1), (xOffset - 10, yOffset + 6 + heightsum), (xOffset - 10, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + heightsum), (xOffset + self.cwidth + 26, yOffset + 6 + heightsum), (xOffset + self.cwidth + 16, yOffset + heightsum + 1)])
pygame.draw.rect(surface, (255, 200, 200), (xOffset, yOffset + 1 + heightsum, self.cwidth + 16, self.cheight + 6))
pygame.draw.rect(surface, (255, 200, 200), (xOffset, yOffset + 1, 6, heightsum + self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
surface.blit(self.elseRender, (xOffset + 4, yOffset + secondBlitHeight + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, heightsum + self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, heightsum + self.cheight + 6), 2)
return heightsum + self.cheight + 8
def getArrowCount(self):
rtn = 3
for block in self.trueBlocks:
rtn += block.getArrowCount()
for block in self.falseBlocks:
rtn += block.getArrowCount()
return rtn
def getBlockCount(self):
rtn = 1
for block in self.trueBlocks:
rtn += block.getBlockCount()
for block in self.falseBlocks:
rtn += block.getBlockCount()
return rtn
def getRenderHeight(self):
heightsum = self.cheight + 8
for i in range(0, len(self.trueBlocks)):
heightsum += self.trueBlocks[i].getRenderHeight()
heightsum += self.cheight + 8
for i in range(0, len(self.falseBlocks)):
heightsum += self.falseBlocks[i].getRenderHeight()
return heightsum + self.cheight + 8
def execute(self, ownerBot, opponentBot, callback,dryRun = False):
if ownerBot.health < self.hthresh:
for t_block in self.trueBlocks:
t_block.execute(ownerBot, opponentBot,callback)
else:
for f_block in self.falseBlocks:
f_block.execute(ownerBot, opponentBot,callback)
def insert(self, blockToInsert, arrowIndex):
if(arrowIndex == 0): # Insert before current block
return False # Should have been handled by calling function
else: # Insert into middle of list
currArrowIndex = arrowIndex - 1;
for i in range(0, len(self.trueBlocks)):
if(currArrowIndex == 0):
self.trueBlocks.insert(i, blockToInsert)
return True
elif(self.trueBlocks[i].insert(blockToInsert, currArrowIndex)):
return True
else:
currArrowIndex -= self.trueBlocks[i].getArrowCount()
if(currArrowIndex == 0): # Insert at end of TrueBlocks
self.trueBlocks.append(blockToInsert)
return True
currArrowIndex -= 1
for i in range(0, len(self.falseBlocks)):
if(currArrowIndex == 0):
self.falseBlocks.insert(i, blockToInsert)
return True
elif(self.falseBlocks[i].insert(blockToInsert, currArrowIndex)):
return True
else:
currArrowIndex -= self.falseBlocks[i].getArrowCount()
if(currArrowIndex == 0): # Insert at end of FalseBlocks
self.falseBlocks.append(blockToInsert)
return True
return False
def remove(self, index):
if(index == 0): # Remove current block
return False # Should have been handled by calling function
else:
currIndex = index - 1;
for i in range(0, len(self.trueBlocks)):
if(currIndex == 0):
del self.trueBlocks[i]
return True
elif(self.trueBlocks[i].remove(currIndex)):
return True
else:
currIndex -= self.trueBlocks[i].getBlockCount()
for i in range(0, len(self.falseBlocks)):
if(currIndex == 0):
del self.falseBlocks[i]
return True
elif(self.falseBlocks[i].remove(currIndex)):
return True
else:
currIndex -= self.falseBlocks[i].getBlockCount()
return False
def fetch(self, index):
if(index == 0):
return self
else:
currIndex = index - 1;
for i in range(0, len(self.trueBlocks)):
if(currIndex == 0):
return self.falseBlocks[i]
rtn = self.falseBlocks[i].fetch(currIndex)
if(rtn != None): return rtn
currIndex -= self.falseBlocks[i].getBlockCount()
for i in range(0, len(self.falseBlocks)):
if(currIndex == 0):
return self.falseBlocks[i]
rtn = self.falseBlocks[i].fetch(currIndex)
if(rtn != None): return rtn
currIndex -= self.falseBlocks[i].getBlockCount()
return None
def setThresh(self, newThresh):
self.hthresh = newThresh
self.fontRender = self.font.render("If I have less than " + str(self.hthresh) + " Health", 0, (0, 0, 0), (255, 200, 200))
# Heal Block. Causes the Golem to cast the Heal spell, restoring a certain amount of health not controlled by the program.
class HealBlock(CodeBlock):
def __init__(self, heal_amount, mana_cost):
super(HealBlock, self).__init__()
self.cwidth, self.cheight = self.font.size("Cast Heal on myself")
self.mana_cost = mana_cost
self.heal_amount = heal_amount
self.fontRender = self.font.render("Cast Heal on myself", 0, (0, 0, 0), (255, 200, 200))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (255, 200, 200), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
if dryRun:
return (ownerBot.mana - self.mana_cost, False)
callback(ownerBot.name + " healed " + opponentBot.name + " Cost: " + str(self.mana_cost) + " Amount: " + str(self.heal_amount))
ownerBot.mana -= self.mana_cost
ownerBot.health = (ownerBot.health + self.heal_amount)
if ownerBot.health > ownerBot.maxHealth: ownerBot.health = ownerBot.maxHealth
# Fireball Block. Causes the Golem to cast Fireball, dealing ignis damage on an opponent.
class FireballBlock(CodeBlock):
def __init__(self, damage_amount, mana_cost):
super(FireballBlock, self).__init__()
self.cwidth, self.cheight = self.font.size("Cast Fireball at the enemy")
self.mana_cost = mana_cost
self.damage_amount = damage_amount
self.fontRender = self.font.render("Cast Fireball at the enemy", 0, (255, 255, 255), (128, 0, 0))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (128, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
if dryRun:
return (ownerBot.mana - self.mana_cost, False)
callback(ownerBot.name + " hit " + opponentBot.name + "w/ a Fireball!" + " Cost: " + str(self.mana_cost) + " Damage: " + str(self.damage_amount))
ownerBot.mana -= self.mana_cost
opponentBot.health -= self.damage_amount
# Moss Leech Block. Causes the Golem to cast Moss Leech, dealing natura damage on an opponent.
class MossLeechBlock(CodeBlock):
def __init__(self, damage_amount, mana_cost):
super(MossLeechBlock, self).__init__()
self.mana_cost = mana_cost
self.damage_amount = damage_amount
self.cwidth, self.cheight = self.font.size("Cast Moss Leech at the enemy")
self.fontRender = self.font.render("Cast Moss Leech at the enemy", 0, (255, 255, 255), (0, 128, 0))
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (0, 128, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
if dryRun:
return (ownerBot.mana - self.mana_cost, False)
callback(ownerBot.name + " moss leeched " + opponentBot.name + "!" + " Cost: " + str(self.mana_cost) + " Damage: " + str(self.damage_amount))
ownerBot.mana -= self.mana_cost
opponentBot.health -= self.damage_amount
# Douse Block. Causes the Golem to cast Douse, dealing aqua damage on an opponent.
class DouseBlock(CodeBlock):
def __init__(self, damage_amount, mana_cost):
super(DouseBlock, self).__init__()
self.cwidth, self.cheight = self.font.size("Cast Douse at the enemy")
self.fontRender = self.font.render("Cast Douse at the enemy", 0, (255, 255, 255), (0, 0, 255))
self.damage_amount = damage_amount
self.mana_cost = mana_cost
def render(self, surface, xOffset = 0, yOffset = 0, selIndex = -1, mode = -1):
if(mode == 0 and selIndex == 0):
pygame.draw.polygon(surface, (255, 255, 255), [(xOffset, yOffset + 1), (xOffset - 10, yOffset + 6), (xOffset - 10, yOffset), (xOffset + self.cwidth + 26, yOffset), (xOffset + self.cwidth + 26, yOffset + 6), (xOffset + self.cwidth + 16, yOffset + 1)])
pygame.draw.rect(surface, (0, 0, 255), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6))
surface.blit(self.fontRender, (xOffset + 4, yOffset + 4))
if(mode == 1 and selIndex == 0):
pygame.draw.rect(surface, (128, 110, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
if(mode == 2 and selIndex == 0):
pygame.draw.rect(surface, (255, 0, 0), (xOffset, yOffset + 1, self.cwidth + 16, self.cheight + 6), 2)
return self.cheight + 8
def getRenderHeight(self):
return self.cheight + 8
def execute(self, ownerBot, opponentBot, callback, dryRun = False):
if dryRun:
return (ownerBot.mana - self.mana_cost, False)
callback(ownerBot.name + " doused " + opponentBot.name + "!" + " Cost: " + str(self.mana_cost) + " Damage: " + str(self.damage_amount))
ownerBot.mana -= self.mana_cost
opponentBot.health -= self.damage_amount
| mit | -1,776,726,705,024,889,000 | 52.185083 | 334 | 0.583727 | false | 3.531041 | false | false | false |
aploium/zmirror | more_configs/config_google_and_zhwikipedia.py | 3 | 8546 | # coding=utf-8
# 这是为Google和中文维基(无缝整合)镜像配置的示例配置文件
#
# 使用方法:
# 1. 复制本文件到 zmirror 根目录(wsgi.py所在目录), 并重命名为 config.py
# 2. 修改 my_host_name 为你自己的域名
#
# 各项设置选项的详细介绍请看 config_default.py 中对应的部分
# 本配置文件假定你的服务器本身在墙外
# 如果服务器本身在墙内(或者在本地环境下测试, 请修改`Proxy Settings`中的设置
#
# 由于google搜索结果经常会出现中文维基, 所以顺便把它也加入了.
# google跟中文维基之间使用了本程序的镜像隔离功能, 可以保证中文维基站的正常使用
#
# 本配置文件试图还原出一个功能完整的google.
# 但是由于程序本身所限, 还是不能[完整]镜像过来整个[google站群]
# 在后续版本会不断增加可用的网站
#
# 以下google服务完全可用:
# google网页搜索/学术/图片/新闻/图书/视频(搜索)/财经/APP搜索/翻译/网页快照/...
# google搜索与中文维基百科无缝结合
# 以下服务部分可用:
# gg地图(地图可看, 左边栏显示不正常)/G+(不能登录)
# 以下服务暂不可用(因为目前无法解决登录的问题):
# 所有需要登录的东西, docs之类的
#
# Github: https://github.com/aploium/zmirror
# ############## Local Domain Settings ##############
my_host_name = '127.0.0.1'
my_host_scheme = 'http://'
my_host_port = None # None表示使用默认端口, 可以设置成非标准端口, 比如 81
# ############## Target Domain Settings ##############
target_domain = 'www.google.com.hk'
target_scheme = 'https://'
# 这里面大部分域名都是通过 `enable_automatic_domains_whitelist` 自动采集的, 我只是把它们复制黏贴到了这里
# 实际镜像一个新的站时, 手动只需要添加很少的几个域名就可以了.
# 自动采集(如果开启的话)会不断告诉你新域名
external_domains = (
'www.google.com',
'webcache.googleusercontent.com', # Google网页快照
'images.google.com.hk',
'images.google.com',
'apis.google.com',
# Google学术
'scholar.google.com.hk',
'scholar.google.com',
# 中文维基百科
'zh.wikipedia.org',
'zh.m.wikipedia.org',
'upload.wikipedia.org',
'meta.wikimedia.org',
'login.wikimedia.org',
# Google静态资源域名
'ssl.gstatic.com',
'www.gstatic.com',
'encrypted-tbn0.gstatic.com',
'encrypted-tbn1.gstatic.com',
'encrypted-tbn2.gstatic.com',
'encrypted-tbn3.gstatic.com',
'csi.gstatic.com',
'fonts.googleapis.com',
# Google登陆支持
'accounts.google.com',
'accounts.youtube.com',
'accounts.google.com.hk',
'myaccount.google.com',
'myaccount.google.com.hk',
'ajax.googleapis.com',
'translate.google.com',
'translate.google.com.hk',
'video.google.com.hk',
'books.google.com',
'cloud.google.com',
'analytics.google.com',
'security.google.com',
'investor.google.com',
'families.google.com',
'clients1.google.com',
'clients2.google.com',
'clients3.google.com',
'clients4.google.com',
'clients5.google.com',
'talkgadget.google.com',
'news.google.com.hk',
'news.google.com',
'support.google.com',
'docs.google.com',
'books.google.com.hk',
'chrome.google.com',
'profiles.google.com',
'feedburner.google.com',
'cse.google.com',
'sites.google.com',
'productforums.google.com',
'encrypted.google.com',
'm.google.com',
'research.google.com',
'maps.google.com.hk',
'hangouts.google.com',
'developers.google.com',
'get.google.com',
'afp.google.com',
'groups.google.com',
'payments.google.com',
'photos.google.com',
'play.google.com',
'mail.google.com',
'code.google.com',
'tools.google.com',
'drive.google.com',
'script.google.com',
'goto.google.com',
'calendar.google.com',
'wallet.google.com',
'privacy.google.com',
'ipv4.google.com',
'video.google.com',
'store.google.com',
'fi.google.com',
'apps.google.com',
'events.google.com',
'notifications.google.com',
'plus.google.com',
'dl.google.com',
'manifest.googlevideo.com',
'storage.googleapis.com',
'gg.google.com',
'scholar.googleusercontent.com',
'translate.googleusercontent.com',
't0.gstatic.com',
't1.gstatic.com',
't2.gstatic.com',
't3.gstatic.com',
's-v6exp1-ds.metric.gstatic.com',
'ci4.googleusercontent.com',
'gp3.googleusercontent.com',
'accounts.gstatic.com',
# For Google Map (optional)
'maps-api-ssl.google.com',
'maps.gstatic.com',
'maps.google.com',
'fonts.gstatic.com',
'lh1.googleusercontent.com',
'lh2.googleusercontent.com',
'lh3.googleusercontent.com',
'lh4.googleusercontent.com',
'lh5.googleusercontent.com',
'lh6.googleusercontent.com',
# 'upload.wikimedia.org',
'id.google.com.hk',
'id.google.com',
# misc
'inputtools.google.com',
'inbox.google.com',
'-v6exp3-v4.metric.gstatic.com',
'-v6exp3-ds.metric.gstatic.com',
'if-v6exp3-v4.metric.gstatic.com',
'public.talk.google.com',
'ie.talkgadget.google.com',
'client-channel.google.com',
'maps.googleapis.com',
'people-pa.clients6.google.com',
'myphonenumbers-pa.googleapis.com',
'clients6.google.com',
'staging.talkgadget.google.com',
'preprod.hangouts.sandbox.google.com',
'dev-hangoutssearch-pa-googleapis.sandbox.google.com',
'picasaweb.google.com',
'schemas.google.com',
'contact.talk.google.com',
'groupchat.google.com',
'friendconnectchat.google.com',
'muvc.google.com',
'bot.talk.google.com',
'prom.corp.google.com',
'stun.l.google.com',
'stun1.l.google.com',
'stun2.l.google.com',
'stun3.l.google.com',
'stun4.l.google.com',
'onetoday.google.com',
'plus.googleapis.com',
'youtube.googleapis.com',
'picasa.google.com',
"www-onepick-opensocial.googleusercontent.com",
'plus.sandbox.google.com',
# gmail misc
'gmail.com',
'www.gmail.com',
'chatenabled.mail.google.com',
'filetransferenabled.mail.google.com',
'gmail.google.com',
'googlemail.l.google.com',
'isolated.mail.google.com',
'm.gmail.com',
'm.googlemail.com',
'mail-settings.google.com',
'm.android.com',
)
# 强制所有Google站点使用HTTPS
force_https_domains = 'ALL'
# 自动动态添加域名
enable_automatic_domains_whitelist = True
domains_whitelist_auto_add_glob_list = (
'*.google.com', '*.gstatic.com', '*.google.com.hk', '*.googleapis.com', "*.googleusercontent.com",)
# ############## Proxy Settings ##############
# 如果你在墙内使用本配置文件, 请指定一个墙外的http代理
is_use_proxy = False
# 代理的格式及SOCKS代理, 请看 http://docs.python-requests.org/en/latest/user/advanced/#proxies
requests_proxies = dict(
http='http://127.0.0.1:8123',
https='https://127.0.0.1:8123',
)
# ############## Sites Isolation ##############
enable_individual_sites_isolation = True
# 镜像隔离, 用于支持Google和维基共存
isolated_domains = {'zh.wikipedia.org', 'zh.m.wikipedia.org'}
# ############## URL Custom Redirect ##############
url_custom_redirect_enable = True
url_custom_redirect_list = {
# 这是一个方便的设置, 如果你访问 /wiki ,程序会自动重定向到后面这个长长的wiki首页
'/wiki': '/extdomains/https-zh.wikipedia.org/',
# 这是gmail
'/gmail': '/extdomains/mail.google.com/mail/u/0/h/girbaeneuj90/',
}
# ############# Additional Functions #############
# 移除google搜索结果页面的url跳转
# 原理是往页面中插入一下面这段js
# js来自: http://userscripts-mirror.org/scripts/review/117942
custom_inject_content = {
"head_first": [
{
"content": r"""<script>
function checksearch(){
var list = document.getElementById('ires');
if(list){
document.removeEventListener('DOMNodeInserted',checksearch,false);
document.addEventListener('DOMNodeInserted',clear,false)
}
};
function clear(){
var i; var items = document.querySelectorAll('a[onmousedown]');
for(i =0;i<items.length;i++){
items[i].removeAttribute('onmousedown');
}
};
document.addEventListener('DOMNodeInserted',checksearch,false)
</script>""",
"url_regex": r"^www\.google(?:\.[a-z]{2,3}){1,2}",
},
]
}
| mit | 8,131,184,257,439,234,000 | 25.753623 | 103 | 0.639085 | false | 2.228123 | false | false | false |
k1203/meeting | public/app/assets/plugins/vector-map/converter/simplifier.py | 234 | 5985 | import argparse
import sys
import os
from osgeo import ogr
from osgeo import osr
import anyjson
import shapely.geometry
import shapely.ops
import codecs
import time
format = '%.8f %.8f'
tolerance = 0.01
infile = '/Users/kirilllebedev/Maps/50m-admin-0-countries/ne_50m_admin_0_countries.shp'
outfile = 'map.shp'
# Open the datasource to operate on.
in_ds = ogr.Open( infile, update = 0 )
in_layer = in_ds.GetLayer( 0 )
in_defn = in_layer.GetLayerDefn()
# Create output file with similar information.
shp_driver = ogr.GetDriverByName( 'ESRI Shapefile' )
if os.path.exists('map.shp'):
shp_driver.DeleteDataSource( outfile )
shp_ds = shp_driver.CreateDataSource( outfile )
shp_layer = shp_ds.CreateLayer( in_defn.GetName(),
geom_type = in_defn.GetGeomType(),
srs = in_layer.GetSpatialRef() )
in_field_count = in_defn.GetFieldCount()
for fld_index in range(in_field_count):
src_fd = in_defn.GetFieldDefn( fld_index )
fd = ogr.FieldDefn( src_fd.GetName(), src_fd.GetType() )
fd.SetWidth( src_fd.GetWidth() )
fd.SetPrecision( src_fd.GetPrecision() )
shp_layer.CreateField( fd )
# Load geometries
geometries = []
for feature in in_layer:
geometry = feature.GetGeometryRef()
geometryType = geometry.GetGeometryType()
if geometryType == ogr.wkbPolygon or geometryType == ogr.wkbMultiPolygon:
shapelyGeometry = shapely.wkb.loads( geometry.ExportToWkb() )
#if not shapelyGeometry.is_valid:
#buffer to fix selfcrosses
#shapelyGeometry = shapelyGeometry.buffer(0)
if shapelyGeometry:
geometries.append(shapelyGeometry)
in_layer.ResetReading()
start = int(round(time.time() * 1000))
# Simplification
points = []
connections = {}
counter = 0
for geom in geometries:
counter += 1
polygons = []
if isinstance(geom, shapely.geometry.Polygon):
polygons.append(geom)
else:
for polygon in geom:
polygons.append(polygon)
for polygon in polygons:
if polygon.area > 0:
lines = []
lines.append(polygon.exterior)
for line in polygon.interiors:
lines.append(line)
for line in lines:
for i in range(len(line.coords)-1):
indexFrom = i
indexTo = i+1
pointFrom = format % line.coords[indexFrom]
pointTo = format % line.coords[indexTo]
if pointFrom == pointTo:
continue
if not (pointFrom in connections):
connections[pointFrom] = {}
connections[pointFrom][pointTo] = 1
if not (pointTo in connections):
connections[pointTo] = {}
connections[pointTo][pointFrom] = 1
print int(round(time.time() * 1000)) - start
simplifiedLines = {}
pivotPoints = {}
def simplifyRing(ring):
coords = list(ring.coords)[0:-1]
simpleCoords = []
isPivot = False
pointIndex = 0
while not isPivot and pointIndex < len(coords):
pointStr = format % coords[pointIndex]
pointIndex += 1
isPivot = ((len(connections[pointStr]) > 2) or (pointStr in pivotPoints))
pointIndex = pointIndex - 1
if not isPivot:
simpleRing = shapely.geometry.LineString(coords).simplify(tolerance)
if len(simpleRing.coords) <= 2:
return None
else:
pivotPoints[format % coords[0]] = True
pivotPoints[format % coords[-1]] = True
simpleLineKey = format % coords[0]+':'+format % coords[1]+':'+format % coords[-1]
simplifiedLines[simpleLineKey] = simpleRing.coords
return simpleRing
else:
points = coords[pointIndex:len(coords)]
points.extend(coords[0:pointIndex+1])
iFrom = 0
for i in range(1, len(points)):
pointStr = format % points[i]
if ((len(connections[pointStr]) > 2) or (pointStr in pivotPoints)):
line = points[iFrom:i+1]
lineKey = format % line[-1]+':'+format % line[-2]+':'+format % line[0]
if lineKey in simplifiedLines:
simpleLine = simplifiedLines[lineKey]
simpleLine = list(reversed(simpleLine))
else:
simpleLine = shapely.geometry.LineString(line).simplify(tolerance).coords
lineKey = format % line[0]+':'+format % line[1]+':'+format % line[-1]
simplifiedLines[lineKey] = simpleLine
simpleCoords.extend( simpleLine[0:-1] )
iFrom = i
if len(simpleCoords) <= 2:
return None
else:
return shapely.geometry.LineString(simpleCoords)
def simplifyPolygon(polygon):
simpleExtRing = simplifyRing(polygon.exterior)
if simpleExtRing is None:
return None
simpleIntRings = []
for ring in polygon.interiors:
simpleIntRing = simplifyRing(ring)
if simpleIntRing is not None:
simpleIntRings.append(simpleIntRing)
return shapely.geometry.Polygon(simpleExtRing, simpleIntRings)
results = []
for geom in geometries:
polygons = []
simplePolygons = []
if isinstance(geom, shapely.geometry.Polygon):
polygons.append(geom)
else:
for polygon in geom:
polygons.append(polygon)
for polygon in polygons:
simplePolygon = simplifyPolygon(polygon)
if not (simplePolygon is None or simplePolygon._geom is None):
simplePolygons.append(simplePolygon)
if len(simplePolygons) > 0:
results.append(shapely.geometry.MultiPolygon(simplePolygons))
else:
results.append(None)
# Process all features in input layer.
in_feat = in_layer.GetNextFeature()
counter = 0
while in_feat is not None:
if results[counter] is not None:
out_feat = ogr.Feature( feature_def = shp_layer.GetLayerDefn() )
out_feat.SetFrom( in_feat )
out_feat.SetGeometryDirectly(
ogr.CreateGeometryFromWkb(
shapely.wkb.dumps(
results[counter]
)
)
)
shp_layer.CreateFeature( out_feat )
out_feat.Destroy()
else:
print 'geometry is too small: '+in_feat.GetField(16)
in_feat.Destroy()
in_feat = in_layer.GetNextFeature()
counter += 1
# Cleanup
shp_ds.Destroy()
in_ds.Destroy()
print int(round(time.time() * 1000)) - start | mit | 6,680,298,553,978,392,000 | 28.2 | 87 | 0.664495 | false | 3.535145 | false | false | false |
mdtareque/difflet-backend | scripts/similarity.py | 1 | 1449 | #!/usr/bin/python
import nltk
from nltk.corpus import wordnet
""" Similarity between two words
Threshold decided manually, can be tweaked after discussion """
""""
Sample Output:
similarity( sparrow, parrot ) = 0.869565217391 ==> Very similar
similarity( ship, boat ) = 0.909090909091 ==> Very similar
similarity( cat, elephant ) = 0.814814814815 ==> Little similar
similarity( dolphin, ship ) = 0.296296296296 ==> Not similar
similarity( giraffe, tiger ) = 0.521739130435 ==> Not similar
similarity( sheep, ship ) = 0.296296296296 ==> Not similar
similarity( ship, cat ) = 0.32 ==> Not similar
"""
def similarity(a, b):
suf=".n.01"
a, b = a+suf, b+suf
w1 = wordnet.synset(a)
w2 = wordnet.synset(b)
sim = w1.wup_similarity(w2)
#print sim,
output=""
if sim >= 0.85:
output="Very similar"
elif sim >= 0.65:
output="Little similar"
else:
output="Not similar"
print 'similarity({:>15}, {:15}) = {:<15} ==> {} '.format(a[:a.find('.')],b[:b.find('.')], sim, output)
sim = similarity
# very similar
print
sim("sparrow", "parrot")
sim("ship", "boat")
# little similar
print
sim("cat", "elephant")
# not similar
print
sim("dolphin", "ship")
sim("giraffe", "tiger")
sim("sheep", "ship")
sim("ship", "cat")
| apache-2.0 | -8,353,782,072,916,627,000 | 23.559322 | 107 | 0.556936 | false | 3.170678 | false | false | false |
brean/python-pathfinding | test/grid_test.py | 1 | 1538 | # -*- coding: utf-8 -*-
from pathfinding.core.diagonal_movement import DiagonalMovement
from pathfinding.core.grid import Grid
from pathfinding.finder.a_star import AStarFinder
import numpy as np
BORDERLESS_GRID = """
xxx
xxx
"""
BORDER_GRID = """
+---+
| |
| |
+---+
"""
WALKED_GRID = """
+---+
|s# |
|xe |
+---+
"""
SIMPLE_MATRIX = [
[1, 1, 1],
[1, 0, 1],
[1, 1, 1]
]
SIMPLE_WALKED = """
+---+
|sx |
| #x|
| e|
+---+
"""
def test_str():
"""
test printing the grid
"""
grid = Grid(height=2, width=3)
assert grid.grid_str(border=False, empty_chr='x') == BORDERLESS_GRID[1:-1]
assert grid.grid_str(border=True) == BORDER_GRID[1:-1]
grid.nodes[0][1].walkable = False
start = grid.nodes[0][0]
end = grid.nodes[1][1]
path = [(0, 1)]
assert grid.grid_str(path, start, end) == WALKED_GRID[1:-1]
def test_empty():
"""
special test for empty values
"""
matrix = ()
grid = Grid(matrix=matrix)
assert grid.grid_str() == '++\n||\n++'
matrix = np.array(matrix)
grid = Grid(matrix=matrix)
assert grid.grid_str() == '++\n||\n++'
def test_numpy():
"""
test grid from numpy array
"""
matrix = np.array(SIMPLE_MATRIX)
grid = Grid(matrix=matrix)
start = grid.node(0, 0)
end = grid.node(2, 2)
finder = AStarFinder(diagonal_movement=DiagonalMovement.always)
path, runs = finder.find_path(start, end, grid)
assert grid.grid_str(path, start, end) == SIMPLE_WALKED[1:-1]
if __name__ == '__main__':
test_str()
| mit | -8,421,383,694,970,449,000 | 17.094118 | 78 | 0.570221 | false | 2.761221 | true | false | false |
ItsLastDay/academic_university_2016-2018 | subjects/MachineLearning/hw01/knn.py | 1 | 13052 | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import KDTree
from sklearn.model_selection import LeaveOneOut
import sys
import abc
import math
import time
import os.path
import random
from multiprocessing import Pool
RESULT_DIR = './exp_result'
class KNN(metaclass=abc.ABCMeta):
@abc.abstractmethod
def classify(self, obj, throw_closest=False):
pass
class KNN_Euclidean(KNN):
def __init__(self, objects, labels, k):
self.objects = objects
self.labels = labels
self.n = objects.shape[0]
self.k = k
def classify(self, obj, throw_closest=False):
obj_repeated = np.array([obj,] * self.n)
obj_repeated = np.repeat(obj, self.n).values.reshape((len(obj), self.n)).T
obj_repeated -= self.objects
norms = np.linalg.norm(obj_repeated, axis=1)
indices = np.argpartition(norms, self.k + throw_closest)
norms_indices = [(norms[i], i) for i in indices[:self.k + throw_closest]]
norms_indices.sort()
match_indices = [p[1] for p in norms_indices[int(throw_closest):]]
votes_for_first_class = sum(self.labels[match_indices])
# The majority class is `0`. I guess that this is `no-spam` class.
if votes_for_first_class * 2 > self.k:
return 1
return 0
class KNN_Euclidean_KD(KNN):
def __init__(self, objects, labels, k):
self.objects_tree = KDTree(objects)
self.labels = labels
self.k = k
def classify(self, obj, throw_closest=False):
neighbours_idx = self.objects_tree.query([obj],
self.k + int(throw_closest),
sort_results=True,
return_distance=False)[0][int(throw_closest):]
votes = self.labels[neighbours_idx]
votes_for_first_class = sum(votes)
if votes_for_first_class * 2 > self.k:
return 1
return 0
class KNN_Radius(KNN):
def __init__(self, objects, labels, radius):
self.objects = objects
self.labels = labels
self.n = objects.shape[0]
self.radius = radius
def classify(self, obj, throw_closest=False):
obj_repeated = np.array([obj,] * self.n)
obj_repeated = np.repeat(obj, self.n).values.reshape((len(obj), self.n)).T
obj_repeated -= self.objects
norms = np.linalg.norm(obj_repeated, axis=1)
mask = norms <= self.radius
if throw_closest:
mask &= norms > 0
votes = self.labels[mask]
votes_for_first_class = sum(votes)
if votes_for_first_class * 2 > len(votes):
return 1
return 0
def classify_one_obj(idx, row, classifier, right_label):
guess = classifier.classify(row, True)
return guess == right_label
def leave_one_out_unparallel(classifier_cls, objects, labels, **kwargs):
classifier = classifier_cls(objects, labels, **kwargs)
num_objects = objects.shape[0]
args = [(i, objects.iloc[i], classifier, labels.iloc[i]) for i in range(num_objects)]
right_guesses = 0
for i in range(num_objects):
obj = objects.iloc[i]
ground_truth = labels.iloc[i]
predicted = classifier.classify(obj, True)
if i % 100 == 0:
print('iteration i=',i)
sys.stdout.flush()
right_guesses += predicted == ground_truth
return right_guesses / float(num_objects)
def experiment_loo_unparallel():
classifier_cls = KNN_Euclidean
print('Non-normalized data:')
objects, labels = read_data()
for k in [1, 3]:
print('k=', k)
loo_val = leave_one_out_unparallel(classifier_cls, objects, labels, k=k)
print(loo_val)
print('Normalized data')
objects, labels = read_normalized_data()
for k in [1, 3]:
print('k=', k)
loo_val = leave_one_out_unparallel(classifier_cls, objects, labels, k=k)
print(loo_val)
def leave_one_out(classifier_cls, objects, labels, **kwargs):
classifier = classifier_cls(objects, labels, **kwargs)
num_objects = objects.shape[0]
args = [(i, objects.iloc[i], classifier, labels.iloc[i]) for i in range(num_objects)]
right_guesses = 0
with Pool() as p:
right_guesses = sum(p.starmap(classify_one_obj, args))
return right_guesses / float(num_objects)
def read_data():
data = pd.read_csv('spambase.csv')
objects, labels = data.iloc[:, :-1], data.iloc[:, -1]
return objects, labels
def read_normalized_data():
objects, labels = read_data()
cols = objects.shape[1]
rows = objects.shape[0]
max_by_col = [0.001 for i in range(cols)]
for i in range(rows):
row = objects.iloc[i]
for j in range(cols):
val = row.iloc[j]
max_by_col[j] = max(max_by_col[j], abs(val))
objects /= np.array(max_by_col)
return objects, labels
def simple_test(objects, labels):
NUM = 330
row = objects.iloc[NUM]
classifier = KNN_Euclidean(objects, labels, 10)
print(classifier.classify(row, True))
print(labels.iloc[NUM])
def generic_euclidean_loo(classifier_cls, objects, labels, filename):
k_values = list(range(1, 10 + 1))
loo_values = []
times = []
for k in k_values:
print('Loo: k = ', k)
time_before = time.time()
loo_val = leave_one_out(classifier_cls, objects, labels, k=k)
print('loo value: ', loo_val)
time_after = time.time()
time_delta = time_after - time_before
times.append(time_delta)
loo_values.append(loo_val)
dct = {'K': k_values, 'LOO': loo_values, 'Time': times}
frame = pd.DataFrame(dct)
frame.to_csv(os.path.join(RESULT_DIR, filename))
def generic_radius_loo(classifier_cls, objects, labels, filename, maximal_radius):
number_of_computations = 0
radiuses = []
loo_vals = []
times = []
def f(radius, saved=dict()):
nonlocal number_of_computations
# Cache already computed results.
key = radius * 10 ** 100
if key not in saved:
time_before = time.time()
saved[key] = leave_one_out(classifier_cls, objects, labels, radius=radius)
time_after = time.time()
times.append(time_after - time_before)
radiuses.append(radius)
loo_vals.append(saved[key])
number_of_computations += 1
return saved[key]
le = 0
rg = maximal_radius
# Assume that LOO has unique maximum.
# Use golden ratio search http://www.essie.ufl.edu/~kgurl/Classes/Lect3421/NM6_optim_s02.pdf
# for lesser number of LOO calculations (which are *painfully* slow).
golden_ratio = (1 + math.sqrt(5)) / 2
NUM_ITER = 10
for it in range(NUM_ITER):
print('Iteration {}/{}:'.format(it + 1, NUM_ITER), 'left', le, 'right', rg)
len_seg = (rg - le)
len_left = len_seg / (1 + golden_ratio)
len_right = len_left * golden_ratio
mid1 = le + len_left
mid2 = le + len_right
loo_mid1 = f(mid1)
loo_mid2 = f(mid2)
print('LOO values:', loo_mid1, loo_mid2)
if loo_mid1 < loo_mid2:
le = mid1
else:
rg = mid2
# Total number of computations is 27 vs 40 in regular ternary search (if we always choose rg = mid2).
print('Total number of LOO computations:', number_of_computations)
dct = {'Radius': radiuses, 'LOO': loo_vals, 'Time': times}
frame = pd.DataFrame(dct)
frame.to_csv(os.path.join(RESULT_DIR, filename))
def experiment_loo_euclidean():
objects, labels = read_data()
generic_euclidean_loo(KNN_Euclidean, objects, labels, 'loo_euclidean.csv')
def experiment_loo_radius():
objects, labels = read_data()
generic_radius_loo(KNN_Radius, objects, labels, 'loo_radius.csv', 100.0)
def experiment_loo_euclidean_normalized():
objects, labels = read_normalized_data()
generic_euclidean_loo(KNN_Euclidean, objects, labels, 'loo_euclidean_normalized.csv')
def experiment_loo_radius_normalized():
objects, labels = read_normalized_data()
generic_radius_loo(KNN_Radius, objects, labels, 'loo_radius_normalized.csv', 1.0)
def classify_one_obj_sklearn (k, objects, labels, train_index, test_index):
x_train, x_test = objects[train_index], objects[test_index]
y_train, y_test = labels[train_index], labels[test_index]
classifier = KNeighborsClassifier(k)
classifier.fit(x_train, y_train)
return classifier.predict(x_test)[0] == y_test[0]
def generic_test_sklearn(objects, labels, filename):
# http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.LeaveOneOut.html
loo = LeaveOneOut()
objects = np.array(objects)
labels = np.array(labels)
k_values = list(range(1, 10 + 1))
loo_values = []
times = []
for k in k_values:
print('Loo: k = ', k)
classifier = KNeighborsClassifier(k, n_jobs=-1)
right_guesses = 0
time_before = time.time()
args = []
for train_index, test_index in loo.split(objects):
args.append((k, objects, labels, train_index, test_index))
print('Computed args')
right_guesses = 0
with Pool() as p:
right_guesses = sum(p.starmap(classify_one_obj_sklearn, args))
loo_val = right_guesses / float(labels.shape[0])
print('Loo value: ', loo_val)
time_after = time.time()
times.append(time_after - time_before)
loo_values.append(loo_val)
dct = {'K': k_values, 'LOO': loo_values, 'Time': times}
frame = pd.DataFrame(dct)
frame.to_csv(os.path.join(RESULT_DIR, filename))
def experiment_loo_sklearn():
objects, labels = read_data()
generic_test_sklearn(objects, labels, 'loo_euclidean_sklearn.csv')
objects, labels = read_normalized_data()
generic_test_sklearn(objects, labels, 'loo_euclidean_sklearn_normalized.csv')
def experiment_time_kdtree():
objects, labels = read_data()
generic_euclidean_loo(KNN_Euclidean_KD, objects, labels, 'loo_euclidean_kdtree.csv')
objects, labels = read_normalized_data()
generic_euclidean_loo(KNN_Euclidean_KD, objects, labels, 'loo_euclidean_kdtree_normalized.csv')
def write_result_euclidean():
fname_prefixes = ['loo_euclidean', 'loo_euclidean_kdtree', 'loo_euclidean_sklearn']
for suffix in ['.csv', '_normalized.csv']:
fnames = [os.path.join(RESULT_DIR, fname_prefix + suffix) for fname_prefix in fname_prefixes]
csv_files = [pd.read_csv(fname) for fname in fnames]
loos = [csv['LOO'] for csv in csv_files]
loos = np.matrix(loos).T
df = pd.DataFrame(loos, index=csv_files[0]['K'],
columns=['my_knn', 'kdtree_knn', 'sklearn_knn'])
df.plot()
plt.title('KNN on {} data'.format('non-normalized' if suffix == '.csv' else 'normalized'))
plt.xlabel('K')
plt.ylabel('LOO')
#plt.ylim([0.5, 1.0])
plt.savefig(os.path.join(RESULT_DIR, 'knn_euclidean{}.png'.format('' if suffix == '.csv' else '_normalized')))
for suffix in ['.csv']:
fnames = [os.path.join(RESULT_DIR, fname_prefix + suffix) for fname_prefix in fname_prefixes]
csv_files = [pd.read_csv(fname) for fname in fnames]
loos = [csv['Time'][:-1] for csv in csv_files]
loos = np.matrix(loos).T
df = pd.DataFrame(loos, index=csv_files[0]['K'][:-1],
columns=['my_knn', 'kdtree_knn', 'sklearn_knn'])
df.plot()
plt.title('Time of one LOO iteration')
plt.xlabel('K')
plt.ylabel('Time (seconds)')
plt.savefig(os.path.join(RESULT_DIR, 'knn_time.png'))
def write_result_radius():
fname_prefixes = ['loo_radius']
for suffix in ['.csv', '_normalized.csv']:
fnames = [os.path.join(RESULT_DIR, fname_prefix + suffix) for fname_prefix in fname_prefixes]
csv_files = [pd.read_csv(fname) for fname in fnames]
frame = csv_files[0]
frame.sort_values('Radius', inplace=True)
loos = [frame['LOO']]
loos = np.matrix(loos).T
df = pd.DataFrame(loos, index=frame['Radius'], columns=['my_knn'])
df.plot()
plt.title('KNN on {} data'.format('non-normalized' if suffix == '.csv' else 'normalized'))
plt.xlabel('Radius')
plt.ylabel('LOO')
plt.savefig(os.path.join(RESULT_DIR, 'knn_radius{}.png'.format('' if suffix == '.csv' else '_normalized')))
def compute_graphic_results():
write_result_euclidean()
write_result_radius()
def main():
#experiment_loo_unparallel()
#experiment_loo_euclidean()
#experiment_loo_radius()
#experiment_loo_euclidean_normalized()
#experiment_loo_radius_normalized()
#experiment_loo_sklearn()
#experiment_time_kdtree()
compute_graphic_results()
if __name__ == '__main__':
main()
| gpl-3.0 | 8,926,484,933,648,611,000 | 28.731207 | 118 | 0.613316 | false | 3.350963 | true | false | false |
cneill/barbican | functionaltests/api/v1/models/order_models.py | 2 | 1661 | """
Copyright 2014-2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from functionaltests.api.v1.models.base_models import BaseModel
class OrderModel(BaseModel):
def __init__(self, type=None, name=None, status=None, secret_ref=None,
expiration=None, updated=None, created=None, meta=None,
payload_content_type=None, order_ref=None, container_ref=None,
error_status_code=None, error_reason=None,
sub_status=None, sub_status_message=None, creator_id=None):
super(OrderModel, self).__init__()
self.type = type
self.name = name
self.status = status
self.sub_status = sub_status
self.sub_status_message = sub_status_message
self.secret_ref = secret_ref
self.expiration = expiration
self.updated = updated
self.created = created
self.meta = meta
self.payload_content_type = payload_content_type
self.order_ref = order_ref
self.container_ref = container_ref
self.error_status_code = error_status_code
self.error_reason = error_reason
self.creator_id = creator_id
| apache-2.0 | 7,290,632,580,848,813,000 | 38.547619 | 79 | 0.680313 | false | 4.081081 | false | false | false |
Worldev/UsefulShell | modules/workurl/workurl.py | 1 | 1096 | import webbrowser
import os
def work_web():
""" Opens a website and saves it to tmp/url_work.txt if wanted """
if lang == 'ca':
listdir = os.listdir('tmp')
if 'url_work.txt' in listdir:
work_url = open('tmp/url_work.txt', 'r').read()
else:
work_url = input('Url (sense http://): ')
save = input('Guardar? ')
save = save.lower()
else:
listdir = os.listdir('tmp')
if 'url_work.txt' in listdir:
work_url = open('tmp/url_work.txt', 'r').read()
else:
work_url = input('Url (without http://): ')
save = input('Save? ')
save = save.lower()
try:
if save == "yes" or save == "sí" or save == "y" or save == "s":
document = open('tmp/url_work.txt', 'w')
document.write(work_url)
document.close()
webbrowser.open('http://' + work_url)
else:
webbrowser.open('http://' + work_url) #work link
except NameError:
webbrowser.open('http://' + work_url) #work link
| gpl-3.0 | 5,171,995,902,833,589,000 | 34.322581 | 71 | 0.49863 | false | 3.625828 | false | false | false |
tanyaweaver/data-structures | src/graph.py | 1 | 5618 | #!/usr/bin/env python
# -*- coding: utf -8 -*-
from __future__ import unicode_literals, division
class Graph(object):
"""Defining class Graph."""
def __init__(self, iterable=None):
"""Initiate an instance of class Graph."""
self._dict = {}
try:
for item in iterable:
try:
self._dict.setdefault(item, [])
except TypeError as e:
e.args = (
'Node must be an immutable data'
' type (string, integer, tuple, etc.)',
)
raise
except TypeError:
if iterable is not None:
self._dict.setdefault(iterable, [])
def add_node(self, n):
"""Add a node to graph."""
try:
self._dict.setdefault(n, [])
except TypeError as e:
e.args = (
'Node must be an immutable data'
' type (string, integer, tuple, etc.)',
)
raise
def add_edge(self, n1, n2, weight):
"""Add a edge from n1 to n2."""
new_node = self._dict.setdefault(n1, [])
self._dict.setdefault(n2, [])
for tup in new_node:
if n2 == tup[0]:
raise ValueError('This edge already exists.')
else:
new_node.append((n2, weight))
def nodes(self):
"""Show all nodes."""
return self._dict.keys()
def edges(self):
"""Show all edges."""
list_key_value = self._dict.items()
list_edges = []
for pair in list_key_value:
for tup in pair[1]:
list_edges.append((pair[0], tup[0], tup[1]),)
return list_edges
def del_node(self, n):
"""Delete a node from graph."""
if n in self._dict:
del self._dict[n]
for key in self._dict:
for tup in self._dict[key]:
if n in tup:
self._dict[key].remove(tup)
else:
raise KeyError('No such node in the graph.')
def del_edge(self, n1, n2):
"""Delete a edge from n1 to n2."""
try:
for tup in self._dict[n1]:
if n2 in tup:
self._dict[n1].remove(tup)
except KeyError as e:
e.args = ('No such edge exists',)
raise
def has_node(self, n):
"""Check if n is a node of graph."""
if n in self._dict.keys():
return True
else:
return False
def neighbors(self, n):
"""Return a list of nodes that have edge connect to n."""
try:
return [x[0] for x in self._dict[n]]
except KeyError as e:
e.agrs = ('Node not in the graph',)
raise
def adjacent(self, n1, n2):
"""Check if 2 nodes has connection."""
try:
n1_neighbors = [x[0] for x in self._dict[n1]]
n2_neighbors = [x[0] for x in self._dict[n2]]
return n2 in n1_neighbors or n1 in n2_neighbors
except KeyError as e:
e.agrs = ('Node not in the graph',)
raise
def depth_first_traversal(self, start):
"""
Perform a full depth-traversal of the graph beggining at start.
Return full visited path when traversal is complete.
Raise a ValueError, if the graph is empty.
"""
if self._dict == {}:
raise ValueError("Can't traverse an empty graph.")
path_list = [start]
visited_list = [start]
current_node = start
while current_node:
for n in self.neighbors(current_node):
if n not in path_list:
path_list.append(n)
visited_list.append(n)
current_node = n
break
else:
try:
visited_list.pop()
current_node = visited_list[-1]
except IndexError:
break
return path_list
def breadth_first_traversal(self, start):
"""
Perform a full breadth-traversal of the graph beggining at start.
Return full visited path when traversal is complete.
Raise a ValueError, if the graph is empty.
"""
if self._dict == {}:
raise ValueError("Can't traverse an empty graph.")
path_list = [start]
pending_list = []
current_node = start
while current_node:
for n in self.neighbors(current_node):
if n not in path_list:
path_list.append(n)
pending_list.append(n)
try:
current_node = pending_list.pop(0)
except IndexError:
break
return path_list
if __name__ == '__main__':
iterable = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
gr = Graph(iterable)
edges = [
(1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8),
(8, 9), (9, 10), (1, 3), (1, 4), (1, 5), (1, 7), (1, 8), (2, 5),
(2, 6), (2, 7), (2, 8), (2, 9), (3, 5), (3, 7), (3, 8), (3, 9)
]
for edge in edges:
gr.add_edge(edge[0], edge[1])
breadth = gr.breadth_first_traversal(1)
depth = gr.depth_first_traversal(1)
print(
"For a graph with nodes {} \n and edges\n {}\n"
"the results are:\n depth traversal: {},\n breadth traversal: {}."
.format(iterable, edges, depth, breadth)
)
| mit | -6,529,629,342,918,017,000 | 31.853801 | 78 | 0.473656 | false | 3.995733 | false | false | false |
guillermooo-forks/Vintageous | vi/dot_file.py | 9 | 1610 | from Vintageous import PluginLogger
import sublime
import os
_logger = PluginLogger(__name__)
class DotFile(object):
def __init__(self, path):
self.path = path
@staticmethod
def from_user():
path = os.path.join(sublime.packages_path(), 'User', '.vintageousrc')
return DotFile(path)
def run(self):
try:
with open(self.path, 'r') as f:
for line in f:
cmd, args = self.parse(line)
if cmd:
_logger.info('[DotFile] running: {0} {1}'.format(cmd, args))
sublime.active_window().run_command(cmd, args)
except FileNotFoundError:
pass
def parse(self, line):
try:
_logger.info('[DotFile] parsing line: {0}'.format(line))
if line.startswith((':map ')):
line = line[1:]
return ('ex_map', {'command_line': line.rstrip()})
if line.startswith((':omap ')):
line = line[len(':omap '):]
return ('ex_omap', {'cmd': line.rstrip()})
if line.startswith((':vmap ')):
line = line[len(':vmap '):]
return ('ex_vmap', {'cmd': line.rstrip()})
if line.startswith((':let ')):
line = line[1:]
return ('ex_let', {'command_line': line.strip()})
except Exception:
print('Vintageous: bad config in dotfile: "%s"' % line.rstrip())
_logger.debug('bad config inf dotfile: "%s"', line.rstrip())
return None, None
| mit | -6,728,869,773,391,696,000 | 29.377358 | 84 | 0.490683 | false | 3.995037 | false | false | false |
Princeton-CDH/winthrop-django | winthrop/common/tests.py | 1 | 2693 | from django.test import TestCase
from django.core.exceptions import ValidationError
import pytest
from .models import Named, Notable, DateRange
class TestNamed(TestCase):
def test_str(self):
named_obj = Named(name='foo')
assert str(named_obj) == 'foo'
class TestNotable(TestCase):
def test_has_notes(self):
noted = Notable()
assert False == noted.has_notes()
noted.notes = 'some text'
assert True == noted.has_notes()
noted.notes = ''
assert False == noted.has_notes()
noted.notes = None
assert False == noted.has_notes()
class TestDateRange(TestCase):
def test_dates(self):
span = DateRange()
# no dates set
assert '' == span.dates
# date range with start and end
span.start_year = 1900
span.end_year = 1901
assert '1900-1901' == span.dates
# start and end dates are same year = single year
span.end_year = span.start_year
assert span.start_year == span.dates
# start date but no end
span.end_year = None
assert '1900-' == span.dates
# end date but no start
span.end_year = 1950
span.start_year = None
assert '-1950' == span.dates
def test_clean_fields(self):
with pytest.raises(ValidationError):
DateRange(start_year=1901, end_year=1900).clean_fields()
# should not raise exception
# - same year is ok (single year range)
DateRange(start_year=1901, end_year=1901).clean_fields()
# - end after start
DateRange(start_year=1901, end_year=1905).clean_fields()
# - only one date set
DateRange(start_year=1901).clean_fields()
DateRange(end_year=1901).clean_fields()
# exclude set
DateRange(start_year=1901, end_year=1900).clean_fields(exclude=['start_year'])
DateRange(start_year=1901, end_year=1900).clean_fields(exclude=['end_year'])
class TestRobotsTxt(TestCase):
'''Test for default robots.txt inclusion'''
def test_robots_txt(self):
res = self.client.get('/robots.txt')
# successfully gets robots.txt
assert res.status_code == 200
# is text/plain
assert res['Content-Type'] == 'text/plain'
# uses robots.txt template
assert 'robots.txt' in [template.name for template in res.templates]
with self.settings(DEBUG=False):
res = self.client.get('/robots.txt')
self.assertContains(res, 'Disallow: /admin')
with self.settings(DEBUG=True):
res = self.client.get('/robots.txt')
self.assertContains(res, 'Disallow: /')
| apache-2.0 | 261,411,447,796,503,740 | 32.246914 | 86 | 0.608986 | false | 3.766434 | true | false | false |
NoahCristino/montyhall | montyhall.py | 1 | 2651 | from random import randint
def montyhall(firstdoor="duck", switchornot="duck"):
"""
This function can be called with no parameters, so you can play the game, or with params to simulate the games
"""
sim = False
if firstdoor == "duck" or switchornot == "duck":
sim = True
doors = ["", "", ""]
cardoor = randint(0,2)
doors[cardoor] = "car"
for idx, door in enumerate(doors):
if door != "car":
doors[idx] = "goat"
if sim:
print("You're on a game show, and you're given the choice of three doors: Behind one door is a car; behind the others, goats.")
print("D D D")
print("1 2 3")
firstpick = 999
if sim:
firstpick = int(input("Pick a door: ")) - 1
else:
firstpick = int(firstdoor) - 1
if sim:
print("I will now open a door!")
others = doors
st = others[firstpick]
others[firstpick] = "none"
notfound = True
reveal = 0
while notfound:
r = randint(0,2)
if others[r] == "goat":
reveal = r
notfound = False
if sim:
print("Door "+str(reveal+1)+" is a goat!")
newprint = doors
pstr = ""
for idx, np in enumerate(newprint):
if idx != reveal:
if pstr == "":
pstr = "D"
else:
pstr = pstr + " D"
else:
if pstr == "":
pstr = "G"
else:
pstr = pstr + " G"
if sim:
print(pstr)
if sim:
switch = input("You selected door "+str(firstpick+1)+" would you like to change? (y/n): ")
else:
switch = switchornot
newpick = "no"
if switch == "y":
ndoors = doors
pdoo = ""
for idx, nd in enumerate(ndoors):
if idx != reveal and idx != firstpick:
pdoo = str(idx + 1)
newpick = doors[idx]
if sim:
print("You have switched to door "+pdoo)
print(newpick)
else:
return newpick
else:
if sim:
print("You have stayed with door " + str(firstpick+1))
print(st)
else:
return st
"""
This code calls the sim function tons of times and displays the results
"""
#Settings
switchfail = 0
switchwin = 0
switchtimes = 10000
stayfail = 0
staywin = 0
staytimes = 10000
#Don't touch code below unless you know what you are doing.
for i in range(switchtimes):
mh = montyhall(randint(1,3), "y")
if mh == "car":
switchwin = switchwin + 1
else:
switchfail = switchfail + 1
for i in range(staytimes):
mh2 = montyhall(randint(1,3), "n")
if mh2 == "car":
staywin = staywin + 1
else:
stayfail = stayfail + 1
print("== MONTY HALL ==")
print("Switching "+str(switchtimes)+" times")
print("Staying "+str(staytimes)+" times")
print("=== RESULTS ===")
print("Switch: "+str((switchwin/switchtimes) * 100)+"% win, "+str((switchfail/switchtimes) * 100)+"% lose")
print("Stay: "+str((staywin/staytimes) * 100)+"% win, "+str((stayfail/staytimes) * 100)+"% lose")
| mit | 3,311,140,192,704,882,000 | 24.490385 | 129 | 0.632214 | false | 2.653654 | false | false | false |
pandada8/mirrord | setup.py | 1 | 1389 | from setuptools import setup, find_packages
from codecs import open
from os import path
__version__ = '0.0.1'
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# get the dependencies and installs
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
all_reqs = f.read().split('\n')
install_requires = [x.strip() for x in all_reqs if 'git+' not in x]
dependency_links = [x.strip().replace('git+', '') for x in all_reqs if 'git+' not in x]
setup(
name='mirrord',
version=__version__,
description='yet another mirror daemon',
long_description=long_description,
url='https://github.com/pandada8/mirrord',
download_url='https://github.com/pandada8/mirrord/tarball/' + __version__,
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
],
keywords='',
entry_points={
'console_scripts': [
'mirrord = mirrord.daemon:run'
]
},
packages=find_packages(exclude=['docs', 'tests*']),
include_package_data=True,
author='Pandada8',
install_requires=install_requires,
dependency_links=dependency_links,
author_email='[email protected]'
)
| mit | -1,364,647,403,577,741,800 | 29.866667 | 87 | 0.648668 | false | 3.516456 | false | false | false |
alexgorban/models | research/autoaugment/wrn.py | 5 | 5415 | # Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builds the Wide-ResNet Model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import custom_ops as ops
import numpy as np
import tensorflow as tf
def residual_block(
x, in_filter, out_filter, stride, activate_before_residual=False):
"""Adds residual connection to `x` in addition to applying BN->ReLU->3x3 Conv.
Args:
x: Tensor that is the output of the previous layer in the model.
in_filter: Number of filters `x` has.
out_filter: Number of filters that the output of this layer will have.
stride: Integer that specified what stride should be applied to `x`.
activate_before_residual: Boolean on whether a BN->ReLU should be applied
to x before the convolution is applied.
Returns:
A Tensor that is the result of applying two sequences of BN->ReLU->3x3 Conv
and then adding that Tensor to `x`.
"""
if activate_before_residual: # Pass up RELU and BN activation for resnet
with tf.variable_scope('shared_activation'):
x = ops.batch_norm(x, scope='init_bn')
x = tf.nn.relu(x)
orig_x = x
else:
orig_x = x
block_x = x
if not activate_before_residual:
with tf.variable_scope('residual_only_activation'):
block_x = ops.batch_norm(block_x, scope='init_bn')
block_x = tf.nn.relu(block_x)
with tf.variable_scope('sub1'):
block_x = ops.conv2d(
block_x, out_filter, 3, stride=stride, scope='conv1')
with tf.variable_scope('sub2'):
block_x = ops.batch_norm(block_x, scope='bn2')
block_x = tf.nn.relu(block_x)
block_x = ops.conv2d(
block_x, out_filter, 3, stride=1, scope='conv2')
with tf.variable_scope(
'sub_add'): # If number of filters do not agree then zero pad them
if in_filter != out_filter:
orig_x = ops.avg_pool(orig_x, stride, stride)
orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
x = orig_x + block_x
return x
def _res_add(in_filter, out_filter, stride, x, orig_x):
"""Adds `x` with `orig_x`, both of which are layers in the model.
Args:
in_filter: Number of filters in `orig_x`.
out_filter: Number of filters in `x`.
stride: Integer specifying the stide that should be applied `orig_x`.
x: Tensor that is the output of the previous layer.
orig_x: Tensor that is the output of an earlier layer in the network.
Returns:
A Tensor that is the result of `x` and `orig_x` being added after
zero padding and striding are applied to `orig_x` to get the shapes
to match.
"""
if in_filter != out_filter:
orig_x = ops.avg_pool(orig_x, stride, stride)
orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
x = x + orig_x
orig_x = x
return x, orig_x
def build_wrn_model(images, num_classes, wrn_size):
"""Builds the WRN model.
Build the Wide ResNet model from https://arxiv.org/abs/1605.07146.
Args:
images: Tensor of images that will be fed into the Wide ResNet Model.
num_classes: Number of classed that the model needs to predict.
wrn_size: Parameter that scales the number of filters in the Wide ResNet
model.
Returns:
The logits of the Wide ResNet model.
"""
kernel_size = wrn_size
filter_size = 3
num_blocks_per_resnet = 4
filters = [
min(kernel_size, 16), kernel_size, kernel_size * 2, kernel_size * 4
]
strides = [1, 2, 2] # stride for each resblock
# Run the first conv
with tf.variable_scope('init'):
x = images
output_filters = filters[0]
x = ops.conv2d(x, output_filters, filter_size, scope='init_conv')
first_x = x # Res from the beginning
orig_x = x # Res from previous block
for block_num in range(1, 4):
with tf.variable_scope('unit_{}_0'.format(block_num)):
activate_before_residual = True if block_num == 1 else False
x = residual_block(
x,
filters[block_num - 1],
filters[block_num],
strides[block_num - 1],
activate_before_residual=activate_before_residual)
for i in range(1, num_blocks_per_resnet):
with tf.variable_scope('unit_{}_{}'.format(block_num, i)):
x = residual_block(
x,
filters[block_num],
filters[block_num],
1,
activate_before_residual=False)
x, orig_x = _res_add(filters[block_num - 1], filters[block_num],
strides[block_num - 1], x, orig_x)
final_stride_val = np.prod(strides)
x, _ = _res_add(filters[0], filters[3], final_stride_val, x, first_x)
with tf.variable_scope('unit_last'):
x = ops.batch_norm(x, scope='final_bn')
x = tf.nn.relu(x)
x = ops.global_avg_pool(x)
logits = ops.fc(x, num_classes)
return logits
| apache-2.0 | 2,077,167,805,379,991,600 | 33.272152 | 80 | 0.648569 | false | 3.451243 | false | false | false |
fusionbox/django-two-factor-auth | two_factor/forms.py | 4 | 5418 | from binascii import unhexlify
from time import time
from django import forms
from django.forms import ModelForm, Form
from django.utils.translation import ugettext_lazy as _
from django_otp.forms import OTPAuthenticationFormMixin
from django_otp.oath import totp
from django_otp.plugins.otp_totp.models import TOTPDevice
from two_factor.utils import totp_digits
try:
from otp_yubikey.models import RemoteYubikeyDevice, YubikeyDevice
except ImportError:
RemoteYubikeyDevice = YubikeyDevice = None
from .models import (PhoneDevice, get_available_phone_methods,
get_available_methods)
class MethodForm(forms.Form):
method = forms.ChoiceField(label=_("Method"),
initial='generator',
widget=forms.RadioSelect)
def __init__(self, **kwargs):
super(MethodForm, self).__init__(**kwargs)
self.fields['method'].choices = get_available_methods()
class PhoneNumberMethodForm(ModelForm):
method = forms.ChoiceField(widget=forms.RadioSelect, label=_('Method'))
class Meta:
model = PhoneDevice
fields = 'number', 'method',
def __init__(self, **kwargs):
super(PhoneNumberMethodForm, self).__init__(**kwargs)
self.fields['method'].choices = get_available_phone_methods()
class PhoneNumberForm(ModelForm):
class Meta:
model = PhoneDevice
fields = 'number',
class DeviceValidationForm(forms.Form):
token = forms.IntegerField(label=_("Token"), min_value=1, max_value=int('9' * totp_digits()))
error_messages = {
'invalid_token': _('Entered token is not valid.'),
}
def __init__(self, device, **args):
super(DeviceValidationForm, self).__init__(**args)
self.device = device
def clean_token(self):
token = self.cleaned_data['token']
if not self.device.verify_token(token):
raise forms.ValidationError(self.error_messages['invalid_token'])
return token
class YubiKeyDeviceForm(DeviceValidationForm):
token = forms.CharField(label=_("YubiKey"))
error_messages = {
'invalid_token': _("The YubiKey could not be verified."),
}
def clean_token(self):
self.device.public_id = self.cleaned_data['token'][:-32]
return super(YubiKeyDeviceForm, self).clean_token()
class TOTPDeviceForm(forms.Form):
token = forms.IntegerField(label=_("Token"), min_value=0, max_value=int('9' * totp_digits()))
error_messages = {
'invalid_token': _('Entered token is not valid.'),
}
def __init__(self, key, user, metadata=None, **kwargs):
super(TOTPDeviceForm, self).__init__(**kwargs)
self.key = key
self.tolerance = 1
self.t0 = 0
self.step = 30
self.drift = 0
self.digits = totp_digits()
self.user = user
self.metadata = metadata or {}
@property
def bin_key(self):
"""
The secret key as a binary string.
"""
return unhexlify(self.key.encode())
def clean_token(self):
token = self.cleaned_data.get('token')
validated = False
t0s = [self.t0]
key = self.bin_key
if 'valid_t0' in self.metadata:
t0s.append(int(time()) - self.metadata['valid_t0'])
for t0 in t0s:
for offset in range(-self.tolerance, self.tolerance):
if totp(key, self.step, t0, self.digits, self.drift + offset) == token:
self.drift = offset
self.metadata['valid_t0'] = int(time()) - t0
validated = True
if not validated:
raise forms.ValidationError(self.error_messages['invalid_token'])
return token
def save(self):
return TOTPDevice.objects.create(user=self.user, key=self.key,
tolerance=self.tolerance, t0=self.t0,
step=self.step, drift=self.drift,
digits=self.digits,
name='default')
class DisableForm(forms.Form):
understand = forms.BooleanField(label=_("Yes, I am sure"))
class AuthenticationTokenForm(OTPAuthenticationFormMixin, Form):
otp_token = forms.IntegerField(label=_("Token"), min_value=1,
max_value=int('9' * totp_digits()))
def __init__(self, user, initial_device, **kwargs):
"""
`initial_device` is either the user's default device, or the backup
device when the user chooses to enter a backup token. The token will
be verified against all devices, it is not limited to the given
device.
"""
super(AuthenticationTokenForm, self).__init__(**kwargs)
self.user = user
# YubiKey generates a OTP of 44 characters (not digits). So if the
# user's primary device is a YubiKey, replace the otp_token
# IntegerField with a CharField.
if RemoteYubikeyDevice and YubikeyDevice and \
isinstance(initial_device, (RemoteYubikeyDevice, YubikeyDevice)):
self.fields['otp_token'] = forms.CharField(label=_('YubiKey'))
def clean(self):
self.clean_otp(self.user)
return self.cleaned_data
class BackupTokenForm(AuthenticationTokenForm):
otp_token = forms.CharField(label=_("Token"))
| mit | 8,383,093,971,943,688,000 | 32.652174 | 97 | 0.608712 | false | 3.989691 | false | false | false |
skirpichev/omg | diofant/polys/galoistools.py | 1 | 18750 | """Dense univariate polynomials with coefficients in Galois fields."""
import math
import random
from ..ntheory import factorint
from .densearith import (dmp_add, dmp_add_term, dmp_mul, dmp_quo, dmp_rem,
dmp_sqr, dmp_sub)
from .densebasic import dmp_degree_in, dmp_normal, dmp_one_p
from .densetools import dmp_ground_monic
from .euclidtools import dmp_gcd
from .polyconfig import query
from .polyutils import _sort_factors
def dup_gf_pow_mod(f, n, g, K):
"""
Compute ``f**n`` in ``GF(q)[x]/(g)`` using repeated squaring.
Given polynomials ``f`` and ``g`` in ``GF(q)[x]`` and a non-negative
integer ``n``, efficiently computes ``f**n (mod g)`` i.e. the remainder
of ``f**n`` from division by ``g``, using the repeated squaring algorithm.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(3*x**2 + 2*x + 4)
>>> g = R.to_dense(x + 1)
>>> dup_gf_pow_mod(f, 3, g, R.domain)
[]
References
==========
* :cite:`Gathen1999modern`, algorithm 4.8
"""
if not n:
return [K.one]
elif n == 1:
return dmp_rem(f, g, 0, K)
elif n == 2:
return dmp_rem(dmp_sqr(f, 0, K), g, 0, K)
h = [K.one]
while True:
if n & 1:
h = dmp_mul(h, f, 0, K)
h = dmp_rem(h, g, 0, K)
n -= 1
n >>= 1
if not n:
break
f = dmp_sqr(f, 0, K)
f = dmp_rem(f, g, 0, K)
return h
def dup_gf_compose_mod(g, h, f, K):
"""
Compute polynomial composition ``g(h)`` in ``GF(q)[x]/(f)``.
Examples
========
>>> R, x = ring('x', FF(5))
>>> g = R.to_dense(3*x**2 + 2*x + 4)
>>> h = R.to_dense(2*x**2 + 2*x + 2)
>>> f = R.to_dense(4*x + 3)
>>> dup_gf_compose_mod(g, h, f, R.domain)
[4 mod 5]
"""
if not g:
return []
comp = [g[0]]
for a in g[1:]:
comp = dmp_mul(comp, h, 0, K)
comp = dmp_add_term(comp, a, 0, 0, K)
comp = dmp_rem(comp, f, 0, K)
return comp
def dup_gf_trace_map(a, b, c, n, f, K):
"""
Compute polynomial trace map in ``GF(q)[x]/(f)``.
Given a polynomial ``f`` in ``GF(q)[x]``, polynomials ``a``, ``b``,
``c`` in the quotient ring ``GF(q)[x]/(f)`` such that ``b = c**t
(mod f)`` for some positive power ``t`` of ``q``, and a positive
integer ``n``, returns a mapping::
a -> a**t**n, a + a**t + a**t**2 + ... + a**t**n (mod f)
In factorization context, ``b = x**q mod f`` and ``c = x mod f``.
This way we can efficiently compute trace polynomials in equal
degree factorization routine, much faster than with other methods,
like iterated Frobenius algorithm, for large degrees.
Examples
========
>>> R, x = ring('x', FF(5))
>>> a = R.to_dense(x + 2)
>>> b = R.to_dense(4*x + 4)
>>> c = R.to_dense(x + 1)
>>> f = R.to_dense(3*x**2 + 2*x + 4)
>>> dup_gf_trace_map(a, b, c, 4, f, R.domain)
([1 mod 5, 3 mod 5], [1 mod 5, 3 mod 5])
References
==========
* :cite:`Gathen1992ComputingFM`, algorithm 5.2
"""
u = dup_gf_compose_mod(a, b, f, K)
v = b
if n & 1:
U = dmp_add(a, u, 0, K)
V = b
else:
U = a
V = c
n >>= 1
while n:
u = dmp_add(u, dup_gf_compose_mod(u, v, f, K), 0, K)
v = dup_gf_compose_mod(v, v, f, K)
if n & 1:
U = dmp_add(U, dup_gf_compose_mod(u, V, f, K), 0, K)
V = dup_gf_compose_mod(v, V, f, K)
n >>= 1
return dup_gf_compose_mod(a, V, f, K), U
def dup_gf_random(n, K):
"""
Generate a random polynomial in ``GF(q)[x]`` of degree ``n``.
Examples
========
>>> dup_gf_random(4, FF(5)) # doctest: +SKIP
[1 mod 5, 4 mod 5, 4 mod 5, 2 mod 5, 1 mod 5]
"""
return [K.one] + [K(random.randint(0, K.order - 1)) for i in range(n)]
def dup_gf_irreducible(n, K):
"""
Generate random irreducible polynomial of degree ``n`` in ``GF(q)[x]``.
Examples
========
>>> dup_gf_irreducible(4, FF(5)) # doctest: +SKIP
[1 mod 5, 2 mod 5, 4 mod 5, 4 mod 5, 3 mod 5]
>>> dup_gf_irreducible_p(_, FF(5))
True
"""
while True:
f = dup_gf_random(n, K)
if dup_gf_irreducible_p(f, K):
return f
def dup_gf_irred_p_ben_or(f, K):
"""
Ben-Or's polynomial irreducibility test over finite fields.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**10 + 4*x**9 + 2*x**8 + 2*x**7 + 3*x**6 +
... 2*x**5 + 4*x**4 + x**3 + 4*x**2 + 4)
>>> dup_gf_irred_p_ben_or(f, R.domain)
True
>>> f = R.to_dense(3*x**2 + 2*x + 4)
>>> dup_gf_irred_p_ben_or(f, R.domain)
False
References
==========
* :cite:`Ben-Or1981ff`
"""
n, q = dmp_degree_in(f, 0, 0), K.order
if n <= 1:
return True
x = [K.one, K.zero]
f = dmp_ground_monic(f, 0, K)
H = h = dup_gf_pow_mod(x, q, f, K)
for i in range(n//2):
g = dmp_sub(h, x, 0, K)
if dmp_one_p(dmp_gcd(f, g, 0, K), 0, K):
h = dup_gf_compose_mod(h, H, f, K)
else:
return False
return True
def dup_gf_irred_p_rabin(f, K):
"""
Rabin's polynomial irreducibility test over finite fields.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**10 + 4*x**9 + 2*x**8 + 2*x**7 + 3*x**6 +
... 2*x**5 + 4*x**4 + x**3 + 4*x**2 + 4)
>>> dup_gf_irred_p_rabin(f, R.domain)
True
>>> f = R.to_dense(3*x**2 + 2*x + 4)
>>> dup_gf_irred_p_rabin(f, R.domain)
False
References
==========
* :cite:`Gathen1999modern`, algorithm 14.36
"""
n, q = dmp_degree_in(f, 0, 0), K.order
if n <= 1:
return True
x = [K.one, K.zero]
f = dmp_ground_monic(f, 0, K)
indices = {n//d for d in factorint(n)}
H = h = dup_gf_pow_mod(x, q, f, K)
for i in range(1, n):
if i in indices:
g = dmp_sub(h, x, 0, K)
if not dmp_one_p(dmp_gcd(f, g, 0, K), 0, K):
return False
h = dup_gf_compose_mod(h, H, f, K)
return h == x
_irred_methods = {
'ben-or': dup_gf_irred_p_ben_or,
'rabin': dup_gf_irred_p_rabin,
}
def dup_gf_irreducible_p(f, K):
"""
Test irreducibility of a polynomial ``f`` in ``GF(q)[x]``.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**10 + 4*x**9 + 2*x**8 + 2*x**7 + 3*x**6 +
... 2*x**5 + 4*x**4 + x**3 + 4*x**2 + 4)
>>> dup_gf_irreducible_p(f, R.domain)
True
>>> f = R.to_dense(3*x**2 + 2*x + 4)
>>> dup_gf_irreducible_p(f, R.domain)
False
"""
method = query('GF_IRRED_METHOD')
return _irred_methods[method](f, K)
def dup_gf_primitive_p(f, K):
"""Test if ``f`` is a primitive polynomial over ``GF(p)``."""
p = K.characteristic
assert K.order == p
if not dup_gf_irreducible_p(f, K):
return False
n = dmp_degree_in(f, 0, 0)
t = [K.one] + [K.zero]*n
for m in range(n, p**n - 1):
r = dmp_rem(t, f, 0, K)
if r == [K.one]:
return False
t = dmp_mul(r, [K.one, K.zero], 0, K)
return True
def dup_gf_Qmatrix(f, K):
"""
Calculate Berlekamp's ``Q`` matrix.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(3*x**2 + 2*x + 4)
>>> dup_gf_Qmatrix(f, R.domain)
[[1 mod 5, 0 mod 5],
[3 mod 5, 4 mod 5]]
>>> f = R.to_dense(x**4 + 1)
>>> dup_gf_Qmatrix(f, R.domain)
[[1 mod 5, 0 mod 5, 0 mod 5, 0 mod 5],
[0 mod 5, 4 mod 5, 0 mod 5, 0 mod 5],
[0 mod 5, 0 mod 5, 1 mod 5, 0 mod 5],
[0 mod 5, 0 mod 5, 0 mod 5, 4 mod 5]]
References
==========
* :cite:`Geddes1992algorithms`, algorithm 8.5
"""
n, q = dmp_degree_in(f, 0, 0), K.order
r = [K.one] + [K.zero]*(n - 1)
Q = [r.copy()] + [[]]*(n - 1)
for i in range(1, (n - 1)*q + 1):
c, r[1:], r[0] = r[-1], r[:-1], K.zero
for j in range(n):
r[j] -= c*f[-j - 1]
if not (i % q):
Q[i//q] = r.copy()
return Q
def dup_gf_berlekamp(f, K):
"""
Factor a square-free polynomial over finite fields of small order.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**4 + 1)
>>> dup_gf_berlekamp([1, 0, 0, 0, 1], R.domain)
[[1 mod 5, 0 mod 5, 2 mod 5], [1 mod 5, 0 mod 5, 3 mod 5]]
References
==========
* :cite:`Geddes1992algorithms`, algorithm 8.4
* :cite:`Knuth1985seminumerical`, section 4.6.2
"""
from .solvers import RawMatrix
Q = dup_gf_Qmatrix(f, K)
Q = RawMatrix(Q) - RawMatrix.eye(len(Q))
V = Q.T.nullspace()
for i, v in enumerate(V):
V[i] = dmp_normal(list(reversed(v)), 0, K)
factors = [f]
for v in V[1:]:
for f in list(factors):
for s in range(K.order):
h = dmp_add_term(v, -K(s), 0, 0, K)
g = dmp_gcd(f, h, 0, K)
if not dmp_one_p(g, 0, K) and g != f:
factors.remove(f)
f = dmp_quo(f, g, 0, K)
factors.extend([f, g])
if len(factors) == len(V):
return _sort_factors(factors, multiple=False)
return _sort_factors(factors, multiple=False)
def dup_gf_ddf_zassenhaus(f, K):
"""
Cantor-Zassenhaus: Deterministic Distinct Degree Factorization.
Given a monic square-free polynomial ``f`` in ``GF(q)[x]``, computes
partial distinct degree factorization ``f_1 ... f_d`` of ``f`` where
``deg(f_i) != deg(f_j)`` for ``i != j``. The result is returned as a
list of pairs ``(f_i, e_i)`` where ``deg(f_i) > 0`` and ``e_i > 0``
is an argument to the equal degree factorization routine.
Examples
========
>>> R, x = ring('x', FF(11))
>>> f = R.to_dense(x**15 - 1)
>>> dup_gf_ddf_zassenhaus(f, R.domain)
[([1 mod 11, 0 mod 11, 0 mod 11, 0 mod 11, 0 mod 11, 10 mod 11], 1),
([1 mod 11, 0 mod 11, 0 mod 11, 0 mod 11, 0 mod 11, 1 mod 11, 0 mod 11,
0 mod 11, 0 mod 11, 0 mod 11, 1 mod 11], 2)]
To obtain factorization into irreducibles, use equal degree factorization
procedure (EDF) with each of the factors.
References
==========
* :cite:`Gathen1999modern`, algorithm 14.3
* :cite:`Geddes1992algorithms`, algorithm 8.8
See Also
========
dup_gf_edf_zassenhaus
"""
factors, q = [], K.order
g, x = [[K.one, K.zero]]*2
for i in range(1, dmp_degree_in(f, 0, 0)//2 + 1):
g = dup_gf_pow_mod(g, q, f, K)
h = dmp_gcd(f, dmp_sub(g, x, 0, K), 0, K)
if not dmp_one_p(h, 0, K):
factors.append((h, i))
f = dmp_quo(f, h, 0, K)
g = dmp_rem(g, f, 0, K)
if not dmp_one_p(f, 0, K):
factors += [(f, dmp_degree_in(f, 0, 0))]
return factors
def dup_gf_edf_zassenhaus(f, n, K):
"""
Cantor-Zassenhaus: Probabilistic Equal Degree Factorization.
Given a monic square-free polynomial ``f`` in ``GF(q)[x]`` and
an integer ``n``, such that ``n`` divides ``deg(f)``, returns all
irreducible factors ``f_1,...,f_d`` of ``f``, each of degree ``n``.
EDF procedure gives complete factorization over Galois fields.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**3 + x**2 + x + 1)
>>> dup_gf_edf_zassenhaus(f, 1, R.domain)
[[1 mod 5, 1 mod 5], [1 mod 5, 2 mod 5], [1 mod 5, 3 mod 5]]
References
==========
* :cite:`Geddes1992algorithms`, algorithm 8.9
See Also
========
dup_gf_ddf_zassenhaus
"""
factors = [f]
if dmp_degree_in(f, 0, 0) <= n:
return factors
p, q = K.characteristic, K.order
N = dmp_degree_in(f, 0, 0) // n
while len(factors) < N:
r = dup_gf_random(2*n - 1, K)
if p == 2:
h = r
for i in range(1, n):
r = dup_gf_pow_mod(r, q, f, K)
h = dmp_add(h, r, 0, K)
else:
h = dup_gf_pow_mod(r, (q**n - 1) // 2, f, K)
h = dmp_add_term(h, -K.one, 0, 0, K)
g = dmp_gcd(f, h, 0, K)
if not dmp_one_p(g, 0, K) and g != f:
factors = (dup_gf_edf_zassenhaus(g, n, K) +
dup_gf_edf_zassenhaus(dmp_quo(f, g, 0, K), n, K))
return _sort_factors(factors, multiple=False)
def dup_gf_ddf_shoup(f, K):
"""
Kaltofen-Shoup: Deterministic Distinct Degree Factorization.
Given a monic square-free polynomial ``f`` in ``GF(q)[x]``, computes
partial distinct degree factorization ``f_1 ... f_d`` of ``f`` where
``deg(f_i) != deg(f_j)`` for ``i != j``. The result is returned as a
list of pairs ``(f_i, e_i)`` where ``deg(f_i) > 0`` and ``e_i > 0``
is an argument to the equal degree factorization routine.
Notes
=====
This algorithm is an improved version of Zassenhaus algorithm for
large ``deg(f)`` and order ``q`` (especially for ``deg(f) ~ lg(q)``).
Examples
========
>>> R, x = ring('x', FF(3))
>>> f = R.to_dense(x**6 - x**5 + x**4 + x**3 - x)
>>> dup_gf_ddf_shoup(f, R.domain)
[([1 mod 3, 1 mod 3, 0 mod 3], 1), ([1 mod 3, 1 mod 3, 0 mod 3, 1 mod 3, 2 mod 3], 2)]
References
==========
* :cite:`Kaltofen1998subquadratic`, algorithm D
* :cite:`Shoup1995factor`
* :cite:`Gathen1992frobenious`
See Also
========
dup_gf_edf_shoup
"""
n, q = dmp_degree_in(f, 0, 0), K.order
k = math.ceil(math.sqrt(n//2))
x = [K.one, K.zero]
h = dup_gf_pow_mod(x, q, f, K)
# U[i] = x**(q**i)
U = [x, h] + [K.zero]*(k - 1)
for i in range(2, k + 1):
U[i] = dup_gf_compose_mod(U[i - 1], h, f, K)
h, U = U[k], U[:k]
# V[i] = x**(q**(k*(i+1)))
V = [h] + [K.zero]*(k - 1)
for i in range(1, k):
V[i] = dup_gf_compose_mod(V[i - 1], h, f, K)
factors = []
for i, v in enumerate(V):
h, j = [K.one], k - 1
for u in U:
g = dmp_sub(v, u, 0, K)
h = dmp_mul(h, g, 0, K)
h = dmp_rem(h, f, 0, K)
g = dmp_gcd(f, h, 0, K)
f = dmp_quo(f, g, 0, K)
for u in reversed(U):
h = dmp_sub(v, u, 0, K)
F = dmp_gcd(g, h, 0, K)
if not dmp_one_p(F, 0, K):
factors.append((F, k*(i + 1) - j))
g, j = dmp_quo(g, F, 0, K), j - 1
if not dmp_one_p(f, 0, K):
factors.append((f, dmp_degree_in(f, 0, 0)))
return factors
def dup_gf_edf_shoup(f, n, K):
"""
Gathen-Shoup: Probabilistic Equal Degree Factorization.
Given a monic square-free polynomial ``f`` in ``GF(q)[x]`` and
an integer ``n``, such that ``n`` divides ``deg(f)``, returns all
irreducible factors ``f_1,...,f_d`` of ``f``, each of degree ``n``.
EDF procedure gives complete factorization over Galois fields.
Notes
=====
This algorithm is an improved version of Zassenhaus algorithm for
large ``deg(f)`` and order ``q`` (especially for ``deg(f) ~ lg(q)``).
Examples
========
>>> R, x = ring('x', FF(2917))
>>> f = R.to_dense(x**2 + 2837*x + 2277)
>>> dup_gf_edf_shoup(f, 1, R.domain)
[[1 mod 2917, 852 mod 2917], [1 mod 2917, 1985 mod 2917]]
References
==========
* :cite:`Shoup1991ffactor`
* :cite:`Gathen1992ComputingFM`, algorithm 3.6
See Also
========
dup_gf_ddf_shoup
"""
q, p = K.order, K.characteristic
N = dmp_degree_in(f, 0, 0)
if not N:
return []
if N <= n:
return [f]
factors, x = [f], [K.one, K.zero]
r = dup_gf_random(N - 1, K)
h = dup_gf_pow_mod(x, q, f, K)
H = dup_gf_trace_map(r, h, x, n - 1, f, K)[1]
if p == 2:
h1 = dmp_gcd(f, H, 0, K)
h2 = dmp_quo(f, h1, 0, K)
factors = dup_gf_edf_shoup(h1, n, K) + dup_gf_edf_shoup(h2, n, K)
else:
h = dup_gf_pow_mod(H, (q - 1)//2, f, K)
h1 = dmp_gcd(f, h, 0, K)
h2 = dmp_gcd(f, dmp_add_term(h, -K.one, 0, 0, K), 0, K)
h3 = dmp_quo(f, dmp_mul(h1, h2, 0, K), 0, K)
factors = (dup_gf_edf_shoup(h1, n, K) + dup_gf_edf_shoup(h2, n, K) +
dup_gf_edf_shoup(h3, n, K))
return _sort_factors(factors, multiple=False)
def dup_gf_zassenhaus(f, K):
"""
Factor a square-free polynomial over finite fields of medium order.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**2 + 4*x + 3)
>>> dup_gf_zassenhaus(f, R.domain)
[[1 mod 5, 1 mod 5], [1 mod 5, 3 mod 5]]
"""
factors = []
for factor, n in dup_gf_ddf_zassenhaus(f, K):
factors += dup_gf_edf_zassenhaus(factor, n, K)
return _sort_factors(factors, multiple=False)
def dup_gf_shoup(f, K):
"""
Factor a square-free polynomial over finite fields of large order.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**2 + 4*x + 3)
>>> dup_gf_shoup(f, R.domain)
[[1 mod 5, 1 mod 5], [1 mod 5, 3 mod 5]]
"""
factors = []
for factor, n in dup_gf_ddf_shoup(f, K):
factors += dup_gf_edf_shoup(factor, n, K)
return _sort_factors(factors, multiple=False)
_factor_methods = {
'berlekamp': dup_gf_berlekamp, # ``p`` : small
'zassenhaus': dup_gf_zassenhaus, # ``p`` : medium
'shoup': dup_gf_shoup, # ``p`` : large
}
def dup_gf_factor_sqf(f, K):
"""
Factor a square-free polynomial ``f`` in ``GF(q)[x]``.
Returns its complete factorization into irreducibles::
f_1(x) f_2(x) ... f_d(x)
where each ``f_i`` is a monic polynomial and ``gcd(f_i, f_j) == 1``,
for ``i != j``. The result is given as a list of factors of ``f``.
Square-free factors of ``f`` can be factored into irreducibles over
finite fields using three very different methods:
Berlekamp
efficient for very small values of order ``q`` (usually ``q < 25``)
Cantor-Zassenhaus
efficient on average input and with "typical" ``q``
Shoup-Kaltofen-Gathen
efficient with very large inputs and order
If you want to use a specific factorization method - set
``GF_FACTOR_METHOD`` configuration option with one of ``"berlekamp"``,
``"zassenhaus"`` or ``"shoup"`` values.
Examples
========
>>> R, x = ring('x', FF(5))
>>> f = R.to_dense(x**2 + 4*x + 3)
>>> dup_gf_factor_sqf(f, R.domain)
[[1 mod 5, 1 mod 5], [1 mod 5, 3 mod 5]]
References
==========
* :cite:`Gathen1999modern`, chapter 14
"""
method = query('GF_FACTOR_METHOD')
return _factor_methods[method](f, K)
| bsd-3-clause | 7,375,662,049,824,548,000 | 23.541885 | 90 | 0.494293 | false | 2.737626 | false | false | false |
samisalkosuo/mazingame | mazingame/gameclasses.py | 1 | 2362 | #Classes for MazingGame.
#
# The MIT License (MIT)
# Copyright (c) 2015, 2016 Sami Salkosuo
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from mazepy import mazepy
class Player:
def __init__(self,name):
self.name=name
#row and column are location in maze grid
self.row=0
self.column=0
self.startingRow=0
self.startingColumn=0
#screeenRow and screenColumn are location in screen
self.screenRow=0
self.screenColumn=0
self.visitedCells=[]
self.symbol="@"
def addVisitedCell(self,cell):
self.visitedCells.append(cell)
def __str__(self):
output="Name: %s. Maze position:[%d,%d]. Screen position: [%d,%d]" % (self.name,self.row,self.column,self.screenRow,self.screenColumn)
return output
class Goal:
def __init__(self,row,column,screenRow,screenColumn):
self.row=row
self.column=column
self.screenRow=screenRow
self.screenColumn=screenColumn
self.symbol="X"
def __str__(self):
output="Goal maze position: [%d,%d], screen position: [%d,%d]" % (self.row,self.column,self.screenRow,self.screenColumn)
return output
class MazingCell(mazepy.Cell):
pass
class GameGrid(mazepy.Grid):
def contentsOf(self,cell):
return cell.getContent()
| mit | -2,358,601,345,035,029,000 | 32.267606 | 142 | 0.696867 | false | 3.923588 | false | false | false |
timshenkao/StringKernelSVM | stringSVM.py | 1 | 10377 | import numpy as np
from sklearn import svm
from sklearn.datasets import fetch_20newsgroups
from sklearn.svm import libsvm
import sys
from time import time
from functools import wraps
LIBSVM_IMPL = ['c_svc', 'nu_svc', 'one_class', 'epsilon_svr', 'nu_svr']
def caching():
"""
Cache decorator. Arguments to the cached function must be hashable.
"""
def decorate_func(func):
cache = dict()
# separating positional and keyword args
kwarg_point = object()
@wraps(func)
def cache_value(*args, **kwargs):
key = args
if kwargs:
key += (kwarg_point,) + tuple(sorted(kwargs.items()))
if key in cache:
result = cache[key]
else:
result = func(*args, **kwargs)
cache[key] = result
return result
def cache_clear():
"""
Clear the cache
"""
cache.clear()
# Clear the cache
cache_value.cache_clear = cache_clear
return cache_value
return decorate_func
class StringKernelSVM(svm.SVC):
"""
Implementation of string kernel from article:
H. Lodhi, C. Saunders, J. Shawe-Taylor, N. Cristianini, and C. Watkins.
Text classification using string kernels. Journal of Machine Learning Research, 2, 2002 .
svm.SVC is a basic class from scikit-learn for SVM classification (in multiclass case, it uses one-vs-one approach)
"""
def __init__(self, subseq_length=3, lambda_decay=0.5):
"""
Constructor
:param lambda_decay: lambda parameter for the algorithm
:type lambda_decay: float
:param subseq_length: maximal subsequence length
:type subseq_length: int
"""
self.lambda_decay = lambda_decay
self.subseq_length = subseq_length
svm.SVC.__init__(self, kernel='precomputed')
@caching()
def _K(self, n, s, t):
"""
K_n(s,t) in the original article; recursive function
:param n: length of subsequence
:type n: int
:param s: document #1
:type s: str
:param t: document #2
:type t: str
:return: float value for similarity between s and t
"""
if min(len(s), len(t)) < n:
return 0
else:
part_sum = 0
for j in range(1, len(t)):
if t[j] == s[-1]:
#not t[:j-1] as in the article but t[:j] because of Python slicing rules!!!
part_sum += self._K1(n - 1, s[:-1], t[:j])
result = self._K(n, s[:-1], t) + self.lambda_decay ** 2 * part_sum
return result
@caching()
def _K1(self, n, s, t):
"""
K'_n(s,t) in the original article; auxiliary intermediate function; recursive function
:param n: length of subsequence
:type n: int
:param s: document #1
:type s: str
:param t: document #2
:type t: str
:return: intermediate float value
"""
if n == 0:
return 1
elif min(len(s), len(t)) < n:
return 0
else:
part_sum = 0
for j in range(1, len(t)):
if t[j] == s[-1]:
#not t[:j-1] as in the article but t[:j] because of Python slicing rules!!!
part_sum += self._K1(n - 1, s[:-1], t[:j]) * (self.lambda_decay ** (len(t) - (j + 1) + 2))
result = self.lambda_decay * self._K1(n, s[:-1], t) + part_sum
return result
def _gram_matrix_element(self, s, t, sdkvalue1, sdkvalue2):
"""
Helper function
:param s: document #1
:type s: str
:param t: document #2
:type t: str
:param sdkvalue1: K(s,s) from the article
:type sdkvalue1: float
:param sdkvalue2: K(t,t) from the article
:type sdkvalue2: float
:return: value for the (i, j) element from Gram matrix
"""
if s == t:
return 1
else:
try:
return self._K(self.subseq_length, s, t) / \
(sdkvalue1 * sdkvalue2) ** 0.5
except ZeroDivisionError:
print("Maximal subsequence length is less or equal to documents' minimal length."
"You should decrease it")
sys.exit(2)
def string_kernel(self, X1, X2):
"""
String Kernel computation
:param X1: list of documents (m rows, 1 column); each row is a single document (string)
:type X1: list
:param X2: list of documents (m rows, 1 column); each row is a single document (string)
:type X2: list
:return: Gram matrix for the given parameters
"""
len_X1 = len(X1)
len_X2 = len(X2)
# numpy array of Gram matrix
gram_matrix = np.zeros((len_X1, len_X2), dtype=np.float32)
sim_docs_kernel_value = {}
#when lists of documents are identical
if X1 == X2:
#store K(s,s) values in dictionary to avoid recalculations
for i in range(len_X1):
sim_docs_kernel_value[i] = self._K(self.subseq_length, X1[i], X1[i])
#calculate Gram matrix
for i in range(len_X1):
for j in range(i, len_X2):
gram_matrix[i, j] = self._gram_matrix_element(X1[i], X2[j], sim_docs_kernel_value[i],
sim_docs_kernel_value[j])
#using symmetry
gram_matrix[j, i] = gram_matrix[i, j]
#when lists of documents are not identical but of the same length
elif len_X1 == len_X2:
sim_docs_kernel_value[1] = {}
sim_docs_kernel_value[2] = {}
#store K(s,s) values in dictionary to avoid recalculations
for i in range(len_X1):
sim_docs_kernel_value[1][i] = self._K(self.subseq_length, X1[i], X1[i])
for i in range(len_X2):
sim_docs_kernel_value[2][i] = self._K(self.subseq_length, X2[i], X2[i])
#calculate Gram matrix
for i in range(len_X1):
for j in range(i, len_X2):
gram_matrix[i, j] = self._gram_matrix_element(X1[i], X2[j], sim_docs_kernel_value[1][i],
sim_docs_kernel_value[2][j])
#using symmetry
gram_matrix[j, i] = gram_matrix[i, j]
#when lists of documents are neither identical nor of the same length
else:
sim_docs_kernel_value[1] = {}
sim_docs_kernel_value[2] = {}
min_dimens = min(len_X1, len_X2)
#store K(s,s) values in dictionary to avoid recalculations
for i in range(len_X1):
sim_docs_kernel_value[1][i] = self._K(self.subseq_length, X1[i], X1[i])
for i in range(len_X2):
sim_docs_kernel_value[2][i] = self._K(self.subseq_length, X2[i], X2[i])
#calculate Gram matrix for square part of rectangle matrix
for i in range(min_dimens):
for j in range(i, min_dimens):
gram_matrix[i, j] = self._gram_matrix_element(X1[i], X2[j], sim_docs_kernel_value[1][i],
sim_docs_kernel_value[2][j])
#using symmetry
gram_matrix[j, i] = gram_matrix[i, j]
#if more rows than columns
if len_X1 > len_X2:
for i in range(min_dimens, len_X1):
for j in range(len_X2):
gram_matrix[i, j] = self._gram_matrix_element(X1[i], X2[j], sim_docs_kernel_value[1][i],
sim_docs_kernel_value[2][j])
#if more columns than rows
else:
for i in range(len_X1):
for j in range(min_dimens, len_X2):
gram_matrix[i, j] = self._gram_matrix_element(X1[i], X2[j], sim_docs_kernel_value[1][i],
sim_docs_kernel_value[2][j])
print sim_docs_kernel_value
return gram_matrix
def fit(self, X, Y):
gram_matr = self.string_kernel(X, X)
self.__X = X
super(svm.SVC, self).fit(gram_matr, Y)
def predict(self, X):
svm_type = LIBSVM_IMPL.index(self.impl)
if not self.__X:
print('You should train the model first!!!')
sys.exit(3)
else:
gram_matr_predict_new = self.string_kernel(X, self.__X)
gram_matr_predict_new = np.asarray(gram_matr_predict_new, dtype=np.float64, order='C')
return libsvm.predict(
gram_matr_predict_new, self.support_, self.support_vectors_, self.n_support_,
self.dual_coef_, self._intercept_,
self._label, self.probA_, self.probB_,
svm_type=svm_type,
kernel=self.kernel, C=self.C, nu=self.nu,
probability=self.probability, degree=self.degree,
shrinking=self.shrinking, tol=self.tol, cache_size=self.cache_size,
coef0=self.coef0, gamma=self._gamma, epsilon=self.epsilon)
if __name__ == '__main__':
cur_f = __file__.split('/')[-1]
if len(sys.argv) != 3:
print >> sys.stderr, 'usage: ' + cur_f + ' <maximal subsequence length> <lambda (decay)>'
sys.exit(1)
else:
subseq_length = int(sys.argv[1])
lambda_decay = float(sys.argv[2])
#The dataset is the 20 newsgroups dataset. It will be automatically downloaded, then cached.
t_start = time()
news = fetch_20newsgroups(subset='train')
X_train = news.data[:10]
Y_train = news.target[:10]
print('Data fetched in %.3f seconds' % (time() - t_start))
clf = StringKernelSVM(subseq_length=subseq_length, lambda_decay=lambda_decay)
t_start = time()
clf.fit(X_train, Y_train)
print('Model trained in %.3f seconds' % (time() - t_start))
t_start = time()
result = clf.predict(news.data[10:14])
print('New data predicted in %.3f seconds' % (time() - t_start))
print result | bsd-3-clause | 6,206,781,025,852,649,000 | 38.162264 | 119 | 0.522213 | false | 3.648734 | false | false | false |
100Shapes/100shapes.github.com | fabfile.py | 1 | 1220 | from fabric.api import local, abort, env
from fabric.contrib import django
django.project('ohs_site')
from django.conf import settings
import os
BUILD_DIR = settings.BUILD_DIR
def setup():
local('cp ohs_site/offline/sample.env ohs_site/.env')
local('nano ohs_site/.env')
def build_site():
e = getattr(env, 'environment', None)
if e == 'production':
local("foreman run python manage.py build --skip-media --skip-static")
else:
local("python manage.py build")
def build_extras():
if not os.path.exists(BUILD_DIR):
os.makedirs(BUILD_DIR)
local('cp ohs_site/extras/* %s' % BUILD_DIR)
def build_blog():
pass
# blog = settings.STATICBLOG_COMPILE_DIRECTORY
# if not os.path.exists(blog):
# os.makedirs(blog)
# e = getattr(env, 'environment', None)
# if e == 'production':
# local("foreman run python manage.py update_blog --all")
# else:
# local("python manage.py update_blog")
def build():
build_site()
build_blog()
build_extras()
def deploy():
local('python manage.py collectstatic --noinput')
env.environment = 'production'
build()
local('foreman run python manage.py collectstatic --noinput')
local('ghp-import -p %s' % BUILD_DIR, capture=True)
env.environment = None
| mit | -1,133,104,399,933,351,700 | 21.181818 | 72 | 0.692623 | false | 2.939759 | false | false | false |
basho/riak-python-client | riak/tests/test_kv.py | 1 | 29624 | # -*- coding: utf-8 -*-
# Copyright 2010-present Basho Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import os
import sys
import unittest
from six import string_types, PY2, PY3
from time import sleep
from riak import ConflictError, RiakError, ListError
from riak import RiakClient, RiakBucket, BucketType
from riak.resolver import default_resolver, last_written_resolver
from riak.tests import RUN_KV, RUN_RESOLVE, PROTOCOL
from riak.tests.base import IntegrationTestBase
from riak.tests.comparison import Comparison
try:
import simplejson as json
except ImportError:
import json
if PY2:
import cPickle
test_pickle_dumps = cPickle.dumps
test_pickle_loads = cPickle.loads
else:
import pickle
test_pickle_dumps = pickle.dumps
test_pickle_loads = pickle.loads
testrun_sibs_bucket = 'sibsbucket'
testrun_props_bucket = 'propsbucket'
def setUpModule():
if not RUN_KV:
return
c = IntegrationTestBase.create_client()
c.bucket(testrun_sibs_bucket).allow_mult = True
c.close()
def tearDownModule():
if not RUN_KV:
return
c = IntegrationTestBase.create_client()
c.bucket(testrun_sibs_bucket).clear_properties()
c.bucket(testrun_props_bucket).clear_properties()
c.close()
class NotJsonSerializable(object):
def __init__(self, *args, **kwargs):
self.args = list(args)
self.kwargs = kwargs
def __eq__(self, other):
if len(self.args) != len(other.args):
return False
if len(self.kwargs) != len(other.kwargs):
return False
for name, value in self.kwargs.items():
if other.kwargs[name] != value:
return False
value1_args = copy.copy(self.args)
value2_args = copy.copy(other.args)
value1_args.sort()
value2_args.sort()
for i in range(len(value1_args)):
if value1_args[i] != value2_args[i]:
return False
return True
class KVUnitTests(unittest.TestCase):
def test_list_keys_exception(self):
c = RiakClient()
bt = BucketType(c, 'test')
b = RiakBucket(c, 'test', bt)
with self.assertRaises(ListError):
b.get_keys()
def test_stream_buckets_exception(self):
c = RiakClient()
with self.assertRaises(ListError):
bs = []
for bl in c.stream_buckets():
bs.extend(bl)
def test_stream_keys_exception(self):
c = RiakClient()
with self.assertRaises(ListError):
ks = []
for kl in c.stream_keys('test'):
ks.extend(kl)
def test_ts_stream_keys_exception(self):
c = RiakClient()
with self.assertRaises(ListError):
ks = []
for kl in c.ts_stream_keys('test'):
ks.extend(kl)
@unittest.skipUnless(RUN_KV, 'RUN_KV is 0')
class BasicKVTests(IntegrationTestBase, unittest.TestCase, Comparison):
def test_no_returnbody(self):
bucket = self.client.bucket(self.bucket_name)
o = bucket.new(self.key_name, "bar").store(return_body=False)
self.assertEqual(o.vclock, None)
@unittest.skipUnless(PROTOCOL == 'pbc', 'Only available on pbc')
def test_get_no_returnbody(self):
bucket = self.client.bucket(self.bucket_name)
o = bucket.new(self.key_name, "Ain't no body")
o.store()
stored_object = bucket.get(self.key_name, head_only=True)
self.assertFalse(stored_object.data)
list_of_objects = bucket.multiget([self.key_name], head_only=True)
for stored_object in list_of_objects:
self.assertFalse(stored_object.data)
def test_many_link_headers_should_work_fine(self):
bucket = self.client.bucket(self.bucket_name)
o = bucket.new("lots_of_links", "My god, it's full of links!")
for i in range(0, 300):
link = ("other", "key%d" % i, "next")
o.add_link(link)
o.store()
stored_object = bucket.get("lots_of_links")
self.assertEqual(len(stored_object.links), 300)
def test_is_alive(self):
self.assertTrue(self.client.is_alive())
def test_store_and_get(self):
bucket = self.client.bucket(self.bucket_name)
rand = self.randint()
obj = bucket.new('foo', rand)
obj.store()
obj = bucket.get('foo')
self.assertTrue(obj.exists)
self.assertEqual(obj.bucket.name, self.bucket_name)
self.assertEqual(obj.key, 'foo')
self.assertEqual(obj.data, rand)
# unicode objects are fine, as long as they don't
# contain any non-ASCII chars
if PY2:
self.client.bucket(unicode(self.bucket_name)) # noqa
else:
self.client.bucket(self.bucket_name)
if PY2:
self.assertRaises(TypeError, self.client.bucket, u'búcket')
self.assertRaises(TypeError, self.client.bucket, 'búcket')
else:
self.client.bucket(u'búcket')
self.client.bucket('búcket')
bucket.get(u'foo')
if PY2:
self.assertRaises(TypeError, bucket.get, u'føø')
self.assertRaises(TypeError, bucket.get, 'føø')
self.assertRaises(TypeError, bucket.new, u'foo', 'éå')
self.assertRaises(TypeError, bucket.new, u'foo', 'éå')
self.assertRaises(TypeError, bucket.new, 'foo', u'éå')
self.assertRaises(TypeError, bucket.new, 'foo', u'éå')
else:
bucket.get(u'føø')
bucket.get('føø')
bucket.new(u'foo', 'éå')
bucket.new(u'foo', 'éå')
bucket.new('foo', u'éå')
bucket.new('foo', u'éå')
obj2 = bucket.new('baz', rand, 'application/json')
obj2.charset = 'UTF-8'
obj2.store()
obj2 = bucket.get('baz')
self.assertEqual(obj2.data, rand)
def test_store_obj_with_unicode(self):
bucket = self.client.bucket(self.bucket_name)
data = {u'føø': u'éå'}
obj = bucket.new('foo', data)
obj.store()
obj = bucket.get('foo')
self.assertEqual(obj.data, data)
def test_store_unicode_string(self):
bucket = self.client.bucket(self.bucket_name)
data = u"some unicode data: \u00c6"
obj = bucket.new(self.key_name, encoded_data=data.encode('utf-8'),
content_type='text/plain')
obj.charset = 'utf-8'
obj.store()
obj2 = bucket.get(self.key_name)
self.assertEqual(data, obj2.encoded_data.decode('utf-8'))
def test_string_bucket_name(self):
# Things that are not strings cannot be bucket names
for bad in (12345, True, None, {}, []):
with self.assert_raises_regex(TypeError, 'must be a string'):
self.client.bucket(bad)
with self.assert_raises_regex(TypeError, 'must be a string'):
RiakBucket(self.client, bad, None)
# Unicode bucket names are not supported in Python 2.x,
# if they can't be encoded to ASCII. This should be changed in a
# future release.
if PY2:
with self.assert_raises_regex(TypeError,
'Unicode bucket names '
'are not supported'):
self.client.bucket(u'føø')
else:
self.client.bucket(u'føø')
# This is fine, since it's already ASCII
self.client.bucket('ASCII')
def test_generate_key(self):
# Ensure that Riak generates a random key when
# the key passed to bucket.new() is None.
bucket = self.client.bucket(self.bucket_name)
o = bucket.new(None, data={})
self.assertIsNone(o.key)
o.store()
self.assertIsNotNone(o.key)
self.assertNotIn('/', o.key)
existing_keys = bucket.get_keys()
self.assertEqual(len(existing_keys), 1)
def maybe_store_keys(self):
skey = 'rkb-init'
bucket = self.client.bucket('random_key_bucket')
sobj = bucket.get(skey)
if sobj.exists:
return
for key in range(1, 1000):
o = bucket.new(None, data={})
o.store()
o = bucket.new(skey, data={})
o.store()
def test_stream_keys(self):
self.maybe_store_keys()
bucket = self.client.bucket('random_key_bucket')
regular_keys = bucket.get_keys()
self.assertNotEqual(len(regular_keys), 0)
streamed_keys = []
for keylist in bucket.stream_keys():
self.assertNotEqual([], keylist)
for key in keylist:
self.assertIsInstance(key, string_types)
streamed_keys += keylist
self.assertEqual(sorted(regular_keys), sorted(streamed_keys))
def test_stream_keys_timeout(self):
self.maybe_store_keys()
bucket = self.client.bucket('random_key_bucket')
streamed_keys = []
with self.assertRaises(RiakError):
for keylist in self.client.stream_keys(bucket, timeout=1):
self.assertNotEqual([], keylist)
for key in keylist:
self.assertIsInstance(key, string_types)
streamed_keys += keylist
def test_stream_keys_abort(self):
self.maybe_store_keys()
bucket = self.client.bucket('random_key_bucket')
regular_keys = bucket.get_keys()
self.assertNotEqual(len(regular_keys), 0)
try:
for keylist in bucket.stream_keys():
raise RuntimeError("abort")
except RuntimeError:
pass
# If the stream was closed correctly, this will not error
robj = bucket.get(regular_keys[0])
self.assertEqual(len(robj.siblings), 1)
self.assertEqual(True, robj.exists)
def test_bad_key(self):
bucket = self.client.bucket(self.bucket_name)
obj = bucket.new()
with self.assertRaises(TypeError):
bucket.get(None)
with self.assertRaises(TypeError):
self.client.get(obj)
with self.assertRaises(TypeError):
bucket.get(1)
def test_binary_store_and_get(self):
bucket = self.client.bucket(self.bucket_name)
# Store as binary, retrieve as binary, then compare...
rand = str(self.randint())
if PY2:
rand = bytes(rand)
else:
rand = bytes(rand, 'utf-8')
obj = bucket.new(self.key_name, encoded_data=rand,
content_type='text/plain')
obj.store()
obj = bucket.get(self.key_name)
self.assertTrue(obj.exists)
self.assertEqual(obj.encoded_data, rand)
# Store as JSON, retrieve as binary, JSON-decode, then compare...
data = [self.randint(), self.randint(), self.randint()]
key2 = self.randname()
obj = bucket.new(key2, data)
obj.store()
obj = bucket.get(key2)
self.assertEqual(data, json.loads(obj.encoded_data.decode()))
def test_blank_binary_204(self):
bucket = self.client.bucket(self.bucket_name)
# this should *not* raise an error
empty = ""
if PY2:
empty = bytes(empty)
else:
empty = bytes(empty, 'utf-8')
obj = bucket.new('foo2', encoded_data=empty, content_type='text/plain')
obj.store()
obj = bucket.get('foo2')
self.assertTrue(obj.exists)
self.assertEqual(obj.encoded_data, empty)
def test_custom_bucket_encoder_decoder(self):
bucket = self.client.bucket(self.bucket_name)
# Teach the bucket how to pickle
bucket.set_encoder('application/x-pickle', test_pickle_dumps)
bucket.set_decoder('application/x-pickle', test_pickle_loads)
data = {'array': [1, 2, 3], 'badforjson': NotJsonSerializable(1, 3)}
obj = bucket.new(self.key_name, data, 'application/x-pickle')
obj.store()
obj2 = bucket.get(self.key_name)
self.assertEqual(data, obj2.data)
def test_custom_client_encoder_decoder(self):
bucket = self.client.bucket(self.bucket_name)
# Teach the client how to pickle
self.client.set_encoder('application/x-pickle', test_pickle_dumps)
self.client.set_decoder('application/x-pickle', test_pickle_loads)
data = {'array': [1, 2, 3], 'badforjson': NotJsonSerializable(1, 3)}
obj = bucket.new(self.key_name, data, 'application/x-pickle')
obj.store()
obj2 = bucket.get(self.key_name)
self.assertEqual(data, obj2.data)
def test_unknown_content_type_encoder_decoder(self):
# Bypass the content_type encoders
bucket = self.client.bucket(self.bucket_name)
data = "some funny data"
if PY3:
# Python 3.x needs to store binaries
data = data.encode()
obj = bucket.new(self.key_name,
encoded_data=data,
content_type='application/x-frobnicator')
obj.store()
obj2 = bucket.get(self.key_name)
self.assertEqual(data, obj2.encoded_data)
def test_text_plain_encoder_decoder(self):
bucket = self.client.bucket(self.bucket_name)
data = "some funny data"
obj = bucket.new(self.key_name, data, content_type='text/plain')
obj.store()
obj2 = bucket.get(self.key_name)
self.assertEqual(data, obj2.data)
def test_missing_object(self):
bucket = self.client.bucket(self.bucket_name)
obj = bucket.get(self.key_name)
self.assertFalse(obj.exists)
# Object with no siblings should not raise the ConflictError
self.assertIsNone(obj.data)
def test_delete(self):
bucket = self.client.bucket(self.bucket_name)
rand = self.randint()
obj = bucket.new(self.key_name, rand)
obj.store()
obj = bucket.get(self.key_name)
self.assertTrue(obj.exists)
obj.delete()
obj.reload()
self.assertFalse(obj.exists)
def test_bucket_delete(self):
bucket = self.client.bucket(self.bucket_name)
rand = self.randint()
obj = bucket.new(self.key_name, rand)
obj.store()
bucket.delete(self.key_name)
obj.reload()
self.assertFalse(obj.exists)
def test_set_bucket_properties(self):
bucket = self.client.bucket(testrun_props_bucket)
# Test setting allow mult...
bucket.allow_mult = True
# Test setting nval...
bucket.n_val = 1
c2 = self.create_client()
bucket2 = c2.bucket(testrun_props_bucket)
self.assertTrue(bucket2.allow_mult)
self.assertEqual(bucket2.n_val, 1)
# Test setting multiple properties...
bucket.set_properties({"allow_mult": False, "n_val": 2})
c3 = self.create_client()
bucket3 = c3.bucket(testrun_props_bucket)
self.assertFalse(bucket3.allow_mult)
self.assertEqual(bucket3.n_val, 2)
# clean up!
c2.close()
c3.close()
def test_if_none_match(self):
bucket = self.client.bucket(self.bucket_name)
obj = bucket.get(self.key_name)
obj.delete()
obj.reload()
self.assertFalse(obj.exists)
obj.data = ["first store"]
obj.content_type = 'application/json'
obj.store()
obj.data = ["second store"]
with self.assertRaises(Exception):
obj.store(if_none_match=True)
def test_siblings(self):
# Set up the bucket, clear any existing object...
bucket = self.client.bucket(testrun_sibs_bucket)
obj = bucket.get(self.key_name)
bucket.allow_mult = True
# Even if it previously existed, let's store a base resolved version
# from which we can diverge by sending a stale vclock.
obj.data = 'start'
obj.content_type = 'text/plain'
obj.store()
vals = set(self.generate_siblings(obj, count=5))
# Make sure the object has five siblings...
obj = bucket.get(self.key_name)
self.assertEqual(len(obj.siblings), 5)
# When the object is in conflict, using the shortcut methods
# should raise the ConflictError
with self.assertRaises(ConflictError):
obj.data
# Get each of the values - make sure they match what was
# assigned
vals2 = set([sibling.data for sibling in obj.siblings])
self.assertEqual(vals, vals2)
# Resolve the conflict, and then do a get...
resolved_sibling = obj.siblings[3]
obj.siblings = [resolved_sibling]
self.assertEqual(len(obj.siblings), 1)
obj.store()
self.assertEqual(len(obj.siblings), 1)
self.assertEqual(obj.data, resolved_sibling.data)
@unittest.skipUnless(RUN_RESOLVE, "RUN_RESOLVE is 0")
def test_resolution(self):
bucket = self.client.bucket(testrun_sibs_bucket)
obj = bucket.get(self.key_name)
bucket.allow_mult = True
# Even if it previously existed, let's store a base resolved version
# from which we can diverge by sending a stale vclock.
obj.data = 'start'
obj.content_type = 'text/plain'
obj.store()
vals = self.generate_siblings(obj, count=5, delay=1.01)
# Make sure the object has five siblings when using the
# default resolver
obj = bucket.get(self.key_name)
obj.reload()
self.assertEqual(len(obj.siblings), 5)
# Setting the resolver on the client object to use the
# "last-write-wins" behavior
self.client.resolver = last_written_resolver
obj.reload()
self.assertEqual(obj.resolver, last_written_resolver)
self.assertEqual(1, len(obj.siblings))
self.assertEqual(obj.data, vals[-1])
# Set the resolver on the bucket to the default resolver,
# overriding the resolver on the client
bucket.resolver = default_resolver
obj.reload()
self.assertEqual(obj.resolver, default_resolver)
self.assertEqual(len(obj.siblings), 5)
# Define our own custom resolver on the object that returns
# the maximum value, overriding the bucket and client resolvers
def max_value_resolver(obj):
obj.siblings = [max(obj.siblings, key=lambda s: s.data), ]
obj.resolver = max_value_resolver
obj.reload()
self.assertEqual(obj.resolver, max_value_resolver)
self.assertEqual(obj.data, max(vals))
# Setting the resolver to None on all levels reverts to the
# default resolver.
obj.resolver = None
self.assertEqual(obj.resolver, default_resolver) # set by bucket
bucket.resolver = None
self.assertEqual(obj.resolver, last_written_resolver) # set by client
self.client.resolver = None
self.assertEqual(obj.resolver, default_resolver) # reset
self.assertEqual(bucket.resolver, default_resolver) # reset
self.assertEqual(self.client.resolver, default_resolver) # reset
@unittest.skipUnless(RUN_RESOLVE, "RUN_RESOLVE is 0")
def test_resolution_default(self):
# If no resolver is setup, be sure to resolve to default_resolver
bucket = self.client.bucket(testrun_sibs_bucket)
self.assertEqual(self.client.resolver, default_resolver)
self.assertEqual(bucket.resolver, default_resolver)
def test_tombstone_siblings(self):
# Set up the bucket, clear any existing object...
bucket = self.client.bucket(testrun_sibs_bucket)
obj = bucket.get(self.key_name)
bucket.allow_mult = True
obj.data = 'start'
obj.content_type = 'text/plain'
obj.store(return_body=True)
obj.delete()
vals = set(self.generate_siblings(obj, count=4))
obj = bucket.get(self.key_name)
# TODO this used to be 5, only
siblen = len(obj.siblings)
self.assertTrue(siblen == 4 or siblen == 5)
non_tombstones = 0
for sib in obj.siblings:
if sib.exists:
non_tombstones += 1
self.assertTrue(not sib.exists or sib.data in vals)
self.assertEqual(non_tombstones, 4)
def test_store_of_missing_object(self):
bucket = self.client.bucket(self.bucket_name)
# for json objects
o = bucket.get(self.key_name)
self.assertEqual(o.exists, False)
o.data = {"foo": "bar"}
o.content_type = 'application/json'
o = o.store()
self.assertEqual(o.data, {"foo": "bar"})
self.assertEqual(o.content_type, "application/json")
o.delete()
# for binary objects
o = bucket.get(self.randname())
self.assertEqual(o.exists, False)
if PY2:
o.encoded_data = "1234567890"
else:
o.encoded_data = "1234567890".encode()
o.content_type = 'application/octet-stream'
o = o.store()
if PY2:
self.assertEqual(o.encoded_data, "1234567890")
else:
self.assertEqual(o.encoded_data, "1234567890".encode())
self.assertEqual(o.content_type, "application/octet-stream")
o.delete()
def test_store_metadata(self):
bucket = self.client.bucket(self.bucket_name)
rand = self.randint()
obj = bucket.new(self.key_name, rand)
obj.usermeta = {'custom': 'some metadata'}
obj.store()
obj = bucket.get(self.key_name)
self.assertEqual('some metadata', obj.usermeta['custom'])
def test_list_buckets(self):
bucket = self.client.bucket(self.bucket_name)
bucket.new("one", {"foo": "one", "bar": "red"}).store()
buckets = self.client.get_buckets()
self.assertTrue(self.bucket_name in [x.name for x in buckets])
def test_stream_buckets(self):
bucket = self.client.bucket(self.bucket_name)
bucket.new(self.key_name, data={"foo": "one",
"bar": "baz"}).store()
buckets = []
for bucket_list in self.client.stream_buckets():
buckets.extend(bucket_list)
self.assertTrue(self.bucket_name in [x.name for x in buckets])
def test_stream_buckets_abort(self):
bucket = self.client.bucket(self.bucket_name)
bucket.new(self.key_name, data={"foo": "one",
"bar": "baz"}).store()
try:
for bucket_list in self.client.stream_buckets():
raise RuntimeError("abort")
except RuntimeError:
pass
robj = bucket.get(self.key_name)
self.assertTrue(robj.exists)
self.assertEqual(len(robj.siblings), 1)
def test_get_params(self):
bucket = self.client.bucket(self.bucket_name)
bucket.new(self.key_name, data={"foo": "one",
"bar": "baz"}).store()
bucket.get(self.key_name, basic_quorum=False)
bucket.get(self.key_name, basic_quorum=True)
bucket.get(self.key_name, notfound_ok=True)
bucket.get(self.key_name, notfound_ok=False)
missing = bucket.get('missing-key', notfound_ok=True,
basic_quorum=True)
self.assertFalse(missing.exists)
def test_preflist(self):
nodes = ['[email protected]', '[email protected]']
bucket = self.client.bucket(self.bucket_name)
bucket.new(self.key_name, data={"foo": "one",
"bar": "baz"}).store()
try:
preflist = bucket.get_preflist(self.key_name)
preflist2 = self.client.get_preflist(bucket, self.key_name)
for pref in (preflist, preflist2):
self.assertEqual(len(pref), 3)
self.assertIn(pref[0]['node'], nodes)
[self.assertTrue(node['primary']) for node in pref]
except NotImplementedError as e:
raise unittest.SkipTest(e)
def generate_siblings(self, original, count=5, delay=None):
vals = []
for _ in range(count):
while True:
randval = str(self.randint())
if randval not in vals:
break
other_obj = original.bucket.new(key=original.key,
data=randval,
content_type='text/plain')
other_obj.vclock = original.vclock
other_obj.store()
vals.append(randval)
if delay:
sleep(delay)
return vals
@unittest.skipUnless(RUN_KV, 'RUN_KV is 0')
class BucketPropsTest(IntegrationTestBase, unittest.TestCase):
def test_rw_settings(self):
bucket = self.client.bucket(testrun_props_bucket)
self.assertEqual(bucket.r, "quorum")
self.assertEqual(bucket.w, "quorum")
self.assertEqual(bucket.dw, "quorum")
self.assertEqual(bucket.rw, "quorum")
bucket.w = 1
self.assertEqual(bucket.w, 1)
bucket.r = "quorum"
self.assertEqual(bucket.r, "quorum")
bucket.dw = "all"
self.assertEqual(bucket.dw, "all")
bucket.rw = "one"
self.assertEqual(bucket.rw, "one")
bucket.set_properties({'w': 'quorum',
'r': 'quorum',
'dw': 'quorum',
'rw': 'quorum'})
bucket.clear_properties()
def test_primary_quora(self):
bucket = self.client.bucket(testrun_props_bucket)
self.assertEqual(bucket.pr, 0)
self.assertEqual(bucket.pw, 0)
bucket.pr = 1
self.assertEqual(bucket.pr, 1)
bucket.pw = "quorum"
self.assertEqual(bucket.pw, "quorum")
bucket.set_properties({'pr': 0, 'pw': 0})
bucket.clear_properties()
def test_clear_bucket_properties(self):
bucket = self.client.bucket(testrun_props_bucket)
bucket.allow_mult = True
self.assertTrue(bucket.allow_mult)
bucket.n_val = 1
self.assertEqual(bucket.n_val, 1)
# Test setting clearing properties...
self.assertTrue(bucket.clear_properties())
self.assertFalse(bucket.allow_mult)
self.assertEqual(bucket.n_val, 3)
@unittest.skipUnless(RUN_KV, 'RUN_KV is 0')
class KVFileTests(IntegrationTestBase, unittest.TestCase):
def test_store_binary_object_from_file(self):
bucket = self.client.bucket(self.bucket_name)
obj = bucket.new_from_file(self.key_name, __file__)
obj.store()
obj = bucket.get(self.key_name)
self.assertNotEqual(obj.encoded_data, None)
is_win32 = sys.platform == 'win32'
self.assertTrue(obj.content_type == 'text/x-python' or
(is_win32 and obj.content_type == 'text/plain') or
obj.content_type == 'application/x-python-code')
def test_store_binary_object_from_file_should_use_default_mimetype(self):
bucket = self.client.bucket(self.bucket_name)
filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir, os.pardir, 'README.md')
obj = bucket.new_from_file(self.key_name, filepath)
obj.store()
obj = bucket.get(self.key_name)
self.assertEqual(obj.content_type, 'application/octet-stream')
def test_store_binary_object_from_file_should_fail_if_file_not_found(self):
bucket = self.client.bucket(self.bucket_name)
with self.assertRaises(IOError):
bucket.new_from_file(self.key_name, 'FILE_NOT_FOUND')
obj = bucket.get(self.key_name)
# self.assertEqual(obj.encoded_data, None)
self.assertFalse(obj.exists)
@unittest.skipUnless(RUN_KV, 'RUN_KV is 0')
class CounterTests(IntegrationTestBase, unittest.TestCase):
def test_counter_requires_allow_mult(self):
bucket = self.client.bucket(self.bucket_name)
if bucket.allow_mult:
bucket.allow_mult = False
self.assertFalse(bucket.allow_mult)
with self.assertRaises(Exception):
bucket.update_counter(self.key_name, 10)
def test_counter_ops(self):
bucket = self.client.bucket(testrun_sibs_bucket)
self.assertTrue(bucket.allow_mult)
# Non-existent counter has no value
self.assertEqual(None, bucket.get_counter(self.key_name))
# Update the counter
bucket.update_counter(self.key_name, 10)
self.assertEqual(10, bucket.get_counter(self.key_name))
# Update with returning the value
self.assertEqual(15, bucket.update_counter(self.key_name, 5,
returnvalue=True))
# Now try decrementing
self.assertEqual(10, bucket.update_counter(self.key_name, -5,
returnvalue=True))
| apache-2.0 | 2,122,825,286,487,329,500 | 34.995134 | 79 | 0.595613 | false | 3.752917 | true | false | false |
sujoykroy/motion-picture | editor/example/render_server.py | 1 | 8491 | import requests
import json
import sys
import os
import tarfile
import shutil
from MotionPicture import *
import imp
import argparse
import getpass
parser = argparse.ArgumentParser(description="Render MotionPicture video segment from remote.")
parser.add_argument("--booking", nargs="?", default="-1", type=int,
help="Number of booking in one attempt")
parser.add_argument("--addr", required=True,
dest="base_url", help="URL of rendering server")
parser.add_argument("--user", required=True,
help="Username to login to server")
parser.add_argument("--project", required=True, help="Project name")
parser.add_argument("--workspace", required=True, help="Local directory to save project data")
parser.add_argument("--process", nargs="?", default=1, type=int,
help="Number of process to spawn")
args = parser.parse_args()
username = args.user
passwd = getpass.getpass("[{0}]'s password:".format(args.user))
base_url = args.base_url
project_name = args.project
workbase_path = args.workspace
process_count = args.process
booking_per_call = args.booking
def write_json_to_file(filepath, data):
f = open(filepath, "w")
json.dump(data, f)
f.close()
def write_segment_status(segments_folder, segment):
segment_folder = os.path.join(segments_folder, u"{0:04}".format(segment["id"]))
if not os.path.exists(segment_folder):
os.makedirs(segment_folder)
status_file = os.path.join(segment_folder, "status.txt")
write_json_to_file(status_file, active_segment)
mp_url = base_url + "/mp"
project_url = mp_url + "/project/" + project_name
s = requests.session()
login_data = {
"username": username,
"password": passwd
}
r=s.post(mp_url+'/login', login_data)
jr = json.loads(r.text)
if jr["result"] == "success":
print("Fetching information about project [{0}]".format(project_name))
r=s.get(project_url+'/info')
project = json.loads(r.text)
if project.get("id"):
extras = project["extras"]
local_project_path = os.path.join(workbase_path, u"{0}".format(project_name))
if not os.path.exists(local_project_path):
os.makedirs(local_project_path)
local_data_path = os.path.join(local_project_path, project["data_file_name"])
if not os.path.isfile(local_data_path):
print("Downloading data-file of project [{0}]".format(project_name))
f = open(local_data_path, "wb")
r = s.get(project_url+"/data_file", stream=True)
total_size = int(r.headers.get("Content-Length"))
downloaded_size = 0
for data in r.iter_content(chunk_size=4096):
f.write(data)
downloaded_size += len(data)
percent = int(downloaded_size*100./total_size)
sys.stdout.write("\rProgress: {0:>5}% (of {2:>10}/{1:<10} byte)".format(
percent, total_size, downloaded_size))
f.close()
print("\n")
extracted_folder = local_data_path + u"_extracted"
if not os.path.exists(extracted_folder):
os.makedirs(extracted_folder)
print("Extracting data-file of project [{0}]".format(project_name))
tar_ref = tarfile.open(local_data_path, "r")
tar_ref.extractall(extracted_folder)
segments_folder = os.path.join(local_project_path, u"segments")
if not os.path.exists(segments_folder):
os.makedirs(segments_folder)
while True:
active_segment = None
for filename in sorted(os.listdir(segments_folder)):
folder = os.path.join(segments_folder , filename)
if not os.path.isdir(folder):
continue
status_file = os.path.join(folder, "status.txt")
if not os.path.isfile(status_file):
seg = dict()
else:
f = open(status_file, "r")
seg = json.load(f)
f.close()
if seg.get("status") in ("Booked",):
active_segment = seg
break
#Get new booking
if not active_segment:
bc = 0
while bc<booking_per_call or booking_per_call<0:
print("Fetching next booking of project [{0}]".format(project_name))
r=s.get(project_url+'/book')
booking = json.loads(r.text)
if booking.get("id"):
booking["status"] = "Booked"
if not active_segment:
active_segment = booking
write_segment_status(segments_folder, booking)
print("Segment id {0} is booked.".format(booking["id"]))
bc += 1
else:
break
if booking_per_call<0:
break
if bc == 0:
print("No booking can be made.")
break
if active_segment:
segment_folder = os.path.join(segments_folder, u"{0:04}".format(active_segment["id"]))
if not os.path.exists(segment_folder):
os.makedirs(segment_folder)
status_file = os.path.join(segment_folder, "status.txt")
write_json_to_file(status_file, active_segment)
temp_output_filename = os.path.join(
segment_folder, "temp_video{0}".format(extras["file_ext"]))
output_filename = os.path.join(
segment_folder, "video{0}".format(extras["file_ext"]))
#Make the movie
if not os.path.isfile(output_filename):
pre_script = extras.get("pre_script")
if pre_script:
pre_script_path = os.path.join(extracted_folder, pre_script)
if os.path.isfile(pre_script_path):
imp.load_source("pre_script", pre_script_path)
print("Building segment id-{2}:{0}-{1}".format(
active_segment["start_time"], active_segment["end_time"], active_segment["id"]))
doc_filename = os.path.join(extracted_folder, project["main_filename"])
kwargs = dict(
src_filename = doc_filename,
dest_filename = temp_output_filename,
time_line = project["time_line_name"],
start_time = active_segment["start_time"],
end_time = active_segment["end_time"],
resolution = extras.get("resolution", "1280x720"),
process_count=process_count
)
extra_param_names = ["ffmpeg_params", "bit_rate", "codec", "fps"]
for param_name in extra_param_names:
if param_name in extras:
kwargs[param_name] = extras[param_name]
ThreeDShape.HQRender = True
doc_movie = DocMovie(**kwargs)
doc_movie.make()
if os.path.isfile(temp_output_filename):
shutil.move(temp_output_filename, output_filename)
if os.path.isfile(output_filename):
segment_url = project_url+"/segment/{0}".format(active_segment["id"])
#Upload
print("Uploading segment id-{2}:{0}-{1}".format(
active_segment["start_time"], active_segment["end_time"], active_segment["id"]))
video_file = open(output_filename, "rb")
r=s.post(segment_url+"/upload", files={"video": video_file})
response = json.loads(r.text)
if response.get("result") == "success":
active_segment["status"] = "Uploaded"
write_json_to_file(status_file, active_segment)
print("Segment id-{2}:{0}-{1} is uploaded.".format(
active_segment["start_time"], active_segment["end_time"], active_segment["id"]))
active_segment = None
#end while
else:
print("No project with name [{0}] is found.".format(project_name))
| gpl-3.0 | -5,244,898,743,986,705,000 | 40.622549 | 104 | 0.535862 | false | 4.139932 | false | false | false |
gljohn/meterd | meterd/currentcostd.py | 1 | 2227 | #!/usr/bin/python
##################################################
# currentcostd.py #
# #
# @author Gareth John <[email protected]> #
##################################################
import sys
import time
import logging
import logging.handlers
from daemon import Daemon
from meter import Meter
from parser import *
class MeterDaemon(Daemon):
def __init__(self, pid):
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.INFO)
##create a file handler
handler = logging.FileHandler('/var/log/meterd.log')
handler.setLevel(logging.INFO)
##create a logging format
formatter = logging.Formatter('%(asctime)s - '
+ '%(name)s - '
+ '%(levelname)s - '
+ '%(message)s')
handler.setFormatter(formatter)
##add the handlers to the logger
self.logger.addHandler(handler)
self.logger.info('Logging started...')
self.logger.info('Pid is:' + pid)
super(MeterDaemon, self).__init__(pid)
self.m = Meter('/dev/ttyUSB0',
57600,
8,
'mastermeter',
'watts',
self.logger)
def run(self):
self.m.open()
self.logger.debug('Meter open.')
while 1:
self.m.read()
self.m.parse()
self.m.submit()
time.sleep(6)
def stop(self):
self.m.close()
self.logger.debug('Meter close.')
super(MeterDaemon, self).stop()
if __name__ == "__main__":
daemon = MeterDaemon('/tmp/meter-daemon.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print("Unknown command")
sys.exit(2)
sys.exit(0)
else:
print("usage: %s start|stop|restart" % sys.argv[0])
sys.exit(2)
| gpl-3.0 | 4,528,696,204,868,289,500 | 29.506849 | 60 | 0.46026 | false | 4.324272 | false | false | false |
sbaogang/teletraan | deploy-board/deploy_board/webapp/env_views.py | 1 | 53714 | # Copyright 2016 Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Collection of all env related views
"""
from django.middleware.csrf import get_token
from django.shortcuts import render, redirect
from django.views.generic import View
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.contrib import messages
from deploy_board.settings import IS_PINTEREST
from django.conf import settings
import agent_report
import common
import random
import json
from helpers import builds_helper, environs_helper, agents_helper, ratings_helper, deploys_helper, \
systems_helper, environ_hosts_helper, clusters_helper, tags_helper, groups_helper, schedules_helper
import math
from dateutil.parser import parse
import calendar
from deploy_board.webapp.agent_report import TOTAL_ALIVE_HOST_REPORT, TOTAL_HOST_REPORT, ALIVE_STAGE_HOST_REPORT, \
FAILED_HOST_REPORT, UNKNOWN_HOST_REPORT, PROVISION_HOST_REPORT
from diff_match_patch import diff_match_patch
import traceback
import logging
import os
import datetime
import time
if IS_PINTEREST:
from helpers import s3_helper, autoscaling_groups_helper, private_builds_helper
ENV_COOKIE_NAME = 'teletraan.env.names'
ENV_COOKIE_CAPACITY = 5
DEFAULT_TOTAL_PAGES = 7
DEFAULT_PAGE_SIZE = 30
BUILD_STAGE = "BUILD"
DEFAULT_ROLLBACK_DEPLOY_NUM = 6
STATUS_COOKIE_NAME = 'sort-by-status'
MODE_COOKIE_NAME = 'show-mode'
log = logging.getLogger(__name__)
class EnvListView(View):
def get(self, request):
index = int(request.GET.get('page_index', '1'))
size = int(request.GET.get('page_size', DEFAULT_PAGE_SIZE))
names = environs_helper.get_all_env_names(request, index=index, size=size)
envs_tag = tags_helper.get_latest_by_targe_id(request, 'TELETRAAN')
return render(request, 'environs/envs_landing.html', {
"names": names,
"pageIndex": index,
"pageSize": DEFAULT_PAGE_SIZE,
"disablePrevious": index <= 1,
"disableNext": len(names) < DEFAULT_PAGE_SIZE,
"envs_tag": envs_tag,
})
class OverrideItem(object):
def __init__(self, key=None, root=None, override=None):
self.key = key
self.root = root
self.override = override
def get_all_stages2(envs, stage):
stages = []
stageName = "stageName"
env = None
for temp in envs:
if stage and stage == temp[stageName]:
env = temp
stages.append(temp[stageName])
stages.sort()
if not env:
stage = stages[0]
for temp in envs:
if temp[stageName] == stage:
env = temp
break
return stages, env
def _fetch_param_with_cookie(request, param_name, cookie_name, default):
"""Gets a parameter from the GET request, or from the cookie, or the default. """
saved_value = request.COOKIES.get(cookie_name, default)
return request.GET.get(param_name, saved_value)
def update_deploy_progress(request, name, stage):
env = environs_helper.get_env_by_stage(request, name, stage)
progress = deploys_helper.update_progress(request, name, stage)
showMode = _fetch_param_with_cookie(
request, 'showMode', MODE_COOKIE_NAME, 'complete')
sortByStatus = _fetch_param_with_cookie(
request, 'sortByStatus', STATUS_COOKIE_NAME, 'true')
report = agent_report.gen_report(request, env, progress, sortByStatus=sortByStatus)
report.showMode = showMode
report.sortByStatus = sortByStatus
html = render_to_string('deploys/deploy_progress.tmpl', {
"report": report,
"env": env,
})
response = HttpResponse(html)
# save preferences
response.set_cookie(MODE_COOKIE_NAME, showMode)
response.set_cookie(STATUS_COOKIE_NAME, sortByStatus)
return response
def removeEnvCookie(request, name):
if ENV_COOKIE_NAME in request.COOKIES:
cookie = request.COOKIES[ENV_COOKIE_NAME]
saved_names = cookie.split(',')
names = []
total = 0
for saved_name in saved_names:
if total >= ENV_COOKIE_CAPACITY:
break
if not saved_name == name:
names.append(saved_name)
total += 1
return ','.join(names)
else:
return ""
def genEnvCookie(request, name):
if ENV_COOKIE_NAME in request.COOKIES:
# keep 5 recent visited env
cookie = request.COOKIES[ENV_COOKIE_NAME]
saved_names = cookie.split(',')
names = [name]
total = 1
for saved_name in saved_names:
if total >= ENV_COOKIE_CAPACITY:
break
if not saved_name == name:
names.append(saved_name)
total += 1
return ','.join(names)
else:
return name
def getRecentEnvNames(request):
if ENV_COOKIE_NAME in request.COOKIES:
cookie = request.COOKIES[ENV_COOKIE_NAME]
return cookie.split(',')
else:
return None
def get_recent_envs(request):
names = getRecentEnvNames(request)
html = render_to_string('environs/simple_envs.tmpl', {
"envNames": names,
"isPinterest": IS_PINTEREST,
})
return HttpResponse(html)
def check_feedback_eligible(request, username):
# Checks to see if a user should be asked for feedback or not.
if username and ratings_helper.is_user_eligible(request, username) and IS_PINTEREST:
num = random.randrange(0, 100)
if num <= 10:
return True
return False
class EnvLandingView(View):
def get(self, request, name, stage=None):
envs = environs_helper.get_all_env_stages(request, name)
if len(envs) == 0:
return redirect('/')
stages, env = common.get_all_stages(envs, stage)
env_promote = environs_helper.get_env_promotes_config(request, name, env['stageName'])
stage = env['stageName']
username = request.teletraan_user_id.name
request_feedback = check_feedback_eligible(request, username)
groups = environs_helper.get_env_capacity(request, name, stage, capacity_type="GROUP")
metrics = environs_helper.get_env_metrics_config(request, name, stage)
alarms = environs_helper.get_env_alarms_config(request, name, stage)
env_tag = tags_helper.get_latest_by_targe_id(request, env['id'])
basic_cluster_info = None
capacity_info = {'groups': groups}
if IS_PINTEREST:
basic_cluster_info = clusters_helper.get_cluster(request, env.get('clusterName'))
capacity_info['cluster'] = basic_cluster_info
if not env['deployId']:
capacity_hosts = deploys_helper.get_missing_hosts(request, name, stage)
provisioning_hosts = environ_hosts_helper.get_hosts(request, name, stage)
if IS_PINTEREST:
basic_cluster_info = clusters_helper.get_cluster(request, env.get('clusterName'))
if basic_cluster_info and basic_cluster_info.get('capacity'):
hosts_in_cluster = groups_helper.get_group_hosts(request, env.get('clusterName'))
num_to_fake = basic_cluster_info.get('capacity') - len(hosts_in_cluster)
for i in range(num_to_fake):
faked_host = {}
faked_host['hostName'] = 'UNKNOWN'
faked_host['hostId'] = 'UNKNOWN'
faked_host['state'] = 'PROVISIONED'
provisioning_hosts.append(faked_host)
response = render(request, 'environs/env_landing.html', {
"envs": envs,
"env": env,
"env_promote": env_promote,
"stages": stages,
"metrics": metrics,
"alarms": alarms,
"request_feedback": request_feedback,
"groups": groups,
"capacity_hosts": capacity_hosts,
"provisioning_hosts": provisioning_hosts,
"basic_cluster_info": basic_cluster_info,
"capacity_info": json.dumps(capacity_info),
"env_tag": env_tag,
"pinterest": IS_PINTEREST,
"csrf_token": get_token(request),
})
showMode = 'complete'
sortByStatus = 'true'
else:
# Get deploy progress
progress = deploys_helper.update_progress(request, name, stage)
showMode = _fetch_param_with_cookie(
request, 'showMode', MODE_COOKIE_NAME, 'complete')
sortByStatus = _fetch_param_with_cookie(
request, 'sortByStatus', STATUS_COOKIE_NAME, 'true')
report = agent_report.gen_report(request, env, progress, sortByStatus=sortByStatus)
report.showMode = showMode
report.sortByStatus = sortByStatus
response = render(request, 'environs/env_landing.html', {
"envs": envs,
"env": env,
"env_promote": env_promote,
"stages": stages,
"report": report,
"has_deploy": True,
"metrics": metrics,
"alarms": alarms,
"request_feedback": request_feedback,
"groups": groups,
"basic_cluster_info": basic_cluster_info,
"capacity_info": json.dumps(capacity_info),
"env_tag": env_tag,
"pinterest": IS_PINTEREST,
})
# save preferences
response.set_cookie(ENV_COOKIE_NAME, genEnvCookie(request, name))
response.set_cookie(MODE_COOKIE_NAME, showMode)
response.set_cookie(STATUS_COOKIE_NAME, sortByStatus)
return response
def _compute_range(totalItems, thisPageIndex, totalItemsPerPage, totalPagesToShow):
totalPages = int(math.ceil(float(totalItems) / totalItemsPerPage))
if totalItems <= 0:
return range(0), 0, 0
halfPagesToShow = totalPagesToShow / 2
startPageIndex = thisPageIndex - halfPagesToShow
if startPageIndex <= 0:
startPageIndex = 1
endPageIndex = startPageIndex + totalPagesToShow
if endPageIndex > totalPages:
endPageIndex = totalPages + 1
prevPageIndex = thisPageIndex - 1
nextPageIndex = thisPageIndex + 1
if nextPageIndex > totalPages:
nextPageIndex = 0
return range(startPageIndex, endPageIndex), prevPageIndex, nextPageIndex
def _convert_time(date_str, time_str):
# We use pacific time by default
if not time_str:
time_str = "00:00:00"
datestr = "%s %s -08:00" % (date_str, time_str)
dt = parse(datestr)
return calendar.timegm(dt.utctimetuple()) * 1000
def _convert_2_timestamp(date_str):
# We use pacific time by default
dt = parse(date_str)
return calendar.timegm(dt.utctimetuple()) * 1000
def _get_commit_info(request, commit, repo=None, branch='master'):
# We try teletraan for commit info first, if not found, try backend
builds = builds_helper.get_builds(request, commit=commit)
if builds:
build = builds[0]
return build['repo'], build['branch'], build['commitDate']
if not repo:
# Without repo, we can not call github api, return None
log.error("Repo is expected when query based on commit which has no build")
return None, None, None
try:
commit_info = builds_helper.get_commit(request, repo, commit)
return repo, branch, commit_info['date']
except:
log.error(traceback.format_exc())
return None, None, None
def _gen_deploy_query_filter(request, from_date, from_time, to_date, to_time, size, reverse_date,
operator, commit, repo, branch):
filter = {}
filter_text = ""
query_string = ""
bad_commit = False
if commit:
filter_text += "commit=%s, " % commit
query_string += "commit=%s&" % commit
if repo:
filter_text += "repo=%s, " % repo
query_string += "repo=%s&" % repo
if branch:
filter_text += "branch=%s, " % branch
query_string += "branch=%s&" % branch
repo, branch, commit_date = _get_commit_info(request, commit, repo, branch)
if repo and branch and commit_date:
filter['repo'] = repo
filter['branch'] = branch
filter['commit'] = commit
filter['commitDate'] = commit_date
else:
bad_commit = True
if from_date:
if from_time:
filter_text += "from=%sT%s, " % (from_date, from_time)
query_string += "from_date=%s&from_time=%s&" % (from_date, from_time)
else:
filter_text += "from=%s, " % from_date
query_string += "from_time=%s&" % from_time
after = _convert_time(from_date, from_time)
filter['after'] = after
if to_date:
if to_time:
filter_text += "to=%sT%s, " % (to_date, to_time)
query_string += "to_date=%s&to_time=%s&" % (to_date, to_time)
else:
filter_text += "to=%s, " % to_date
query_string += "to_time=%s&" % to_time
before = _convert_time(to_date, to_time)
filter['before'] = before
if reverse_date and reverse_date.lower() == "true":
filter_text += "earliest deploy first, "
filter['oldestFirst'] = True
query_string += "reverse_date=true&"
if operator:
filter_text += "operator=%s, " % operator
filter['operator'] = operator
query_string += "operator=%s&" % operator
if size != DEFAULT_PAGE_SIZE:
filter_text += "page_size=%s, " % size
query_string += "page_size=%s&" % size
if filter_text:
filter_title = "Filter (%s)" % filter_text[:-2]
else:
filter_title = "Filter"
if bad_commit:
return None, filter_title, query_string
else:
return filter, filter_title, query_string
def _gen_deploy_summary(request, deploys, for_env=None):
deploy_summaries = []
for deploy in deploys:
if for_env:
env = for_env
else:
env = environs_helper.get(request, deploy['envId'])
build_with_tag = builds_helper.get_build_and_tag(request, deploy['buildId'])
summary = {}
summary['deploy'] = deploy
summary['env'] = env
summary['build'] = build_with_tag['build']
summary['buildTag'] = build_with_tag['tag']
deploy_summaries.append(summary)
return deploy_summaries
def get_all_deploys(request):
index = int(request.GET.get('page_index', '1'))
size = int(request.GET.get('page_size', DEFAULT_PAGE_SIZE))
from_date = request.GET.get('from_date')
from_time = request.GET.get('from_time')
to_date = request.GET.get('to_date')
to_time = request.GET.get('to_time')
commit = request.GET.get('commit')
repo = request.GET.get('repo')
branch = request.GET.get('branch')
reverse_date = request.GET.get('reverse_date')
operator = request.GET.get('operator')
if not branch:
branch = 'master'
filter, filter_title, query_string = \
_gen_deploy_query_filter(request, from_date, from_time, to_date, to_time, size,
reverse_date, operator, commit, repo, branch)
if filter is None:
# specified a bad commit
return render(request, 'deploys/all_history.html', {
"deploy_summaries": [],
"filter_title": filter_title,
"pageIndex": index,
"pageSize": size,
"from_date": from_date,
"from_time": from_time,
"to_date": to_date,
"to_time": to_time,
"commit": commit,
"repo": repo,
"branch": branch,
"reverse_date": reverse_date,
"operator": operator,
'pageRange': range(0),
"prevPageIndex": 0,
"nextPageIndex": 0,
"query_string": query_string,
})
filter['pageIndex'] = index
filter['pageSize'] = size
result = deploys_helper.get_all(request, **filter)
deploy_summaries = _gen_deploy_summary(request, result['deploys'])
page_range, prevPageIndex, nextPageIndex = _compute_range(result['total'], index, size,
DEFAULT_TOTAL_PAGES)
return render(request, 'deploys/all_history.html', {
"deploy_summaries": deploy_summaries,
"filter_title": filter_title,
"pageIndex": index,
"pageSize": size,
"from_date": from_date,
"from_time": from_time,
"to_date": to_date,
"to_time": to_time,
"commit": commit,
"repo": repo,
"branch": branch,
"reverse_date": reverse_date,
"operator": operator,
'pageRange': page_range,
"prevPageIndex": prevPageIndex,
"nextPageIndex": nextPageIndex,
"query_string": query_string,
})
def get_env_deploys(request, name, stage):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
index = int(request.GET.get('page_index', '1'))
size = int(request.GET.get('page_size', DEFAULT_PAGE_SIZE))
from_date = request.GET.get('from_date', None)
from_time = request.GET.get('from_time', None)
to_date = request.GET.get('to_date', None)
to_time = request.GET.get('to_time', None)
commit = request.GET.get('commit', None)
repo = request.GET.get('repo', None)
branch = request.GET.get('branch', None)
reverse_date = request.GET.get('reverse_date', None)
operator = request.GET.get('operator', None)
filter, filter_title, query_string = \
_gen_deploy_query_filter(request, from_date, from_time, to_date, to_time, size,
reverse_date, operator, commit, repo, branch)
if filter is None:
return render(request, 'environs/env_history.html', {
"envs": envs,
"env": env,
"stages": stages,
"deploy_summaries": [],
"filter_title": filter_title,
"pageIndex": index,
"pageSize": size,
"from_date": from_date,
"from_time": from_time,
"to_date": to_date,
"to_time": to_time,
"commit": commit,
"repo": repo,
"branch": branch,
"reverse_date": reverse_date,
"operator": operator,
'pageRange': range(0),
"prevPageIndex": 0,
"nextPageIndex": 0,
"query_string": query_string,
"pinterest": IS_PINTEREST
})
filter['envId'] = [env['id']]
filter['pageIndex'] = index
filter['pageSize'] = size
result = deploys_helper.get_all(request, **filter)
deploy_summaries = _gen_deploy_summary(request, result['deploys'], for_env=env)
page_range, prevPageIndex, nextPageIndex = _compute_range(result['total'], index, size,
DEFAULT_TOTAL_PAGES)
return render(request, 'environs/env_history.html', {
"envs": envs,
"env": env,
"stages": stages,
"deploy_summaries": deploy_summaries,
"filter_title": filter_title,
"pageIndex": index,
"pageSize": size,
"from_date": from_date,
"from_time": from_time,
"to_date": to_date,
"to_time": to_time,
"commit": commit,
"repo": repo,
"branch": branch,
"reverse_date": reverse_date,
"operator": operator,
'pageRange': page_range,
"prevPageIndex": prevPageIndex,
"nextPageIndex": nextPageIndex,
"query_string": query_string,
"pinterest": IS_PINTEREST
})
def get_env_names(request):
# TODO create a loop to get all names
max_size = 10000
names = environs_helper.get_all_env_names(request, index=1, size=max_size)
return HttpResponse(json.dumps(names), content_type="application/json")
def search_envs(request, filter):
max_size = 10000
names = environs_helper.get_all_env_names(request, name_filter=filter, index=1, size=max_size)
if not names:
return redirect('/envs/')
if len(names) == 1:
return redirect('/env/%s/' % names[0])
envs_tag = tags_helper.get_latest_by_targe_id(request, 'TELETRAAN')
return render(request, 'environs/envs_landing.html', {
"names": names,
"pageIndex": 1,
"pageSize": DEFAULT_PAGE_SIZE,
"disablePrevious": True,
"disableNext": True,
"envs_tag": envs_tag,
})
def post_create_env(request):
# TODO how to validate envName
data = request.POST
env_name = data["env_name"]
stage_name = data["stage_name"]
clone_env_name = data.get("clone_env_name")
clone_stage_name = data.get("clone_stage_name")
description = data.get('description')
if clone_env_name and clone_stage_name:
common.clone_from_stage_name(request, env_name, stage_name, clone_env_name,
clone_stage_name, description)
else:
data = {}
data['envName'] = env_name
data['stageName'] = stage_name
data['description'] = description
environs_helper.create_env(request, data)
return redirect('/env/' + env_name + '/' + stage_name + '/config/')
class EnvNewDeployView(View):
def get(self, request, name, stage):
env = environs_helper.get_env_by_stage(request, name, stage)
env_promote = environs_helper.get_env_promotes_config(request, name, stage)
current_build = None
if 'deployId' in env and env['deployId']:
deploy = deploys_helper.get(request, env['deployId'])
current_build = builds_helper.get_build(request, deploy['buildId'])
return render(request, 'deploys/new_deploy.html', {
"env": env,
"env_promote": env_promote,
"buildName": env['buildName'],
"current_build": current_build,
"pageIndex": 1,
"pageSize": common.DEFAULT_BUILD_SIZE,
})
def post(self, request, name, stage):
common.deploy(request, name, stage)
if name == 'ngapp2-A' or name == 'ngapp2-B':
return redirect("/env/ngapp2/deploy/?stage=2")
return redirect('/env/%s/%s/deploy' % (name, stage))
def post_add_stage(request, name):
# TODO how to validate stage name
data = request.POST
stage = data.get("stage")
from_stage = data.get("from_stage")
description = data.get("description")
if from_stage:
common.clone_from_stage_name(request, name, stage, name, from_stage, description)
else:
data = {}
data['envName'] = name
data['stageName'] = stage
data['description'] = description
environs_helper.create_env(request, data)
return redirect('/env/' + name + '/' + stage + '/config/')
def remove_stage(request, name, stage):
# TODO so we need to make sure the capacity is empty???
environs_helper.delete_env(request, name, stage)
envs = environs_helper.get_all_env_stages(request, name)
response = redirect('/env/' + name)
if len(envs) == 0:
cookie_response = removeEnvCookie(request, name)
if not cookie_response:
response.delete_cookie(ENV_COOKIE_NAME)
else:
response.set_cookie(ENV_COOKIE_NAME, cookie_response)
return response
def get_builds(request, name, stage):
env = environs_helper.get_env_by_stage(request, name, stage)
env_promote = environs_helper.get_env_promotes_config(request, name, stage)
show_lock = False
if env_promote['type'] == 'AUTO' and env_promote['predStage'] and \
env_promote['predStage'] == environs_helper.BUILD_STAGE:
show_lock = True
if 'buildName' not in env and not env['buildName']:
html = render_to_string('builds/simple_builds.tmpl', {
"builds": [],
"env": env,
"show_lock": show_lock,
})
return HttpResponse(html)
current_publish_date = 0
if 'deployId' in env and env['deployId']:
deploy = deploys_helper.get(request, env['deployId'])
build = builds_helper.get_build(request, deploy['buildId'])
current_publish_date = build['publishDate']
# return only the new builds
index = int(request.GET.get('page_index', '1'))
size = int(request.GET.get('page_size', common.DEFAULT_BUILD_SIZE))
builds = builds_helper.get_builds_and_tags(request, name=env['buildName'], pageIndex=index,
pageSize=size)
new_builds = []
for build in builds:
if build['build']['publishDate'] > current_publish_date:
new_builds.append(build)
html = render_to_string('builds/simple_builds.tmpl', {
"builds": new_builds,
"current_publish_date": current_publish_date,
"env": env,
"show_lock": show_lock,
})
return HttpResponse(html)
def upload_private_build(request, name, stage):
return private_builds_helper.handle_uploaded_build(request, request.FILES['file'], name, stage)
def get_groups(request, name, stage):
groups = common.get_env_groups(request, name, stage)
html = render_to_string('groups/simple_groups.tmpl', {
"groups": groups,
})
return HttpResponse(html)
def deploy_build(request, name, stage, build_id):
env = environs_helper.get_env_by_stage(request, name, stage)
current_build = None
deploy_state = None
if env.get('deployId'):
current_deploy = deploys_helper.get(request, env['deployId'])
current_build = builds_helper.get_build(request, current_deploy['buildId'])
deploy_state = deploys_helper.get(request, env['deployId'])['state']
build = builds_helper.get_build_and_tag(request, build_id)
builds = [build]
scm_url = systems_helper.get_scm_url(request)
html = render_to_string('deploys/deploy_build.html', {
"env": env,
"builds": builds,
"current_build": current_build,
"scm_url": scm_url,
"buildName": env.get('buildName'),
"branch": env.get('branch'),
"csrf_token": get_token(request),
"deployState": deploy_state,
"overridePolicy": env.get('overridePolicy'),
})
return HttpResponse(html)
def deploy_commit(request, name, stage, commit):
env = environs_helper.get_env_by_stage(request, name, stage)
builds = builds_helper.get_builds_and_tags(request, commit=commit)
current_build = None
if env.get('deployId'):
deploy = deploys_helper.get(request, env['deployId'])
current_build = builds_helper.get_build(request, deploy['buildId'])
scm_url = systems_helper.get_scm_url(request)
html = render_to_string('deploys/deploy_build.html', {
"env": env,
"builds": builds,
"current_build": current_build,
"scm_url": scm_url,
"buildName": env.get('buildName'),
"branch": env.get('branch'),
"csrf_token": get_token(request),
})
return HttpResponse(html)
def promote_to(request, name, stage, deploy_id):
query_dict = request.POST
toStages = query_dict['toStages']
description = query_dict['description']
toStage = None
for toStage in toStages.split(','):
deploys_helper.promote(request, name, toStage, deploy_id, description)
return redirect('/env/%s/%s/deploy' % (name, toStage))
def restart(request, name, stage):
common.restart(request, name, stage)
return redirect('/env/%s/%s/deploy' % (name, stage))
def rollback_to(request, name, stage, deploy_id):
common.rollback_to(request, name, stage, deploy_id)
return redirect('/env/%s/%s/deploy' % (name, stage))
def rollback(request, name, stage):
query_dict = request.GET
to_deploy_id = query_dict.get('to_deploy_id', None)
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
result = deploys_helper.get_all(request, envId=[env['id']], pageIndex=1,
pageSize=DEFAULT_ROLLBACK_DEPLOY_NUM)
deploys = result.get("deploys")
# remove the first deploy if exists
if deploys:
deploys.pop(0)
# append the build info
deploy_summaries = []
branch = None
commit = None
build_id = None
for deploy in deploys:
build_info = builds_helper.get_build_and_tag(request, deploy['buildId'])
build = build_info["build"]
tag = build_info.get("tag", None)
summary = {}
summary['deploy'] = deploy
summary['build'] = build
summary['tag'] = tag
if not to_deploy_id and deploy['state'] == 'SUCCEEDED':
to_deploy_id = deploy['id']
if to_deploy_id and to_deploy_id == deploy['id']:
branch = build['branch']
commit = build['commitShort']
build_id = build['id']
deploy_summaries.append(summary)
html = render_to_string("environs/env_rollback.html", {
"envs": envs,
"stages": stages,
"envs": envs,
"env": env,
"deploy_summaries": deploy_summaries,
"to_deploy_id": to_deploy_id,
"branch": branch,
"commit": commit,
"build_id": build_id,
"csrf_token": get_token(request),
})
return HttpResponse(html)
def get_deploy(request, name, stage, deploy_id):
deploy = deploys_helper.get(request, deploy_id)
build = builds_helper.get_build(request, deploy['buildId'])
env = environs_helper.get_env_by_stage(request, name, stage)
return render(request, 'environs/env_deploy_details.html', {
"deploy": deploy,
"csrf_token": get_token(request),
"build": build,
"env": env,
})
def promote(request, name, stage, deploy_id):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
env_wrappers = []
for temp_env in envs:
env_wrapper = {}
env_wrapper["env"] = temp_env
env_wrapper["env_promote"] = environs_helper.get_env_promotes_config(request,
temp_env['envName'],
temp_env['stageName'])
env_wrappers.append(env_wrapper)
deploy = deploys_helper.get(request, deploy_id)
build = builds_helper.get_build(request, deploy['buildId'])
html = render_to_string("environs/env_promote.html", {
"envs": envs,
"stages": stages,
"envs": envs,
"env": env,
"env_wrappers": env_wrappers,
"deploy": deploy,
"build": build,
"csrf_token": get_token(request),
})
return HttpResponse(html)
def pause(request, name, stage):
deploys_helper.pause(request, name, stage)
return redirect('/env/%s/%s/deploy' % (name, stage))
def resume(request, name, stage):
deploys_helper.resume(request, name, stage)
return redirect('/env/%s/%s/deploy' % (name, stage))
def enable_env_change(request, name, stage):
params = request.POST
description = params.get('description')
environs_helper.enable_env_changes(request, name, stage, description)
return redirect('/env/%s/%s/deploy' % (name, stage))
def disable_env_change(request, name, stage):
params = request.POST
description = params.get('description')
environs_helper.disable_env_changes(request, name, stage, description)
return redirect('/env/%s/%s/deploy' % (name, stage))
def enable_all_env_change(request):
params = request.POST
description = params.get('description')
environs_helper.enable_all_env_changes(request, description)
return redirect('/envs/')
def disable_all_env_change(request):
params = request.POST
description = params.get('description')
environs_helper.disable_all_env_changes(request, description)
return redirect('/envs/')
# get all reachable hosts
def get_hosts(request, name, stage):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
agents = agents_helper.get_agents(request, env['envName'], env['stageName'])
title = "All hosts"
agents_wrapper = {}
for agent in agents:
if agent['deployId'] not in agents_wrapper:
agents_wrapper[agent['deployId']] = []
agents_wrapper[agent['deployId']].append(agent)
return render(request, 'environs/env_hosts.html', {
"envs": envs,
"env": env,
"stages": stages,
"agents_wrapper": agents_wrapper,
"title": title,
})
# get total alive hosts (hostStage == -1000)
# get alive hosts by using deploy_id and its stage (hostStage = 0 ~ 8)
def get_hosts_by_deploy(request, name, stage, deploy_id):
hostStageString = request.GET.get('hostStage')
if hostStageString is None:
hostStage = TOTAL_ALIVE_HOST_REPORT
else:
hostStage = hostStageString
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
progress = deploys_helper.update_progress(request, name, stage)
agents_wrapper = agent_report.gen_agent_by_deploy(progress, deploy_id,
ALIVE_STAGE_HOST_REPORT, hostStage)
title = "All hosts with deploy " + deploy_id
return render(request, 'environs/env_hosts.html', {
"envs": envs,
"env": env,
"stages": stages,
"agents_wrapper": agents_wrapper,
"title": title,
})
# reset all failed hosts for this env, this deploy
def reset_failed_hosts(request, name, stage, deploy_id):
agents_helper.reset_failed_agents(request, name, stage, deploy_id)
return HttpResponse(json.dumps({'html': ''}), content_type="application/json")
# retry failed deploy stage for this env, this host
def reset_deploy(request, name, stage, host_id):
agents_helper.retry_deploy(request, name, stage, host_id)
return HttpResponse(json.dumps({'html': ''}), content_type="application/json")
# pause deploy for this this env, this host
def pause_deploy(request, name, stage, host_id):
agents_helper.pause_deploy(request, name, stage, host_id)
return HttpResponse(json.dumps({'html': ''}), content_type="application/json")
# resume deploy stage for this env, this host
def resume_deploy(request, name, stage, host_id):
agents_helper.resume_deploy(request, name, stage, host_id)
return HttpResponse(json.dumps({'html': ''}), content_type="application/json")
# pause hosts for this env and stage
def pause_hosts(request, name, stage):
post_params = request.POST
host_ids = None
if 'hostIds' in post_params:
hosts_str = post_params['hostIds']
host_ids = [x.strip() for x in hosts_str.split(',')]
environs_helper.pause_hosts(request, name, stage, host_ids)
return redirect('/env/{}/{}/'.format(name, stage))
# resume hosts for this env and stage
def resume_hosts(request, name, stage):
post_params = request.POST
host_ids = None
if 'hostIds' in post_params:
hosts_str = post_params['hostIds']
host_ids = [x.strip() for x in hosts_str.split(',')]
environs_helper.resume_hosts(request, name, stage, host_ids)
return redirect('/env/{}/{}/'.format(name, stage))
# reset hosts for this env and stage
def reset_hosts(request, name, stage):
post_params = request.POST
host_ids = None
if 'hostIds' in post_params:
hosts_str = post_params['hostIds']
host_ids = [x.strip() for x in hosts_str.split(',')]
environs_helper.reset_hosts(request, name, stage, host_ids)
return redirect('/env/{}/{}/hosts'.format(name, stage))
# get total unknown(unreachable) hosts
def get_unknown_hosts(request, name, stage):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
progress = deploys_helper.update_progress(request, name, stage)
agents_wrapper = agent_report.gen_agent_by_deploy(progress, env['deployId'],
UNKNOWN_HOST_REPORT)
title = "Unknow hosts"
return render(request, 'environs/env_hosts.html', {
"envs": envs,
"env": env,
"stages": stages,
"agents_wrapper": agents_wrapper,
"title": title,
})
# get provisioning hosts
def get_provisioning_hosts(request, name, stage):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
progress = deploys_helper.update_progress(request, name, stage)
agents_wrapper = agent_report.gen_agent_by_deploy(progress, env['deployId'],
PROVISION_HOST_REPORT)
title = "Provisioning hosts"
return render(request, 'environs/env_hosts.html', {
"envs": envs,
"env": env,
"stages": stages,
"agents_wrapper": agents_wrapper,
"title": title,
})
# get total (unknown+alive) hosts
def get_all_hosts(request, name, stage):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
progress = deploys_helper.update_progress(request, name, stage)
agents_wrapper = agent_report.gen_agent_by_deploy(progress, env['deployId'],
TOTAL_HOST_REPORT)
title = "All hosts"
return render(request, 'environs/env_hosts.html', {
"envs": envs,
"env": env,
"stages": stages,
"agents_wrapper": agents_wrapper,
"title": title,
})
# get failed (but alive) hosts (agent status > 0)
def get_failed_hosts(request, name, stage):
envs = environs_helper.get_all_env_stages(request, name)
stages, env = common.get_all_stages(envs, stage)
progress = deploys_helper.update_progress(request, name, stage)
agents_wrapper = agent_report.gen_agent_by_deploy(progress, env['deployId'],
FAILED_HOST_REPORT)
failed_hosts = [agent['hostId'] for agent in agents_wrapper[env['deployId']]]
host_ids = ",".join(failed_hosts)
title = "Failed Hosts"
return render(request, 'environs/env_hosts.html', {
"envs": envs,
"env": env,
"stages": stages,
"agents_wrapper": agents_wrapper,
"title": title,
"is_retryable": True,
"host_ids": host_ids,
"pinterest": IS_PINTEREST,
})
def get_pred_deploys(request, name, stage):
index = int(request.GET.get('page_index', '1'))
size = int(request.GET.get('page_size', DEFAULT_PAGE_SIZE))
env = environs_helper.get_env_by_stage(request, name, stage)
env_promote = environs_helper.get_env_promotes_config(request, name, stage)
show_lock = False
predStage = env_promote.get('predStage')
if env_promote['type'] != "MANUAL" and predStage:
show_lock = True
current_startDate = 0
if not predStage or predStage == "BUILD":
deploys = []
else:
pred_env = environs_helper.get_env_by_stage(request, name, predStage)
result = deploys_helper.get_all(request, envId=[pred_env['id']], pageIndex=index,
pageSize=size)
deploys = result["deploys"]
if env.get('deployId'):
deploy = deploys_helper.get(request, env['deployId'])
build = builds_helper.get_build(request, deploy['buildId'])
current_startDate = build['publishDate']
deploy_wrappers = []
for deploy in deploys:
build = builds_helper.get_build(request, deploy['buildId'])
if build['publishDate'] > current_startDate:
deploy_wrapper = {}
deploy_wrapper['deploy'] = deploy
deploy_wrapper['build'] = build
deploy_wrappers.append(deploy_wrapper)
html = render_to_string('deploys/simple_pred_deploys.tmpl', {
"deploy_wrappers": deploy_wrappers,
"envName": name,
"stageName": predStage,
"show_lock": show_lock,
"current_startDate": current_startDate,
})
return HttpResponse(html)
def warn_for_deploy(request, name, stage, buildId):
""" Returns a warning message if:
1. The build has been tagged as build build
2. a build doesn't have a successful deploy on the preceding stage.
TODO: we would have call backend twice since the getAllDeploys call does not support filtering on multiple states;
Also, getAllDeploys return all deploys with commits after the specific commit, it would be good if there is options
to return the exact matched deploys.
"""
build_info = builds_helper.get_build_and_tag(request, buildId)
build = build_info["build"]
tag = build_info.get("tag")
if tag is not None and tag["value"] == tags_helper.TagValue.BAD_BUILD:
html = render_to_string('warn_deploy_bad_build.tmpl', {
'tag': tag,
})
return HttpResponse(html)
env_promote = environs_helper.get_env_promotes_config(request, name, stage)
pred_stage = env_promote.get('predStageName')
if not pred_stage or pred_stage == BUILD_STAGE:
return HttpResponse("")
pred_env = environs_helper.get_env_by_stage(request, name, pred_stage)
filter = {}
filter['envId'] = [pred_env['id']]
filter['commit'] = build['commit']
filter['repo'] = build['repo']
filter['oldestFirst'] = True
filter['deployState'] = "SUCCEEDING"
filter['pageIndex'] = 1
filter['pageSize'] = 1
result = deploys_helper.get_all(request, **filter)
succeeding_deploys = result['deploys']
if succeeding_deploys:
return HttpResponse("")
filter['deployState'] = "SUCCEEDED"
result = deploys_helper.get_all(request, **filter)
succeeded_deploys = result['deploys']
if succeeded_deploys:
return HttpResponse("")
html = render_to_string('warn_no_success_deploy_in_pred.tmpl', {
'envName': name,
'predStageName': pred_stage,
})
return HttpResponse(html)
def get_env_config_history(request, name, stage):
index = int(request.GET.get('page_index', '1'))
size = int(request.GET.get('page_size', DEFAULT_PAGE_SIZE))
env = environs_helper.get_env_by_stage(request, name, stage)
configs = environs_helper.get_config_history(request, name, stage, index, size)
for config in configs:
replaced_config = config["configChange"].replace(",", ", ").replace("#", "%23").replace("\"", "%22")\
.replace("{", "%7B").replace("}", "%7D").replace("_", "%5F")
config["replaced_config"] = replaced_config
return render(request, 'configs/config_history.html', {
"envName": name,
"stageName": stage,
"envId": env['id'],
"configs": configs,
"pageIndex": index,
"pageSize": DEFAULT_PAGE_SIZE,
"disablePrevious": index <= 1,
"disableNext": len(configs) < DEFAULT_PAGE_SIZE,
})
def _parse_config_comparison(query_dict):
configs = {}
for key, value in query_dict.iteritems():
if key.startswith('chkbox_'):
id = key[len('chkbox_'):]
split_data = value.split('_')
config_change = split_data[1]
configs[id] = config_change
return configs
def get_config_comparison(request, name, stage):
configs = _parse_config_comparison(request.POST)
if len(configs) > 1:
ids = configs.keys()
change1 = configs[ids[0]]
change2 = configs[ids[1]]
return HttpResponse(json.dumps({'change1': change1, 'change2': change2}),
content_type="application/json")
return HttpResponse("", content_type="application/json")
def show_config_comparison(request, name, stage):
change1 = request.GET.get('change1')
change2 = request.GET.get('change2')
diff_res = GenerateDiff()
result = diff_res.diff_main(change1, change2)
diff_res.diff_cleanupSemantic(result)
old_change = diff_res.old_content(result)
new_change = diff_res.new_content(result)
return render(request, 'configs/env_config_comparison_result.html', {
"envName": name,
"stageName": stage,
"oldChange": old_change,
"newChange": new_change,
})
def get_deploy_schedule(request, name, stage):
env = environs_helper.get_env_by_stage(request, name, stage)
envs = environs_helper.get_all_env_stages(request, name)
schedule_id = env.get('scheduleId', None);
if schedule_id != None:
schedule = schedules_helper.get_schedule(request, name, stage, schedule_id)
else:
schedule = None
agent_number = agents_helper.get_agents_total_by_env(request, env["id"])
return render(request, 'deploys/deploy_schedule.html', {
"envs": envs,
"env": env,
"schedule": schedule,
"agent_number": agent_number,
})
class GenerateDiff(diff_match_patch):
def old_content(self, diffs):
html = []
for (flag, data) in diffs:
text = (data.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace("\n", "<br>")
.replace(",", ",<br>"))
if flag == self.DIFF_DELETE:
html.append("""<b style=\"background:#FFB5B5;
\">%s</b>""" % text)
elif flag == self.DIFF_EQUAL:
html.append("<span>%s</span>" % text)
return "".join(html)
def new_content(self, diffs):
html = []
for (flag, data) in diffs:
text = (data.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace("\n", "<br>")
.replace(",", ",<br>"))
if flag == self.DIFF_INSERT:
html.append("""<b style=\"background:#97f697;
\">%s</b>""" % text)
elif flag == self.DIFF_EQUAL:
html.append("<span>%s</span>" % text)
return "".join(html)
def get_new_commits(request, name, stage):
env = environs_helper.get_env_by_stage(request, name, stage)
current_deploy = deploys_helper.get(request, env['deployId'])
current_build = builds_helper.get_build(request, current_deploy['buildId'])
startSha = current_build['commit']
repo = current_build['repo']
scm_url = systems_helper.get_scm_url(request)
diffUrl = "%s/%s/compare/%s...%s" % (scm_url, repo, startSha, startSha)
last_deploy = common.get_last_completed_deploy(request, env)
if not last_deploy:
return render(request, 'deploys/deploy_commits.html', {
"env": env,
"title": "No previous deploy found!",
"startSha": startSha,
"endSha": startSha,
"repo": repo,
"diffUrl": diffUrl,
})
last_build = builds_helper.get_build(request, last_deploy['buildId'])
endSha = last_build['commit']
diffUrl = "%s/%s/compare/%s...%s" % (scm_url, repo, endSha, startSha)
return render(request, 'deploys/deploy_commits.html', {
"env": env,
"startSha": startSha,
"endSha": endSha,
"repo": repo,
"title": "Commits since last deploy",
"diffUrl": diffUrl,
})
def compare_deploys(request, name, stage):
start_deploy_id = request.GET.get('start_deploy', None)
start_deploy = deploys_helper.get(request, start_deploy_id)
start_build = builds_helper.get_build(request, start_deploy['buildId'])
startSha = start_build['commit']
repo = start_build['repo']
end_deploy_id = request.GET.get('end_deploy', None)
if end_deploy_id:
end_deploy = deploys_helper.get(request, end_deploy_id)
else:
env = environs_helper.get_env_by_stage(request, name, stage)
end_deploy = common.get_previous_deploy(request, env, start_deploy)
if not end_deploy:
end_deploy = start_deploy
end_build = builds_helper.get_build(request, end_deploy['buildId'])
endSha = end_build['commit']
commits, truncated, new_start_sha = common.get_commits_batch(request, repo, startSha,
endSha, keep_first=True)
html = render_to_string('builds/commits.tmpl', {
"commits": commits,
"start_sha": new_start_sha,
"end_sha": endSha,
"repo": repo,
"truncated": truncated,
"show_checkbox": False,
})
return HttpResponse(html)
def compare_deploys_2(request, name, stage):
env = environs_helper.get_env_by_stage(request, name, stage)
configs = {}
for key, value in request.GET.iteritems():
if key.startswith('chkbox_'):
index = key[len('chkbox_'):]
configs[index] = value
indexes = configs.keys()
start_build_id = configs[indexes[0]]
end_build_id = configs[indexes[1]]
if int(indexes[0]) > int(indexes[1]):
start_build_id = configs[indexes[1]]
end_build_id = configs[indexes[0]]
start_build = builds_helper.get_build(request, start_build_id)
startSha = start_build['commit']
repo = start_build['repo']
end_build = builds_helper.get_build(request, end_build_id)
endSha = end_build['commit']
scm_url = systems_helper.get_scm_url(request)
diffUrl = "%s/%s/compare/%s...%s" % (scm_url, repo, endSha, startSha)
return render(request, 'deploys/deploy_commits.html', {
"env": env,
"startSha": startSha,
"endSha": endSha,
"repo": repo,
"title": "Commits between the two deploys",
"diffUrl": diffUrl,
})
def add_instance(request, name, stage):
params = request.POST
groupName = params['groupName']
num = int(params['instanceCnt'])
subnet = None
asg_status = params['asgStatus']
launch_in_asg = True
if 'subnet' in params:
subnet = params['subnet']
if asg_status == 'UNKNOWN':
launch_in_asg = False
elif 'customSubnet' in params:
launch_in_asg = False
try:
if not launch_in_asg:
if not subnet:
content = 'Failed to launch hosts to group {}. Please choose subnets in' \
' <a href="https://deploy.pinadmin.com/groups/{}/config/">group config</a>.' \
' If you have any question, please contact your friendly Teletraan owners' \
' for immediate assistance!'.format(groupName, groupName)
messages.add_message(request, messages.ERROR, content)
else:
host_ids = autoscaling_groups_helper.launch_hosts(request, groupName, num, subnet)
if len(host_ids) > 0:
content = '{} hosts have been launched to group {} (host ids: {})'.format(num, groupName, host_ids)
messages.add_message(request, messages.SUCCESS, content)
else:
content = 'Failed to launch hosts to group {}. Please make sure the' \
' <a href="https://deploy.pinadmin.com/groups/{}/config/">group config</a>' \
' is correct. If you have any question, please contact your friendly Teletraan owners' \
' for immediate assistance!'.format(groupName, groupName)
messages.add_message(request, messages.ERROR, content)
else:
autoscaling_groups_helper.launch_hosts(request, groupName, num, None)
content = 'Capacity increased by {}'.format(num)
messages.add_message(request, messages.SUCCESS, content)
except:
log.error(traceback.format_exc())
raise
return redirect('/env/{}/{}/deploy'.format(name, stage))
def get_tag_message(request):
envs_tag = tags_helper.get_latest_by_targe_id(request, 'TELETRAAN')
html = render_to_string('environs/tag_message.tmpl', {
'envs_tag': envs_tag,
})
return HttpResponse(html)
def update_schedule(request, name, stage):
post_params = request.POST
data = {}
data['cooldownTimes'] = post_params['cooldownTimes']
data['hostNumbers'] = post_params['hostNumbers']
data['totalSessions'] = post_params['totalSessions']
schedules_helper.update_schedule(request, name, stage, data)
return HttpResponse(json.dumps(''))
def delete_schedule(request, name, stage):
schedules_helper.delete_schedule(request, name, stage)
return HttpResponse(json.dumps(''))
def override_session(request, name, stage):
session_num = request.GET.get('session_num')
schedules_helper.override_session(request, name, stage, session_num)
return HttpResponse(json.dumps(''))
| apache-2.0 | 736,445,885,809,333,800 | 35.244265 | 119 | 0.602618 | false | 3.679545 | true | false | false |
jrbl/python-wtf | test_wtf.py | 1 | 1043 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014 Joseph Blaylock <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from wtf import wtf
class Shape(object):
def __init__(self, pastry=None, **kwargs):
self.initialized = True
wtf(to_out=False, to_err=True, wvars=['centroid'])
class Square(Shape):
def __init__(self, **kwargs):
super(Square, self).__init__(pastry='Croissant', **kwargs)
if __name__ == "__main__":
square = Square(centroid=(0,9), mass=3e7)
| apache-2.0 | 1,199,571,914,817,227,000 | 31.59375 | 76 | 0.674976 | false | 3.465116 | false | false | false |
eponvert/texnlp | src/main/python/score_tags.py | 2 | 3070 | ###############################################################################
## Copyright (C) 2007 Jason Baldridge, The University of Texas at Austin
##
## This library is free software; you can redistribute it and#or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
##############################################################################
import sys
import gzip
import fnmatch
gold_tag_filename = sys.argv[1]
model_tag_filename = sys.argv[2]
if fnmatch.fnmatch(gold_tag_filename,"*.gz"):
gold_tag_file = gzip.open(gold_tag_filename)
else:
gold_tag_file = file(gold_tag_filename,"r")
if fnmatch.fnmatch(model_tag_filename,"*.gz"):
model_tag_file = gzip.open(model_tag_filename)
else:
model_tag_file = file(model_tag_filename,"r")
gold_lines = gold_tag_file.readlines()
model_lines = model_tag_file.readlines()
word_correct = 0
word_total = 0
sentence_correct = 0
sentence_total = 0
errors = {}
all_correct = True
for linenumber in range(len(gold_lines)):
gold_line = gold_lines[linenumber].strip()
model_line = model_lines[linenumber].strip()
if (model_line == ""):
if (gold_line != ""):
print "Something wrong -- different length on sentence for gold and model."
print "Gold:",gold_line
print "Model:",model_line
if all_correct:
sentence_correct += 1
sentence_total +=1
all_correct = True
else:
gitems = gold_line.split()
mitems = model_line.split()
gtag = gitems[1]
mtag = mitems[1]
if gtag == mtag:
word_correct += 1
else:
all_correct = False
errors[(gtag,mtag)] = errors.get((gtag,mtag),0)+1
word_total += 1
gold_line = gold_tag_file.readline().strip()
model_line = model_tag_file.readline().strip()
word_accuracy = (word_correct/float(word_total))*100.0
sentence_accuracy = (sentence_correct/float(sentence_total))*100.0
print "Word accuracy: %2.3f (%d/%d)" % (word_accuracy, word_correct, word_total)
print "Sent accuracy: %2.3f (%d/%d)" % (sentence_accuracy, sentence_correct, sentence_total)
to_sort = []
for (gtag,mtag) in errors:
to_sort.append((errors[(gtag,mtag)],gtag,mtag))
to_sort.sort(lambda x,y:cmp(y[0],x[0]));
print "\nMost common errors:"
print "Err\tGold\tModel\n---------------------"
for i in range (0,min(5,len(to_sort))):
print "\t".join([str(x) for x in to_sort[i]])
| lgpl-3.0 | -4,555,898,188,288,726,500 | 30.979167 | 92 | 0.62215 | false | 3.50057 | false | false | false |
mariuszkowalski/BioCounter | analysis/export_statistics.py | 1 | 1332 | #!/usr/bin/env python3
import codecs
import csv
class ExportStatistics:
'''
Class used for exporting the created in software
statistics.
'''
@staticmethod
def export_to_csv(file_path, samples, statistics):
'''
Exports the basic statistics form the application to the CSV file format.
Args:
file_path: string - contains full file path with file name.
samples: instance - class Samples.
statistics: instance - class Statistics.
Return:
No return in method.
'''
header_row = [
'',
samples.qualifiers[1],
samples.qualifiers[2],
samples.qualifiers[3],
samples.qualifiers[4]]
#Row structure.
#samples.names[1...8], statistics.stats[1...8][1...4]
matrix = []
for key, value in samples.names.items():
temp = [value, statistics.stats[key][1], statistics.stats[key][2], statistics.stats[key][3], statistics.stats[key][4]]
matrix.append(temp)
with codecs.open(file_path, 'w', 'utf-8') as csv_file:
file_writer = csv.writer(csv_file, delimiter=';')
file_writer.writerow(header_row)
for element in matrix:
file_writer.writerow(element)
| gpl-3.0 | -1,886,476,106,400,703,200 | 26.183673 | 130 | 0.568318 | false | 4.242038 | false | false | false |
tuc-osg/micropython | tests/extmod/vfs_fat_ramdisk.py | 8 | 2135 | import sys
import uos
import uerrno
try:
uos.VfsFat
except AttributeError:
print("SKIP")
sys.exit()
class RAMFS:
SEC_SIZE = 512
def __init__(self, blocks):
self.data = bytearray(blocks * self.SEC_SIZE)
def readblocks(self, n, buf):
#print("readblocks(%s, %x(%d))" % (n, id(buf), len(buf)))
for i in range(len(buf)):
buf[i] = self.data[n * self.SEC_SIZE + i]
def writeblocks(self, n, buf):
#print("writeblocks(%s, %x)" % (n, id(buf)))
for i in range(len(buf)):
self.data[n * self.SEC_SIZE + i] = buf[i]
def ioctl(self, op, arg):
#print("ioctl(%d, %r)" % (op, arg))
if op == 4: # BP_IOCTL_SEC_COUNT
return len(self.data) // self.SEC_SIZE
if op == 5: # BP_IOCTL_SEC_SIZE
return self.SEC_SIZE
try:
bdev = RAMFS(48)
except MemoryError:
print("SKIP")
sys.exit()
uos.VfsFat.mkfs(bdev)
print(b"FOO_FILETXT" not in bdev.data)
print(b"hello!" not in bdev.data)
vfs = uos.VfsFat(bdev, "/ramdisk")
try:
vfs.statvfs("/null")
except OSError as e:
print(e.args[0] == uerrno.ENODEV)
print("statvfs:", vfs.statvfs("/ramdisk"))
print("getcwd:", vfs.getcwd())
try:
vfs.stat("no_file.txt")
except OSError as e:
print(e.args[0] == uerrno.ENOENT)
with vfs.open("foo_file.txt", "w") as f:
f.write("hello!")
print(vfs.listdir())
print("stat root:", vfs.stat("/"))
print("stat disk:", vfs.stat("/ramdisk/"))
print("stat file:", vfs.stat("foo_file.txt"))
print(b"FOO_FILETXT" in bdev.data)
print(b"hello!" in bdev.data)
vfs.mkdir("foo_dir")
vfs.chdir("foo_dir")
print("getcwd:", vfs.getcwd())
print(vfs.listdir())
with vfs.open("sub_file.txt", "w") as f:
f.write("subdir file")
try:
vfs.chdir("sub_file.txt")
except OSError as e:
print(e.args[0] == uerrno.ENOENT)
vfs.chdir("..")
print("getcwd:", vfs.getcwd())
vfs.umount()
try:
vfs.listdir()
except OSError as e:
print(e.args[0] == uerrno.ENODEV)
try:
vfs.getcwd()
except OSError as e:
print(e.args[0] == uerrno.ENODEV)
vfs = uos.VfsFat(bdev, "/ramdisk")
print(vfs.listdir(b""))
| mit | 688,574,128,968,513,800 | 20.138614 | 65 | 0.594379 | false | 2.695707 | false | false | false |
bitcraft/pyglet | tests/unittests/integration/text_tests/empty_bold.py | 1 | 1233 | """Test that an empty document doesn't break, even when its
(nonexistent) text is set to bold.
"""
__noninteractive = True
import unittest
from pyglet import gl
from pyglet import graphics
from pyglet.text import document
from pyglet.text import layout
from pyglet import window
class TestWindow(window.Window):
def __init__(self, doctype, *args, **kwargs):
super().__init__(*args, **kwargs)
self.batch = graphics.Batch()
self.document = doctype()
self.layout = layout.IncrementalTextLayout(self.document,
self.width, self.height,
batch=self.batch)
self.document.set_style(0, len(self.document.text), {"bold": True})
def on_draw(self):
gl.glClearColor(1, 1, 1, 1)
self.clear()
self.batch.draw()
class TestCase(unittest.TestCase):
def testUnformatted(self):
self.window = TestWindow(document.UnformattedDocument)
self.window.dispatch_events()
self.window.close()
def testFormatted(self):
self.window = TestWindow(document.FormattedDocument)
self.window.dispatch_events()
self.window.close()
| bsd-3-clause | 6,124,426,004,767,012,000 | 26.4 | 75 | 0.616383 | false | 4.237113 | true | false | false |
asomya/test | horizon/dashboards/nova/networks/ports/views.py | 1 | 2247 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright Cisco Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Nova keypairs.
"""
import logging
from django import http
from django.core.urlresolvers import reverse
from django.template.defaultfilters import slugify
from django.views.generic import View, TemplateView
from django.utils.translation import ugettext as _
from horizon import api
from horizon import forms
from horizon import tables
from horizon import exceptions
from .forms import CreatePorts, AttachPort, DetachPort
from .tables import PortsTable
import pickle
LOG = logging.getLogger(__name__)
class CreatePortsView(forms.ModalFormView):
form_class = CreatePorts
template_name = 'nova/networks/ports/create.html'
class PortsView(tables.DataTableView):
table_class = PortsTable
template_name = 'nova/networks/ports/index.html'
def get_data(self):
network_id = self.kwargs['network_id']
try:
ports = api.quantum_port_list(self.request, network_id)
except:
ports = []
msg = _('Unable to retrieve network details.')
exceptions.handle(self.request, msg)
return ports
class AttachPortView(forms.ModalFormView):
form_class = AttachPort
template_name = 'nova/networks/ports/attach.html'
def get_initial(self):
interfaces = api.get_free_interfaces(self.request)
return {"interfaces": interfaces}
class DetachPortView(forms.ModalFormView):
form_class = DetachPort
template_name = 'nova/networks/ports/detach.html'
| apache-2.0 | 8,638,973,069,664,146,000 | 29.780822 | 78 | 0.727192 | false | 4.026882 | false | false | false |
osergeev/bike | test_ga.py | 1 | 3154 | #test_ga.py
from genetic import *
import matplotlib.pyplot as plt
import random
class Eq1(object):
""" parabole with max in 30.0 """
def __init__(self):
self.bestval = 10000.0
self.minval = 10.0
self.maxval = 50.0
def get_result(self,x):
return self.eq1(x)
def eq1(self, x):
return -10.0*(x-30.0)*(x-30.0)+10000.0
class Eq2(object):
""" between -2 and 2 the max is on 0 with a value of 10 """
def __init__(self):
self.bestval = 10.0
self.minval = -2.0
self.maxval = 2.0
def get_result(self,x):
return self.eq2(x)
def eq2(self, x):
return x**4 - 10*x**2 + 9
class Eq3(object):
""" vector function with eq1 = x, eq2 = y """
def __init__(self):
self.eq1 = Eq1()
self.eq2 = Eq2()
self.bestval = self.eq1.bestval + self.eq2.bestval
self.minval = [self.eq1.minval,self.eq2.minval]
self.maxval = [self.eq1.maxval,self.eq2.maxval]
def get_result(self, vallst):
return self.eq1.get_result(vallst[0]) + self.eq2.get_result(vallst[1])
class TestGA(object):
def __init__(self, neq, nmember, ngeneration):
self._nmember = nmember
self._ngeneration = ngeneration
self._eq = neq
def run(self):
#build member
if self._eq == 1:
eq = Eq1()
elif self._eq == 2:
eq = Eq2()
elif self._eq == 3:
eq = Eq3()
else:
print "Select an equation"
exit
valuelist = []
fitnesslist = []
for i in range(self._nmember):
if self._eq == 1 or self._eq == 2:
value = random.uniform(eq.minval,eq.maxval)
elif self._eq == 3:
value = [random.uniform(eq.minval[0],eq.maxval[0]),random.uniform(eq.minval[1],eq.maxval[1])]
valuelist.append([value])
fitnesslist.append(eq.get_result(value))
ga = Generation()
for i in range(len(valuelist)):
ga.build_member(valuelist[i],fitnesslist[i],[eq.minval],[eq.maxval])
newgeneration = ga.next()
xlist = []
ylist = []
for ngeneration in range(0,self._ngeneration):
h = 0
maxfit = 0
fitnesslist = []
valuelistlist = []
for member in newgeneration:
h += 1
for gene in member.chromosome:
fitness = eq.get_result(gene.val)
fitnesslist.append(fitness)
valuelistlist.append([gene.val])
if fitness > maxfit:
maxfit = fitness
ga = Generation(valuelistlist,fitnesslist,[eq.minval],[eq.maxval])
newgeneration = ga.next()
xlist.append(ngeneration)
ylist.append(maxfit)
plt.plot(xlist,ylist, 'ro')
plt.axis([0, self._ngeneration, 0, eq.bestval])
plt.show()
test1 = TestGA(1, 20, 100)
test1.run()
| gpl-2.0 | 4,118,245,763,700,684,000 | 26.911504 | 109 | 0.502219 | false | 3.524022 | false | false | false |
quantmind/pulsar | pulsar/async/mixins.py | 1 | 6580 | import time
from asyncio import Queue, CancelledError
DEFAULT_LIMIT = 2**16
class FlowControl:
"""A protocol mixin for flow control logic.
This implements the protocol methods :meth:`pause_writing`,
:meth:`resume_writing`.
"""
_b_limit = 2*DEFAULT_LIMIT
_paused = False
_buffer_size = 0
_waiter = None
def write(self, data):
"""Write ``data`` into the wire.
Returns an empty tuple or a :class:`~asyncio.Future` if this
protocol has paused writing.
"""
if self.closed:
raise ConnectionResetError(
'Transport closed - cannot write on %s' % self
)
else:
t = self.transport
if self._paused or self._buffer:
self._buffer.appendleft(data)
self._buffer_size += len(data)
self._write_from_buffer()
if self._buffer_size > 2 * self._b_limit:
if self._waiter and not self._waiter.cancelled():
self.logger.warning(
'%s buffer size is %d: limit is %d ',
self._buffer_size, self._b_limit
)
else:
t.pause_reading()
self._waiter = self._loop.create_future()
else:
t.write(data)
self.changed()
return self._waiter
def pause_writing(self):
'''Called by the transport when the buffer goes over the
high-water mark
Successive calls to this method will fails unless
:meth:`resume_writing` is called first.
'''
assert not self._paused
self._paused = True
def resume_writing(self, exc=None):
'''Resume writing.
Successive calls to this method will fails unless
:meth:`pause_writing` is called first.
'''
assert self._paused
self._paused = False
waiter = self._waiter
if waiter is not None:
self._waiter = None
if not waiter.done():
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
self.transport.resume_reading()
self._write_from_buffer()
# INTERNAL CALLBACKS
def _write_from_buffer(self):
t = self.transport
if not t:
return
while not self._paused and self._buffer:
if t.is_closing():
self.producer.logger.debug(
'Transport closed - cannot write on %s', self
)
break
data = self._buffer.pop()
self._buffer_size -= len(data)
self.transport.write(data)
def _set_flow_limits(self, _, exc=None):
if not exc:
self.transport.set_write_buffer_limits(high=self._limit)
def _wakeup_waiter(self, _, exc=None):
# Wake up the writer if currently paused.
if not self._paused:
return
self.resume_writing(exc=exc)
class Timeout:
'''Adds a timeout for idle connections to protocols
'''
_timeout = None
_timeout_handler = None
@property
def timeout(self):
return self._timeout
@timeout.setter
def timeout(self, timeout):
'''Set a new :attr:`timeout` for this protocol
'''
if self._timeout is None:
self.event('connection_made').bind(self._add_timeout)
self.event('connection_lost').bind(self._cancel_timeout)
self._timeout = timeout or 0
self._add_timeout(None)
# INTERNALS
def _timed_out(self):
if self.last_change:
gap = time.time() - self.last_change
if gap < self._timeout:
self._timeout_handler = None
return self._add_timeout(None, timeout=self._timeout-gap)
self.close()
self.logger.debug('Closed idle %s.', self)
def _add_timeout(self, _, exc=None, timeout=None):
if not self.closed:
self._cancel_timeout(_, exc=exc)
timeout = timeout or self._timeout
if timeout and not exc:
self._timeout_handler = self._loop.call_later(
timeout, self._timed_out
)
def _cancel_timeout(self, _, exc=None, **kw):
if self._timeout_handler:
self._timeout_handler.cancel()
self._timeout_handler = None
class Pipeline:
"""Pipeline protocol consumers once reading is finished
This mixin can be used by TCP connections to pipeline response writing
"""
_pipeline = None
def pipeline(self, consumer):
"""Add a consumer to the pipeline
"""
if self._pipeline is None:
self._pipeline = ResponsePipeline(self)
self.event('connection_lost').bind(self._close_pipeline)
self._pipeline.put(consumer)
def close_pipeline(self):
if self._pipeline:
p, self._pipeline = self._pipeline, None
return p.close()
def _close_pipeline(self, _, **kw):
self.close_pipeline()
class ResponsePipeline:
"""Maintains a queue of responses to send back to the client
"""
__slots__ = ('connection', 'queue', 'debug', 'worker', 'put')
def __init__(self, connection):
self.connection = connection
self.queue = Queue(loop=connection._loop)
self.debug = connection._loop.get_debug()
self.worker = self.queue._loop.create_task(self._process())
self.put = self.queue.put_nowait
async def _process(self):
while True:
try:
consumer = await self.queue.get()
if self.debug:
self.connection.producer.logger.debug(
'Connection pipeline process %s', consumer
)
await consumer.write_response()
except (CancelledError, GeneratorExit, RuntimeError):
break
except Exception:
self.connection.producer.logger.exception(
'Critical exception in %s response pipeline',
self.connection
)
self.connection.close()
break
# help gc
self.connection = None
self.queue = None
self.worker = None
self.put = None
def close(self):
self.worker.cancel()
return self.worker
| bsd-3-clause | -3,372,958,613,512,494,600 | 30.037736 | 74 | 0.537538 | false | 4.500684 | false | false | false |
open-austin/influence-texas | src/influencetx/legislators/models.py | 1 | 1822 | from django.db import models
from influencetx.core import constants, utils
import logging
log = logging.getLogger(__name__)
class Legislator(models.Model):
# Legislator ID from Open States API.
openstates_leg_id = models.CharField(max_length=48, db_index=True)
tpj_filer_id = models.IntegerField(default=0, blank=True, db_index=True)
tx_lege_id = models.CharField(max_length=48, blank=True, db_index=True)
name = models.CharField(max_length=45)
first_name = models.CharField(max_length=20, blank=True)
last_name = models.CharField(max_length=20, blank=True)
party = models.CharField(max_length=1, choices=constants.PARTY_CHOICES)
chamber = models.CharField(max_length=6, choices=constants.CHAMBER_CHOICES)
district = models.IntegerField()
# updated_at field from Open States API. Used to check whether legislator-detail needs update
openstates_updated_at = models.DateTimeField()
url = models.URLField(blank=True)
photo_url = models.URLField(blank=True)
@property
def initial(self):
"""First initial used for placeholder image."""
return self.name[0]
@property
def party_label(self):
"""User-friendly party label."""
return utils.party_label(self.party)
@property
def chamber_label(self):
"""User-friendly label for chamber of congress."""
return utils.chamber_label(self.chamber)
def __str__(self):
return f'{self.name} {self.tx_lege_id}'
class LegislatorIdMap(models.Model):
# Provide mapping between TPJ FILER_ID and Legislator ID from Open States API.
openstates_leg_id = models.CharField(max_length=48, db_index=True)
tpj_filer_id = models.IntegerField(db_index=True)
def __str__(self):
return f'{self.openstates_leg_id!r} {self.tpj_filer_id}'
| gpl-2.0 | -8,889,635,678,502,600,000 | 36.958333 | 97 | 0.697036 | false | 3.457306 | false | false | false |
beast-arena/beast-arena | Beast.py | 2 | 1039 | # -*- coding: utf-8 -*-
import random
class Beast(object):
""" Basic Beast class (implements Random move "stragegy") """
def __init__(self):
self.environment = None
def bewege(self, paramString):
"""
calculates the destination where to move
@param paramString string which is given by the server containing the
beasts energy, environment and the round ten rounds before
@return destination move which is calculated by the client beast
"""
# Just to give examples which param means what:
params = paramString.split(';', 2)
if len(params[0]) > 0:
energy = int(params[0])
else:
energy = 0
self.environment = params[1]
worldLastTenRounds = params[2].rstrip(';')
if energy > 0:
whitelistedMoves = (0, 2, 4, 6, 7, 8, 10, 11, 12, 13, 14, 16, 17, 18, 20, 22, 24, '?')
return random.choice(whitelistedMoves)
else:
return
| gpl-3.0 | -6,030,475,482,796,140,000 | 31.46875 | 98 | 0.559192 | false | 4.172691 | false | false | false |
prymitive/upaas-admin | upaas_admin/apps/applications/api.py | 1 | 12209 | # -*- coding: utf-8 -*-
"""
:copyright: Copyright 2013-2014 by Łukasz Mierzwa
:contact: [email protected]
"""
from __future__ import unicode_literals
import logging
import mongoengine
from django.core import exceptions
from django.http import HttpResponseNotFound, HttpResponseBadRequest
from django.conf.urls import url
from django.utils.translation import ugettext as _
from tastypie_mongoengine.resources import MongoEngineResource
from tastypie_mongoengine.fields import ReferenceField, ReferencedListField
from tastypie.resources import ALL
from tastypie.authorization import Authorization, ReadOnlyAuthorization
from tastypie.exceptions import Unauthorized
from tastypie.utils import trailing_slash
from tastypie.http import HttpCreated
from mongoengine.errors import ValidationError
from upaas.config.metadata import MetadataConfig
from upaas_admin.apps.applications.models import Application, Package
from upaas_admin.common.apiauth import UpaasApiKeyAuthentication
from upaas_admin.common.uwsgi import fetch_json_stats
log = logging.getLogger(__name__)
class ApplicationAuthorization(Authorization):
def read_list(self, object_list, bundle):
log.debug(_("Limiting query to user owned apps (length: "
"{length})").format(length=len(object_list)))
return object_list.filter(owner=bundle.request.user)
def read_detail(self, object_list, bundle):
return bundle.obj.owner == bundle.request.user
def create_detail(self, object_list, bundle):
return bundle.obj.owner == bundle.request.user
def update_list(self, object_list, bundle):
return object_list.filter(owner=bundle.request.user)
def update_detail(self, object_list, bundle):
bundle.data['name'] = bundle.obj.name
return bundle.obj.owner == bundle.request.user
def delete_list(self, object_list, bundle):
raise Unauthorized(_("Unauthorized for such operation"))
def delete_detail(self, object_list, bundle):
raise Unauthorized(_("Unauthorized for such operation"))
class ApplicationResource(MongoEngineResource):
current_package = ReferenceField(
'upaas_admin.apps.applications.api.PackageResource', 'current_package',
full=True, null=True, readonly=True)
run_plan = ReferenceField(
'upaas_admin.apps.scheduler.api.RunPlanResource', 'run_plan',
full=True, null=True, readonly=True)
running_tasks = ReferencedListField(
'upaas_admin.apps.tasks.api.TaskResource', 'running_tasks', null=True,
readonly=True)
class Meta:
always_return_data = True
queryset = Application.objects.all()
resource_name = 'application'
excludes = ['packages', 'tasks']
filtering = {
'id': ALL,
'name': ALL,
'owner': ALL,
}
authentication = UpaasApiKeyAuthentication()
authorization = ApplicationAuthorization()
def __init__(self, *args, **kwargs):
super(ApplicationResource, self).__init__(*args, **kwargs)
self.fields['owner'].readonly = True
def dehydrate(self, bundle):
instances = 0
if bundle.obj.run_plan:
instances = len(bundle.obj.run_plan.backends)
bundle.data['package_count'] = len(bundle.obj.packages)
bundle.data['instance_count'] = instances
bundle.data['can_start'] = bundle.obj.can_start
return bundle
def obj_create(self, bundle, request=None, **kwargs):
# TODO use MongoCleanedDataFormValidation ??
metadata = bundle.data.get('metadata')
if not metadata:
raise exceptions.ValidationError(_('Missing metadata'))
try:
MetadataConfig.from_string(metadata)
except Exception as e:
raise exceptions.ValidationError(
_('Invalid metadata: {err}').format(err=e))
log.debug(_("Going to create new application for user "
"'{name}'").format(name=bundle.request.user.username))
try:
return super(MongoEngineResource, self).obj_create(
bundle, request=request, owner=bundle.request.user, **kwargs)
except mongoengine.ValidationError as e:
log.warning(_("Can't create new application, invalid data "
"payload: {msg}").format(msg=e.message))
raise exceptions.ValidationError(e.message)
except mongoengine.NotUniqueError as e:
log.warning(_("Can't create new application, duplicated fields: "
"{msg}").format(msg=e.message))
raise exceptions.ValidationError(e.message)
def prepend_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<id>\w[\w/-]*)/build%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('build_package'), name="build"),
url(r"^(?P<resource_name>%s)/(?P<id>\w[\w/-]*)/start%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('start_application'), name="start"),
url(r"^(?P<resource_name>%s)/(?P<id>\w[\w/-]*)/stop%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('stop_application'), name="stop"),
url(r"^(?P<resource_name>%s)/(?P<id>\w[\w/-]*)/update%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('update_application'), name="update"),
url(r"^(?P<resource_name>%s)/(?P<id>\w[\w/-]*)/instances%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('instances'), name="instances"),
]
def get_app(self, kwargs):
try:
return Application.objects(
**self.remove_api_resource_names(kwargs)).first()
except ValidationError:
return None
def build_package(self, request, **kwargs):
self.method_check(request, allowed=['put'])
try:
force_fresh = bool(int(request.GET.get('force_fresh', 0)))
except:
force_fresh = False
interpreter_version = request.GET.get('interpreter_version') or None
app = self.get_app(kwargs)
if app:
if interpreter_version and (
interpreter_version not in
app.supported_interpreter_versions):
return HttpResponseBadRequest(
_("Unsupported interpreter version"))
return self.create_response(request, app.build_package(
force_fresh=force_fresh,
interpreter_version=interpreter_version),
response_class=HttpCreated)
else:
return HttpResponseNotFound(_("No such application"))
def start_application(self, request, **kwargs):
self.method_check(request, allowed=['put'])
app = self.get_app(kwargs)
if app:
if app.current_package:
return self.create_response(request, app.start_application(),
response_class=HttpCreated)
else:
return HttpResponseBadRequest(
_("No package built or no metadata registered for app "
"'{name}' with id '{id}'").format(name=app.name,
id=app.id))
else:
return HttpResponseNotFound(_("No such application"))
def stop_application(self, request, **kwargs):
self.method_check(request, allowed=['put'])
app = self.get_app(kwargs)
if app:
if not app.run_plan:
return HttpResponseBadRequest(_(
"Application is already stopped"))
if app.current_package:
return self.create_response(request, app.stop_application(),
response_class=HttpCreated)
else:
return HttpResponseBadRequest(
_("No package built or no metadata registered for app "
"'{name}' with id '{id}'").format(name=app.name,
id=app.id))
else:
return HttpResponseNotFound(_("No such application"))
def update_application(self, request, **kwargs):
self.method_check(request, allowed=['put'])
app = self.get_app(kwargs)
if app:
if app.run_plan:
return self.create_response(request, app.update_application(),
response_class=HttpCreated)
else:
return HttpResponseBadRequest(_("Application is stopped"))
else:
return HttpResponseNotFound(_("No such application"))
def instances(self, request, **kwargs):
self.method_check(request, allowed=['get'])
stats = []
app = self.get_app(kwargs)
if not app:
return HttpResponseNotFound(_("No such application"))
if app.run_plan:
for backend_conf in app.run_plan.backends:
backend_data = {
'name': backend_conf.backend.name,
'ip': str(backend_conf.backend.ip),
'limits': {
'workers_min': backend_conf.workers_min,
'workers_max': backend_conf.workers_max,
'memory_per_worker': app.run_plan.memory_per_worker,
'memory_per_worker_bytes':
app.run_plan.memory_per_worker * 1024 * 1024,
'backend_memory': app.run_plan.memory_per_worker *
backend_conf.workers_max,
'backend_memory_bytes':
app.run_plan.memory_per_worker *
backend_conf.workers_max * 1024 * 1024,
}}
s = fetch_json_stats(str(backend_conf.backend.ip),
backend_conf.stats)
stats.append({'backend': backend_data, 'stats': s})
return self.create_response(request, stats)
class PackageAuthorization(ReadOnlyAuthorization):
def read_list(self, object_list, bundle):
log.debug(_("Limiting query to user owned apps (length: "
"{length})").format(length=len(object_list)))
return object_list.filter(
application__in=bundle.request.user.applications)
def read_detail(self, object_list, bundle):
return bundle.obj.application.owner == bundle.request.user
def delete_list(self, object_list, bundle):
active_pkgs = []
for app in bundle.request.user.applications:
if app:
active_pkgs.append(app.current_package.id)
return object_list.filter(
application__in=bundle.request.user.applications,
id__not__in=active_pkgs)
def delete_detail(self, object_list, bundle):
if (bundle.obj.application.owner == bundle.request.user) and (
bundle.obj.id != bundle.obj.application.current_package.id):
return True
return False
class PackageResource(MongoEngineResource):
application = ReferenceField(
'upaas_admin.apps.applications.api.ApplicationResource',
'application', readonly=True)
class Meta:
always_return_data = True
queryset = Package.objects.all()
resource_name = 'package'
filtering = {
'id': ALL,
}
authentication = UpaasApiKeyAuthentication()
authorization = PackageAuthorization()
def obj_delete(self, bundle, **kwargs):
bundle.obj = self.obj_get(bundle=bundle, **kwargs)
self.authorized_delete_detail(self.get_object_list(bundle.request),
bundle)
if bundle.obj.id != bundle.obj.application.current_package.id:
return super(PackageResource, self).obj_delete(bundle, **kwargs)
return HttpResponseBadRequest(_("Package in use"))
| gpl-3.0 | 88,815,950,642,673,780 | 39.829431 | 79 | 0.589531 | false | 4.400865 | false | false | false |
3dfxsoftware/cbss-addons | mrp_bom_subproduct_cost/mrp_subproduct.py | 1 | 2731 | # -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2010 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo ([email protected])
############################################################################
# Coded by: Luis Torres ([email protected])
############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
import decimal_precision as dp
class mrp_subproduct(osv.Model):
_inherit = 'mrp.subproduct'
def _calc_cost(self, cr, uid, ids, field_name, arg, context):
res = {}
for i in self.browse(cr, uid, ids):
res[i.id] = self.compute_bom_cost(cr, uid, [i.id])
return res
def _calc_cost_u(self, cr, uid, ids, field_name, arg, context):
'''
funcion para el calculo del costo unitario, el cual es: product cost/ product qty
@cost = se almacena el costo unitario final.
@res = diccionario usado para retornar el id y el costo unitario.
'''
res = {}
for i in self.browse(cr, uid, ids):
cost = 0.00
cost = i.product_id.standard_price
res[i.id] = cost
return res
_columns = {
'cost_t': fields.function(_calc_cost, method=True, type='float', digits_compute=dp.get_precision('Cost_Bom'), string='Cost', store=False),
'cost_u': fields.function(_calc_cost_u, method=True, type='float', digits_compute=dp.get_precision('Cost_Bom'), string='Unit Cost', store=False),
}
def compute_bom_cost(self, cr, uid, ids, *args):
for i in self.browse(cr, uid, ids):
cost = 0.00
cost = i.product_id.standard_price*i.product_qty * \
i.product_uom.factor_inv * i.product_id.uom_id.factor
return cost
| gpl-2.0 | 1,877,546,884,443,324,400 | 39.761194 | 153 | 0.56829 | false | 3.846479 | false | false | false |
arc64/datawi.re | datawire/views/api/entities.py | 1 | 2324 | from flask import Blueprint, request
from flask.ext.login import current_user
from werkzeug.exceptions import BadRequest
from apikit import obj_or_404, jsonify, Pager, request_data
from datawire.model import Entity, Collection, db
from datawire.model.forms import EntityForm
from datawire import authz
blueprint = Blueprint('entities', __name__)
@blueprint.route('/api/1/entities', methods=['GET'])
def index():
collection_ids = Collection.user_ids(current_user)
filter_collections = request.args.getlist('collection')
if len(filter_collections):
try:
collection_ids = [l for l in collection_ids if l
in filter_collections]
except ValueError:
raise BadRequest()
prefix = request.args.get('prefix')
q = Entity.by_collection(collection_ids, prefix=prefix)
return jsonify(Pager(q))
@blueprint.route('/api/1/entities', methods=['POST', 'PUT'])
def create():
data = EntityForm().deserialize(request_data())
authz.require(data['collection'])
authz.require(authz.collection_write(data['collection'].id))
entity = Entity.create(data, current_user)
db.session.commit()
return view(entity.id)
@blueprint.route('/api/1/entities/_suggest', methods=['GET'])
def suggest():
prefix = request.args.get('prefix')
results = Entity.suggest_prefix(prefix, authz.authz_collection('read'))
return jsonify({'results': results})
@blueprint.route('/api/1/entities/<id>', methods=['GET'])
def view(id):
entity = obj_or_404(Entity.by_id(id))
authz.require(authz.collection_read(entity.collection_id))
return jsonify(entity)
@blueprint.route('/api/1/entities/<id>', methods=['POST', 'PUT'])
def update(id):
entity = obj_or_404(Entity.by_id(id))
authz.require(authz.collection_write(entity.collection_id))
data = EntityForm().deserialize(request_data())
authz.require(data['list'])
authz.require(authz.collection_write(data['list'].id))
entity.update(data)
db.session.commit()
return view(entity.id)
@blueprint.route('/api/1/entities/<id>', methods=['DELETE'])
def delete(id):
entity = obj_or_404(Entity.by_id(id))
authz.require(authz.collection_write(entity.collection_id))
entity.delete()
db.session.commit()
return jsonify({'status': 'ok'})
| mit | -3,900,141,859,984,782,300 | 31.732394 | 75 | 0.685026 | false | 3.58642 | false | false | false |
shlomif/PySolFC | pysollib/kivy/tkutil.py | 1 | 14609 | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------#
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------#
# kivy implementation:
# most of the code will not be used, but some important function have been
# emulated.
from __future__ import division
import logging
import os
from array import array
from kivy.clock import Clock
from kivy.core.image import Image as CoreImage
from kivy.core.text import Label as CoreLabel
from kivy.graphics.texture import Texture
from pysollib.kivy.LApp import LImage
from pysollib.kivy.LApp import LTopLevel0
# ************************************************************************
# * window manager util
# ************************************************************************
def wm_withdraw(window):
window.wm_withdraw()
def wm_map(window, maximized=0):
return
# ************************************************************************
# * window util
# ************************************************************************
def setTransient(window, parent, relx=None, rely=None, expose=1):
# Make an existing toplevel window transient for a parent.
#
# The window must exist but should not yet have been placed; in
# other words, this should be called after creating all the
# subwidget but before letting the user interact.
# not used in kivy (highly tk specific).
return
def makeToplevel(parent, title=None):
print('tkutil: makeTopLevel')
# Create a Toplevel window.
#
window = LTopLevel0(parent, title)
# window = LTopLevelPopup(parent, title)
return window.content
def make_help_toplevel(app, title=None):
# Create an independent Toplevel window.
window = app.top
# from pysollib.winsystems import init_root_window
# window = Tkinter.Tk(className=TITLE)
# init_root_window(window, app)
return window
# ************************************************************************
# * bind wrapper - Tkinter doesn't properly delete all bindings
# ************************************************************************
__mfx_bindings = {}
__mfx_wm_protocols = ("WM_DELETE_WINDOW", "WM_TAKE_FOCUS", "WM_SAVE_YOURSELF")
def bind(widget, sequence, func, add=None):
# logging.info('tkutil: bind %s %s %s %s '
# % (widget, sequence, func, add))
# logging.info('tkutil: bind canvas = ' % str(widget.canvas))
if hasattr(widget, 'bindings'):
# logging.info('tkutil: bind %s %s %s %s '
# % (sequence, widget, func, add))
widget.bindings[sequence] = func
else:
# logging.info('tkutil: bind failed %s %s' % (sequence, widget))
pass
if (sequence == '<KeyPress-Left>'):
return
if (sequence == '<KeyPress-Right>'):
return
if (sequence == '<KeyPress-Prior>'):
return
if (sequence == '<KeyPress-Next>'):
return
if (sequence == '<KeyPress-Up>'):
return
if (sequence == '<KeyPress-Down>'):
return
if (sequence == '<KeyPress-Begin>'):
return
if (sequence == '<KeyPress-Home>'):
return
if (sequence == '<KeyPress-End>'):
return
if (sequence == '<KeyPress-Down>'):
return
if (sequence == '<4>'):
return
if (sequence == '<5>'):
return
if (sequence == '<1>'):
return
if (sequence == '<Motion>'):
return
if (sequence == '<ButtonRelease-1>'):
return
if (sequence == '<Control-1>'):
return
if (sequence == '<Shift-1>'):
return
if (sequence == '<Double-1>'):
return
if (sequence == '<3>'):
return
if (sequence == '<2>'):
return
if (sequence == '<Control-3>'):
return
if (sequence == '<Enter>'):
return
if (sequence == '<Leave>'):
return
if (sequence == '<Unmap>'):
return
if (sequence == '<Configure>'):
return
pass
def unbind_destroy(widget):
# logging.info('tkutil: unbind %s' % (widget))
widget.bindings = []
pass
# ************************************************************************
# * timer wrapper - Tkinter doesn't properly delete all commands
# ************************************************************************
def after(widget, ms, func, *args):
print('tkutil: after(%s, %s, %s, %s)' % (widget, ms, func, args))
if (ms == 'idle'):
print('demo use')
Clock.schedule_once(lambda dt: func(), 1.0)
elif (isinstance(ms, int)):
# print('ms: play timer (accounting)')
# Clock.schedule_once(lambda dt: func(), float(ms)/1000.0)
# makes not sense, drains battery!
pass
def after_idle(widget, func, *args):
print('tkutil: after_idle()')
return after(widget, "idle", func, *args)
def after_cancel(t):
print('tkutil: after_cancel()')
pass
# ************************************************************************
# * image handling
# ************************************************************************
def makeImage(file=None, data=None, dither=None, alpha=None):
kw = {}
if data is None:
assert file is not None
kw["source"] = file
# print('makeImage: source = %s' % file)
# if (file=='/home/lb/PRG/Python/Kivy/pysolfc/data/images/redeal.gif'):
# y = self.yy
else:
assert data is not None
kw["texture"] = data
# ob das geht ?? - kommt das vor ?
# yy = self.yy
'''
if 'source' in kw:
logging.info ("makeImage: " + kw["source"])
if 'texture' in kw:
logging.info ("makeImage: " + str(kw["texture"]))
'''
return LImage(**kw)
loadImage = makeImage
def copyImage(image, x, y, width, height):
# return Image(source=image.source)
# return Image(texture=image.texture)
return image
def fillTexture(texture, fill, outline=None, owidth=1):
# logging.info("fillImage: t=%s, f=%s o=%s, w=%s" %
# (texture, fill, outline, owidth))
# O.K. Kivy
if not fill and not outline:
return
width = texture.width
height = texture.height
ox = round(owidth)
ow = int(ox) # muss int sein!
if width <= 2 * ow or height <= 2 * ow:
fill = fill or outline
outline = None
if not fill:
fi0 = 0
fi1 = 0
fi2 = 0
fi3 = 0
else:
# wir erwarten Werte als '#xxxxxx' (color Werte in Tk notation)
# (optional mit transparenz)
if (fill[0] == '#'):
fill = fill[1:]
fi0 = int(fill[0:2], 16)
fi1 = int(fill[2:4], 16)
fi2 = int(fill[4:6], 16)
fi3 = 255
if len(fill) >= 8:
fi3 = int(fill[6:8], 16)
if not outline:
f = (fi0, fi1, fi2, fi3) * width
f = (f, ) * height
assert len(f) == height
f = sum(f, ())
assert len(f) == height * width * 4
arr = array('B', f)
texture.blit_buffer(arr, colorfmt='rgba', bufferfmt='ubyte')
else:
if (outline[0] == '#'):
outline = outline[1:]
ou0 = int(outline[0:2], 16)
ou1 = int(outline[2:4], 16)
ou2 = int(outline[4:6], 16)
ou3 = 255
if len(outline) >= 8:
ou3 = int(outline[6:8], 16)
l1 = (
ou0,
ou1,
ou2,
ou3,
) * width
l2 = (ou0, ou1, ou2, ou3, ) * ow + (fi0, fi1, fi2, fi3, ) * \
(width - 2 * ow) + (ou0, ou1, ou2, ou3, ) * ow
f = (l1, ) * ow + (l2, ) * (height - 2 * ow) + (l1, ) * ow
assert len(f) == height
f = sum(f, ())
assert len(f) == height * width * 4
arr = array('B', f)
texture.blit_buffer(arr, colorfmt='rgba', bufferfmt='ubyte')
def createImage(width, height, fill, outline=None, outwidth=1):
logging.info("createImage: w=%s, h=%s, f=%s, o=%s, ow=%s" %
(width, height, fill, outline, outwidth))
# test stellungen:
# if (fill==None):
# fill = '#00cc00'
# if (outline==None):
# outline = '#ff00ff'
# if (fill is None and (outline is None or outline == '')):
# outline = '#fff000'
# outwidth = 1
texture = Texture.create(size=(width, height), colorfmt='rgba')
fillTexture(texture, fill, outline, outwidth)
image = LImage(texture=texture)
# logging.info("createImage: LImage create %s" % image)
return image
def shadowImage(image, color='#3896f8', factor=0.3):
logging.info("shadowImage: ")
# TBD.
return None
# Kivy nicht benötigt. aber - was tut das ?
# wurde aufgerufen, als der erste König auf die Foundation
# gezogen wurde. (möglicherweise eine Gewonnen! - Markierung).
def markImage(image):
logging.info("markImage: ")
return None
def _createImageMask(texture, color):
col = 0
if (color == 'black'):
col = 0
if (color == 'white'):
col = 255
g = texture.pixels
arr = array('B', g)
for mx in range(int(len(arr) / 4)):
m = 4 * mx
if arr[m + 3] < 128:
arr[m + 3] = 0
arr[m] = arr[m + 1] = arr[m + 2] = 0
else:
arr[m + 3] = 32
arr[m] = arr[m + 1] = arr[m + 2] = col
mask = Texture.create(size=texture.size, colorfmt='rgba')
mask.blit_buffer(arr, colorfmt='rgba', bufferfmt='ubyte')
return mask
def _scaleTextureToSize(texture, size):
width = size[0]
height = size[1]
g = texture.pixels
ag = array('B', g)
gw, gh = texture.size
# print('size:',width,height)
# print('texture size:',gw,gh)
bb = array('B', [0 for x in range(width * height * 4)])
# print ('bb length: ',len(bb))
# print ('gg length: ',gw*gh*4)
scalex = width / gw
scaley = height / gh
# scale, x und y offset bestimmen.
scale = scaley
if (scalex < scaley):
scale = scalex
offx = (width - gw * scale) / 2
offy = (height - gh * scale) / 2
# print ('scale: ',scalex,'/',scaley,' -> ',scale)
# print ('offs: ',offx,'/',offy)
for bi in range(height):
bline = bi * width
if (bi >= offy) and (bi < (height - offy)):
# transfer
ai = gh - int((bi - offy) / scale) - 1
aline = ai * gw
for bk in range(width):
bpos = (bline + bk) * 4
if (bk >= offx) and (bk < (width - offx)):
# transfer
ak = int((bk - offx) / scale)
apos = (aline + ak) * 4
bb[bpos] = ag[apos]
bb[bpos + 1] = ag[apos + 1]
bb[bpos + 2] = ag[apos + 2]
bb[bpos + 3] = ag[apos + 3]
else:
# transparent
bb[bpos + 3] = 0
else:
# transparent
for bk in range(width):
bb[(bline + bk) * 4 + 3] = 0
stext = Texture.create(size=(width, height), colorfmt='rgba')
stext.blit_buffer(bb, colorfmt='rgba', bufferfmt='ubyte')
return stext
def _pasteTextureTo(texture, totexture):
g = texture.pixels
ag = array('B', g)
gw, gh = texture.size
t = totexture.pixels
at = array('B', t)
tw, th = totexture.size
if (tw != gw) or (th != gh):
return
for i in range(int(len(ag) / 4)):
i4 = i * 4
if ag[i4 + 3] > 128:
at[i4] = ag[i4]
at[i4 + 1] = ag[i4 + 1]
at[i4 + 2] = ag[i4 + 2]
at[i4 + 3] = ag[i4 + 3]
stext = Texture.create(size=(tw, th), colorfmt='rgba')
stext.blit_buffer(at, colorfmt='rgba', bufferfmt='ubyte')
return stext
def createBottom(image, color='white', backfile=None):
backfilebase = None
if backfile is not None:
backfilebase = os.path.basename(backfile)
logging.info("createBottom: %s | %s" % (color, backfilebase))
# print('createBottom:',image)
# th = 1 # thickness
# size = (w - th * 2, h - th * 2)
# original: zeichnet noch eine outline um die karte - können wir nicht.
tmp0 = _createImageMask(image.texture, color)
if backfile:
tmp1 = CoreImage(backfile)
txtre = _scaleTextureToSize(tmp1.texture, image.texture.size)
tmp = _pasteTextureTo(txtre, tmp0)
else:
tmp = tmp0
img = LImage(texture=tmp)
img.size[0] = image.getWidth()
img.size[1] = image.getHeight()
return img
'''
im = image._pil_image
th = 1 # thickness
sh = Image.new('RGBA', im.size, color)
out = Image.composite(sh, im, im)
w, h = im.size
size = (w - th * 2, h - th * 2)
tmp = Image.new('RGBA', size, color)
tmp.putalpha(60)
mask = out.resize(size, Image.ANTIALIAS)
out.paste(tmp, (th, th), mask)
if backfile:
back = Image.open(backfile).convert('RGBA')
w0, h0 = back.size
w1, h1 = im.size
a = min(float(w1) / w0, float(h1) / h0)
a = a * 0.9
w0, h0 = int(w0 * a), int(h0 * a)
back = back.resize((w0, h0), Image.ANTIALIAS)
x, y = (w1 - w0) / 2, (h1 - h0) / 2
out.paste(back, (x, y), back)
return PIL_Image(image=out)
'''
# ************************************************************************
# * font utils
# ************************************************************************
def get_text_width(text, font, root=None):
logging.info("get_text_width: %s %s" % (text, font))
label = CoreLabel()
label.text = text
label.refresh()
return label.content_width
# return Font(root=root, font=font).measure(text)
| gpl-3.0 | 4,965,557,221,259,048,000 | 27.03263 | 79 | 0.510236 | false | 3.469945 | false | false | false |
bruinxiong/SENet.mxnet | symbol_se_inception_v4.py | 1 | 20343 | """
Inception V4, suitable for images with around 299 x 299 (original)
Implemented the following paper:
Szegedy C, Ioffe S, Vanhoucke V. Inception-v4, inception-resnet and the impact of residual connections on learning[J]. arXiv preprint arXiv:1602.07261, 2016.
Jie Hu, Li Shen, Gang Sun. "Squeeze-and-Excitation Networks" https://arxiv.org/pdf/1709.01507v1.pdf
This modification version is based on Inception-v4 original but change to 224 x 224 size of input data.
Modified by Lin Xiong, May-27, 2017
Added Squeeze-and-Excitation block by Lin Xiong Oct-30, 2017
Thanks to Cher Keng Heng
"""
#import find_mxnet
import mxnet as mx
def Conv(data, num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0), name=None, suffix='', withRelu=True, withBn=False, bn_mom=0.9, workspace=256):
conv = mx.sym.Convolution(data=data, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad,
name='%s%s_conv2d' % (name, suffix), workspace=workspace)
if withBn:
conv = mx.sym.BatchNorm(data=conv, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='%s%s_bn' % (name, suffix))
if withRelu:
conv = mx.sym.Activation(data=conv, act_type='relu', name='%s%s_relu' % (name, suffix))
return conv
# Input Shape is 299*299*3 (old)
# Input Shape is 224*224*3 (new)
def inception_stem(name, data,
num_1_1=32, num_1_2=32, num_1_3=64,
num_2_1=96,
num_3_1=64, num_3_2=96,
num_4_1=64, num_4_2=64, num_4_3=64, num_4_4=96,
num_5_1=192,
bn_mom=0.9):
stem_3x3 = Conv(data=data, num_filter=num_1_1, kernel=(3, 3), stride=(2, 2), name=('%s_conv' % name), bn_mom=bn_mom, workspace=256)
stem_3x3 = Conv(data=stem_3x3, num_filter=num_1_2, kernel=(3, 3), name=('%s_stem' % name), suffix='_conv', bn_mom=bn_mom, workspace=256)
stem_3x3 = Conv(data=stem_3x3, num_filter=num_1_3, kernel=(3, 3), pad=(1, 1), name=('%s_stem' % name),
suffix='_conv_1', bn_mom=bn_mom, workspace=256)
pool1 = mx.sym.Pooling(data=stem_3x3, kernel=(3, 3), stride=(2, 2), pad=(0, 0), pool_type='max',
name=('%s_%s_pool1' % ('max', name)))
stem_1_3x3 = Conv(data=stem_3x3, num_filter=num_2_1, kernel=(3, 3), stride=(2, 2), name=('%s_stem_1' % name),
suffix='_conv_1', bn_mom=bn_mom, workspace=256)
concat1 = mx.sym.Concat(*[pool1, stem_1_3x3], name=('%s_concat_1' % name))
stem_1_1x1 = Conv(data=concat1, num_filter=num_3_1, name=('%s_stem_1' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
stem_1_3x3 = Conv(data=stem_1_1x1, num_filter=num_3_2, kernel=(3, 3), name=('%s_stem_1' % name), suffix='_conv_3', bn_mom=bn_mom, workspace=256)
stem_2_1x1 = Conv(data=concat1, num_filter=num_4_1, name=('%s_stem_2' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
stem_2_7x1 = Conv(data=stem_2_1x1, num_filter=num_4_2, kernel=(7, 1), pad=(3, 0), name=('%s_stem_2' % name),
suffix='_conv_2', bn_mom=bn_mom, workspace=256)
stem_2_1x7 = Conv(data=stem_2_7x1, num_filter=num_4_3, kernel=(1, 7), pad=(0, 3), name=('%s_stem_2' % name),
suffix='_conv_3', bn_mom=bn_mom, workspace=256)
stem_2_3x3 = Conv(data=stem_2_1x7, num_filter=num_4_4, kernel=(3, 3), name=('%s_stem_2' % name), suffix='_conv_4', bn_mom=bn_mom, workspace=256)
concat2 = mx.sym.Concat(*[stem_1_3x3, stem_2_3x3], name=('%s_concat_2' % name))
pool2 = mx.sym.Pooling(data=concat2, kernel=(3, 3), stride=(2, 2), pad=(0, 0), pool_type='max',
name=('%s_%s_pool2' % ('max', name)))
stem_3_3x3 = Conv(data=concat2, num_filter=num_5_1, kernel=(3, 3), stride=(2, 2), name=('%s_stem_3' % name),
suffix='_conv_1', withRelu=False, bn_mom=bn_mom, workspace=256)
concat3 = mx.sym.Concat(*[pool2, stem_3_3x3], name=('%s_concat_3' % name))
bn1 = mx.sym.BatchNorm(data=concat3, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=('%s_bn1' % name))
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=('%s_relu1' % name))
return act1
# Output Shape is 25*25*384
# Input Shape is 25*25*384
def InceptionA(name, data,
num_1_1=96,
num_2_1=96,
num_3_1=64, num_3_2=96,
num_4_1=64, num_4_2=96, num_4_3=96,
bn_mom=0.9):
a1 = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(1, 1), pad=(1, 1), pool_type='avg',
name=('%s_%s_pool1' % ('avg', name)))
a1 = Conv(data=a1, num_filter=num_1_1, name=('%s_a_1' % name), suffix='_conv', withRelu=False, bn_mom=bn_mom, workspace=256)
a2 = Conv(data=data, num_filter=num_2_1, name=('%s_a_2' % name), suffix='_conv', withRelu=False, bn_mom=bn_mom, workspace=256)
a3 = Conv(data=data, num_filter=num_3_1, name=('%s_a_3' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
a3 = Conv(data=a3, num_filter=num_3_2, kernel=(3, 3), pad=(1, 1), name=('%s_a_3' % name), suffix='_conv_2',
withRelu=False, bn_mom=bn_mom, workspace=256)
a4 = Conv(data=data, num_filter=num_4_1, name=('%s_a_4' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
a4 = Conv(data=a4, num_filter=num_4_2, kernel=(3, 3), pad=(1, 1), name=('%s_a_4' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
a4 = Conv(data=a4, num_filter=num_4_3, kernel=(3, 3), pad=(1, 1), name=('%s_a_4' % name), suffix='_conv_3',
withRelu=False, bn_mom=bn_mom, workspace=256)
m = mx.sym.Concat(*[a1, a2, a3, a4], name=('%s_a_concat1' % name))
m = mx.sym.BatchNorm(data=m, fix_gamma=False, eps=2e-5, name=('%s_a_bn1' % name))
m = mx.sym.Activation(data=m, act_type='relu', name=('%s_a_relu1' % name))
return m
# Output Shape is 25*25*384
# Input Shape is 12*12*1024
def InceptionB(name, data,
num_1_1=128,
num_2_1=384,
num_3_1=192, num_3_2=224, num_3_3=256,
num_4_1=192, num_4_2=192, num_4_3=224, num_4_4=224, num_4_5=256,
bn_mom=0.9):
b1 = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(1, 1), pad=(1, 1), pool_type='avg',
name=('%s_%s_pool1' % ('avg', name)))
b1 = Conv(data=b1, num_filter=num_1_1, name=('%s_b_1' % name), suffix='_conv', withRelu=False, bn_mom=bn_mom, workspace=256)
b2 = Conv(data=data, num_filter=num_2_1, name=('%s_b_2' % name), suffix='_conv', withRelu=False, bn_mom=bn_mom, workspace=256)
b3 = Conv(data=data, num_filter=num_3_1, name=('%s_b_3' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
b3 = Conv(data=b3, num_filter=num_3_2, kernel=(1, 7), pad=(0, 3), name=('%s_b_3' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
b3 = Conv(data=b3, num_filter=num_3_3, kernel=(7, 1), pad=(3, 0), name=('%s_b_3' % name), suffix='_conv_3',
withRelu=False, bn_mom=bn_mom, workspace=256)
b4 = Conv(data=data, num_filter=num_4_1, name=('%s_b_4' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
b4 = Conv(data=b4, num_filter=num_4_2, kernel=(1, 7), pad=(0, 3), name=('%s_b_4' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
b4 = Conv(data=b4, num_filter=num_4_3, kernel=(7, 1), pad=(3, 0), name=('%s_b_4' % name), suffix='_conv_3', bn_mom=bn_mom, workspace=256)
b4 = Conv(data=b4, num_filter=num_4_4, kernel=(1, 7), pad=(0, 3), name=('%s_b_4' % name), suffix='_conv_4', bn_mom=bn_mom, workspace=256)
b4 = Conv(data=b4, num_filter=num_4_5, kernel=(7, 1), pad=(3, 0), name=('%s_b_4' % name), suffix='_conv_5',
withRelu=False, bn_mom=bn_mom, workspace=256)
m = mx.sym.Concat(*[b1, b2, b3, b4], name=('%s_b_concat1' % name))
m = mx.sym.BatchNorm(data=m, fix_gamma=False, eps=2e-5, name=('%s_b_bn1' % name))
m = mx.sym.Activation(data=m, act_type='relu', name=('%s_b_relu1' % name))
return m
# Output Shape is 12*12*1024
# Input Shape is 5*5*1536
def InceptionC(name, data,
num_1_1=256,
num_2_1=256,
num_3_1=384, num_3_2=256, num_3_3=256,
num_4_1=384, num_4_2=448, num_4_3=512, num_4_4=256, num_4_5=256,
bn_mom=0.9):
c1 = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(1, 1), pad=(1, 1), pool_type='avg',
name=('%s_%s_pool1' % ('avg', name)))
c1 = Conv(data=c1, num_filter=num_1_1, name=('%s_c_1' % name), suffix='_conv', withRelu=False, bn_mom=bn_mom, workspace=256)
c2 = Conv(data=data, num_filter=num_2_1, name=('%s_c_2' % name), suffix='_conv', withRelu=False, bn_mom=bn_mom, workspace=256)
c3 = Conv(data=data, num_filter=num_3_1, name=('%s_c_3' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
c3_1 = Conv(data=c3, num_filter=num_3_2, kernel=(3, 1), pad=(1, 0), name=('%s_c_3' % name), suffix='_conv_1_1',
withRelu=False, bn_mom=bn_mom, workspace=256)
c3_2 = Conv(data=c3, num_filter=num_3_3, kernel=(1, 3), pad=(0, 1), name=('%s_c_3' % name), suffix='_conv_1_2',
withRelu=False, bn_mom=bn_mom, workspace=256)
c4 = Conv(data=data, num_filter=num_4_1, name=('%s_c_4' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
c4 = Conv(data=c4, num_filter=num_4_2, kernel=(1, 3), pad=(0, 1), name=('%s_c_4' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
c4 = Conv(data=c4, num_filter=num_4_3, kernel=(3, 1), pad=(1, 0), name=('%s_c_4' % name), suffix='_conv_3', bn_mom=bn_mom, workspace=256)
c4_1 = Conv(data=c4, num_filter=num_4_4, kernel=(3, 1), pad=(1, 0), name=('%s_c_4' % name), suffix='_conv_3_1',
withRelu=False, bn_mom=bn_mom, workspace=256)
c4_2 = Conv(data=c4, num_filter=num_4_5, kernel=(1, 3), pad=(0, 1), name=('%s_c_4' % name), suffix='_conv_3_2',
withRelu=False, bn_mom=bn_mom, workspace=256)
m = mx.sym.Concat(*[c1, c2, c3_1, c3_2, c4_1, c4_2], name=('%s_c_concat1' % name))
m = mx.sym.BatchNorm(data=m, fix_gamma=False, eps=2e-5, name=('%s_c_bn1' % name))
m = mx.sym.Activation(data=m, act_type='relu', name=('%s_c_relu1' % name))
return m
# Output Shape is 5*5*1536
# Input Shape is 25*25*384
def ReductionA(name, data,
num_2_1=384,
num_3_1=192, num_3_2=224, num_3_3=256,
bn_mom=0.9):
ra1 = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(2, 2), pad=(0, 0), pool_type='max', name=('%s_%s_pool1' % ('max', name)))
ra2 = Conv(data=data, num_filter=num_2_1, kernel=(3, 3), stride=(2, 2), name=('%s_ra_2' % name), suffix='_conv',
withRelu=False, bn_mom=bn_mom, workspace=256)
ra3 = Conv(data=data, num_filter=num_3_1, name=('%s_ra_3' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
ra3 = Conv(data=ra3, num_filter=num_3_2, kernel=(3, 3), pad=(1, 1), name=('%s_ra_3' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
ra3 = Conv(data=ra3, num_filter=num_3_3, kernel=(3, 3), stride=(2, 2), name=('%s_ra_3' % name), suffix='_conv_3',
withRelu=False, bn_mom=bn_mom, workspace=256)
m = mx.sym.Concat(*[ra1, ra2, ra3], name=('%s_ra_concat1' % name))
m = mx.sym.BatchNorm(data=m, fix_gamma=False, eps=2e-5, name=('%s_ra_bn1' % name))
m = mx.sym.Activation(data=m, act_type='relu', name=('%s_ra_relu1' % name))
return m
# Output Shape is 12*12*1024
# Input Shape is 12*12*1024
def ReductionB(name, data,
num_2_1=192, num_2_2=192,
num_3_1=256, num_3_2=256, num_3_3=320, num_3_4=320,
bn_mom=0.9):
rb1 = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(2, 2), pad=(0, 0), pool_type='max', name=('%s_%s_pool1' % ('max', name)))
rb2 = Conv(data=data, num_filter=num_2_1, name=('%s_rb_2' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
rb2 = Conv(data=rb2, num_filter=num_2_2, kernel=(3, 3), stride=(2, 2), name=('%s_rb_2' % name), suffix='_conv_2',
withRelu=False, bn_mom=bn_mom, workspace=256)
rb3 = Conv(data=data, num_filter=num_3_1, name=('%s_rb_3' % name), suffix='_conv_1', bn_mom=bn_mom, workspace=256)
rb3 = Conv(data=rb3, num_filter=num_3_2, kernel=(1, 7), pad=(0, 3), name=('%s_rb_3' % name), suffix='_conv_2', bn_mom=bn_mom, workspace=256)
rb3 = Conv(data=rb3, num_filter=num_3_3, kernel=(7, 1), pad=(3, 0), name=('%s_rb_3' % name), suffix='_conv_3', bn_mom=bn_mom, workspace=256)
rb3 = Conv(data=rb3, num_filter=num_3_4, kernel=(3, 3), stride=(2, 2), name=('%s_rb_3' % name), suffix='_conv_4',
withRelu=False, bn_mom=bn_mom, workspace=256)
m = mx.sym.Concat(*[rb1, rb2, rb3], name=('%s_rb_concat1' % name))
m = mx.sym.BatchNorm(data=m, fix_gamma=False, eps=2e-5, name=('%s_rb_bn1' % name))
m = mx.sym.Activation(data=m, act_type='relu', name=('%s_rb_relu1' % name))
return m
# Output Shape is 5*5*1536
# Squeeze and excitation block
def squeeze_excitation_block(name, data, num_filter, ratio):
squeeze = mx.sym.Pooling(data=data, global_pool=True, kernel=(7, 7), pool_type='avg', name=name + '_squeeze')
squeeze = mx.symbol.Flatten(data=squeeze, name=name + '_flatten')
excitation = mx.symbol.FullyConnected(data=squeeze, num_hidden=int(num_filter*ratio), name=name + '_excitation1')
excitation = mx.sym.Activation(data=excitation, act_type='relu', name=name + '_excitation1_relu')
excitation = mx.symbol.FullyConnected(data=excitation, num_hidden=num_filter, name=name + '_excitation2')
excitation = mx.sym.Activation(data=excitation, act_type='sigmoid', name=name + '_excitation2_sigmoid')
scale = mx.symbol.broadcast_mul(data, mx.symbol.reshape(data=excitation, shape=(-1, num_filter, 1, 1)))
return scale
def circle_in4a(name, data, ratio,
num_1_1=96,
num_2_1=96,
num_3_1=64, num_3_2=96,
num_4_1=64, num_4_2=96, num_4_3=96,
bn_mom=0.9,
round=4):
in4a = data
for i in xrange(round):
in4a = InceptionA(name + ('_%d' % i),
in4a,
num_1_1,
num_2_1,
num_3_1, num_3_2,
num_4_1, num_4_2, num_4_3,
bn_mom)
_, out_shapes, _ = in4a.infer_shape(data=(1, 3, 224, 224))
# import pdb
# pdb.set_trace()
num_filter = int(out_shapes[0][1])
in4a = squeeze_excitation_block(name + ('_%d' % i), in4a, num_filter, ratio)
return in4a
def circle_in7b(name, data, ratio,
num_1_1=128,
num_2_1=384,
num_3_1=192, num_3_2=224, num_3_3=256,
num_4_1=192, num_4_2=192, num_4_3=224, num_4_4=224, num_4_5=256,
bn_mom=0.9,
round=7):
in7b = data
for i in xrange(round):
in7b = InceptionB(name + ('_%d' % i),
in7b,
num_1_1,
num_2_1,
num_3_1, num_3_2, num_3_3,
num_4_1, num_4_2, num_4_3, num_4_4, num_4_5,
bn_mom)
_, out_shapes, _, = in7b.infer_shape(data=(1, 3, 224, 224))
# import pdb
# pdb.set_trace()
num_filter = int(out_shapes[0][1])
in7b = squeeze_excitation_block(name + ('_%d' % i), in7b, num_filter, ratio)
return in7b
def circle_in3c(name, data, ratio,
num_1_1=256,
num_2_1=256,
num_3_1=384, num_3_2=256, num_3_3=256,
num_4_1=384, num_4_2=448, num_4_3=512, num_4_4=256, num_4_5=256,
bn_mom=0.9,
round=3):
in3c = data
for i in xrange(round):
in3c = InceptionC(name + ('_%d' % i),
in3c,
num_1_1,
num_2_1,
num_3_1, num_3_2, num_3_3,
num_4_1, num_4_2, num_4_3, num_4_4, num_4_5,
bn_mom)
_, out_shapes, _, = in3c.infer_shape(data=(1, 3, 224, 224))
# import pdb
# pdb.set_trace()
num_filter = int(out_shapes[0][1])
in3c = squeeze_excitation_block(name + ('_%d' % i), in3c, num_filter, ratio)
return in3c
# create SE inception-v4
def get_symbol(ratio, num_classes=1000):
# input shape 229*229*3 (old)
# input shape 224*224*3 (new)
data = mx.symbol.Variable(name="data")
bn_mom = 0.9
# import pdb
# pdb.set_trace()
# stage stem
(num_1_1, num_1_2, num_1_3) = (32, 32, 64)
num_2_1 = 96
(num_3_1, num_3_2) = (64, 96)
(num_4_1, num_4_2, num_4_3, num_4_4) = (64, 64, 64, 96)
num_5_1 = 192
in_stem = inception_stem('stem_stage', data,
num_1_1, num_1_2, num_1_3,
num_2_1,
num_3_1, num_3_2,
num_4_1, num_4_2, num_4_3, num_4_4,
num_5_1,
bn_mom)
# stage 4 x InceptionA
num_1_1 = 96
num_2_1 = 96
(num_3_1, num_3_2) = (64, 96)
(num_4_1, num_4_2, num_4_3) = (64, 96, 96)
in4a = circle_in4a('in4a',
in_stem,
ratio,
num_1_1,
num_2_1,
num_3_1, num_3_2,
num_4_1, num_4_2, num_4_3,
bn_mom,
4)
# stage ReductionA
num_1_1 = 384
(num_2_1, num_2_2, num_2_3) = (192, 224, 256)
re_a = ReductionA('re_a', in4a,
num_1_1,
num_2_1, num_2_2, num_2_3,
bn_mom)
# stage 7 x InceptionB
num_1_1 = 128
num_2_1 = 384
(num_3_1, num_3_2, num_3_3) = (192, 224, 256)
(num_4_1, num_4_2, num_4_3, num_4_4, num_4_5) = (192, 192, 224, 224, 256)
in7b = circle_in7b('in7b', re_a, ratio,
num_1_1,
num_2_1,
num_3_1, num_3_2, num_3_3,
num_4_1, num_4_2, num_4_3, num_4_4, num_4_5,
bn_mom,
7)
# stage ReductionB
(num_1_1, num_1_2) = (192, 192)
(num_2_1, num_2_2, num_2_3, num_2_4) = (256, 256, 320, 320)
re_b = ReductionB('re_b', in7b,
num_1_1, num_1_2,
num_2_1, num_2_2, num_2_3, num_2_4,
bn_mom)
# stage 3 x InceptionC
num_1_1 = 256
num_2_1 = 256
(num_3_1, num_3_2, num_3_3) = (384, 256, 256)
(num_4_1, num_4_2, num_4_3, num_4_4, num_4_5) = (384, 448, 512, 256, 256)
in3c = circle_in3c('in3c', re_b, ratio,
num_1_1,
num_2_1,
num_3_1, num_3_2, num_3_3,
num_4_1, num_4_2, num_4_3, num_4_4, num_4_5,
bn_mom,
3)
# stage Average Pooling
#pool = mx.sym.Pooling(data=in3c, kernel=(8, 8), stride=(1, 1), pool_type="avg", name="global_pool")
pool = mx.sym.Pooling(data=in3c, global_pool=True, kernel=(5, 5), stride=(1, 1), pad=(0, 0), pool_type="avg", name="global_pool")
# stage Dropout
#dropout = mx.sym.Dropout(data=pool, p=0.5) #modified for vggface data
dropout = mx.sym.Dropout(data=pool, p=0.2) #original
# dropout = mx.sym.Dropout(data=pool, p=0.8)
flatten = mx.sym.Flatten(data=dropout)
# output
fc1 = mx.symbol.FullyConnected(data=flatten, num_hidden=num_classes, name='fc1')
softmax = mx.symbol.SoftmaxOutput(data=fc1, name='softmax')
return softmax
# if __name__ == '__main__':
# net = get_symbol(1000)
# shape = {'softmax_label': (32, 1000), 'data': (32, 3, 299, 299)}
# mx.viz.plot_network(net, title='inception-v4', format='png', shape=shape).render('inception-v4')
| apache-2.0 | 7,157,415,611,834,751,000 | 48.478908 | 157 | 0.523817 | false | 2.618821 | false | false | false |
osmr/utct | Gluon/trainer.py | 1 | 3553 | import logging
from utct.common.trainer_template import TrainerTemplate
import mxnet as mx
from mxnet import gluon, autograd
class Trainer(TrainerTemplate):
"""
Class, which provides training process under Gluon/MXNet framework.
Parameters:
----------
model : object
instance of Model class with graph of CNN
optimizer : object
instance of Optimizer class with CNN optimizer
data_source : object
instance of DataSource class with training/validation iterators
saver : object
instance of Saver class with information about stored files
ctx : object
instance of MXNet context
"""
def __init__(self,
model,
optimizer,
data_source,
saver,
ctx):
super(Trainer, self).__init__(
model,
optimizer,
data_source,
saver)
self.ctx = ctx
def _hyper_train_target_sub(self, **kwargs):
"""
Calling single training procedure for specific hyper parameters from hyper optimizer.
"""
if self.saver.log_filename:
fh = logging.FileHandler(self.saver.log_filename)
self.logger.addHandler(fh)
self.logger.info("Training with parameters: {}".format(kwargs))
train_loader, val_loader = self.data_source()
net = self.model()
net.initialize(
mx.init.Xavier(magnitude=2.24),
ctx=self.ctx)
trainer = self.optimizer(
params=net.collect_params(),
**kwargs)
metric = mx.metric.Accuracy()
loss = gluon.loss.SoftmaxCrossEntropyLoss()
log_interval = 1
for epoch in range(self.num_epoch):
metric.reset()
for i, (data, label) in enumerate(train_loader):
# Copy data to ctx if necessary
data = data.as_in_context(self.ctx)
label = label.as_in_context(self.ctx)
# Start recording computation graph with record() section.
# Recorded graphs can then be differentiated with backward.
with autograd.record():
output = net(data)
L = loss(output, label)
L.backward()
# take a gradient step with batch_size equal to data.shape[0]
trainer.step(data.shape[0])
# update metric at last.
metric.update([label], [output])
if i % log_interval == 0 and i > 0:
name, acc = metric.get()
print('[Epoch %d Batch %d] Training: %s=%f' % (epoch, i, name, acc))
name, acc = metric.get()
print('[Epoch %d] Training: %s=%f' % (epoch, name, acc))
name, val_acc = self._test(
model=net,
val_data=val_loader,
ctx=self.ctx)
print('[Epoch %d] Validation: %s=%f' % (epoch, name, val_acc))
if self.saver.log_filename:
self.logger.removeHandler(fh)
fh.close()
best_value = 0.0
return best_value
@staticmethod
def _test(model,
val_data,
ctx):
metric = mx.metric.Accuracy()
for data, label in val_data:
data = data.as_in_context(ctx)
label = label.as_in_context(ctx)
output = model(data)
metric.update([label], [output])
return metric.get()
| mit | -2,329,165,689,116,485,600 | 29.895652 | 93 | 0.534478 | false | 4.397277 | false | false | false |
ROIV/ViorCoin-ElectrumWallet | gui/qt/main_window.py | 1 | 90385 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, time, datetime, re, threading
from electrum_vior.i18n import _, set_language
from electrum_vior.util import print_error, print_msg
import os.path, json, ast, traceback
import webbrowser
import shutil
import StringIO
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum_vior.bitcoin import MIN_RELAY_TX_FEE, is_valid
from electrum_vior.plugins import run_hook
import icons_rc
from electrum_vior.wallet import format_satoshis
from electrum_vior import Transaction
from electrum_vior import mnemonic
from electrum_vior import util, bitcoin, commands, Interface, Wallet
from electrum_vior import SimpleConfig, Wallet, WalletStorage
from electrum_vior import bmp, pyqrnative
from amountedit import AmountEdit
from network_dialog import NetworkDialog
from qrcodewidget import QRCodeWidget
from decimal import Decimal
import platform
import httplib
import socket
import webbrowser
import csv
if platform.system() == 'Windows':
MONOSPACE_FONT = 'Lucida Console'
elif platform.system() == 'Darwin':
MONOSPACE_FONT = 'Monaco'
else:
MONOSPACE_FONT = 'monospace'
from electrum_vior import ELECTRUM_VERSION
import re
from util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(func)
self.func = func
self.setIconSize(QSize(25,25))
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
apply(self.func,())
default_column_widths = { "history":[40,140,350,140], "contacts":[350,330], "receive": [370,200,130] }
class ElectrumWindow(QMainWindow):
def __init__(self, config, network, gui_object):
QMainWindow.__init__(self)
self.config = config
self.network = network
self.gui_object = gui_object
self.tray = gui_object.tray
self.go_lite = gui_object.go_lite
self.lite = None
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros',0))
set_language(config.get('language'))
self.funds_error = False
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
self.column_widths = self.config.get("column_widths_2", default_column_widths )
tabs.addTab(self.create_history_tab(), _('History') )
tabs.addTab(self.create_send_tab(), _('Send') )
tabs.addTab(self.create_receive_tab(), _('Receive') )
tabs.addTab(self.create_contacts_tab(), _('Contacts') )
tabs.addTab(self.create_console_tab(), _('Console') )
tabs.setMinimumSize(600, 400)
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
g = self.config.get("winpos-qt",[100, 100, 840, 400])
self.setGeometry(g[0], g[1], g[2], g[3])
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(QIcon(":icons/electrum-vior.png"))
self.init_menubar()
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() - 1 )%tabs.count() ))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() + 1 )%tabs.count() ))
for i in range(tabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: tabs.setCurrentIndex(i))
self.connect(self, QtCore.SIGNAL('update_status'), self.update_status)
self.connect(self, QtCore.SIGNAL('banner_signal'), lambda: self.console.showMessage(self.network.banner) )
self.connect(self, QtCore.SIGNAL('transaction_signal'), lambda: self.notify_transactions() )
self.connect(self, QtCore.SIGNAL('send_tx2'), self.send_tx2)
self.connect(self, QtCore.SIGNAL('send_tx3'), self.send_tx3)
self.connect(self, QtCore.SIGNAL('payment_request_ok'), self.payment_request_ok)
self.connect(self, QtCore.SIGNAL('payment_request_error'), self.payment_request_error)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.network.register_callback('updated', lambda: self.need_update.set())
self.network.register_callback('banner', lambda: self.emit(QtCore.SIGNAL('banner_signal')))
self.network.register_callback('disconnected', lambda: self.emit(QtCore.SIGNAL('update_status')))
self.network.register_callback('disconnecting', lambda: self.emit(QtCore.SIGNAL('update_status')))
self.network.register_callback('new_transaction', lambda: self.emit(QtCore.SIGNAL('transaction_signal')))
# set initial message
self.console.showMessage(self.network.banner)
self.wallet = None
def update_account_selector(self):
# account selector
accounts = self.wallet.get_account_names()
self.account_selector.clear()
if len(accounts) > 1:
self.account_selector.addItems([_("All accounts")] + accounts.values())
self.account_selector.setCurrentIndex(0)
self.account_selector.show()
else:
self.account_selector.hide()
def load_wallet(self, wallet):
import electrum_vior as electrum
self.wallet = wallet
self.accounts_expanded = self.wallet.storage.get('accounts_expanded',{})
self.current_account = self.wallet.storage.get("current_account", None)
title = 'Electrum-VIOR ' + self.wallet.electrum_version + ' - ' + self.wallet.storage.path
if self.wallet.is_watching_only(): title += ' [%s]' % (_('watching only'))
self.setWindowTitle( title )
self.update_wallet()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
self.update_account_selector()
# update menus
self.new_account_menu.setEnabled(self.wallet.can_create_accounts())
self.private_keys_menu.setEnabled(not self.wallet.is_watching_only())
self.password_menu.setEnabled(not self.wallet.is_watching_only())
self.seed_menu.setEnabled(self.wallet.has_seed())
self.mpk_menu.setEnabled(self.wallet.is_deterministic())
self.import_menu.setEnabled(self.wallet.can_import())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
run_hook('load_wallet', wallet)
def open_wallet(self):
wallet_folder = self.wallet.storage.path
filename = unicode( QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder) )
if not filename:
return
storage = WalletStorage({'wallet_path': filename})
if not storage.file_exists:
self.show_message("file not found "+ filename)
return
self.wallet.stop_threads()
# create new wallet
wallet = Wallet(storage)
wallet.start_threads(self.network)
self.load_wallet(wallet)
def backup_wallet(self):
import shutil
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) )
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
QMessageBox.information(None,"Wallet backup created", _("A copy of your wallet file was created in")+" '%s'" % str(new_path))
except (IOError, os.error), reason:
QMessageBox.critical(None,"Unable to create backup", _("Electrum was unable to copy your wallet file to the specified location.")+"\n" + str(reason))
def new_wallet(self):
import installwizard
wallet_folder = os.path.dirname(self.wallet.storage.path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a new file name'), wallet_folder) )
if not filename:
return
filename = os.path.join(wallet_folder, filename)
storage = WalletStorage({'wallet_path': filename})
if storage.file_exists:
QMessageBox.critical(None, "Error", _("File exists"))
return
wizard = installwizard.InstallWizard(self.config, self.network, storage)
wallet = wizard.run('new')
if wallet:
self.load_wallet(wallet)
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
wallet_menu.addAction(_("&New contact"), self.new_contact_dialog)
self.new_account_menu = wallet_menu.addAction(_("&New account"), self.new_account_dialog)
wallet_menu.addSeparator()
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.mpk_menu = wallet_menu.addAction(_("&Master Public Keys"), self.show_master_public_keys)
wallet_menu.addSeparator()
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
wallet_menu.addAction(_("&Export History"), self.export_history_dialog)
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), self.run_network_dialog)
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
#tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
csv_transaction_menu = tools_menu.addMenu(_("&Create transaction"))
csv_transaction_menu.addAction(_("&From CSV file"), self.do_process_from_csv_file)
csv_transaction_menu.addAction(_("&From CSV text"), self.do_process_from_csv_text)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"), lambda: webbrowser.open("http://electrum-vior.org"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://electrum-vior.org/documentation.html")).setShortcut(QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
self.setMenuBar(menubar)
def show_about(self):
QMessageBox.about(self, "Electrum-VIOR",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" + _("Electrum's focus is speed, with low resource usage and simplifying ViorCoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the ViorCoin system."))
def show_report_bug(self):
QMessageBox.information(self, "Electrum-VIOR - " + _("Reporting Bugs"),
_("Please report any bugs as issues on github:")+" <a href=\"https://github.com/pooler/electrum-vior/issues\">https://github.com/pooler/electrum-vior/issues</a>")
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
print_error("Notifying GUI")
if len(self.network.pending_transactions_for_notifications) > 0:
# Combine the transactions if there are more then three
tx_amount = len(self.network.pending_transactions_for_notifications)
if(tx_amount >= 3):
total_amount = 0
for tx in self.network.pending_transactions_for_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
total_amount += v
self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s %(unit)s") \
% { 'txs' : tx_amount, 'amount' : self.format_amount(total_amount), 'unit' : self.base_unit()})
self.network.pending_transactions_for_notifications = []
else:
for tx in self.network.pending_transactions_for_notifications:
if tx:
self.network.pending_transactions_for_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
self.notify(_("New transaction received. %(amount)s %(unit)s") % { 'amount' : self.format_amount(v), 'unit' : self.base_unit()})
def notify(self, message):
self.tray.showMessage("Electrum-VIOR", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
path = os.path.join( directory, filename )
fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def close(self):
QMainWindow.close(self)
run_hook('close_main_window')
def connect_slots(self, sender):
self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions)
self.previous_payto_e=''
def timer_actions(self):
if self.need_update.is_set():
self.update_wallet()
self.need_update.clear()
run_hook('timer_actions')
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces)
def read_amount(self, x):
if x in['.', '']: return None
p = pow(10, self.decimal_point)
return int( p * Decimal(x) )
def base_unit(self):
assert self.decimal_point in [5,8]
return "VIOR" if self.decimal_point == 8 else "mVIOR"
def update_status(self):
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
if not self.wallet.up_to_date:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif self.network.server_lag > 1:
text = _("Server is lagging (%d blocks)"%self.network.server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u = self.wallet.get_account_balance(self.current_account)
text = _( "Balance" ) + ": %s "%( self.format_amount(c) ) + self.base_unit()
if u: text += " [%s unconfirmed]"%( self.format_amount(u,True).strip() )
# append fiat balance and price from exchange rate plugin
r = {}
run_hook('get_fiat_status_text', c+u, r)
quote = r.get(0)
if quote:
text += "%s"%quote
self.tray.setToolTip(text)
icon = QIcon(":icons/status_connected.png")
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_history_tab()
self.update_receive_tab()
self.update_contacts_tab()
self.update_completions()
def create_history_tab(self):
self.history_list = l = MyTreeWidget(self)
l.setColumnCount(5)
for i,width in enumerate(self.column_widths['history']):
l.setColumnWidth(i, width)
l.setHeaderLabels( [ '', _('Date'), _('Description') , _('Amount'), _('Balance')] )
self.connect(l, SIGNAL('itemDoubleClicked(QTreeWidgetItem*, int)'), self.tx_label_clicked)
self.connect(l, SIGNAL('itemChanged(QTreeWidgetItem*, int)'), self.tx_label_changed)
l.customContextMenuRequested.connect(self.create_history_menu)
return l
def create_history_menu(self, position):
self.history_list.selectedIndexes()
item = self.history_list.currentItem()
be = self.config.get('block_explorer', 'explorer.viorcoin.net')
if be == 'explorer.viorcoin.net':
block_explorer = 'http://explorer.viorcoin.net/tx/'
elif be == 'block-explorer.com':
block_explorer = 'http://block-explorer.com/tx/'
elif be == 'Blockr.io':
block_explorer = 'https://vior.blockr.io/tx/info/'
if not item: return
tx_hash = str(item.data(0, Qt.UserRole).toString())
if not tx_hash: return
menu = QMenu()
menu.addAction(_("Copy ID to Clipboard"), lambda: self.app.clipboard().setText(tx_hash))
menu.addAction(_("Details"), lambda: self.show_transaction(self.wallet.transactions.get(tx_hash)))
menu.addAction(_("Edit description"), lambda: self.tx_label_clicked(item,2))
menu.addAction(_("View on block explorer"), lambda: webbrowser.open(block_explorer + tx_hash))
menu.exec_(self.contacts_list.viewport().mapToGlobal(position))
def show_transaction(self, tx):
import transaction_dialog
d = transaction_dialog.TxDialog(tx, self)
d.exec_()
def tx_label_clicked(self, item, column):
if column==2 and item.isSelected():
self.is_edit=True
item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
self.history_list.editItem( item, column )
item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
self.is_edit=False
def tx_label_changed(self, item, column):
if self.is_edit:
return
self.is_edit=True
tx_hash = str(item.data(0, Qt.UserRole).toString())
tx = self.wallet.transactions.get(tx_hash)
text = unicode( item.text(2) )
self.wallet.set_label(tx_hash, text)
if text:
item.setForeground(2, QBrush(QColor('black')))
else:
text = self.wallet.get_default_label(tx_hash)
item.setText(2, text)
item.setForeground(2, QBrush(QColor('gray')))
self.is_edit=False
def edit_label(self, is_recv):
l = self.receive_list if is_recv else self.contacts_list
item = l.currentItem()
item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
l.editItem( item, 1 )
item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
def address_label_clicked(self, item, column, l, column_addr, column_label):
if column == column_label and item.isSelected():
is_editable = item.data(0, 32).toBool()
if not is_editable:
return
addr = unicode( item.text(column_addr) )
label = unicode( item.text(column_label) )
item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
l.editItem( item, column )
item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
def address_label_changed(self, item, column, l, column_addr, column_label):
if column == column_label:
addr = unicode( item.text(column_addr) )
text = unicode( item.text(column_label) )
is_editable = item.data(0, 32).toBool()
if not is_editable:
return
changed = self.wallet.set_label(addr, text)
if changed:
self.update_history_tab()
self.update_completions()
self.current_item_changed(item)
run_hook('item_changed', item, column)
def current_item_changed(self, a):
run_hook('current_item_changed', a)
def update_history_tab(self):
self.history_list.clear()
for item in self.wallet.get_tx_history(self.current_account):
tx_hash, conf, is_mine, value, fee, balance, timestamp = item
time_str = _("unknown")
if conf > 0:
try:
time_str = datetime.datetime.fromtimestamp( timestamp).isoformat(' ')[:-3]
except Exception:
time_str = _("error")
if conf == -1:
time_str = 'unverified'
icon = QIcon(":icons/unconfirmed.png")
elif conf == 0:
time_str = 'pending'
icon = QIcon(":icons/unconfirmed.png")
elif conf < 6:
icon = QIcon(":icons/clock%d.png"%conf)
else:
icon = QIcon(":icons/confirmed.png")
if value is not None:
v_str = self.format_amount(value, True, whitespaces=True)
else:
v_str = '--'
balance_str = self.format_amount(balance, whitespaces=True)
if tx_hash:
label, is_default_label = self.wallet.get_label(tx_hash)
else:
label = _('Pruned transaction outputs')
is_default_label = False
item = QTreeWidgetItem( [ '', time_str, label, v_str, balance_str] )
item.setFont(2, QFont(MONOSPACE_FONT))
item.setFont(3, QFont(MONOSPACE_FONT))
item.setFont(4, QFont(MONOSPACE_FONT))
if value < 0:
item.setForeground(3, QBrush(QColor("#BC1E1E")))
if tx_hash:
item.setData(0, Qt.UserRole, tx_hash)
item.setToolTip(0, "%d %s\nTxId:%s" % (conf, _('Confirmations'), tx_hash) )
if is_default_label:
item.setForeground(2, QBrush(QColor('grey')))
item.setIcon(0, icon)
self.history_list.insertTopLevelItem(0,item)
self.history_list.setCurrentItem(self.history_list.topLevelItem(0))
run_hook('history_tab_update')
def create_send_tab(self):
w = QWidget()
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(3,300)
grid.setColumnStretch(5,1)
self.payto_e = QLineEdit()
grid.addWidget(QLabel(_('Pay to')), 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, 3)
grid.addWidget(HelpButton(_('Recipient of the funds.') + '\n\n' + _('You may enter a ViorCoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a ViorCoin address)')), 1, 4)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.setCompleter(completer)
completer.setModel(self.completions)
self.message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 2, 0)
grid.addWidget(self.message_e, 2, 1, 1, 3)
grid.addWidget(HelpButton(_('Description of the transaction (not mandatory).') + '\n\n' + _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')), 2, 4)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = QTreeWidget(self)
self.from_list.setColumnCount(2)
self.from_list.setColumnWidth(0, 350)
self.from_list.setColumnWidth(1, 50)
self.from_list.setHeaderHidden (True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, 3)
self.set_pay_from([])
self.amount_e = AmountEdit(self.base_unit)
grid.addWidget(QLabel(_('Amount')), 4, 0)
grid.addWidget(self.amount_e, 4, 1, 1, 2)
grid.addWidget(HelpButton(
_('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet. Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') \
+ '\n\n' + _('Keyboard shortcut: type "!" to send all your coins.')), 4, 3)
self.fee_e = AmountEdit(self.base_unit)
grid.addWidget(QLabel(_('Fee')), 5, 0)
grid.addWidget(self.fee_e, 5, 1, 1, 2)
grid.addWidget(HelpButton(
_('ViorCoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')), 5, 3)
run_hook('exchange_rate_button', grid)
self.send_button = EnterButton(_("Send"), self.do_send)
grid.addWidget(self.send_button, 6, 1)
b = EnterButton(_("Clear"),self.do_clear)
grid.addWidget(b, 6, 2)
self.payto_sig = QLabel('')
grid.addWidget(self.payto_sig, 7, 0, 1, 4)
QShortcut(QKeySequence("Up"), w, w.focusPreviousChild)
QShortcut(QKeySequence("Down"), w, w.focusNextChild)
w.setLayout(grid)
w2 = QWidget()
vbox = QVBoxLayout()
vbox.addWidget(w)
vbox.addStretch(1)
w2.setLayout(vbox)
def entry_changed( is_fee ):
self.funds_error = False
if self.amount_e.is_shortcut:
self.amount_e.is_shortcut = False
sendable = self.get_sendable_balance()
# there is only one output because we are completely spending inputs
inputs, total, fee = self.wallet.choose_tx_inputs( sendable, 0, 1, self.get_payment_sources())
fee = self.wallet.estimated_fee(inputs, 1)
amount = total - fee
self.amount_e.setText( self.format_amount(amount) )
self.fee_e.setText( self.format_amount( fee ) )
return
amount = self.read_amount(str(self.amount_e.text()))
fee = self.read_amount(str(self.fee_e.text()))
if not is_fee: fee = None
if amount is None:
return
# assume that there will be 2 outputs (one for change)
inputs, total, fee = self.wallet.choose_tx_inputs(amount, fee, 2, self.get_payment_sources())
if not is_fee:
self.fee_e.setText( self.format_amount( fee ) )
if inputs:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('black'))
text = ""
else:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('red'))
self.funds_error = True
text = _( "Not enough funds" )
c, u = self.wallet.get_frozen_balance()
if c+u: text += ' (' + self.format_amount(c+u).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
self.statusBar().showMessage(text)
self.amount_e.setPalette(palette)
self.fee_e.setPalette(palette)
self.amount_e.textChanged.connect(lambda: entry_changed(False) )
self.fee_e.textChanged.connect(lambda: entry_changed(True) )
run_hook('create_send_tab', grid)
return w2
def set_pay_from(self, l):
self.pay_from = l
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
for addr in self.pay_from:
c, u = self.wallet.get_addr_balance(addr)
balance = self.format_amount(c + u)
self.from_list.addTopLevelItem(QTreeWidgetItem( [addr, balance] ))
def update_completions(self):
l = []
for addr,label in self.wallet.labels.items():
if addr in self.wallet.addressbook:
l.append( label + ' <' + addr + '>')
run_hook('update_completions', l)
self.completions.setStringList(l)
def protected(func):
return lambda s, *args: s.do_protect(func, args)
def do_send(self):
label = unicode( self.message_e.text() )
if self.gui_object.payment_request:
outputs = self.gui_object.payment_request.outputs
amount = self.gui_object.payment_request.get_amount()
else:
r = unicode( self.payto_e.text() )
r = r.strip()
# label or alias, with address in brackets
m = re.match('(.*?)\s*\<([1-9A-HJ-NP-Za-km-z]{26,})\>', r)
to_address = m.group(2) if m else r
if not is_valid(to_address):
QMessageBox.warning(self, _('Error'), _('Invalid ViorCoin Address') + ':\n' + to_address, _('OK'))
return
try:
amount = self.read_amount(unicode( self.amount_e.text()))
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid Amount'), _('OK'))
return
outputs = [(to_address, amount)]
try:
fee = self.read_amount(unicode( self.fee_e.text()))
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid Fee'), _('OK'))
return
confirm_amount = self.config.get('confirm_amount', 100000000)
if amount >= confirm_amount:
if not self.question(_("send %(amount)s to %(address)s?")%{ 'amount' : self.format_amount(amount) + ' '+ self.base_unit(), 'address' : to_address}):
return
confirm_fee = self.config.get('confirm_fee', 1000000)
if fee >= confirm_fee:
if not self.question(_("The fee for this transaction seems unusually high.\nAre you really sure you want to pay %(fee)s in fees?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}):
return
self.send_tx(outputs, fee, label)
def waiting_dialog(self, message):
d = QDialog(self)
d.setWindowTitle('Please wait')
l = QLabel(message)
vbox = QVBoxLayout(d)
vbox.addWidget(l)
d.show()
return d
@protected
def send_tx(self, outputs, fee, label, password):
# first, create an unsigned tx
domain = self.get_payment_sources()
try:
tx = self.wallet.make_unsigned_transaction(outputs, fee, None, domain)
tx.error = None
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
# call hook to see if plugin needs gui interaction
run_hook('send_tx', tx)
# sign the tx
def sign_thread():
time.sleep(0.1)
keypairs = {}
try:
self.wallet.add_keypairs_from_wallet(tx, keypairs, password)
self.wallet.sign_transaction(tx, keypairs, password)
except Exception as e:
tx.error = str(e)
self.signed_tx_data = (tx, fee, label)
self.emit(SIGNAL('send_tx2'))
self.tx_wait_dialog = self.waiting_dialog('Signing..')
threading.Thread(target=sign_thread).start()
def send_tx2(self):
tx, fee, label = self.signed_tx_data
self.tx_wait_dialog.accept()
if tx.error:
self.show_message(tx.error)
return
if fee < tx.required_fee(self.wallet.verifier):
QMessageBox.warning(self, _('Error'), _("This transaction requires a higher fee, or it will not be propagated by the network."), _('OK'))
return
if label:
self.wallet.set_label(tx.hash(), label)
if not tx.is_complete() or self.config.get('show_before_broadcast'):
self.show_transaction(tx)
return
def broadcast_thread():
if self.gui_object.payment_request:
print "sending ack"
refund_address = self.wallet.addresses()[0]
self.gui_object.payment_request.send_ack(str(tx), refund_address)
self.gui_object.payment_request = None
# note: BIP 70 recommends not broadcasting the tx to the network and letting the merchant do that
self.tx_broadcast_result = self.wallet.sendtx(tx)
self.emit(SIGNAL('send_tx3'))
self.tx_broadcast_dialog = self.waiting_dialog('Broadcasting..')
threading.Thread(target=broadcast_thread).start()
def send_tx3(self):
self.tx_broadcast_dialog.accept()
status, msg = self.tx_broadcast_result
if status:
QMessageBox.information(self, '', _('Payment sent.') + '\n' + msg, _('OK'))
self.do_clear()
else:
QMessageBox.warning(self, _('Error'), msg, _('OK'))
def prepare_for_payment_request(self):
style = "QWidget { background-color:none;border:none;}"
self.tabs.setCurrentIndex(1)
self.payto_e.setReadOnly(True)
self.payto_e.setStyleSheet(style)
self.amount_e.setReadOnly(True)
self.payto_e.setText(_("please wait..."))
self.amount_e.setStyleSheet(style)
return True
def payment_request_ok(self):
self.payto_e.setText(self.gui_object.payment_request.domain)
self.amount_e.setText(self.format_amount(self.gui_object.payment_request.get_amount()))
def payment_request_error(self):
self.payto_e.setText(self.gui_object.payment_request.error)
def set_send(self, address, amount, label, message):
if label and self.wallet.labels.get(address) != label:
if self.question('Give label "%s" to address %s ?'%(label,address)):
if address not in self.wallet.addressbook and not self.wallet.is_mine(address):
self.wallet.addressbook.append(address)
self.wallet.set_label(address, label)
self.tabs.setCurrentIndex(1)
label = self.wallet.labels.get(address)
m_addr = label + ' <'+ address +'>' if label else address
self.payto_e.setText(m_addr)
self.message_e.setText(message)
if amount:
self.amount_e.setText(amount)
def do_clear(self):
self.payto_sig.setVisible(False)
for e in [self.payto_e, self.message_e, self.amount_e, self.fee_e]:
e.setText('')
self.set_frozen(e,False)
e.setStyleSheet("")
self.set_pay_from([])
self.update_status()
def set_frozen(self,entry,frozen):
if frozen:
entry.setReadOnly(True)
entry.setFrame(False)
palette = QPalette()
palette.setColor(entry.backgroundRole(), QColor('lightgray'))
entry.setPalette(palette)
else:
entry.setReadOnly(False)
entry.setFrame(True)
palette = QPalette()
palette.setColor(entry.backgroundRole(), QColor('white'))
entry.setPalette(palette)
def set_addrs_frozen(self,addrs,freeze):
for addr in addrs:
if not addr: continue
if addr in self.wallet.frozen_addresses and not freeze:
self.wallet.unfreeze(addr)
elif addr not in self.wallet.frozen_addresses and freeze:
self.wallet.freeze(addr)
self.update_receive_tab()
def create_list_tab(self, headers):
"generic tab creation method"
l = MyTreeWidget(self)
l.setColumnCount( len(headers) )
l.setHeaderLabels( headers )
w = QWidget()
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setMargin(0)
vbox.setSpacing(0)
vbox.addWidget(l)
buttons = QWidget()
vbox.addWidget(buttons)
hbox = QHBoxLayout()
hbox.setMargin(0)
hbox.setSpacing(0)
buttons.setLayout(hbox)
return l,w,hbox
def create_receive_tab(self):
l,w,hbox = self.create_list_tab([ _('Address'), _('Label'), _('Balance'), _('Tx')])
l.setContextMenuPolicy(Qt.CustomContextMenu)
l.customContextMenuRequested.connect(self.create_receive_menu)
l.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.connect(l, SIGNAL('itemDoubleClicked(QTreeWidgetItem*, int)'), lambda a, b: self.address_label_clicked(a,b,l,0,1))
self.connect(l, SIGNAL('itemChanged(QTreeWidgetItem*, int)'), lambda a,b: self.address_label_changed(a,b,l,0,1))
self.connect(l, SIGNAL('currentItemChanged(QTreeWidgetItem*, QTreeWidgetItem*)'), lambda a,b: self.current_item_changed(a))
self.receive_list = l
self.receive_buttons_hbox = hbox
hbox.addStretch(1)
return w
def save_column_widths(self):
self.column_widths["receive"] = []
for i in range(self.receive_list.columnCount() -1):
self.column_widths["receive"].append(self.receive_list.columnWidth(i))
self.column_widths["history"] = []
for i in range(self.history_list.columnCount() - 1):
self.column_widths["history"].append(self.history_list.columnWidth(i))
self.column_widths["contacts"] = []
for i in range(self.contacts_list.columnCount() - 1):
self.column_widths["contacts"].append(self.contacts_list.columnWidth(i))
self.config.set_key("column_widths_2", self.column_widths, True)
def create_contacts_tab(self):
l,w,hbox = self.create_list_tab([_('Address'), _('Label'), _('Tx')])
l.setContextMenuPolicy(Qt.CustomContextMenu)
l.customContextMenuRequested.connect(self.create_contact_menu)
for i,width in enumerate(self.column_widths['contacts']):
l.setColumnWidth(i, width)
self.connect(l, SIGNAL('itemDoubleClicked(QTreeWidgetItem*, int)'), lambda a, b: self.address_label_clicked(a,b,l,0,1))
self.connect(l, SIGNAL('itemChanged(QTreeWidgetItem*, int)'), lambda a,b: self.address_label_changed(a,b,l,0,1))
self.contacts_list = l
self.contacts_buttons_hbox = hbox
hbox.addStretch(1)
return w
def delete_imported_key(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_imported_key(addr)
self.update_receive_tab()
self.update_history_tab()
def edit_account_label(self, k):
text, ok = QInputDialog.getText(self, _('Rename account'), _('Name') + ':', text = self.wallet.labels.get(k,''))
if ok:
label = unicode(text)
self.wallet.set_label(k,label)
self.update_receive_tab()
def account_set_expanded(self, item, k, b):
item.setExpanded(b)
self.accounts_expanded[k] = b
def create_account_menu(self, position, k, item):
menu = QMenu()
if item.isExpanded():
menu.addAction(_("Minimize"), lambda: self.account_set_expanded(item, k, False))
else:
menu.addAction(_("Maximize"), lambda: self.account_set_expanded(item, k, True))
menu.addAction(_("Rename"), lambda: self.edit_account_label(k))
if self.wallet.seed_version > 4:
menu.addAction(_("View details"), lambda: self.show_account_details(k))
if self.wallet.account_is_pending(k):
menu.addAction(_("Delete"), lambda: self.delete_pending_account(k))
menu.exec_(self.receive_list.viewport().mapToGlobal(position))
def delete_pending_account(self, k):
self.wallet.delete_pending_account(k)
self.update_receive_tab()
def create_receive_menu(self, position):
# fixme: this function apparently has a side effect.
# if it is not called the menu pops up several times
#self.receive_list.selectedIndexes()
selected = self.receive_list.selectedItems()
multi_select = len(selected) > 1
addrs = [unicode(item.text(0)) for item in selected]
if not multi_select:
item = self.receive_list.itemAt(position)
if not item: return
addr = addrs[0]
if not is_valid(addr):
k = str(item.data(0,32).toString())
if k:
self.create_account_menu(position, k, item)
else:
item.setExpanded(not item.isExpanded())
return
menu = QMenu()
if not multi_select:
menu.addAction(_("Copy to clipboard"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("QR code"), lambda: self.show_qrcode("viorcoin:" + addr, _("Address")) )
menu.addAction(_("Edit label"), lambda: self.edit_label(True))
menu.addAction(_("Public keys"), lambda: self.show_public_keys(addr))
if not self.wallet.is_watching_only():
menu.addAction(_("Private key"), lambda: self.show_private_key(addr))
menu.addAction(_("Sign/verify message"), lambda: self.sign_verify_message(addr))
#menu.addAction(_("Encrypt/decrypt message"), lambda: self.encrypt_message(addr))
if self.wallet.is_imported(addr):
menu.addAction(_("Remove from wallet"), lambda: self.delete_imported_key(addr))
if any(addr not in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Freeze"), lambda: self.set_addrs_frozen(addrs, True))
if any(addr in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Unfreeze"), lambda: self.set_addrs_frozen(addrs, False))
if any(addr not in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Send From"), lambda: self.send_from_addresses(addrs))
run_hook('receive_menu', menu, addrs)
menu.exec_(self.receive_list.viewport().mapToGlobal(position))
def get_sendable_balance(self):
return sum(sum(self.wallet.get_addr_balance(a)) for a in self.get_payment_sources())
def get_payment_sources(self):
if self.pay_from:
return self.pay_from
else:
return self.wallet.get_account_addresses(self.current_account)
def send_from_addresses(self, addrs):
self.set_pay_from( addrs )
self.tabs.setCurrentIndex(1)
def payto(self, addr):
if not addr: return
label = self.wallet.labels.get(addr)
m_addr = label + ' <' + addr + '>' if label else addr
self.tabs.setCurrentIndex(1)
self.payto_e.setText(m_addr)
self.amount_e.setFocus()
def delete_contact(self, x):
if self.question(_("Do you want to remove")+" %s "%x +_("from your list of contacts?")):
self.wallet.delete_contact(x)
self.wallet.set_label(x, None)
self.update_history_tab()
self.update_contacts_tab()
self.update_completions()
def create_contact_menu(self, position):
item = self.contacts_list.itemAt(position)
menu = QMenu()
if not item:
menu.addAction(_("New contact"), lambda: self.new_contact_dialog())
else:
addr = unicode(item.text(0))
label = unicode(item.text(1))
is_editable = item.data(0,32).toBool()
payto_addr = item.data(0,33).toString()
menu.addAction(_("Copy to Clipboard"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("Pay to"), lambda: self.payto(payto_addr))
menu.addAction(_("QR code"), lambda: self.show_qrcode("viorcoin:" + addr, _("Address")))
if is_editable:
menu.addAction(_("Edit label"), lambda: self.edit_label(False))
menu.addAction(_("Delete"), lambda: self.delete_contact(addr))
run_hook('create_contact_menu', menu, item)
menu.exec_(self.contacts_list.viewport().mapToGlobal(position))
def update_receive_item(self, item):
item.setFont(0, QFont(MONOSPACE_FONT))
address = str(item.data(0,0).toString())
label = self.wallet.labels.get(address,'')
item.setData(1,0,label)
item.setData(0,32, True) # is editable
run_hook('update_receive_item', address, item)
if not self.wallet.is_mine(address): return
c, u = self.wallet.get_addr_balance(address)
balance = self.format_amount(c + u)
item.setData(2,0,balance)
if address in self.wallet.frozen_addresses:
item.setBackgroundColor(0, QColor('lightblue'))
def update_receive_tab(self):
l = self.receive_list
# extend the syntax for consistency
l.addChild = l.addTopLevelItem
l.clear()
for i,width in enumerate(self.column_widths['receive']):
l.setColumnWidth(i, width)
accounts = self.wallet.get_accounts()
if self.current_account is None:
account_items = sorted(accounts.items())
else:
account_items = [(self.current_account, accounts.get(self.current_account))]
for k, account in account_items:
if len(accounts) > 1:
name = self.wallet.get_account_name(k)
c,u = self.wallet.get_account_balance(k)
account_item = QTreeWidgetItem( [ name, '', self.format_amount(c+u), ''] )
l.addTopLevelItem(account_item)
account_item.setExpanded(self.accounts_expanded.get(k, True))
account_item.setData(0, 32, k)
else:
account_item = l
sequences = [0,1] if account.has_change() else [0]
for is_change in sequences:
if len(sequences) > 1:
name = _("Receiving") if not is_change else _("Change")
seq_item = QTreeWidgetItem( [ name, '', '', '', ''] )
account_item.addChild(seq_item)
if not is_change:
seq_item.setExpanded(True)
else:
seq_item = account_item
used_item = QTreeWidgetItem( [ _("Used"), '', '', '', ''] )
used_flag = False
is_red = False
gap = 0
for address in account.get_addresses(is_change):
h = self.wallet.history.get(address,[])
if h == []:
gap += 1
if gap > self.wallet.gap_limit:
is_red = True
else:
gap = 0
c, u = self.wallet.get_addr_balance(address)
num_tx = '*' if h == ['*'] else "%d"%len(h)
item = QTreeWidgetItem( [ address, '', '', num_tx] )
self.update_receive_item(item)
if is_red:
item.setBackgroundColor(1, QColor('red'))
if len(h) > 0 and c == -u:
if not used_flag:
seq_item.insertChild(0,used_item)
used_flag = True
used_item.addChild(item)
else:
seq_item.addChild(item)
# we use column 1 because column 0 may be hidden
l.setCurrentItem(l.topLevelItem(0),1)
def update_contacts_tab(self):
l = self.contacts_list
l.clear()
for address in self.wallet.addressbook:
label = self.wallet.labels.get(address,'')
n = self.wallet.get_num_tx(address)
item = QTreeWidgetItem( [ address, label, "%d"%n] )
item.setFont(0, QFont(MONOSPACE_FONT))
# 32 = label can be edited (bool)
item.setData(0,32, True)
# 33 = payto string
item.setData(0,33, address)
l.addTopLevelItem(item)
run_hook('update_contacts_tab', l)
l.setCurrentItem(l.topLevelItem(0))
def create_console_tab(self):
from console import Console
self.console = console = Console()
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet, 'network' : self.network, 'gui':self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: apply( f, (method, args, self.password_dialog ))
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def change_account(self,s):
if s == _("All accounts"):
self.current_account = None
else:
accounts = self.wallet.get_account_names()
for k, v in accounts.items():
if v == s:
self.current_account = k
self.update_history_tab()
self.update_status()
self.update_receive_tab()
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
sb.addWidget(self.balance_label)
from version_getter import UpdateLabel
self.updatelabel = UpdateLabel(self.config, sb)
self.account_selector = QComboBox()
self.account_selector.setSizeAdjustPolicy(QComboBox.AdjustToContents)
self.connect(self.account_selector,SIGNAL("activated(QString)"),self.change_account)
sb.addPermanentWidget(self.account_selector)
#if (int(qtVersion[0]) >= 4 and int(qtVersion[2]) >= 7):
# sb.addPermanentWidget( StatusBarButton( QIcon(":icons/switchgui.png"), _("Switch to Lite Mode"), self.go_lite ) )
self.lock_icon = QIcon()
self.password_button = StatusBarButton( self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget( self.password_button )
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton( QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget( self.seed_button )
self.status_button = StatusBarButton( QIcon(":icons/status_disconnected.png"), _("Network"), self.run_network_dialog )
sb.addPermanentWidget( self.status_button )
run_hook('create_status_bar', (sb,))
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.use_encryption else QIcon(":icons/unlock.png")
self.password_button.setIcon( icon )
def update_buttons_on_seed(self):
if self.wallet.has_seed():
self.seed_button.show()
else:
self.seed_button.hide()
if not self.wallet.is_watching_only():
self.password_button.show()
self.send_button.setText(_("Send"))
else:
self.password_button.hide()
self.send_button.setText(_("Create unsigned transaction"))
def change_password_dialog(self):
from password_dialog import PasswordDialog
d = PasswordDialog(self.wallet, self)
d.run()
self.update_lock_icon()
def new_contact_dialog(self):
d = QDialog(self)
d.setWindowTitle(_("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact')+':'))
grid = QGridLayout()
line1 = QLineEdit()
line2 = QLineEdit()
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(ok_cancel_buttons(d))
if not d.exec_():
return
address = str(line1.text())
label = unicode(line2.text())
if not is_valid(address):
QMessageBox.warning(self, _('Error'), _('Invalid Address'), _('OK'))
return
self.wallet.add_contact(address)
if label:
self.wallet.set_label(address, label)
self.update_contacts_tab()
self.update_history_tab()
self.update_completions()
self.tabs.setCurrentIndex(3)
@protected
def new_account_dialog(self, password):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("New Account"))
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_('Account name')+':'))
e = QLineEdit()
vbox.addWidget(e)
msg = _("Note: Newly created accounts are 'pending' until they receive viorcoins.") + " " \
+ _("You will need to wait for 2 confirmations until the correct balance is displayed and more addresses are created for that account.")
l = QLabel(msg)
l.setWordWrap(True)
vbox.addWidget(l)
vbox.addLayout(ok_cancel_buttons(dialog))
dialog.setLayout(vbox)
r = dialog.exec_()
if not r: return
name = str(e.text())
if not name: return
self.wallet.create_pending_account(name, password)
self.update_receive_tab()
self.tabs.setCurrentIndex(2)
def show_master_public_keys(self):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("Master Public Keys"))
main_layout = QGridLayout()
mpk_dict = self.wallet.get_master_public_keys()
i = 0
for key, value in mpk_dict.items():
main_layout.addWidget(QLabel(key), i, 0)
mpk_text = QTextEdit()
mpk_text.setReadOnly(True)
mpk_text.setMaximumHeight(170)
mpk_text.setText(value)
main_layout.addWidget(mpk_text, i + 1, 0)
i += 2
vbox = QVBoxLayout()
vbox.addLayout(main_layout)
vbox.addLayout(close_button(dialog))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
QMessageBox.information(self, _('Message'), _('This wallet has no seed'), _('OK'))
return
try:
mnemonic = self.wallet.get_mnemonic(password)
except Exception:
QMessageBox.warning(self, _('Error'), _('Incorrect Password'), _('OK'))
return
from seed_dialog import SeedDialog
d = SeedDialog(self, mnemonic, self.wallet.imported_keys)
d.exec_()
def show_qrcode(self, data, title = _("QR code")):
if not data: return
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(title)
d.setMinimumSize(270, 300)
vbox = QVBoxLayout()
qrw = QRCodeWidget(data)
vbox.addWidget(qrw, 1)
vbox.addWidget(QLabel(data), 0, Qt.AlignHCenter)
hbox = QHBoxLayout()
hbox.addStretch(1)
filename = os.path.join(self.config.path, "qrcode.bmp")
def print_qr():
bmp.save_qrcode(qrw.qr, filename)
QMessageBox.information(None, _('Message'), _("QR code saved to file") + " " + filename, _('OK'))
def copy_to_clipboard():
bmp.save_qrcode(qrw.qr, filename)
self.app.clipboard().setImage(QImage(filename))
QMessageBox.information(None, _('Message'), _("QR code saved to clipboard"), _('OK'))
b = QPushButton(_("Copy"))
hbox.addWidget(b)
b.clicked.connect(copy_to_clipboard)
b = QPushButton(_("Save"))
hbox.addWidget(b)
b.clicked.connect(print_qr)
b = QPushButton(_("Close"))
hbox.addWidget(b)
b.clicked.connect(d.accept)
b.setDefault(True)
vbox.addLayout(hbox)
d.setLayout(vbox)
d.exec_()
def do_protect(self, func, args):
if self.wallet.use_encryption:
password = self.password_dialog()
if not password:
return
else:
password = None
if args != (False,):
args = (self,) + args + (password,)
else:
args = (self,password)
apply( func, args)
def show_public_keys(self, address):
if not address: return
try:
pubkey_list = self.wallet.get_public_keys(address)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = QDialog(self)
d.setMinimumSize(600, 200)
d.setModal(1)
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Public key") + ':'))
keys = QTextEdit()
keys.setReadOnly(True)
keys.setText('\n'.join(pubkey_list))
vbox.addWidget(keys)
#vbox.addWidget( QRCodeWidget('\n'.join(pk_list)) )
vbox.addLayout(close_button(d))
d.setLayout(vbox)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address: return
try:
pk_list = self.wallet.get_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = QDialog(self)
d.setMinimumSize(600, 200)
d.setModal(1)
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Private key") + ':'))
keys = QTextEdit()
keys.setReadOnly(True)
keys.setText('\n'.join(pk_list))
vbox.addWidget(keys)
vbox.addWidget( QRCodeWidget('\n'.join(pk_list)) )
vbox.addLayout(close_button(d))
d.setLayout(vbox)
d.exec_()
@protected
def do_sign(self, address, message, signature, password):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
try:
sig = self.wallet.sign_message(str(address.text()), message, password)
signature.setText(sig)
except Exception as e:
self.show_message(str(e))
def do_verify(self, address, message, signature):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
if bitcoin.verify_message(address.text(), str(signature.toPlainText()), message):
self.show_message(_("Signature verified"))
else:
self.show_message(_("Error: wrong signature"))
def sign_verify_message(self, address=''):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Sign/verify Message'))
d.setMinimumSize(410, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
try:
decrypted = self.wallet.decrypt_message(str(pubkey_e.text()), str(encrypted_e.toPlainText()), password)
message_e.setText(decrypted)
except Exception as e:
self.show_message(str(e))
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = unicode(message_e.toPlainText())
message = message.encode('utf-8')
try:
encrypted = bitcoin.encrypt_message(message, str(pubkey_e.text()))
encrypted_e.setText(encrypted)
except Exception as e:
self.show_message(str(e))
def encrypt_message(self, address = ''):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.getpubkeys(address)[0]
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def question(self, msg):
return QMessageBox.question(self, _('Message'), msg, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) == QMessageBox.Yes
def show_message(self, msg):
QMessageBox.information(self, _('Message'), msg, _('OK'))
def password_dialog(self ):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_("Enter Password"))
pw = QLineEdit()
pw.setEchoMode(2)
vbox = QVBoxLayout()
msg = _('Please enter your password')
vbox.addWidget(QLabel(msg))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Password')), 1, 0)
grid.addWidget(pw, 1, 1)
vbox.addLayout(grid)
vbox.addLayout(ok_cancel_buttons(d))
d.setLayout(vbox)
run_hook('password_dialog', pw, grid, 1)
if not d.exec_(): return
return unicode(pw.text())
def tx_from_text(self, txt):
"json or raw hexadecimal"
try:
txt.decode('hex')
tx = Transaction(txt)
return tx
except Exception:
pass
try:
tx_dict = json.loads(str(txt))
assert "hex" in tx_dict.keys()
tx = Transaction(tx_dict["hex"])
if tx_dict.has_key("input_info"):
input_info = json.loads(tx_dict['input_info'])
tx.add_input_info(input_info)
return tx
except Exception:
traceback.print_exc(file=sys.stdout)
pass
QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum was unable to parse your transaction"))
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return self.tx_from_text(file_content)
@protected
def sign_raw_transaction(self, tx, input_info, password):
self.wallet.signrawtransaction(tx, input_info, [], password)
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum_vior import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
r = self.network.synchronous_get([ ('blockchain.transaction.get',[str(txid)]) ])[0]
if r:
tx = transaction.Transaction(r)
if tx:
self.show_transaction(tx)
else:
self.show_message("unknown transaction")
def do_process_from_csvReader(self, csvReader):
outputs = []
errors = []
errtext = ""
try:
for position, row in enumerate(csvReader):
address = row[0]
if not is_valid(address):
errors.append((position, address))
continue
amount = Decimal(row[1])
amount = int(100000000*amount)
outputs.append((address, amount))
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return
if errors != []:
for x in errors:
errtext += "CSV Row " + str(x[0]+1) + ": " + x[1] + "\n"
QMessageBox.critical(None, _("Invalid Addresses"), _("ABORTING! Invalid Addresses found:") + "\n\n" + errtext)
return
try:
tx = self.wallet.make_unsigned_transaction(outputs, None, None)
except Exception as e:
self.show_message(str(e))
return
self.show_transaction(tx)
def do_process_from_csv_file(self):
fileName = self.getOpenFileName(_("Select your transaction CSV"), "*.csv")
if not fileName:
return
try:
with open(fileName, "r") as f:
csvReader = csv.reader(f)
self.do_process_from_csvReader(csvReader)
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return
def do_process_from_csv_text(self):
text = text_dialog(self, _('Input CSV'), _("Please enter a list of outputs.") + '\n' \
+ _("Format: address, amount. One output per line"), _("Load CSV"))
if not text:
return
f = StringIO.StringIO(text)
csvReader = csv.reader(f)
self.do_process_from_csvReader(csvReader)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
d = QDialog(self)
d.setWindowTitle(_('Private keys'))
d.setMinimumSize(850, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-vior-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
h, b = ok_cancel_buttons2(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(h)
private_keys = {}
addresses = self.wallet.addresses(True)
done = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done:
break
private_keys[addr] = "\n".join(self.wallet.get_private_key(addr, password))
d.emit(SIGNAL('computing_privkeys'))
d.emit(SIGNAL('show_privkeys'))
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
d.connect(d, QtCore.SIGNAL('computing_privkeys'), lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))
d.connect(d, QtCore.SIGNAL('show_privkeys'), show_privkeys)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a private key-export.")
QMessageBox.critical(None, _("Unable to create csv"), export_error_label + "\n" + str(reason))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent = 4))
def do_import_labels(self):
labelsFile = self.getOpenFileName(_("Open labels file"), "*.dat")
if not labelsFile: return
try:
f = open(labelsFile, 'r')
data = f.read()
f.close()
for key, value in json.loads(data).items():
self.wallet.set_label(key, value)
QMessageBox.information(None, _("Labels imported"), _("Your labels were imported from")+" '%s'" % str(labelsFile))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to import labels"), _("Electrum was unable to import your labels.")+"\n" + str(reason))
def do_export_labels(self):
labels = self.wallet.labels
try:
fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum-vior_labels.dat', "*.dat")
if fileName:
with open(fileName, 'w+') as f:
json.dump(labels, f)
QMessageBox.information(None, _("Labels exported"), _("Your labels where exported to")+" '%s'" % str(fileName))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to export labels"), _("Electrum was unable to export your labels.")+"\n" + str(reason))
def export_history_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-vior-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
h, b = ok_cancel_buttons2(d, _('Export'))
vbox.addLayout(h)
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(self.wallet, filename, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
QMessageBox.critical(self, _("Unable to export history"), export_error_label + "\n" + str(reason))
return
QMessageBox.information(self,_("History exported"), _("Your wallet history has been successfully exported."))
def do_export_history(self, wallet, fileName, is_csv):
history = wallet.get_tx_history()
lines = []
for item in history:
tx_hash, confirmations, is_mine, value, fee, balance, timestamp = item
if confirmations:
if timestamp is not None:
try:
time_string = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3]
except [RuntimeError, TypeError, NameError] as reason:
time_string = "unknown"
pass
else:
time_string = "unknown"
else:
time_string = "pending"
if value is not None:
value_string = format_satoshis(value, True)
else:
value_string = '--'
if fee is not None:
fee_string = format_satoshis(fee, True)
else:
fee_string = '0'
if tx_hash:
label, is_default_label = wallet.get_label(tx_hash)
label = label.encode('utf-8')
else:
label = ""
balance_string = format_satoshis(balance, False)
if is_csv:
lines.append([tx_hash, label, confirmations, value_string, fee_string, balance_string, time_string])
else:
lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string})
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["transaction_hash","label", "confirmations", "value", "fee", "balance", "timestamp"])
for line in lines:
transaction.writerow(line)
else:
import json
f.write(json.dumps(lines, indent = 4))
def sweep_key_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys")))
keys_e = QTextEdit()
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
h, address_e = address_field(self.wallet.addresses())
vbox.addLayout(h)
vbox.addStretch(1)
hbox, button = ok_cancel_buttons2(d, _('Sweep'))
vbox.addLayout(hbox)
button.setEnabled(False)
def get_address():
addr = str(address_e.text())
if bitcoin.is_address(addr):
return addr
def get_pk():
pk = str(keys_e.toPlainText()).strip()
if Wallet.is_private_key(pk):
return pk.split()
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
if not d.exec_():
return
fee = self.wallet.fee
tx = Transaction.sweep(get_pk(), self.network, get_address(), fee)
self.show_transaction(tx)
@protected
def do_import_privkey(self, password):
if not self.wallet.imported_keys:
r = QMessageBox.question(None, _('Warning'), '<b>'+_('Warning') +':\n</b><br/>'+ _('Imported keys are not recoverable from seed.') + ' ' \
+ _('If you ever need to restore your wallet from its seed, these keys will be lost.') + '<p>' \
+ _('Are you sure you understand what you are doing?'), 3, 4)
if r == 4: return
text = text_dialog(self, _('Import private keys'), _("Enter private keys")+':', _("Import"))
if not text: return
text = str(text).split()
badkeys = []
addrlist = []
for key in text:
try:
addr = self.wallet.import_key(key, password)
except Exception as e:
badkeys.append(key)
continue
if not addr:
badkeys.append(key)
else:
addrlist.append(addr)
if addrlist:
QMessageBox.information(self, _('Information'), _("The following addresses were added") + ':\n' + '\n'.join(addrlist))
if badkeys:
QMessageBox.critical(self, _('Error'), _("The following inputs could not be imported") + ':\n'+ '\n'.join(badkeys))
self.update_receive_tab()
self.update_history_tab()
def settings_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Electrum Settings'))
d.setModal(1)
vbox = QVBoxLayout()
grid = QGridLayout()
grid.setColumnStretch(0,1)
nz_label = QLabel(_('Display zeros') + ':')
grid.addWidget(nz_label, 0, 0)
nz_e = AmountEdit(None,True)
nz_e.setText("%d"% self.num_zeros)
grid.addWidget(nz_e, 0, 1)
msg = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
grid.addWidget(HelpButton(msg), 0, 2)
if not self.config.is_modifiable('num_zeros'):
for w in [nz_e, nz_label]: w.setEnabled(False)
lang_label=QLabel(_('Language') + ':')
grid.addWidget(lang_label, 1, 0)
lang_combo = QComboBox()
from electrum_vior.i18n import languages
lang_combo.addItems(languages.values())
try:
index = languages.keys().index(self.config.get("language",''))
except Exception:
index = 0
lang_combo.setCurrentIndex(index)
grid.addWidget(lang_combo, 1, 1)
grid.addWidget(HelpButton(_('Select which language is used in the GUI (after restart).')+' '), 1, 2)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
fee_label = QLabel(_('Transaction fee') + ':')
grid.addWidget(fee_label, 2, 0)
fee_e = AmountEdit(self.base_unit)
fee_e.setText(self.format_amount(self.wallet.fee).strip())
grid.addWidget(fee_e, 2, 1)
msg = _('Fee per kilobyte of transaction.') + ' ' \
+ _('Recommended value') + ': ' + self.format_amount(100000)
grid.addWidget(HelpButton(msg), 2, 2)
if not self.config.is_modifiable('fee_per_kb'):
for w in [fee_e, fee_label]: w.setEnabled(False)
units = ['VIOR', 'mVIOR']
unit_label = QLabel(_('Base unit') + ':')
grid.addWidget(unit_label, 3, 0)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
grid.addWidget(unit_combo, 3, 1)
grid.addWidget(HelpButton(_('Base unit of your wallet.')\
+ '\n1VIOR=1000mVIOR.\n' \
+ _(' These settings affects the fields in the Send tab')+' '), 3, 2)
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
grid.addWidget(usechange_cb, 4, 0)
grid.addWidget(HelpButton(_('Using change addresses makes it more difficult for other people to track your transactions.')+' '), 4, 2)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
block_explorers = ['explorer.viorcoin.net', 'block-explorer.com', 'Blockr.io']
block_ex_label = QLabel(_('Online Block Explorer') + ':')
grid.addWidget(block_ex_label, 5, 0)
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_explorers.index(self.config.get('block_explorer', 'explorer.viorcoin.net')))
grid.addWidget(block_ex_combo, 5, 1)
grid.addWidget(HelpButton(_('Choose which online block explorer to use for functions that open a web browser')+' '), 5, 2)
show_tx = self.config.get('show_before_broadcast', False)
showtx_cb = QCheckBox(_('Show before broadcast'))
showtx_cb.setChecked(show_tx)
grid.addWidget(showtx_cb, 6, 0)
grid.addWidget(HelpButton(_('Display the details of your transactions before broadcasting it.')), 6, 2)
vbox.addLayout(grid)
vbox.addStretch(1)
vbox.addLayout(ok_cancel_buttons(d))
d.setLayout(vbox)
# run the dialog
if not d.exec_(): return
fee = unicode(fee_e.text())
try:
fee = self.read_amount(fee)
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid value') +': %s'%fee, _('OK'))
return
self.wallet.set_fee(fee)
nz = unicode(nz_e.text())
try:
nz = int( nz )
if nz>8: nz=8
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid value')+':%s'%nz, _('OK'))
return
if self.num_zeros != nz:
self.num_zeros = nz
self.config.set_key('num_zeros', nz, True)
self.update_history_tab()
self.update_receive_tab()
usechange_result = usechange_cb.isChecked()
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
if showtx_cb.isChecked() != show_tx:
self.config.set_key('show_before_broadcast', not show_tx)
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() != unit_result:
self.decimal_point = 8 if unit_result == 'VIOR' else 5
self.config.set_key('decimal_point', self.decimal_point, True)
self.update_history_tab()
self.update_status()
need_restart = False
lang_request = languages.keys()[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
need_restart = True
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
run_hook('close_settings_dialog')
if need_restart:
QMessageBox.warning(self, _('Success'), _('Please restart Electrum to activate the new GUI settings'), _('OK'))
def run_network_dialog(self):
if not self.network:
return
NetworkDialog(self.wallet.network, self.config, self).do_exec()
def closeEvent(self, event):
self.tray.hide()
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.config.set_key("winpos-qt", [g.left(),g.top(),g.width(),g.height()])
self.save_column_widths()
self.config.set_key("console-history", self.console.history[-50:], True)
self.wallet.storage.put('accounts_expanded', self.accounts_expanded)
event.accept()
def plugins_dialog(self):
from electrum_vior.plugins import plugins
d = QDialog(self)
d.setWindowTitle(_('Electrum Plugins'))
d.setModal(1)
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(len(plugins)*35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
def do_toggle(cb, p, w):
r = p.toggle()
cb.setChecked(r)
if w: w.setEnabled(r)
def mk_toggle(cb, p, w):
return lambda: do_toggle(cb,p,w)
for i, p in enumerate(plugins):
try:
cb = QCheckBox(p.fullname())
cb.setDisabled(not p.is_available())
cb.setChecked(p.is_enabled())
grid.addWidget(cb, i, 0)
if p.requires_settings():
w = p.settings_widget(self)
w.setEnabled( p.is_enabled() )
grid.addWidget(w, i, 1)
else:
w = None
cb.clicked.connect(mk_toggle(cb,p,w))
grid.addWidget(HelpButton(p.description()), i, 2)
except Exception:
print_msg(_("Error: cannot display plugin"), p)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(i+1,1)
vbox.addLayout(close_button(d))
d.exec_()
def show_account_details(self, k):
account = self.wallet.accounts[k]
d = QDialog(self)
d.setWindowTitle(_('Account Details'))
d.setModal(1)
vbox = QVBoxLayout(d)
name = self.wallet.get_account_name(k)
label = QLabel('Name: ' + name)
vbox.addWidget(label)
vbox.addWidget(QLabel(_('Address type') + ': ' + account.get_type()))
vbox.addWidget(QLabel(_('Derivation') + ': ' + k))
vbox.addWidget(QLabel(_('Master Public Key:')))
text = QTextEdit()
text.setReadOnly(True)
text.setMaximumHeight(170)
vbox.addWidget(text)
mpk_text = '\n'.join( account.get_master_pubkeys() )
text.setText(mpk_text)
vbox.addLayout(close_button(d))
d.exec_()
| gpl-3.0 | -2,286,613,206,495,023,000 | 36.707551 | 454 | 0.583415 | false | 3.888029 | true | false | false |
thirdwing/SFrame | oss_src/unity/python/sframe/_scripts/_pylambda_worker.py | 1 | 4845 | import sys
import os
import ctypes
from ctypes import PyDLL, c_char_p, c_int
from os.path import split, abspath, join
from glob import glob
from itertools import chain
def set_windows_dll_path():
"""
Sets the dll load path so that things are resolved correctly.
"""
# Back up to the directory, then to the base directory as this is
# in ./_scripts.
lib_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def errcheck_bool(result, func, args):
if not result:
last_error = ctypes.get_last_error()
if last_error != 0:
raise ctypes.WinError(last_error)
else:
raise OSError
return args
import ctypes.wintypes as wintypes
# Also need to set the dll loading directory to the main
# folder so windows attempts to load all DLLs from this
# directory.
try:
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
kernel32.SetDllDirectoryW.errcheck = errcheck_bool
kernel32.SetDllDirectoryW.argtypes = (wintypes.LPCWSTR,)
kernel32.SetDllDirectoryW(lib_path)
except Exception, e:
sys.stderr.write("Error setting DLL load orders: %s (things may still work).\n" % str(e))
sys.stderr.flush()
if __name__ == "__main__":
if len(sys.argv) == 1:
dry_run = True
else:
dry_run = False
if dry_run or os.environ.get("GRAPHLAB_LAMBDA_WORKER_DRY_RUN") == "1":
_write_out = sys.stderr
else:
_write_out = sys.stdout
_write_out_file_name = os.environ.get("GRAPHLAB_LAMBDA_WORKER_LOG_FILE", "")
_write_out_file = None
def _write_log(s, error = False):
s = s + "\n"
if error:
sys.stderr.write(s)
sys.stderr.flush()
else:
_write_out.write(s)
_write_out.flush()
if _write_out_file is not None:
_write_out_file.write(s)
_write_out_file.flush()
if _write_out_file_name != "":
# Set this to an absolute location to make things worthwhile
_write_out_file_name = abspath(_write_out_file_name)
os.environ["GRAPHLAB_LAMBDA_WORKER_LOG_FILE"] = _write_out_file_name
_write_out_file_name = _write_out_file_name + "-init"
_write_log("Logging initialization routines to %s." % _write_out_file_name)
try:
_write_out_file = open(_write_out_file_name, "w")
except Exception, e:
_write_log("Error opening '%s' for write: %s" % (_write_out_file_name, repr(e)))
_write_out_file = None
if dry_run:
print "PyLambda script called with no IPC information; entering diagnostic mode."
script_path = abspath(sys.modules[__name__].__file__)
main_dir = split(split(script_path)[0])[0]
_write_log("Script directory: %s." % script_path)
_write_log("Main program directory: %s." % main_dir)
for s in sys.argv:
_write_log("Lambda worker args: \n %s" % ("\n ".join(sys.argv)))
# Handle the different library type extensions
pylamda_worker_search_string = join(main_dir, "libpylambda_worker.*")
_write_log("Lambda worker search pattern: %s\n" % pylamda_worker_search_string)
pylambda_workers = glob(join(main_dir, "libpylambda_worker.*"))
_write_log("Found %d candidade pylambda_worker file(s): \n %s."
% (len(pylambda_workers), "\n ".join(pylambda_workers)))
if len(pylambda_workers) > 1:
_write_log("WARNING: multiple pylambda worker libraries.")
if len(pylambda_workers) == 0:
_write_log("ERROR: Cannot find pylambda_worker extension library.", error = True)
sys.exit(202)
_write_log("INFO: Loading pylambda worker library: %s." % pylambda_workers[0])
# Set the dll load path if we are on windows
if sys.platform == 'win32':
set_windows_dll_path()
try:
pylambda_lib = PyDLL(pylambda_workers[0])
except Exception, e:
_write_log("Error loading lambda library %s: %s" % (pylambda_workers[0], repr(e)), error = True)
sys.exit(203)
try:
pylambda_lib.pylambda_worker_main.argtypes = [c_char_p, c_char_p]
pylambda_lib.pylambda_worker_main.restype = c_int
except Exception, e:
_write_log("Error accessing pylambda_worker_main: %s\n" % repr(e), error = True)
sys.exit(204)
if not dry_run:
# This call only returns after the parent process is done.
result = pylambda_lib.pylambda_worker_main(c_char_p(main_dir), c_char_p(sys.argv[1]))
else:
# This version will print out a bunch of diagnostic information and then exit.
result = pylambda_lib.pylambda_worker_main(c_char_p(main_dir), c_char_p("debug"))
_write_log("Lambda process exited with code %d." % result)
sys.exit(0)
| bsd-3-clause | 8,055,003,699,847,656,000 | 34.108696 | 104 | 0.615273 | false | 3.419195 | false | false | false |
Denisolt/IEEE-NYIT-MA | local/lib/python2.7/site-packages/tests/test_base.py | 1 | 21285 | # coding: utf-8
import os
import ntpath
import posixpath
import shutil
from mock import patch
from filebrowser.base import FileObject, FileListing
from filebrowser.sites import site
from filebrowser.settings import VERSIONS
from tests import FilebrowserTestCase as TestCase
class FileObjectPathTests(TestCase):
def setUp(self):
super(FileObjectPathTests, self).setUp()
shutil.copy(self.STATIC_IMG_PATH, self.FOLDER_PATH)
@patch('filebrowser.base.os.path', ntpath)
def test_windows_paths(self):
"""
Use ntpath to test windows paths independently from current os
"""
f = FileObject('_test\\uploads\\folder\\testfile.jpg', site=site)
self.assertEqual(f.path_relative_directory, 'folder\\testfile.jpg')
self.assertEqual(f.dirname, r'folder')
@patch('filebrowser.base.os.path', posixpath)
def test_posix_paths(self):
"""
Use posixpath to test posix paths independently from current os
"""
f = FileObject('_test/uploads/folder/testfile.jpg', site=site)
self.assertEqual(f.path_relative_directory, 'folder/testfile.jpg')
self.assertEqual(f.dirname, r'folder')
class FileObjectUnicodeTests(TestCase):
def setUp(self):
super(FileObjectUnicodeTests, self).setUp()
shutil.copy(self.STATIC_IMG_PATH, self.FOLDER_PATH)
@patch('filebrowser.base.os.path', ntpath)
def test_windows_paths(self):
"""
Use ntpath to test windows paths independently from current os
"""
f = FileObject('_test\\uploads\\$%^&*\\測試文件.jpg', site=site)
self.assertEqual(f.path_relative_directory, '$%^&*\\測試文件.jpg')
self.assertEqual(f.dirname, r'$%^&*')
@patch('filebrowser.base.os.path', posixpath)
def test_posix_paths(self):
"""
Use posixpath to test posix paths independently from current os
"""
f = FileObject('_test/uploads/$%^&*/測試文件.jpg', site=site)
self.assertEqual(f.path_relative_directory, '$%^&*/測試文件.jpg')
self.assertEqual(f.dirname, r'$%^&*')
@patch('filebrowser.base.os.path', posixpath)
@patch('filebrowser.namers.VERSION_NAMER', 'filebrowser.namers.OptionsNamer')
def test_unicode_options_namer_version(self):
path_unicode = os.path.join(self.FOLDER_PATH, '測試文件.jpg')
expected = u'測試文件_large--680x0.jpg'
shutil.copy(self.STATIC_IMG_PATH, path_unicode)
f = FileObject(path_unicode, site=site)
version = f.version_generate('large')
self.assertEqual(version.filename, expected)
class FileObjectAttributeTests(TestCase):
def setUp(self):
super(FileObjectAttributeTests, self).setUp()
shutil.copy(self.STATIC_IMG_PATH, self.FOLDER_PATH)
def test_init_attributes(self):
"""
FileObject init attributes
# path
# head
# filename
# filename_lower
# filename_root
# extension
# mimetype
"""
self.assertEqual(self.F_IMAGE.path, "_test/uploads/folder/testimage.jpg")
self.assertEqual(self.F_IMAGE.head, '_test/uploads/folder')
self.assertEqual(self.F_IMAGE.filename, 'testimage.jpg')
self.assertEqual(self.F_IMAGE.filename_lower, 'testimage.jpg')
self.assertEqual(self.F_IMAGE.filename_root, 'testimage')
self.assertEqual(self.F_IMAGE.extension, '.jpg')
self.assertEqual(self.F_IMAGE.mimetype, ('image/jpeg', None))
def test_general_attributes(self):
"""
FileObject general attributes
# filetype
# filesize
# date
# datetime
# exists
"""
self.assertEqual(self.F_IMAGE.filetype, 'Image')
self.assertEqual(self.F_IMAGE.filetype, 'Image')
self.assertEqual(self.F_IMAGE.filesize, 870037)
# FIXME: test date/datetime
self.assertEqual(self.F_IMAGE.exists, True)
def test_path_url_attributes(self):
"""
FileObject path and url attributes
# path (see init)
# path_relative_directory
# path_full
# dirname
# url
"""
# test with image
self.assertEqual(self.F_IMAGE.path, "_test/uploads/folder/testimage.jpg")
self.assertEqual(self.F_IMAGE.path_relative_directory, "folder/testimage.jpg")
self.assertEqual(self.F_IMAGE.path_full, os.path.join(site.storage.location, site.directory, "folder/testimage.jpg"))
self.assertEqual(self.F_IMAGE.dirname, "folder")
self.assertEqual(self.F_IMAGE.url, site.storage.url(self.F_IMAGE.path))
# test with folder
self.assertEqual(self.F_FOLDER.path, "_test/uploads/folder")
self.assertEqual(self.F_FOLDER.path_relative_directory, "folder")
self.assertEqual(self.F_FOLDER.path_full, os.path.join(site.storage.location, site.directory, "folder"))
self.assertEqual(self.F_FOLDER.dirname, "")
self.assertEqual(self.F_FOLDER.url, site.storage.url(self.F_FOLDER.path))
# test with alternative folder
self.assertEqual(self.F_SUBFOLDER.path, "_test/uploads/folder/subfolder")
self.assertEqual(self.F_SUBFOLDER.path_relative_directory, "folder/subfolder")
self.assertEqual(self.F_SUBFOLDER.path_full, os.path.join(site.storage.location, site.directory, "folder/subfolder"))
self.assertEqual(self.F_SUBFOLDER.dirname, "folder")
self.assertEqual(self.F_SUBFOLDER.url, site.storage.url(self.F_SUBFOLDER.path))
def test_image_attributes(self):
"""
FileObject image attributes
# dimensions
# width
# height
# aspectratio
# orientation
"""
self.assertEqual(self.F_IMAGE.dimensions, (1000, 750))
self.assertEqual(self.F_IMAGE.width, 1000)
self.assertEqual(self.F_IMAGE.height, 750)
self.assertEqual(self.F_IMAGE.aspectratio, 1.3333333333333333)
self.assertEqual(self.F_IMAGE.orientation, 'Landscape')
def test_folder_attributes(self):
"""
FileObject folder attributes
# directory (deprecated) > path_relative_directory
# folder (deprecated) > dirname
# is_folder
# is_empty
"""
# test with image
self.assertEqual(self.F_IMAGE.path_relative_directory, "folder/testimage.jpg") # equals path_relative_directory
self.assertEqual(self.F_IMAGE.dirname, "folder") # equals dirname
self.assertEqual(self.F_IMAGE.is_folder, False)
self.assertEqual(self.F_IMAGE.is_empty, False)
# test with folder
self.assertEqual(self.F_FOLDER.path_relative_directory, "folder") # equals path_relative_directory
self.assertEqual(self.F_FOLDER.dirname, "") # equals dirname
self.assertEqual(self.F_FOLDER.is_folder, True)
self.assertEqual(self.F_FOLDER.is_empty, False)
# test with alternative folder
self.assertEqual(self.F_SUBFOLDER.path_relative_directory, "folder/subfolder") # equals path_relative_directory
self.assertEqual(self.F_SUBFOLDER.dirname, "folder") # equals dirname
self.assertEqual(self.F_SUBFOLDER.is_folder, True)
self.assertEqual(self.F_SUBFOLDER.is_empty, True)
@patch('filebrowser.base.ADMIN_VERSIONS', ['large'])
def test_version_attributes_1(self):
"""
FileObject version attributes/methods
without versions_basedir
# is_version
# original
# original_filename
# versions_basedir
# versions
# admin_versions
# version_name(suffix)
# version_path(suffix)
# version_generate(suffix)
"""
# new settings
version_list = sorted(['_test/_versions/folder/testimage_{}.jpg'.format(name) for name in VERSIONS.keys()])
admin_version_list = ['_test/_versions/folder/testimage_large.jpg']
self.assertEqual(self.F_IMAGE.is_version, False)
self.assertEqual(self.F_IMAGE.original.path, self.F_IMAGE.path)
self.assertEqual(self.F_IMAGE.versions_basedir, "_test/_versions/")
self.assertEqual(self.F_IMAGE.versions(), version_list)
self.assertEqual(self.F_IMAGE.admin_versions(), admin_version_list)
self.assertEqual(self.F_IMAGE.version_name("large"), "testimage_large.jpg")
self.assertEqual(self.F_IMAGE.version_path("large"), "_test/_versions/folder/testimage_large.jpg")
# version does not exist yet
f_version = FileObject(os.path.join(site.directory, 'folder', "testimage_large.jpg"), site=site)
self.assertEqual(f_version.exists, False)
# generate version
f_version = self.F_IMAGE.version_generate("large")
self.assertEqual(f_version.path, "_test/_versions/folder/testimage_large.jpg")
self.assertEqual(f_version.exists, True)
self.assertEqual(f_version.is_version, True)
self.assertEqual(f_version.original_filename, "testimage.jpg")
self.assertEqual(f_version.original.path, self.F_IMAGE.path)
# FIXME: versions should not have versions or admin_versions
@patch('filebrowser.base.ADMIN_VERSIONS', ['large'])
def test_version_attributes_2(self):
"""
FileObject version attributes/methods
with versions_basedir
# is_version
# original
# original_filename
# versions_basedir
# versions
# admin_versions
# version_name(suffix)
# version_generate(suffix)
"""
version_list = sorted(['_test/_versions/folder/testimage_{}.jpg'.format(name) for name in VERSIONS.keys()])
admin_version_list = ['_test/_versions/folder/testimage_large.jpg']
self.assertEqual(self.F_IMAGE.is_version, False)
self.assertEqual(self.F_IMAGE.original.path, self.F_IMAGE.path)
self.assertEqual(self.F_IMAGE.versions_basedir, "_test/_versions/")
self.assertEqual(self.F_IMAGE.versions(), version_list)
self.assertEqual(self.F_IMAGE.admin_versions(), admin_version_list)
self.assertEqual(self.F_IMAGE.version_name("large"), "testimage_large.jpg")
self.assertEqual(self.F_IMAGE.version_path("large"), "_test/_versions/folder/testimage_large.jpg")
# version does not exist yet
f_version = FileObject(os.path.join(site.directory, 'folder', "testimage_large.jpg"), site=site)
self.assertEqual(f_version.exists, False)
# generate version
f_version = self.F_IMAGE.version_generate("large")
self.assertEqual(f_version.path, "_test/_versions/folder/testimage_large.jpg")
self.assertEqual(f_version.exists, True)
self.assertEqual(f_version.is_version, True)
self.assertEqual(f_version.original_filename, "testimage.jpg")
self.assertEqual(f_version.original.path, self.F_IMAGE.path)
self.assertEqual(f_version.versions(), [])
self.assertEqual(f_version.admin_versions(), [])
@patch('filebrowser.base.ADMIN_VERSIONS', ['large'])
def test_version_attributes_3(self):
"""
FileObject version attributes/methods
with alternative versions_basedir
# is_version
# original
# original_filename
# versions_basedir
# versions
# admin_versions
# version_name(suffix)
# version_generate(suffix)
"""
# new settings
version_list = sorted(['_test/_versions/folder/testimage_{}.jpg'.format(name) for name in VERSIONS.keys()])
admin_version_list = ['_test/_versions/folder/testimage_large.jpg']
self.assertEqual(self.F_IMAGE.is_version, False)
self.assertEqual(self.F_IMAGE.original.path, self.F_IMAGE.path)
self.assertEqual(self.F_IMAGE.versions_basedir, "_test/_versions/")
self.assertEqual(self.F_IMAGE.versions(), version_list)
self.assertEqual(self.F_IMAGE.admin_versions(), admin_version_list)
self.assertEqual(self.F_IMAGE.version_name("large"), "testimage_large.jpg")
self.assertEqual(self.F_IMAGE.version_path("large"), "_test/_versions/folder/testimage_large.jpg")
# version does not exist yet
f_version = FileObject(os.path.join(site.directory, 'folder', "testimage_large.jpg"), site=site)
self.assertEqual(f_version.exists, False)
# generate version
f_version = self.F_IMAGE.version_generate("large")
self.assertEqual(f_version.path, "_test/_versions/folder/testimage_large.jpg")
self.assertEqual(f_version.exists, True)
self.assertEqual(f_version.is_version, True)
self.assertEqual(f_version.original_filename, "testimage.jpg")
self.assertEqual(f_version.original.path, self.F_IMAGE.path)
self.assertEqual(f_version.versions(), [])
self.assertEqual(f_version.admin_versions(), [])
def test_delete(self):
"""
FileObject delete methods
# delete
# delete_versions
# delete_admin_versions
"""
# version does not exist yet
f_version = FileObject(os.path.join(site.directory, 'folder', "testimage_large.jpg"), site=site)
self.assertEqual(f_version.exists, False)
# generate version
f_version = self.F_IMAGE.version_generate("large")
f_version_thumb = self.F_IMAGE.version_generate("admin_thumbnail")
self.assertEqual(f_version.exists, True)
self.assertEqual(f_version_thumb.exists, True)
self.assertEqual(f_version.path, "_test/_versions/folder/testimage_large.jpg")
self.assertEqual(f_version_thumb.path, "_test/_versions/folder/testimage_admin_thumbnail.jpg")
# delete admin versions (large)
self.F_IMAGE.delete_admin_versions()
self.assertEqual(site.storage.exists(f_version.path), False)
# delete versions (admin_thumbnail)
self.F_IMAGE.delete_versions()
self.assertEqual(site.storage.exists(f_version_thumb.path), False)
class FileListingTests(TestCase):
"""
/_test/uploads/testimage.jpg
/_test/uploads/folder/
/_test/uploads/folder/subfolder/
/_test/uploads/folder/subfolder/testimage.jpg
"""
def setUp(self):
super(FileListingTests, self).setUp()
self.F_LISTING_FOLDER = FileListing(self.DIRECTORY, sorting_by='date', sorting_order='desc')
self.F_LISTING_IMAGE = FileListing(os.path.join(self.DIRECTORY, 'folder', 'subfolder', "testimage.jpg"))
shutil.copy(self.STATIC_IMG_PATH, self.SUBFOLDER_PATH)
shutil.copy(self.STATIC_IMG_PATH, self.DIRECTORY_PATH)
def test_init_attributes(self):
"""
FileListing init attributes
# path
# filter_func
# sorting_by
# sorting_order
"""
self.assertEqual(self.F_LISTING_FOLDER.path, '_test/uploads/')
self.assertEqual(self.F_LISTING_FOLDER.filter_func, None)
self.assertEqual(self.F_LISTING_FOLDER.sorting_by, 'date')
self.assertEqual(self.F_LISTING_FOLDER.sorting_order, 'desc')
def test_listing(self):
"""
FileObject listing
# listing
# files_listing_total
# files_listing_filtered
# results_listing_total
# results_listing_filtered
"""
self.assertEqual(self.F_LISTING_IMAGE.listing(), [])
self.assertEqual(list(self.F_LISTING_FOLDER.listing()), [u'folder', u'testimage.jpg'])
self.assertEqual(list(f.path for f in self.F_LISTING_FOLDER.files_listing_total()), [u'_test/uploads/testimage.jpg', u'_test/uploads/folder'])
self.assertEqual(list(f.path for f in self.F_LISTING_FOLDER.files_listing_filtered()), [u'_test/uploads/testimage.jpg', u'_test/uploads/folder'])
self.assertEqual(self.F_LISTING_FOLDER.results_listing_total(), 2)
self.assertEqual(self.F_LISTING_FOLDER.results_listing_filtered(), 2)
def test_listing_filtered(self):
"""
FileObject listing
# listing
# files_listing_total
# files_listing_filtered
# results_listing_total
# results_listing_filtered
"""
self.assertEqual(self.F_LISTING_IMAGE.listing(), [])
self.assertEqual(list(self.F_LISTING_FOLDER.listing()), [u'folder', u'testimage.jpg'])
self.assertEqual(list(f.path for f in self.F_LISTING_FOLDER.files_listing_total()), [u'_test/uploads/testimage.jpg', u'_test/uploads/folder'])
self.assertEqual(list(f.path for f in self.F_LISTING_FOLDER.files_listing_filtered()), [u'_test/uploads/testimage.jpg', u'_test/uploads/folder'])
self.assertEqual(self.F_LISTING_FOLDER.results_listing_total(), 2)
self.assertEqual(self.F_LISTING_FOLDER.results_listing_filtered(), 2)
def test_walk(self):
"""
FileObject walk
# walk
# files_walk_total
# files_walk_filtered
# results_walk_total
# results_walk_filtered
"""
self.assertEqual(self.F_LISTING_IMAGE.walk(), [])
self.assertEqual(list(self.F_LISTING_FOLDER.walk()), [u'folder/subfolder/testimage.jpg', u'folder/subfolder', u'folder', u'testimage.jpg'])
self.assertEqual(list(f.path for f in self.F_LISTING_FOLDER.files_walk_total()), [u'_test/uploads/testimage.jpg', u'_test/uploads/folder', u'_test/uploads/folder/subfolder', u'_test/uploads/folder/subfolder/testimage.jpg'])
self.assertEqual(list(f.path for f in self.F_LISTING_FOLDER.files_walk_filtered()), [u'_test/uploads/testimage.jpg', u'_test/uploads/folder', u'_test/uploads/folder/subfolder', u'_test/uploads/folder/subfolder/testimage.jpg'])
self.assertEqual(self.F_LISTING_FOLDER.results_walk_total(), 4)
self.assertEqual(self.F_LISTING_FOLDER.results_walk_filtered(), 4)
class FileObjecNamerTests(TestCase):
PATCH_VERSIONS = {
'thumbnail': {'verbose_name': 'Thumbnail (1 col)', 'width': 60, 'height': 60, 'opts': 'crop'},
'small': {'verbose_name': 'Small (2 col)', 'width': 140, 'height': '', 'opts': ''},
'large': {'verbose_name': 'Large (8 col)', 'width': 680, 'height': '', 'opts': ''},
}
PATCH_ADMIN_VERSIONS = ['large']
def setUp(self):
super(FileObjecNamerTests, self).setUp()
shutil.copy(self.STATIC_IMG_PATH, self.FOLDER_PATH)
@patch('filebrowser.namers.VERSION_NAMER', 'filebrowser.namers.OptionsNamer')
def test_init_attributes(self):
"""
FileObject init attributes
# path
# head
# filename
# filename_lower
# filename_root
# extension
# mimetype
"""
self.assertEqual(self.F_IMAGE.path, "_test/uploads/folder/testimage.jpg")
self.assertEqual(self.F_IMAGE.head, '_test/uploads/folder')
self.assertEqual(self.F_IMAGE.filename, 'testimage.jpg')
self.assertEqual(self.F_IMAGE.filename_lower, 'testimage.jpg')
self.assertEqual(self.F_IMAGE.filename_root, 'testimage')
self.assertEqual(self.F_IMAGE.extension, '.jpg')
self.assertEqual(self.F_IMAGE.mimetype, ('image/jpeg', None))
@patch('filebrowser.namers.VERSION_NAMER', 'filebrowser.namers.OptionsNamer')
@patch('filebrowser.base.VERSIONS', PATCH_VERSIONS)
@patch('filebrowser.base.ADMIN_VERSIONS', PATCH_ADMIN_VERSIONS)
def test_version_attributes_with_options_namer(self):
"""
FileObject version attributes/methods
without versions_basedir
# is_version
# original
# original_filename
# versions_basedir
# versions
# admin_versions
# version_name(suffix)
# version_path(suffix)
# version_generate(suffix)
"""
# new settings
version_list = sorted([
'_test/_versions/folder/testimage_large--680x0.jpg',
'_test/_versions/folder/testimage_small--140x0.jpg',
'_test/_versions/folder/testimage_thumbnail--60x60--opts-crop.jpg'
])
admin_version_list = ['_test/_versions/folder/testimage_large--680x0.jpg']
self.assertEqual(self.F_IMAGE.is_version, False)
self.assertEqual(self.F_IMAGE.original.path, self.F_IMAGE.path)
self.assertEqual(self.F_IMAGE.versions_basedir, "_test/_versions/")
self.assertEqual(self.F_IMAGE.versions(), version_list)
self.assertEqual(self.F_IMAGE.admin_versions(), admin_version_list)
self.assertEqual(self.F_IMAGE.version_name("large"), "testimage_large--680x0.jpg")
self.assertEqual(self.F_IMAGE.version_path("large"), "_test/_versions/folder/testimage_large--680x0.jpg")
# version does not exist yet
f_version = FileObject(os.path.join(site.directory, 'folder', "testimage_large--680x0.jpg"), site=site)
self.assertEqual(f_version.exists, False)
# generate version
f_version = self.F_IMAGE.version_generate("large")
self.assertEqual(f_version.path, "_test/_versions/folder/testimage_large--680x0.jpg")
self.assertEqual(f_version.exists, True)
self.assertEqual(f_version.is_version, True)
self.assertEqual(f_version.original_filename, "testimage.jpg")
self.assertEqual(f_version.original.path, self.F_IMAGE.path)
| gpl-3.0 | -7,885,753,741,644,402,000 | 40.478516 | 234 | 0.650092 | false | 3.699826 | true | false | false |
sschiau/swift | utils/gyb_syntax_support/Node.py | 1 | 3036 | from __future__ import print_function
import sys
from kinds import SYNTAX_BASE_KINDS, kind_to_type, lowercase_first_word
def error(msg):
print('error: ' + msg, file=sys.stderr)
sys.exit(-1)
class Node(object):
"""
A Syntax node, possibly with children.
If the kind is "SyntaxCollection", then this node is considered a Syntax
Collection that will expose itself as a typedef rather than a concrete
subclass.
"""
def __init__(self, name, description=None, kind=None, traits=None,
children=None, element=None, element_name=None,
element_choices=None, omit_when_empty=False):
self.syntax_kind = name
self.swift_syntax_kind = lowercase_first_word(name)
self.name = kind_to_type(self.syntax_kind)
self.description = description
self.traits = traits or []
self.children = children or []
self.base_kind = kind
if self.base_kind == 'SyntaxCollection':
self.base_type = 'Syntax'
else:
self.base_type = kind_to_type(self.base_kind)
if self.base_kind not in SYNTAX_BASE_KINDS:
error("unknown base kind '%s' for node '%s'" %
(self.base_kind, self.syntax_kind))
self.omit_when_empty = omit_when_empty
self.collection_element = element or ""
# For SyntaxCollections make sure that the element_name is set.
assert(not self.is_syntax_collection() or element_name or
(element and element != 'Syntax'))
# If there's a preferred name for the collection element that differs
# from its supertype, use that.
self.collection_element_name = element_name or self.collection_element
self.collection_element_type = kind_to_type(self.collection_element)
self.collection_element_choices = element_choices or []
def is_base(self):
"""
Returns `True` if this node declares one of the base syntax kinds.
"""
return self.syntax_kind in SYNTAX_BASE_KINDS
def is_syntax_collection(self):
"""
Returns `True` if this node is a subclass of SyntaxCollection.
"""
return self.base_kind == "SyntaxCollection"
def requires_validation(self):
"""
Returns `True` if this node should have a `validate` method associated.
"""
return self.is_buildable()
def is_unknown(self):
"""
Returns `True` if this node is an `Unknown` syntax subclass.
"""
return "Unknown" in self.syntax_kind
def is_buildable(self):
"""
Returns `True` if this node should have a builder associated.
"""
return not self.is_base() and \
not self.is_unknown() and \
not self.is_syntax_collection()
def shall_be_omitted_when_empty(self):
"""
Returns 'True' if this node shall not be created while parsing if it
has no children.
"""
return self.omit_when_empty
| apache-2.0 | -8,498,343,167,070,602,000 | 33.11236 | 79 | 0.61166 | false | 4.080645 | false | false | false |
xueqiang41/shopsite | shopsite/shopsite/settings.py | 1 | 4757 | """
Django settings for shopsite project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SETTING_DIR = os.path.dirname(__file__)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xw&!parp-j@3_sdxu^-g_l^g_)-*+o*-n=8%f$0cp3jy!#*fw)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'shopsite.apps.catalog',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'shopsite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(SETTING_DIR,"templates"),],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.template.context_processors.media',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'shopsite.utils.context_processors.shopsite',
],
},
},
]
WSGI_APPLICATION = 'shopsite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
#配置msql,使用驱动mysql-connector-python
DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
'default':{
'NAME': 'shopsys',
#'ENGINE': 'mysql.connector.django',
'ENGINE': 'django.db.backends.mysql',
'USER': 'shopsys',
'PASSWORD': 'shopsys',
'HOST':'120.25.102.253',
'POST':'3306',
'TEST':{}
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
DATE_FORMAT = 'Y-m-d'
TIME_ZONE = 'Asia/Shanghai'
# 是否开启国际化支持,不开启时可以不加载翻译模块优化性能
USE_I18N = False
# 本地化格式支持,为True使用系统locale设置显示数字、时间等格式
USE_L10N = False
USE_TZ = True
# 是否设置Etag, 设置etag可以降低网络资源开销,但会增加服务器性能开销
USE_ETAGS = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
#在给定的路径寻找静态文件
STATICFILES_DIRS = (
os.path.join(SETTING_DIR,"static"),
)
STATIC_URL = '/static/'
#用户上传的图片
MEDIA_ROOT = os.path.join(BASE_DIR,"media")
MEDIA_URL = "/media/"
# 站点设置
SITE_NAME = '小白购'
META_KEYWORDS = '小白购, 特价男装, 精品女鞋, 计算机图书, 双十一特惠'
META_DESCRIPTION = '''小白购 - 成都最大、最安全的网上交易平台,提供各类服饰、
美容、家居、数码、话费/点卡充值… 2亿优质特价商品,同时提供担保交易(先收货
后付款)、先行赔付、假一赔三、七天无理由退换货、数码免费维修等安全交易保障
服务,让你全面安心享受网上购物乐趣!''' | gpl-2.0 | -2,099,231,296,338,303,500 | 26.856209 | 91 | 0.676132 | false | 2.68494 | false | false | false |
mlperf/training_results_v0.7 | Intel/benchmarks/minigo/8-nodes-32s-cpx-tensorflow/oneoffs/rotate_examples.py | 7 | 5384 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Randomly rotate the examples in a tfrecords.zz file."""
import sys
sys.path.insert(0, '.')
import itertools
import os.path
import multiprocessing as mp
from absl import app, flags
import tensorflow as tf
from tqdm import tqdm
import dual_net
import preprocessing
# This file produces a lot of logging, supress most of it
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
flags.DEFINE_string("in_dir", None, "tfrecord.zz in this dir are converted.")
flags.DEFINE_string("out_dir", None, "Records are writen to this dir.")
flags.DEFINE_bool("compare", False, "Whether to run compare after rotation.")
flags.DEFINE_integer("threads", None, "number of threads, default: num cpus.")
flags.DEFINE_integer("batch_size", 100, "batch_size for rotating.")
FLAGS = flags.FLAGS
OPTS = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB)
def grouper(n, iterable):
"""Itertools recipe
>>> list(grouper(3, iter('ABCDEFG')))
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
"""
return iter(lambda: list(itertools.islice(iterable, n)), [])
def batched_reader(file_path):
reader = tf.python_io.tf_record_iterator(file_path, OPTS)
return grouper(FLAGS.batch_size, reader)
def get_size(path):
return tf.gfile.Stat(path).length
def convert(paths):
position, in_path, out_path = paths
assert tf.gfile.Exists(in_path)
assert tf.gfile.Exists(os.path.dirname(out_path))
in_size = get_size(in_path)
if tf.gfile.Exists(out_path):
# Make sure out_path is about the size of in_path
size = get_size(out_path)
error = (size - in_size) / (in_size + 1)
# 5% smaller to 20% larger
if -0.05 < error < 0.20:
return out_path + " already existed"
return "ERROR on file size ({:.1f}% diff) {}".format(
100 * error, out_path)
num_batches = dual_net.EXAMPLES_PER_GENERATION // FLAGS.batch_size + 1
with tf.python_io.TFRecordWriter(out_path, OPTS) as writer:
record_iter = tqdm(
batched_reader(in_path),
desc=os.path.basename(in_path),
position=position,
total=num_batches)
for record in record_iter:
xs, rs = preprocessing.batch_parse_tf_example(len(record), record)
# Undo cast in batch_parse_tf_example.
xs = tf.cast(xs, tf.uint8)
# map the rotation function.
x_rot, r_rot = preprocessing._random_rotation(xs, rs)
with tf.Session() as sess:
x_rot, r_rot = sess.run([x_rot, r_rot])
tf.reset_default_graph()
pi_rot = r_rot['pi_tensor']
val_rot = r_rot['value_tensor']
for r, x, pi, val in zip(record, x_rot, pi_rot, val_rot):
record_out = preprocessing.make_tf_example(x, pi, val)
serialized = record_out.SerializeToString()
writer.write(serialized)
assert len(r) == len(serialized), (len(r), len(serialized))
def compare(pair):
position, in_path, out_path = pair
num_batches = dual_net.EXAMPLES_PER_GENERATION // FLAGS.batch_size + 1
compare_iter = tqdm(
zip(batched_reader(in_path), batched_reader(out_path)),
desc=os.path.basename(in_path),
position=position,
total=num_batches)
count = 0
equal = 0
results = {}
for a, b in compare_iter:
# a, b are batched records
xa, ra = preprocessing.batch_parse_tf_example(len(a), a)
xb, rb = preprocessing.batch_parse_tf_example(len(b), b)
xa, xb, ra, rb = tf.Session().run([xa, xb, ra, rb])
# NOTE: This relies on python3 deterministic dictionaries.
values = [xa] + list(ra.values()) + [xb] + list(rb.values())
for xa, pa, va, xb, pb, vb in zip(*values):
count += 1
assert va == vb
equal += (xa == xb).all() + (pa == pb).all()
results['equal'] = "{}/{} = {:.3f}".format(equal, count, equal / count)
compare_iter.set_postfix(results)
def main(remaining_argv):
paths = sorted(tf.gfile.ListDirectory(FLAGS.in_dir))
total = len(paths)
pairs = []
for i, path in enumerate(paths):
ext = '.tfrecord.zz'
out_path = path.replace(ext, '_rot' + ext)
pairs.append((
-total + i,
os.path.join(FLAGS.in_dir, path),
os.path.join(FLAGS.out_dir, out_path)))
with mp.Pool(FLAGS.threads) as p:
# NOTE: this keeps tqdm progress bars visible.
print("\n" * (total + 1))
list(tqdm(p.imap(convert, pairs), desc="converting", total=total))
if FLAGS.compare:
print("\n" * (total + 1))
list(tqdm(p.imap(compare, pairs), desc="comparing", total=total))
if __name__ == "__main__":
app.run(main)
| apache-2.0 | -4,926,151,409,267,089,000 | 33.512821 | 79 | 0.613484 | false | 3.429299 | false | false | false |
baffolobill/django-lfs | lfs/mail/utils.py | 3 | 6009 | # django imports
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.template import RequestContext
from django.template.base import TemplateDoesNotExist
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
def send_order_sent_mail(order):
try:
_send_order_sent_mail.delay(order)
except AttributeError:
_send_order_sent_mail(order)
def _send_order_sent_mail(order):
"""Sends an order has been sent mail to the shop customer
"""
import lfs.core.utils
shop = lfs.core.utils.get_default_shop()
try:
subject = render_to_string("lfs/mail/order_sent_subject.txt", {"order": order})
except TemplateDoesNotExist:
subject = _(u"Your order has been sent")
from_email = shop.from_email
to = [order.customer_email]
bcc = shop.get_notification_emails()
# text
text = render_to_string("lfs/mail/order_sent_mail.txt", {"order": order})
mail = EmailMultiAlternatives(
subject=subject, body=text, from_email=from_email, to=to, bcc=bcc)
# html
html = render_to_string("lfs/mail/order_sent_mail.html", {
"order": order
})
mail.attach_alternative(html, "text/html")
mail.send(fail_silently=True)
def send_order_paid_mail(order):
try:
_send_order_paid_mail.delay(order)
except AttributeError:
_send_order_paid_mail(order)
def _send_order_paid_mail(order):
"""Sends an order has been paid mail to the shop customer.
"""
import lfs.core.utils
shop = lfs.core.utils.get_default_shop()
try:
subject = render_to_string("lfs/mail/order_paid_subject.txt", {"order": order})
except TemplateDoesNotExist:
subject = _(u"Your order has been paid")
from_email = shop.from_email
to = [order.customer_email]
bcc = shop.get_notification_emails()
# text
text = render_to_string("lfs/mail/order_paid_mail.txt", {"order": order})
mail = EmailMultiAlternatives(
subject=subject, body=text, from_email=from_email, to=to, bcc=bcc)
# html
html = render_to_string("lfs/mail/order_paid_mail.html", {
"order": order
})
mail.attach_alternative(html, "text/html")
mail.send(fail_silently=True)
def send_order_received_mail(request, order):
try:
_send_order_received_mail.delay(request, order)
except AttributeError:
_send_order_received_mail(request, order)
def _send_order_received_mail(request, order):
"""Sends an order received mail to the shop customer.
Customer information is taken from the provided order.
"""
import lfs.core.utils
shop = lfs.core.utils.get_default_shop()
try:
subject = render_to_string("lfs/mail/order_received_subject.txt", {"order": order})
except TemplateDoesNotExist:
subject = _(u"Your order has been received")
from_email = shop.from_email
to = [order.customer_email]
bcc = shop.get_notification_emails()
# text
text = render_to_string("lfs/mail/order_received_mail.txt", RequestContext(request, {"order": order}))
mail = EmailMultiAlternatives(
subject=subject, body=text, from_email=from_email, to=to, bcc=bcc)
# html
html = render_to_string("lfs/mail/order_received_mail.html", RequestContext(request, {
"order": order
}))
mail.attach_alternative(html, "text/html")
mail.send(fail_silently=True)
def send_customer_added(user):
try:
_send_customer_added.delay(user)
except AttributeError:
_send_customer_added(user)
def _send_customer_added(user):
"""Sends a mail to a newly registered user.
"""
import lfs.core.utils
shop = lfs.core.utils.get_default_shop()
from_email = shop.from_email
to = [user.email]
bcc = shop.get_notification_emails()
# text
text = render_to_string("lfs/mail/new_user_mail.txt", {
"user": user, "shop": shop})
# subject
subject = render_to_string("lfs/mail/new_user_mail_subject.txt", {
"user": user, "shop": shop})
mail = EmailMultiAlternatives(
subject=subject, body=text, from_email=from_email, to=to, bcc=bcc)
# html
html = render_to_string("lfs/mail/new_user_mail.html", {
"user": user, "shop": shop,
})
mail.attach_alternative(html, "text/html")
mail.send(fail_silently=True)
def send_review_added(review):
try:
_send_review_added.delay(review)
except AttributeError:
_send_review_added(review)
def _send_review_added(review):
"""Sends a mail to shop admins that a new review has been added
"""
import lfs.core.utils
shop = lfs.core.utils.get_default_shop()
subject = _(u"New review has been added")
from_email = shop.from_email
to = shop.get_notification_emails()
ctype = ContentType.objects.get_for_id(review.content_type_id)
product = ctype.get_object_for_this_type(pk=review.content_id)
# text
text = render_to_string("lfs/mail/review_added_mail.txt", {
"review": review,
"product": product,
})
mail = EmailMultiAlternatives(
subject=subject, body=text, from_email=from_email, to=to)
# html
html = render_to_string("lfs/mail/review_added_mail.html", {
"site": "http://%s" % Site.objects.get(id=settings.SITE_ID),
"review": review,
"product": product,
})
mail.attach_alternative(html, "text/html")
mail.send(fail_silently=True)
# celery
try:
from celery.task import task
except ImportError:
pass
else:
_send_customer_added = task(_send_customer_added)
_send_order_paid_mail = task(_send_order_paid_mail)
_send_order_received_mail = task(_send_order_received_mail)
_send_order_sent_mail = task(_send_order_sent_mail)
_send_review_added = task(_send_review_added)
| bsd-3-clause | 1,145,132,046,764,439,400 | 27.751196 | 106 | 0.659511 | false | 3.439611 | false | false | false |
rebase-helper/rebase-helper | rebasehelper/constants.py | 1 | 1925 | # -*- coding: utf-8 -*-
#
# This tool helps you rebase your package to the latest version
# Copyright (C) 2013-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hráček <[email protected]>
# Tomáš Hozza <[email protected]>
# Nikola Forró <[email protected]>
# František Nečas <[email protected]>
import locale
PROGRAM_DESCRIPTION: str = 'Tool to help package maintainers rebase their packages to the latest upstream version'
NEW_ISSUE_LINK: str = 'https://github.com/rebase-helper/rebase-helper/issues/new'
RESULTS_DIR: str = 'rebase-helper-results'
WORKSPACE_DIR: str = 'rebase-helper-workspace'
REBASED_SOURCES_DIR: str = 'rebased-sources'
OLD_BUILD_DIR: str = 'old-build'
NEW_BUILD_DIR: str = 'new-build'
CHECKERS_DIR: str = 'checkers'
LOGS_DIR: str = 'logs'
DEBUG_LOG: str = 'debug.log'
TRACEBACK_LOG: str = 'traceback.log'
VERBOSE_LOG: str = 'verbose.log'
INFO_LOG: str = 'info.log'
REPORT: str = 'report'
CHANGES_PATCH: str = 'changes.patch'
OLD_SOURCES_DIR: str = 'old_sources'
NEW_SOURCES_DIR: str = 'new_sources'
GIT_CONFIG: str = '.gitconfig'
CONFIG_PATH: str = '$XDG_CONFIG_HOME'
CONFIG_FILENAME: str = 'rebase-helper.cfg'
SYSTEM_ENCODING: str = locale.getpreferredencoding()
| gpl-2.0 | 7,541,820,209,842,720,000 | 33.872727 | 114 | 0.728884 | false | 3.175497 | false | false | false |
dfang/odoo | addons/mass_mailing/wizard/mail_compose_message.py | 22 | 2775 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class MailComposeMessage(models.TransientModel):
"""Add concept of mass mailing campaign to the mail.compose.message wizard
"""
_inherit = 'mail.compose.message'
mass_mailing_campaign_id = fields.Many2one('mail.mass_mailing.campaign', string='Mass Mailing Campaign')
mass_mailing_id = fields.Many2one('mail.mass_mailing', string='Mass Mailing', ondelete='cascade')
mass_mailing_name = fields.Char(string='Mass Mailing')
mailing_list_ids = fields.Many2many('mail.mass_mailing.list', string='Mailing List')
@api.multi
def get_mail_values(self, res_ids):
""" Override method that generated the mail content by creating the
mail.mail.statistics values in the o2m of mail_mail, when doing pure
email mass mailing. """
self.ensure_one()
res = super(MailComposeMessage, self).get_mail_values(res_ids)
# use only for allowed models in mass mailing
if self.composition_mode == 'mass_mail' and \
(self.mass_mailing_name or self.mass_mailing_id) and \
self.model in [item[0] for item in self.env['mail.mass_mailing']._get_mailing_model()]:
mass_mailing = self.mass_mailing_id
if not mass_mailing:
reply_to_mode = 'email' if self.no_auto_thread else 'thread'
reply_to = self.reply_to if self.no_auto_thread else False
mass_mailing = self.env['mail.mass_mailing'].create({
'mass_mailing_campaign_id': self.mass_mailing_campaign_id.id,
'name': self.mass_mailing_name,
'template_id': self.template_id.id,
'state': 'done',
'reply_to_mode': reply_to_mode,
'reply_to': reply_to,
'sent_date': fields.Datetime.now(),
'body_html': self.body,
'mailing_model': self.model,
'mailing_domain': self.active_domain,
})
for res_id in res_ids:
res[res_id].update({
'mailing_id': mass_mailing.id,
'statistics_ids': [(0, 0, {
'model': self.model,
'res_id': res_id,
'mass_mailing_id': mass_mailing.id,
})],
# email-mode: keep original message for routing
'notification': mass_mailing.reply_to_mode == 'thread',
'auto_delete': not mass_mailing.keep_archives,
})
return res
| agpl-3.0 | 7,045,370,708,939,977,000 | 49.454545 | 108 | 0.55027 | false | 4.129464 | false | false | false |
marquesarthur/BugAnalysisRecommender | patterny/resources/script/shenanigans.py | 1 | 6080 |
------------------------------------------------------------------------------------------------
def sss_similarity(s1, s2, threshold=0.50, type='relation', corpus='webbase'):
sss_url = "http://swoogle.umbc.edu/SimService/GetSimilarity"
try:
response = get(sss_url,
params={'operation': 'api', 'phrase1': s1, 'phrase2': s2, 'type': type, 'corpus': corpus})
similarity = float(response.text.strip())
return similarity
except Exception as ex:
print ex.msg
return 0
s1 = u'When I try to build the index for the applications, the KHelpCenter would hang, now it just crashes.'
s2 = u'As I said, I can install fonts - so I know the basic system works.'
print sss_similarity(s1, s2)
s2 = u'If it does, then the font installed has an issue, if not then it is an system problem.'
print sss_similarity(s1, s2)
s2 = u'Installing (and removing) system fonts is now working for me in KDE 4.14.9, except that I always get prompted for root authentication twice.'
print sss_similarity(s1, s2)
s1 = u"I\\'m a newbie to KDE, and it would be very helpful to have an index to the application documents/handbooks."
s2 = u'So, this does sound like a system issue.'
print sss_similarity(s1, s2)
------------------------------------------------------------------------------------------------
# [sb['id'] for sb in recommended_similar_bugs]
bug_ids = [351900, 347673, 351405, 340147, 347145, 348378, 343961]
sentences = []
for idx, id in enumerate(ids):
if id in bug_ids:
sentences.append(aux_sentences[idx])
for s1 in original:
for s2 in sentences:
similarity = sss(s1, s2)
print similarity
if similarity >= 0.25:
print s1
print
print s2
sample_ok = [
351724,
350707,
277464,
351405,
269619,
300577,
351869,
351900,
343772,
335678,
300951,
343961,
351559,
343982,
341951,
344114
]
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
batch = chunks(aux, bugs_per_chunk)
for i, current_batch in enumerate(batch):
analysis_similarity_file = 'analysis_similarity_file_{}.json'.format(str(i))
print analysis_similarity_file
with open(analysis_similarity_file, 'wb+') as outfile:
json.dump(current_batch, outfile, indent=4, sort_keys=True)
def count_similar(data, t):
count = 0
for a in data:
if a['value']['similarity'] >= t:
count += 1
return count
with open('analysis_similarity_file.json', 'rb') as data:
aux = json.load(data)
def pairs_in_range(data, lower_bound, upper_bound):
return [r['key'] for r in data if lower_bound <= r['value']['similarity'] < upper_bound]
def percentage_in_range(tuples_in_range, data):
return len(tuples_in_range) / len(data)
semantic_020_025 = pairs_in_range(analyses, 0.20, 0.25)
semantic_025_030 = pairs_in_range(analyses, 0.25, 0.30)
semantic_030_035 = pairs_in_range(analyses, 0.30, 0.35)
semantic_035_040 = pairs_in_range(analyses, 0.35, 0.40)
semantic_040_045 = pairs_in_range(analyses, 0.40, 0.45)
semantic_045_050 = pairs_in_range(analyses, 0.45, 0.50)
semantic_050_055 = pairs_in_range(analyses, 0.50, 0.55)
semantic_065_070 = pairs_in_range(analyses, 0.65, 0.70)
semantic_070_075 = pairs_in_range(analyses, 0.70, 0.75)
percentage_in_range(semantic_020_025, analyses)
percentage_in_range(semantic_030_035, analyses)
percentage_in_range(semantic_035_040, analyses)
percentage_in_range(semantic_040_045, analyses)
percentage_in_range(semantic_045_050, analyses)
percentage_in_range(semantic_050_055, analyses)
percentage_in_range(semantic_065_070, analyses)
percentage_in_range(semantic_070_075, analyses)
list(filter(lambda a: a['key'] == random.choice(semantic_020_025), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_025_030), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_030_035), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_035_040), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_040_045), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_045_050), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_050_055), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_065_070), analyses))
list(filter(lambda a: a['key'] == random.choice(semantic_070_075), analyses))
random.choice(semantic_045_050)
random.choice(semantic_045_050)
random.choice(semantic_050_055)
random.choice(semantic_050_055)
with open('problem_vector_similarity_map.json', 'rb') as data:
aux = json.load(data)
vector_similarity_map = {}
for r in aux:
key = (r['key'][0], r['key'][1])
value = r['value']
vector_similarity_map[key] = value
[ 266290, -- search
122437,
[164308,
,
311799,
101876]
351869, 351627 <- wrong summary
def is_in(key):
return len(list(filter(lambda a: key in a['key'], analyses))) > 0
def threshold_of(a, b):
return list(filter(lambda x: x['key'] == [a, b], analyses))
------------------------------------------------------------------------------------------------
Teaching assistantship is a job, and for scheduling purposes TA duties take precedence over all other UBC-related duties, except for regularly scheduled activities (lectures, labs, etc.) for the courses that the TA is taking for credit.
import os, sys
from PIL import Image
from resizeimage import resizeimage
from os import listdir
from os.path import isfile, join
cwd = os.getcwd()
onlyfiles = [f for f in listdir(cwd) if isfile(join(cwd, f))]
for i, f in enumerate(onlyfiles):
try:
with open(f, 'r+b') as f:
with Image.open(f) as image:
cover = resizeimage.resize_cover(image, [210, 330])
outfile = 'large/portrait_{}_lg.jpeg'.format(str(i))
cover.save(outfile, image.format)
except Exception as ex:
print ex
continue | mit | 870,109,135,793,218,200 | 31.005263 | 236 | 0.630428 | false | 3.156802 | false | false | false |
craws/OpenAtlas-Python | openatlas/models/network.py | 1 | 3821 | from typing import Any, Dict, Iterator, Optional
from flask import flash, g
from flask_wtf import FlaskForm
from psycopg2.extras import NamedTupleCursor
from openatlas.util.display import truncate
class Network:
properties = ['P7', 'P11', 'P14', 'P22', 'P23', 'P24', 'P25', 'P67', 'P74', 'P107', 'OA7',
'OA8', 'OA9']
classes = ['E7', 'E8', 'E9', 'E18', 'E21', 'E31', 'E33', 'E40', 'E53', 'E74', 'E84']
sql_where = """
AND ((e.system_type IS NULL AND e.class_code != 'E53')
OR (e.system_type NOT IN ('feature', 'stratigraphic unit', 'find', 'file',
'source translation')
AND e.system_type NOT LIKE 'external reference%%'))"""
sql_where2 = """
AND ((e2.system_type IS NULL AND e2.class_code != 'E53')
OR (e2.system_type NOT IN ('feature', 'stratigraphic unit', 'find', 'file',
'source translation')
AND e2.system_type NOT LIKE 'external reference%%'))"""
@staticmethod
def get_edges() -> Iterator[NamedTupleCursor.Record]:
sql = """
SELECT l.id, l.domain_id, l.range_id FROM model.link l
JOIN model.entity e ON l.domain_id = e.id
JOIN model.entity e2 ON l.range_id = e2.id
WHERE property_code IN %(properties)s """ + Network.sql_where + Network.sql_where2
g.execute(sql, {'properties': tuple(Network.properties)})
return g.cursor.fetchall()
@staticmethod
def get_entities() -> Iterator[NamedTupleCursor.Record]:
sql = """
SELECT e.id, e.class_code, e.name
FROM model.entity e
WHERE class_code IN %(classes)s """ + Network.sql_where
g.execute(sql, {'classes': tuple(Network.classes)})
return g.cursor.fetchall()
@staticmethod
def get_object_mapping() -> Dict[int, int]:
# Get mapping between location and objects to join them into one entity
sql = """
SELECT e.id, l.range_id
FROM model.entity e
JOIN model.link l ON e.id = domain_id AND l.property_code = 'P53';"""
g.execute(sql)
return {row.range_id: row.id for row in g.cursor.fetchall()}
@staticmethod
def get_network_json(form: FlaskForm,
params: Dict[str, Any],
dimensions: Optional[int]) -> Optional[str]:
mapping = Network.get_object_mapping()
linked_entity_ids = set()
edges = []
for row in Network.get_edges():
domain_id = mapping[row.domain_id] if row.domain_id in mapping else row.domain_id
range_id = mapping[row.range_id] if row.range_id in mapping else row.range_id
linked_entity_ids.add(domain_id)
linked_entity_ids.add(range_id)
edges.append({'id': int(row.id), 'source': domain_id, 'target': range_id})
nodes = []
entities = set()
for row in Network.get_entities():
if row.id in mapping: # pragma: no cover - Locations will be mapped to objects
continue
if not form.orphans.data and row.id not in linked_entity_ids: # Hide orphans
continue
entities.add(row.id)
name = truncate(row.name.replace("'", ""), span=False)
nodes.append({'id': row.id,
'label' if dimensions else 'name': name,
'color': params['classes'][row.class_code]['color']})
if not linked_entity_ids.issubset(entities): # pragma: no cover
flash('Missing nodes for links', 'error')
return ''
return str({'nodes': nodes, 'edges' if dimensions else 'links': edges}) if nodes else None
| gpl-2.0 | 2,012,162,949,420,295,400 | 43.952941 | 98 | 0.556399 | false | 3.828657 | false | false | false |
deepjets/deepjets | deepjets/generate.py | 1 | 2156 | from ._libdeepjets import generate_events as _generate_events
from ._libdeepjets import PythiaInput, HepMCInput
import os
from fnmatch import fnmatch
import logging
from .extern.six import string_types
log = logging.getLogger(__name__)
__all__ = [
'generate_events',
'PythiaInput', 'HepMCInput',
'get_generator_input',
]
def get_generator_input(name, filename, **kwargs):
"""
name may be 'pythia' or 'hepmc'
filename may be the pythia config file or a HepMC file
"""
name = name.lower().strip()
if name == 'pythia':
xmldoc = os.environ.get('PYTHIA8DATA', os.path.join(
os.environ.get('DEEPJETS_SFT_DIR', '/usr/local'),
'share/Pythia8/xmldoc'))
if not os.path.exists(filename):
internal_filename = os.path.join(
os.environ.get('DEEPJETS_DIR'), 'config', 'pythia', filename)
if not os.path.isabs(filename) and os.path.exists(internal_filename):
log.warning("{0} does not exist but using internal "
"config with the same name instead: {1}".format(
filename, internal_filename))
filename = internal_filename
else:
raise IOError("Pythia config not found: {0}".format(filename))
gen_input = PythiaInput(filename, xmldoc, **kwargs)
elif name == 'hepmc':
gen_input = HepMCInput(filename)
if kwargs:
raise ValueError(
"unrecognized parameters in kwargs: {0}".format(kwargs))
else:
raise ValueError(
"no generator input available with name '{0}'".format(name))
return gen_input
def generate_events(gen_input, events=-1, write_to='', ignore_weights=False, **kwargs):
if isinstance(gen_input, string_types):
if fnmatch(os.path.splitext(gen_input)[1], '.hepmc*'):
gen_input = get_generator_input('hepmc', gen_input, **kwargs)
else:
gen_input = get_generator_input('pythia', gen_input, **kwargs)
for event in _generate_events(gen_input, events, write_to, ignore_weights):
yield event
| bsd-3-clause | -5,898,751,006,015,875,000 | 37.5 | 87 | 0.605751 | false | 3.723661 | false | false | false |
madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/functionslocal/vtable/createderivedcolumns.py | 1 | 2211 | import setpath
import functions
import json
import re
registered=True
class createderivedcolumns(functions.vtable.vtbase.VT): #uses + and : for multiplication
def VTiter(self, *parsedArgs,**envars):
largs, dictargs = self.full_parse(parsedArgs)
if 'query' not in dictargs:
raise functions.OperatorError(__name__.rsplit('.')[-1],"No query argument ")
query = dictargs['query']
if 'newSchema' not in dictargs: # einai to neo sxhma pou tha exei o pinakas.
raise functions.OperatorError(__name__.rsplit('.')[-1],"No newSchema ")
newSchema = str(dictargs['newSchema'])
newSchema = re.split(',',newSchema)
newSchema1 =""
for i in xrange(len(newSchema)):
newSchema1 += newSchema[i]+","
newSchema1=newSchema1[:-1]
yield ([newSchema1],)
cur = envars['db'].cursor()
c=cur.execute(query)
currentSchema1 = cur.getdescriptionsafe()
currentSchema =[str(x[0]) for x in currentSchema1]
for myrow in c:
myrowresult =""
for d in xrange(len(newSchema)):
colval = 1.0
if ":" in newSchema[d]:
elements = re.split(":",newSchema[d])
else:
elements = [newSchema[d]]
item=[]
for e in xrange(len(elements)):
colname = elements[e]
myindex = currentSchema.index(str(colname))
colval = colval * float(myrow[myindex])
myrowresult+=str(colval)+","
# print myrow
# print newSchema
# print "result", myrowresult
yield tuple([myrowresult[0:-1]],)
def Source():
return functions.vtable.vtbase.VTGenerator(createderivedcolumns)
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
import setpath
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.tes
| mit | -3,548,538,125,132,672,500 | 26.296296 | 88 | 0.556309 | false | 4.086876 | false | false | false |
plamut/ggrc-core | src/ggrc/migrations/versions/20160707132122_1269660b288b_remove_ca_duplicate_values.py | 7 | 1826 | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
remove ca duplicate values
Create Date: 2016-07-07 13:21:22.732299
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '1269660b288b'
down_revision = '3a0c977a9cb8'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
# Remove duplicate lines and include only the newest ones.
# This relies on the newest lines having the biggest id.
connection = op.get_bind()
good_rows = connection.execute("""
SELECT MAX(id) AS id
FROM custom_attribute_values
GROUP BY custom_attribute_id, attributable_id
""").fetchall()
all_rows = connection.execute(
"SELECT id FROM custom_attribute_values"
).fetchall()
good_ids = set(row[0] for row in good_rows)
all_ids = set(row[0] for row in all_rows)
bad_ids = [str(i) for i in all_ids.difference(good_ids)]
if bad_ids:
op.execute(
"""
DELETE FROM custom_attribute_values
WHERE id IN ({bad_ids})
""".format(bad_ids=",".join(bad_ids))
)
# The unique constraint does not include the attributable_type since that is
# already specified in the custom attribute definition (custom_attribute_id)
# and we should avoid adding string values to indexes.
op.create_unique_constraint(
"uq_custom_attribute_value",
"custom_attribute_values",
["custom_attribute_id", "attributable_id"]
)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.drop_constraint(
"uq_custom_attribute_value",
"custom_attribute_values",
type_="unique"
)
| apache-2.0 | -855,889,480,554,926,200 | 27.53125 | 79 | 0.687295 | false | 3.652 | false | false | false |
odahoda/noisicaa | build_utils/waf/csound.py | 1 | 2382 | # -*- mode: python -*-
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import os.path
import subprocess
import sys
from waflib.Configure import conf
from waflib.Task import Task
from waflib import Utils
def configure(ctx):
ctx.find_program('csound')
class compile_csound(Task):
def __str__(self):
return self.outputs[0].relpath()
def keyword(self):
return 'Generating'
def run(self):
ctx = self.generator.bld
cwd = ctx.srcnode
cmd = [
ctx.env.CSOUND[0],
'-o' + self.outputs[0].path_from(cwd),
self.inputs[0].path_from(cwd),
]
kw = {
'cwd': cwd.abspath(),
'stdout': subprocess.PIPE,
'stderr': subprocess.STDOUT,
}
ctx.log_command(cmd, kw)
rc, out, _ = Utils.run_process(cmd, kw)
if rc:
sys.stderr.write(out.decode('utf-8'))
return rc
@conf
def rendered_csound(ctx, source, install=None, install_to=None, chmod=0o644):
assert source.endswith('.csnd')
wav_path = os.path.splitext(source)[0] + '.wav'
target = ctx.path.get_bld().make_node(wav_path)
task = compile_csound(env=ctx.env)
task.set_inputs(ctx.path.find_resource(source))
task.set_outputs(target)
ctx.add_to_group(task)
if install is None:
install = ctx.in_group(ctx.GRP_BUILD_MAIN)
if install:
if install_to is None:
install_to = os.path.join(
ctx.env.DATADIR, target.parent.path_from(ctx.bldnode.make_node('data')))
ctx.install_files(install_to, target, chmod=chmod)
| gpl-2.0 | -5,177,411,920,902,307,000 | 27.357143 | 88 | 0.645676 | false | 3.549925 | false | false | false |
ldoktor/autotest | client/tests/kvm/tests/migration_with_reboot.py | 2 | 1512 | from autotest.client.shared import utils
def run_migration_with_reboot(test, params, env):
"""
KVM migration test:
1) Get a live VM and clone it.
2) Verify that the source VM supports migration. If it does, proceed with
the test.
3) Reboot the VM
4) Send a migration command to the source VM and wait until it's finished.
5) Kill off the source VM.
6) Log into the destination VM after the migration is finished.
@param test: kvm test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test environment.
"""
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
login_timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=login_timeout)
mig_timeout = float(params.get("mig_timeout", "3600"))
mig_protocol = params.get("migration_protocol", "tcp")
mig_cancel_delay = int(params.get("mig_cancel") == "yes") * 2
try:
# Reboot the VM in the background
bg = utils.InterruptedThread(vm.reboot, (session,))
bg.start()
try:
while bg.isAlive():
vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay)
except Exception:
# If something bad happened in the main thread, ignore exceptions
# raised in the background thread
bg.join(suppress_exception=True)
raise
else:
session = bg.join()
finally:
session.close()
| gpl-2.0 | -2,831,542,114,939,271,000 | 34.162791 | 78 | 0.625661 | false | 3.978947 | true | false | false |
CMPUT410W15T02/CMPUT410W15-project | testenv/lib/python2.7/site-packages/django/contrib/formtools/tests/tests.py | 53 | 7410 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import unittest
import warnings
from django import http
from django.contrib.formtools import preview, utils
from django.test import TestCase, override_settings
from django.utils._os import upath
from django.contrib.formtools.tests.forms import (
HashTestBlankForm, HashTestForm, TestForm,
)
success_string = "Done was called!"
success_string_encoded = success_string.encode()
class TestFormPreview(preview.FormPreview):
def get_context(self, request, form):
context = super(TestFormPreview, self).get_context(request, form)
context.update({'custom_context': True})
return context
def get_initial(self, request):
return {'field1': 'Works!'}
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
@override_settings(
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
)
class PreviewTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
super(PreviewTests, self).setUp()
# Create a FormPreview instance to share between tests
self.preview = preview.FormPreview(TestForm)
input_template = '<input type="hidden" name="%s" value="%s" />'
self.input = input_template % (self.preview.unused_name('stage'), "%d")
self.test_data = {'field1': 'foo', 'field1_': 'asdf'}
def test_unused_name(self):
"""
Verifies name mangling to get uniue field name.
"""
self.assertEqual(self.preview.unused_name('field1'), 'field1__')
def test_form_get(self):
"""
Test contrib.formtools.preview form retrieval.
Use the client library to see if we can successfully retrieve
the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field
is created to manage the stage.
"""
response = self.client.get('/preview/')
stage = self.input % 1
self.assertContains(response, stage, 1)
self.assertEqual(response.context['custom_context'], True)
self.assertEqual(response.context['form'].initial, {'field1': 'Works!'})
def test_form_preview(self):
"""
Test contrib.formtools.preview form preview rendering.
Use the client library to POST to the form to see if a preview
is returned. If we do get a form back check that the hidden
value is correctly managing the state of the form.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 1, 'date1': datetime.date(2006, 10, 25)})
response = self.client.post('/preview/', self.test_data)
# Check to confirm stage is set to 2 in output form.
stage = self.input % 2
self.assertContains(response, stage, 1)
def test_form_submit(self):
"""
Test contrib.formtools.preview form submittal.
Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security
hash and verify success.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 2, 'date1': datetime.date(2006, 10, 25)})
response = self.client.post('/preview/', self.test_data)
self.assertNotEqual(response.content, success_string_encoded)
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.content, success_string_encoded)
def test_bool_submit(self):
"""
Test contrib.formtools.preview form submittal when form contains:
BooleanField(required=False)
Ticket: #6209 - When an unchecked BooleanField is previewed, the preview
form's hash would be computed with no value for ``bool1``. However, when
the preview form is rendered, the unchecked hidden BooleanField would be
rendered with the string value 'False'. So when the preview form is
resubmitted, the hash would be computed with the value 'False' for
``bool1``. We need to make sure the hashes are the same in both cases.
"""
self.test_data.update({'stage': 2})
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash, 'bool1': 'False'})
with warnings.catch_warnings(record=True):
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.content, success_string_encoded)
def test_form_submit_good_hash(self):
"""
Test contrib.formtools.preview form submittal, using a correct
hash
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 2})
response = self.client.post('/preview/', self.test_data)
self.assertNotEqual(response.content, success_string_encoded)
hash = utils.form_hmac(TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.content, success_string_encoded)
def test_form_submit_bad_hash(self):
"""
Test contrib.formtools.preview form submittal does not proceed
if the hash is incorrect.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 2})
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.content, success_string_encoded)
hash = utils.form_hmac(TestForm(self.test_data)) + "bad"
self.test_data.update({'hash': hash})
response = self.client.post('/previewpreview/', self.test_data)
self.assertNotEqual(response.content, success_string_encoded)
class FormHmacTests(unittest.TestCase):
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Speaking español.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Speaking español. '})
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
| gpl-2.0 | 5,029,489,336,572,046,000 | 39.043243 | 81 | 0.649028 | false | 4.07033 | true | false | false |
gabrielfalcao/steadymark | steadymark/__init__.py | 1 | 1948 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <steadymark - markdown-based test runner for python>
# Copyright (C) <2012-2020> Gabriel Falcão <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import imp
from optparse import OptionParser
from steadymark.version import version
from steadymark.runner import Runner
def run(filenames):
for filename in filenames:
runner = Runner(filename)
runner.run()
def main():
parser = OptionParser()
parser.add_option(
"-b",
"--bootstrap",
dest="bootstrap_file",
help="A path to a python file to be loaded before steadymark runs the tests",
)
(options, args) = parser.parse_args()
if options.bootstrap_file:
imp.load_source("steadymark_bootstrap", options.bootstrap_file)
run(args or ["README.md"])
__all__ = ["run", "Runner", "version"]
if __name__ == "__main__":
main()
| mit | -1,674,765,360,954,304,500 | 32 | 85 | 0.714946 | false | 3.965377 | false | false | false |
isb-cgc/ISB-CGC-Webapp | scripts/data_source_etl.py | 1 | 19107 | ###
# Copyright 2015-2020, Institute for Systems Biology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import print_function
from builtins import str
import logging
import json
import traceback
import requests
import os
import re
from os.path import join, dirname, exists
from argparse import ArgumentParser
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "isb_cgc.settings")
from isb_cgc import secret_settings, settings
import django
django.setup()
from google_helpers.bigquery.bq_support import BigQuerySupport
from projects.models import Program, Project, Attribute, Attribute_Ranges, Attribute_Display_Values, DataSource, DataVersion, DataNode
from django.contrib.auth.models import User
isb_superuser = User.objects.get(username="isb")
logger = logging.getLogger('main_logger')
ranges_needed = {
'wbc_at_diagnosis': 'by_200',
'event_free_survival_time_in_days': 'by_500',
'days_to_death': 'by_500',
'days_to_last_known_alive': 'by_500',
'days_to_last_followup': 'by_500',
'year_of_diagnosis': 'year',
'days_to_birth': 'by_negative_3k',
'year_of_initial_pathologic_diagnosis': 'year',
'age_at_diagnosis': None,
'age_at_index': None
}
ranges = {
'by_200': [{'first': "200", "last": "1400", "gap": "200", "include_lower": True, "unbounded": True,
"include_upper": True, 'type': 'F', 'unit': '0.01'}],
'by_negative_3k': [{'first': "-15000", "last": "-5000", "gap": "3000", "include_lower": True, "unbounded": True,
"include_upper": False, 'type': 'I'}],
'by_500': [{'first': "500", "last": "6000", "gap": "500", "include_lower": False, "unbounded": True,
"include_upper": True, 'type': 'I'}],
'year': [{'first': "1976", "last": "2015", "gap": "5", "include_lower": True, "unbounded": False,
"include_upper": False, 'type': 'I'}]
}
SOLR_TYPES = {
'STRING': "string",
"FLOAT": "pfloat",
"INTEGER": "plong",
"DATE": "pdate"
}
SOLR_SINGLE_VAL = ["case_barcode", "case_gdc_id", "case_pdc_id","program_name","project_short_name"]
ATTR_SET = {}
DISPLAY_VALS = {}
SOLR_URI = settings.SOLR_URI
SOLR_LOGIN = settings.SOLR_LOGIN
SOLR_PASSWORD = settings.SOLR_PASSWORD
SOLR_CERT = settings.SOLR_CERT
def add_data_versions(dv_set):
for dv in dv_set:
try:
dv_obj = DataVersion.objects.get(name=dv['name'])
logger.warning("[WARNING] Data Version {} already exists! Skipping.".format(dv['name']))
except ObjectDoesNotExist:
progs = Program.objects.filter(name__in=dv['programs'], active=True, owner=isb_superuser, is_public=True)
obj, created = DataVersion.objects.update_or_create(name=dv['name'], data_type=dv['type'], version=dv['ver'])
dv_to_prog = []
for prog in progs:
dv_to_prog.append(DataVersion.programs.through(dataversion_id=obj.id, program_id=prog.id))
DataVersion.programs.through.objects.bulk_create(dv_to_prog)
logger.info("Data Version created: {}".format(obj))
except Exception as e:
logger.error("[ERROR] Data Version {} may not have been added!".format(dv['name']))
logger.exception(e)
def add_data_sources(sources, build_attrs=True, link_attr=True):
try:
attrs_to_srcs = []
for src in sources:
try:
obj = DataSource.objects.get(name=src['name'])
logger.warning("[WARNING] Source with the name {} already exists - updating ONLY.".format(src['name']))
except ObjectDoesNotExist as e:
obj, created = DataSource.objects.update_or_create(
name=src['name'], version=DataVersion.objects.get(version=src['version'], data_type=src['version_type']),
source_type=src['source_type'],
)
progs = Program.objects.filter(name__in=src['programs'])
src_to_prog = []
for prog in progs:
src_to_prog.append(DataSource.programs.through(datasource_id=obj.id, program_id=prog.id))
DataSource.programs.through.objects.bulk_create(src_to_prog)
nodes = DataNode.objects.filter(short_name__in=src['nodes'])
node_to_src = []
for node in nodes:
node_to_src.append(DataNode.data_sources.through(datasource_id=obj.id, datanode_id=node.id))
DataNode.data_sources.through.objects.bulk_create(node_to_src)
logger.info("Data Source created: {}".format(obj.name))
source_attrs = list(obj.get_source_attr(all=True).values_list('name',flat=True))
if src['source_type'] == DataSource.SOLR:
schema_src = src['schema_source'].split('.')
schema = BigQuerySupport.get_table_schema(schema_src[0],schema_src[1],schema_src[2])
link_attrs = []
solr_schema = []
solr_index_strings = []
for field in schema:
if build_attrs:
if field['name'] not in ATTR_SET:
attr_type = Attribute.CATEGORICAL if (not re.search(r'(_id|_barcode)', field['name']) and field['type'] == "STRING") else Attribute.STRING if field['type'] == "STRING" else Attribute.CONTINUOUS_NUMERIC
ATTR_SET[field['name']] = {
'name': field['name'],
"display_name": field['name'].replace("_", " ").title() if re.search(r'_', field['name']) else
field['name'],
"type": attr_type,
'solr_collex': [],
'bq_tables': [],
'display': (
(attr_type == Attribute.STRING and not re.search('_id|_barcode',field['name'].lower())) or attr_type == Attribute.CATEGORICAL or field['name'].lower() in ranges_needed)
}
attr = ATTR_SET[field['name']]
attr['solr_collex'].append(src['name'])
if attr['name'] in DISPLAY_VALS:
if 'preformatted_values' in DISPLAY_VALS[attr['name']]:
attr['preformatted_values'] = True
else:
if 'display_vals' not in attr:
attr['display_vals'] = []
attr['display_vals'].extend(DISPLAY_VALS[attr['name']]['vals'])
if attr['name'] in DISPLAY_VALS:
if 'preformatted_values' in DISPLAY_VALS[attr['name']]:
attr['preformatted_values'] = True
else:
attr['display_vals'] = DISPLAY_VALS[attr['name']]['vals']
if 'range' not in attr:
if attr['name'].lower() in ranges_needed:
attr['range'] = ranges.get(ranges_needed.get(attr['name'], ''), [])
elif link_attr and field['name'] not in source_attrs:
link_attrs.append(field['name'])
solr_schema.append({
"name": field['name'], "type": SOLR_TYPES[field['type']],
"multiValued": True if src['aggregated'] and field['name'] not in SOLR_SINGLE_VAL else False, "stored": True
})
if src['aggregated'] and field['name'] not in SOLR_SINGLE_VAL:
solr_index_strings.append("f.{}.split=true&f.{}.separator=|".format(field['name'],field['name']))
for la in link_attrs:
try:
a = Attribute.objects.get(name=la)
attrs_to_srcs.append(Attribute.data_sources.through(attribute_id=a.id,datasource_id=obj.id))
except Exception as e:
if isinstance(e,MultipleObjectsReturned):
logger.info("More than one attribute with the name {} was found!".format(la))
a = Attribute.objects.filter(name=la).first()
attrs_to_srcs.append(Attribute.data_sources.through(attribute_id=a.id,datasource_id=obj.id))
elif isinstance(e,ObjectDoesNotExist):
logger.info("Attribute {} doesn't exist--can't add, skipping!".format(la))
with open("{}_solr_schemas.json".format(src['name']), "w") as schema_outfile:
json.dump(solr_schema, schema_outfile)
schema_outfile.close()
with open("{}_solr_index_vars.txt".format(src['name']), "w") as solr_index_string:
solr_index_string.write("&{}".format("&".join(solr_index_strings)))
solr_index_string.close()
# # add core to Solr
# # sudo -u solr /opt/bitnami/solr/bin/solr create -c <solr_name> -s 2 -rf 2
# core_uri = "{}/solr/admin/cores?action=CREATE&name={}".format(settings.SOLR_URI,solr_name)
# core_create = requests.post(core_uri, auth=(SOLR_LOGIN, SOLR_PASSWORD), verify=SOLR_CERT)
#
# # add schema to core
# schema_uri = "{}/solr/{}/schema".format(settings.SOLR_URI,solr_name)
# schema_load = requests.post(schema_uri, data=json.dumps({"add-field": solr_schema[src['name']]}),
# headers={'Content-type': 'application/json'}, auth=(SOLR_LOGIN, SOLR_PASSWORD), verify=SOLR_CERT)
#
# # query-to-file the table
# # OR
# # export from BQ console into GCS
#
# # pull file to local
# # gsutil cp gs://<BUCKET>/<CSV export> ./
#
# # POST to Solr core
# index_uri = "{}/solr/{}/update?commit=yes{}".format(settings.SOLR_URI,solr_name,"&".join(solr_index_vars))
# index_load = requests.post(index_uri, files={'file': open('export.csv', 'rb')},
# headers={'Content-type': 'application/csv'}, auth=(SOLR_LOGIN, SOLR_PASSWORD), verify=SOLR_CERT)
Attribute.data_sources.through.objects.bulk_create(attrs_to_srcs)
except Exception as e:
logger.error("[ERROR] Data Source {} may not have been added!".format(obj.name))
logger.exception(e)
def add_attributes(attr_set):
try:
for attr in attr_set:
try:
obj = Attribute.objects.get(name=attr['name'], data_type=attr['type'])
logger.info("Attribute {} already located in the database - just updating...".format(attr['name']))
except ObjectDoesNotExist:
logger.info("Attribute {} not found - creating".format(attr['name']))
obj, created = Attribute.objects.update_or_create(
name=attr['name'], display_name=attr['display_name'], data_type=attr['type'],
preformatted_values=True if 'preformatted_values' in attr else False,
is_cross_collex=True if 'cross_collex' in attr else False,
default_ui_display=attr['display']
)
except Exception as e:
if isinstance(e,MultipleObjectsReturned):
logger.info("More than one attribute with the name {} was found!".format(attr['name']))
obj = Attribute.objects.filter(name=attr['name'], data_type=attr['type']).first()
if 'range' in attr and not len(Attribute_Ranges.objects.select_related('attribute').filter(attribute=obj)):
if len(attr['range']):
for attr_range in attr['range']:
Attribute_Ranges.objects.update_or_create(
attribute=obj, **attr_range
)
else:
Attribute_Ranges.objects.update_or_create(
attribute=obj
)
if 'display_vals' in attr and not len(Attribute_Display_Values.objects.select_related('attribute').filter(attribute=obj)):
for dv in attr['display_vals']:
Attribute_Display_Values.objects.update_or_create(
raw_value=dv['raw_value'], display_value=dv['display_value'], attribute=obj
)
if 'solr_collex' in attr:
attr_sources = obj.get_data_sources(DataSource.SOLR, all=True)
missing_sources = [x for x in attr['solr_collex'] if x not in attr_sources]
if len(missing_sources):
sources = DataSource.objects.filter(name__in=missing_sources)
attr_to_src = []
for src in sources:
attr_to_src.append(Attribute.data_sources.through(datasource_id=src.id, attribute_id=obj.id))
Attribute.data_sources.through.objects.bulk_create(attr_to_src)
if 'bq_tables' in attr:
attr_sources = obj.get_data_sources(DataSource.BIGQUERY, all=True)
missing_sources = [x for x in attr['bq_tables'] if x not in attr_sources]
if len(missing_sources):
sources = DataSource.objects.filter(name__in=missing_sources)
attr_to_src = []
for src in sources:
attr_to_src.append(Attribute.data_sources.through(datasource_id=src.id, attribute_id=obj.id))
Attribute.data_sources.through.objects.bulk_create(attr_to_src)
except Exception as e:
logger.error("[ERROR] Attribute {} may not have been added!".format(attr['name']))
logger.exception(e)
def copy_attrs(from_data_sources, to_data_sources):
to_sources = DataSource.objects.filter(name__in=to_data_sources)
from_sources = DataSource.objects.filter(name__in=from_data_sources)
to_sources_attrs = to_sources.get_source_attrs()
bulk_add = []
for fds in from_sources:
from_source_attrs = fds.attribute_set.exclude(id__in=to_sources_attrs['ids'])
logger.info("Copying {} attributes from {} to: {}.".format(
len(from_source_attrs.values_list('name',flat=True)),
fds.name, "; ".join(to_data_sources),
))
for attr in from_source_attrs:
for ds in to_sources:
bulk_add.append(Attribute.data_sources.through(attribute_id=attr.id, datasource_id=ds.id))
Attribute.data_sources.through.objects.bulk_create(bulk_add)
def main(config, make_attr=False):
try:
if 'programs' in config:
for prog in config['programs']:
try:
obj = Program.objects.get(name=prog['name'], owner=isb_superuser, active=True, is_public=True)
logger.info("[STATUS] Program {} found - skipping creation.".format(prog))
except ObjectDoesNotExist:
logger.info("[STATUS] Program {} not found - creating.".format(prog))
obj = Program.objects.update_or_create(owner=isb_superuser, active=True, is_public=True, **prog)
if 'projects' in config:
for proj in config['projects']:
program = Program.objects.get(name=proj['program'], owner=isb_superuser, active=True)
try:
obj = Project.objects.get(name=proj['name'], owner=isb_superuser, active=True, program=program)
logger.info("[STATUS] Project {} found - skipping.".format(proj['name']))
except ObjectDoesNotExist:
logger.info("[STATUS] Project {} not found - creating.".format(proj['name']))
obj = Project.objects.update_or_create(name=proj['name'], owner=isb_superuser, active=True, program=program)
if 'versions' in config:
add_data_versions(config['versions'])
# Preload all display value information, as we'll want to load it into the attributes while we build that set
if 'display_values' in config and exists(join(dirname(__file__), config['display_values'])):
attr_vals_file = open(join(dirname(__file__), config['display_values']), "r")
line_reader = attr_vals_file.readlines()
for line in line_reader:
line = line.strip()
line_split = line.split(",")
if line_split[0] not in DISPLAY_VALS:
DISPLAY_VALS[line_split[0]] = {}
if line_split[1] == 'NULL':
DISPLAY_VALS[line_split[0]]['preformatted_values'] = True
else:
DISPLAY_VALS[line_split[0]]['vals'] = [{'raw_value': line_split[1], 'display_value': line_split[2]}]
else:
DISPLAY_VALS[line_split[0]]['vals'].append({'raw_value': line_split[1], 'display_value': line_split[2]})
attr_vals_file.close()
if 'data_sources' in config:
add_data_sources(config['data_sources'])
len(ATTR_SET) and make_attr and add_attributes([ATTR_SET[x] for x in ATTR_SET])
except Exception as e:
logging.exception(e)
if __name__ == "__main__":
cmd_line_parser = ArgumentParser(description="Extract a data source from BigQuery and ETL it into Solr")
cmd_line_parser.add_argument('-j', '--json-config-file', type=str, default='', help="JSON settings file")
cmd_line_parser.add_argument('-a', '--parse_attributes', type=str, default='False', help="Attempt to create/update attributes from the sources")
args = cmd_line_parser.parse_args()
if not len(args.json_config_file):
logger.info("[ERROR] You must supply a JSON settings file!")
cmd_line_parser.print_help()
exit(1)
if not exists(join(dirname(__file__),args.json_config_file)):
logger.info("[ERROR] JSON config file {} not found.".format(args.json_config_file))
exit(1)
f = open(join(dirname(__file__),args.json_config_file), "r")
settings = json.load(f)
main(settings, (args.parse_attributes == 'True'))
| apache-2.0 | -4,132,635,280,405,714,400 | 47.007538 | 229 | 0.558748 | false | 3.989768 | true | false | false |
mstubinis/PaintPartners | PaintPartners/objects/TextField.py | 1 | 5910 | import pygame
from pygame.locals import *
class TextField(pygame.sprite.Sprite):
def __init__(self,pos,maxChars,buttonName,font,password=False):
pygame.sprite.Sprite.__init__(self)
self.pos = pos
self.selected = False
self.blink = False
self.password = password
self.timer = 0
self.name = buttonName
self.text_color = (0,0,0)
self.maxChars = maxChars
self.message = ""
self.display_message = ""
temp_message = ""
for i in range(maxChars):
temp_message += "X"
self.font = font
self.text = self.font.render(self.message, 1,self.text_color)
self.w = self.font.size(temp_message)[0] + 8
self.h = self.font.size(temp_message)[1] + 10
self.rect = pygame.Rect(0,0,self.w-2,self.h-2)
self.rect_border = pygame.Rect(0,0,self.w,self.h)
self.nametext = self.font.render(self.name, 1,self.text_color)
self.rect_name = pygame.Rect(0,0,self.font.size(self.name)[0],self.font.size(self.name)[1])
self.rect.center = (pos[0]+ self.font.size(self.name)[0]/2,pos[1])
self.rect_border.center = (pos[0] + self.font.size(self.name)[0]/2,pos[1])
self.rect_name.center = (pos[0] - self.w/2 - 6,pos[1])
def is_mouse_over(self,mousePos):
if mousePos[0] < self.rect.x or mousePos[0] > self.rect.x + self.rect.w or mousePos[1] < self.rect.y or mousePos[1] > self.rect.y + self.rect.h:
return False
return True
def set_pos(self,pos):
self.pos = pos
self.rect.center = (pos[0]+ self.font.size(self.name)[0]/2,pos[1])
self.rect_border.center = (pos[0] + self.font.size(self.name)[0]/2,pos[1])
self.rect_name.center = (pos[0] - self.w/2 - 6,pos[1])
def set_name(self,name):
self.name = name
self.nametext = self.font.render(self.name, 1,self.text_color)
self.rect = pygame.Rect(0,0,self.w-2,self.h-2)
self.rect_border = pygame.Rect(0,0,self.w,self.h)
self.rect_name = pygame.Rect(0,0,self.font.size(self.name)[0],self.font.size(self.name)[1])
self.rect.center = (self.pos[0]+ self.font.size(self.name)[0]/2,self.pos[1])
self.rect_border.center = (self.pos[0] + self.font.size(self.name)[0]/2,self.pos[1])
self.rect_name.center = (self.pos[0] - self.w/2 - 6,self.pos[1])
def set_maxchars(self,maxChars):
self.maxChars = maxChars
temp_message = ""
for i in range(maxChars):
temp_message += "X"
self.w = self.font.size(temp_message)[0] + 8
self.h = self.font.size(temp_message)[1] + 10
self.rect = pygame.Rect(0,0,self.w-2,self.h-2)
self.rect_border = pygame.Rect(0,0,self.w,self.h)
def set_message(self,message):
self.message = message
self.display_message = message
if self.password == False:
self.text = self.font.render(self.message, 1,self.text_color)
else:
self.display_message = ""
for i in self.message:
self.display_message += "*"
self.text = self.font.render(self.display_message, 1,self.text_color)
def update_message(self,message):
if ord(message) != 8 and ord(message) != 13:#not backspace key or enter key
if len(self.message) < self.maxChars:
self.message += message
self.display_message += message
if self.password == False:
self.text = self.font.render(self.display_message, 1,self.text_color)
else:
self.display_message = ""
for i in self.message:
self.display_message += "*"
self.text = self.font.render(self.display_message, 1,self.text_color)
elif ord(message) == 8:#backspace key
if len(self.message) > 0:
self.message = self.message[:-1]
self.display_message = self.message
if self.password == False:
self.text = self.font.render(self.display_message, 1,self.text_color)
else:
self.display_message = ""
for i in self.message:
self.display_message += "*"
self.text = self.font.render(self.display_message, 1,self.text_color)
elif ord(message) == 13:#enter key
self.blink = False
self.timer = 0
self.selected = False
def update(self,events,mousePos):
for event in events:
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
if self.is_mouse_over(mousePos) == True:
self.selected = True
else:
self.selected = False
elif event.type == pygame.KEYDOWN:
try:
if self.selected == True:
self.update_message(str(chr(event.key)))
except:
pass
if self.selected == True:
self.timer += 1
if self.timer > 20:
self.timer = 0
self.blink = not self.blink
def draw(self,screen):
pygame.draw.rect(screen,(0,0,0),self.rect_border)
if self.selected == True:
pygame.draw.rect(screen,(225,225,225),self.rect)
if self.blink == True:
rectNew = pygame.Rect(self.rect.x+self.font.size(self.display_message)[0] + 8,self.rect.y+4,8,self.rect.h-9)
pygame.draw.rect(screen,(0,0,0),rectNew)
else:
pygame.draw.rect(screen,(255,255,255),self.rect)
screen.blit(self.nametext, self.rect_name)
screen.blit(self.text, self.rect)
| gpl-3.0 | -8,113,434,477,920,135,000 | 40.328671 | 152 | 0.544332 | false | 3.466276 | false | false | false |
kreitek/metaespacio | metaespacio/contabilidad2/admin.py | 2 | 1244 | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Validacion, Registro, Categoria
@admin.register(Categoria)
class CategoriaAdmin(admin.ModelAdmin):
list_display = ('espacio', 'nombre', 'ayuda')
@admin.register(Validacion)
class ValidacionAdmin(admin.ModelAdmin):
list_display = ('espacio', 'nombre', 'es_donacion', 'es_metalico', 'es_oficial')
@admin.register(Registro)
class RegistroAdmin(admin.ModelAdmin):
date_hierarchy = 'fecha_factura'
fieldsets = (
('Datos de sesión', {
'fields': ('espacio', 'miembro', 'fecha_formulario'),
}),
('Datos de usuario', {
'fields': ('concepto', 'fecha_factura', 'categoria', 'importe', 'es_donado', 'foto', 'factura'),
}),
('Datos de contabilidad', {
'fields': ('validacion', 'fecha_pago', 'notas'),
}),
)
list_display = ('fecha_', 'concepto', 'categoria_', 'importe', 'miembro')
list_filter = ('espacio', 'categoria', 'miembro')
search_fields = ('concepto', )
readonly_fields = ('fecha_formulario', )
def fecha_(self, obj):
return obj.fecha_factura.strftime("%d/%m/%y")
def categoria_(self, obj):
return obj.categoria.nombre
| agpl-3.0 | 4,024,719,277,030,134,300 | 30.871795 | 108 | 0.608206 | false | 3.046569 | false | false | false |
livni/old-OK | src/knesset/api/handlers.py | 1 | 9520 | from datetime import datetime
import urllib
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.db.models import Count
from piston.handler import BaseHandler
from piston.utils import rc
from knesset.mks.models import Member, Party, Membership
from knesset.laws.models import Vote, VoteAction
from knesset.agendas.models import Agenda
from tagging.models import Tag, TaggedItem
import math
from django.forms import model_to_dict
DEFAULT_PAGE_LEN = 20
def limit_by_request(qs, request):
if 'num' in request.GET:
num = int(request.GET['num'])
page = 'page' in request.GET and int(request.GET['page']) or 0
return qs[page*num:(page+1)*num]
return qs
class MemberHandler(BaseHandler):
fields = ('id', 'url', 'name','party', 'img_url', 'votes_count', 'votes_per_month', 'service_time', 'discipline','average_weekly_presence', 'committee_meetings_per_month','bills_proposed','bills_passed_pre_vote','bills_passed_first_vote','bills_approved', 'roles', 'average_weekly_presence_rank', 'committees', )
allowed_methods = ('GET')
model = Member
qs = Member.objects.all()
@classmethod
def url (self, member):
return member.get_absolute_url()
@classmethod
def party (self, member):
return member.current_party.name
@classmethod
def votes_count (self, member):
return member.voting_statistics.votes_count()
@classmethod
def votes_per_month (self, member):
return round(member.voting_statistics.average_votes_per_month(),1)
@classmethod
def service_time (self, member):
return member.service_time()
@classmethod
def discipline (self, member):
x = member.voting_statistics.discipline()
if x:
return round(x,2)
else:
return None
@classmethod
def bills_proposed(self, member):
return member.bills.count()
@classmethod
def bills_passed_pre_vote(self, member):
return member.bills.filter(Q(stage='2')|Q(stage='3')|Q(stage='4')|Q(stage='5')|Q(stage='6')).count()
@classmethod
def bills_passed_first_vote(self, member):
return member.bills.filter(Q(stage='4')|Q(stage='5')|Q(stage='6')).count()
@classmethod
def bills_approved(self, member):
return member.bills.filter(stage='6').count()
@classmethod
def roles (self, member):
return member.get_role
@classmethod
def average_weekly_presence_rank (self, member):
''' Calculate the distribution of presence and place the user on a 5 level scale '''
SCALE = 5
rel_location = cache.get('average_presence_location_%d' % member.id)
if not rel_location:
presence_list = sorted(map(lambda member: member.average_weekly_presence(), Member.objects.all()))
presence_groups = int(math.ceil(len(presence_list) / float(SCALE)))
# Generate cache for all members
for mk in Member.objects.all():
avg = mk.average_weekly_presence()
if avg:
mk_location = 1 + (presence_list.index(avg) / presence_groups)
else:
mk_location = 0
cache.set('average_presence_location_%d' % mk.id, mk_location, 60*60*24)
if mk.id == member.id:
rel_location = mk_location
return rel_location
@classmethod
def committees (self, member):
temp_list = member.committee_meetings.values("committee", "committee__name").annotate(Count("id")).order_by('-id__count')[:5]
return (map(lambda item: (item['committee__name'], reverse('committee-detail', args=[item['committee']])), temp_list))
@classmethod
def member (self, member):
qs = self.qs.filter(member=member)
return map(lambda o: dict(url=o.party.get_absolute_url(),
name=o.party.name,
since=o.start_date,
until=o.end_date,
), qs)
def read(self, request, **kwargs):
if id not in kwargs and 'q' in request.GET:
q = request.GET['q']
q = urllib.unquote(q)
qs = self.qs
try:
q = int(q)
return qs.filter(pk=q)
except ValueError:
return Member.objects.find(q)
return super(MemberHandler,self).read(request, **kwargs)
class VoteHandler(BaseHandler):
fields = ('url', 'title', 'time',
'summary','full_text',
'for_votes', 'against_votes', 'abstain_votes', 'didnt_vote',
'agendas',
)
exclude = ('member')
allowed_methods = ('GET',)
model = Vote
qs = Vote.objects.all()
def read(self, request, **kwargs):
''' returns a vote or a list of votes '''
qs = self.qs
if 'id' in kwargs:
return super(VoteHandler, self).read(request, **kwargs)
type = request.GET.get('type', None)
order = request.GET.get('order', None)
days_back = request.GET.get('days_back', None)
page_len = int(request.GET.get('page_len', DEFAULT_PAGE_LEN))
page_num= int(request.GET.get('page_num', 0))
if type:
qs = qs.filter(title__contains=type)
if days_back:
qs = qs.since(days=int(days_back))
if order:
qs = qs.sort(by=order)
return qs[page_len*page_num:page_len*(page_num +1)]
@classmethod
def url(self, vote):
return vote.get_absolute_url()
@classmethod
def for_votes(self, vote):
return vote.get_voters_id('for')
@classmethod
def against_votes(self, vote):
return vote.get_voters_id('against')
@classmethod
def abstain_votes(self, vote):
return vote.get_voters_id('abstain')
@classmethod
def didnt_vote(self, vote):
return vote.get_voters_id('no-vote')
@classmethod
def agendas(cls, vote):
# Augment agenda with reasonings from agendavote and
# arrange it so that it will be accessible using the
# agenda's id in JavaScript
agendavotes = vote.agenda_vote_set.all()
agendas = [model_to_dict(av.agenda) for av in agendavotes]
reasonings = [av.reasoning for av in agendavotes]
text_scores = [av.get_score_display() for av in agendavotes]
for i in range(len(agendas)):
agendas[i].update({'reasoning':reasonings[i], 'text_score':text_scores[i]})
return dict(zip([a['id'] for a in agendas],agendas))
class PartyHandler(BaseHandler):
fields = ('id', 'name', 'start_date', 'end_date')
allowed_methods = ('GET',)
model = Party
def read(self, request, **kwargs):
if id not in kwargs and 'q' in request.GET:
q = request.GET['q']
q = urllib.unquote(q)
return Party.objects.find(q)
return super(MemberHandler,self).read(request, **kwargs)
class TagHandler(BaseHandler):
fields = ('id', 'name', 'number_of_items')
allowed_methods = ('GET',)
model = Tag
def read(self, request, **kwargs):
id = None
if 'id' in kwargs:
id = kwargs['id']
if id:
return Tag.objects.filter(pk=id)
object_id = None
ctype = None
if 'object_id' in kwargs and 'object_type' in kwargs:
object_id = kwargs['object_id']
try:
ctype = ContentType.objects.get(model=kwargs['object_type'])
except ContentType.DoesNotExist:
pass
if object_id and ctype:
tags_ids = TaggedItem.objects.filter(object_id=object_id).filter(content_type=ctype).values_list('tag', flat=True)
return Tag.objects.filter(id__in=tags_ids)
return Tag.objects.usage_for_model(Vote)
@classmethod
def number_of_items(self, tag):
return tag.items.count()
class AgendaHandler(BaseHandler):
# TODO: Once we have user authentication over the API,
# need to expose not only public agendas.
# See AgendaManager.get_relevant_for_user(user)
# The is true for both read() and number_of_items() methods
fields = ('id', 'name', 'number_of_items')
allowed_methods = ('GET',)
model = Agenda
def read(self, request, **kwargs):
agendas = Agenda.objects.get_relevant_for_user(user=None)
# Handle API calls of type /agenda/[agenda_id]
id = None
if 'id' in kwargs:
id = kwargs['id']
if id is not None:
return agendas.filter(pk=id)
# Handle API calls of type /agenda/vote/[vote_id]
# Used to return the agendas ascribed to a specific vote
object_id = None
ctype = None
if 'object_id' in kwargs and 'object_type' in kwargs:
object_id = kwargs['object_id']
try:
ctype = ContentType.objects.get(model=kwargs['object_type'])
except ContentType.DoesNotExist:
pass
if object_id and (ctype == 'vote'):
return agendas.filter(votes__id=object_id)
else:
return agendas
@classmethod
def number_of_items(self, agenda):
return agenda.agendavotes.count()
| bsd-3-clause | 5,660,173,439,571,471,000 | 33.618182 | 316 | 0.593382 | false | 3.712949 | false | false | false |
nathanbjenx/cairis | cairis/gui/VulnerabilityEnvironmentPanel.py | 1 | 6188 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
from EnvironmentListCtrl import EnvironmentListCtrl
from DimensionListCtrl import DimensionListCtrl
from cairis.core.VulnerabilityEnvironmentProperties import VulnerabilityEnvironmentProperties
__author__ = 'Shamal Faily'
class VulnerabilityEnvironmentPanel(wx.Panel):
def __init__(self,parent,dp):
wx.Panel.__init__(self,parent,VULNERABILITY_PANELENVIRONMENT_ID)
self.dbProxy = dp
self.theVulId = None
self.theEnvironmentDictionary = {}
self.theSelectedIdx = -1
mainSizer = wx.BoxSizer(wx.HORIZONTAL)
environmentBox = wx.StaticBox(self)
environmentListSizer = wx.StaticBoxSizer(environmentBox,wx.HORIZONTAL)
mainSizer.Add(environmentListSizer,0,wx.EXPAND)
self.environmentList = EnvironmentListCtrl(self,VULNERABILITYENVIRONMENT_LISTENVIRONMENTS_ID,self.dbProxy)
environmentListSizer.Add(self.environmentList,1,wx.EXPAND)
environmentDimSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(environmentDimSizer,1,wx.EXPAND)
sevBox = wx.StaticBox(self)
sevSizer = wx.StaticBoxSizer(sevBox,wx.HORIZONTAL)
environmentDimSizer.Add(sevSizer,0,wx.EXPAND)
sevSizer.Add(wx.StaticText(self,-1,'Severity'))
sevList = ['Negligible','Marginal','Critical','Catastrophic']
self.sevCtrl = wx.ComboBox(self,VULNERABILITYENVIRONMENT_COMBOSEVERITY_ID,choices=sevList,size=wx.DefaultSize,style=wx.CB_READONLY)
sevSizer.Add(self.sevCtrl,1,wx.EXPAND)
aSizer = wx.BoxSizer(wx.HORIZONTAL)
environmentDimSizer.Add(aSizer,1,wx.EXPAND)
self.assetList = DimensionListCtrl(self,VULNERABILITYENVIRONMENT_LISTASSETS_ID,wx.DefaultSize,'Asset','asset',self.dbProxy)
assetBox = wx.StaticBox(self)
assetSizer = wx.StaticBoxSizer(assetBox,wx.HORIZONTAL)
assetSizer.Add(self.assetList,1,wx.EXPAND)
aSizer.Add(assetSizer,1,wx.EXPAND)
self.SetSizer(mainSizer)
self.environmentList.Bind(wx.EVT_LIST_INSERT_ITEM,self.OnAddEnvironment)
self.environmentList.Bind(wx.EVT_LIST_DELETE_ITEM,self.OnDeleteEnvironment)
def loadControls(self,vulnerability):
self.environmentList.Unbind(wx.EVT_LIST_ITEM_SELECTED)
self.environmentList.Unbind(wx.EVT_LIST_ITEM_DESELECTED)
self.theVulId = vulnerability.id()
# We load the environment name control before anything else. Weird stuff happens if we don't do this. Don't ask me why!!!
environmentNames = []
if (len(vulnerability.environmentProperties()) > 0):
for cp in vulnerability.environmentProperties():
environmentNames.append(cp.name())
self.environmentList.load(environmentNames)
for cp in vulnerability.environmentProperties():
environmentName = cp.name()
self.theEnvironmentDictionary[environmentName] = cp
environmentName = environmentNames[0]
p = self.theEnvironmentDictionary[environmentName]
self.sevCtrl.SetStringSelection(p.severity())
self.assetList.setEnvironment(environmentName)
self.assetList.load(p.assets())
self.environmentList.Select(0)
self.environmentList.Bind(wx.EVT_LIST_ITEM_SELECTED,self.OnEnvironmentSelected)
self.environmentList.Bind(wx.EVT_LIST_ITEM_DESELECTED,self.OnEnvironmentDeselected)
self.theSelectedIdx = 0
def OnEnvironmentSelected(self,evt):
self.theSelectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
p = self.theEnvironmentDictionary[environmentName]
self.sevCtrl.SetStringSelection(p.severity())
self.assetList.setEnvironment(environmentName)
self.assetList.load(p.assets())
def OnEnvironmentDeselected(self,evt):
self.theSelectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.theEnvironmentDictionary[environmentName] = VulnerabilityEnvironmentProperties(environmentName,self.sevCtrl.GetValue(),self.assetList.dimensions())
self.sevCtrl.SetValue('')
self.assetList.setEnvironment('')
self.assetList.DeleteAllItems()
self.theSelectedIdx = -1
def OnAddEnvironment(self,evt):
self.theSelectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.theEnvironmentDictionary[environmentName] = VulnerabilityEnvironmentProperties(environmentName,'',[])
self.environmentList.Select(self.theSelectedIdx)
self.assetList.setEnvironment(environmentName)
inheritedEnv = self.environmentList.inheritedEnvironment()
if (inheritedEnv != '' and self.theVulId != None):
p = self.dbProxy.inheritedVulnerabilityProperties(self.theVulId,inheritedEnv)
self.theEnvironmentDictionary[environmentName] = p
self.sevCtrl.SetStringSelection(p.severity())
self.assetList.setEnvironment(environmentName)
self.assetList.load(p.assets())
def OnDeleteEnvironment(self,evt):
selectedIdx = evt.GetIndex()
environmentName = self.environmentList.GetItemText(selectedIdx)
del self.theEnvironmentDictionary[environmentName]
self.theSelectedIdx = -1
def environmentProperties(self):
if (self.theSelectedIdx != -1):
environmentName = self.environmentList.GetItemText(self.theSelectedIdx)
self.theEnvironmentDictionary[environmentName] = VulnerabilityEnvironmentProperties(environmentName,self.sevCtrl.GetValue(),self.assetList.dimensions())
return self.theEnvironmentDictionary.values()
| apache-2.0 | 1,378,462,077,631,647,200 | 45.526316 | 158 | 0.766968 | false | 3.757134 | false | false | false |
williamgilpin/pypdb | pypdb/clients/fasta/fasta_client_test.py | 1 | 2511 | """Tests for RCSB FASTA fetching logic."""
import pytest
import requests
import unittest
from unittest import mock
from pypdb.clients.fasta import fasta_client
class TestFastaLogic(unittest.TestCase):
@mock.patch.object(requests, "get")
@mock.patch.object(fasta_client, "_parse_fasta_text_to_list")
def test_get_fasta_file(self, mock_parse_fasta, mock_get):
mock_response = mock.Mock()
mock_response.ok = True
mock_response.text = "fake_fasta_response"
mock_get.return_value = mock_response
fasta_client.get_fasta_from_rcsb_entry("6TML")
mock_get.assert_called_once_with(
"https://www.rcsb.org/fasta/entry/6TML")
mock_parse_fasta.assert_called_once_with("fake_fasta_response")
def test_parse_fasta_file(self):
test_fasta_raw_text = """
>6TML_1|Chains Q7,Q8,Q9,q7,q8,q9|ATPTG11|Toxoplasma gondii (strain ATCC 50853 / GT1) (507601)
MVRNQRYPASPVQEIFLPEPVPFVQFDQTAPSPNSPPAPLPSPSLSQCEEQKDRYR
>6TML_2|Chain i9|ATPTG7|Toxoplasma gondii (strain ATCC 50853 / GT1) (507601)
MPSSSSEDAQGGNRFECVSNSTSPRRKNATKDEAACLQPRRSAVSGPREDVLCIR
>6TML_32|Chains H1,H2,H3,H4|subunit c|Toxoplasma gondii (strain ATCC 50853 / GT1) (507601)
MFFSRLSLSALKAAPAREAL"""
self.assertEqual(
fasta_client._parse_fasta_text_to_list(test_fasta_raw_text), [
fasta_client.FastaSequence(
entity_id="6TML_1",
chains=["Q7", "Q8", "Q9", "q7", "q8", "q9"],
sequence=
"MVRNQRYPASPVQEIFLPEPVPFVQFDQTAPSPNSPPAPLPSPSLSQCEEQKDRYR",
fasta_header=
"6TML_1|Chains Q7,Q8,Q9,q7,q8,q9|ATPTG11|Toxoplasma gondii (strain ATCC 50853 / GT1) (507601)"
),
fasta_client.FastaSequence(
entity_id="6TML_2",
chains=["i9"],
sequence=
"MPSSSSEDAQGGNRFECVSNSTSPRRKNATKDEAACLQPRRSAVSGPREDVLCIR",
fasta_header=
"6TML_2|Chain i9|ATPTG7|Toxoplasma gondii (strain ATCC 50853 / GT1) (507601)"
),
fasta_client.FastaSequence(
entity_id="6TML_32",
chains=["H1", "H2", "H3", "H4"],
sequence="MFFSRLSLSALKAAPAREAL",
fasta_header=
"6TML_32|Chains H1,H2,H3,H4|subunit c|Toxoplasma gondii (strain ATCC 50853 / GT1) (507601)"
)
])
| mit | 5,745,887,918,968,113,000 | 41.559322 | 114 | 0.593787 | false | 2.815022 | true | false | false |
OPpuolitaival/django-info-screen | info_screen/views.py | 1 | 2210 | # coding: utf-8
import json
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.views.generic import TemplateView, View
from .models import InfoScreen, Page
class ScreenView(TemplateView):
template_name = 'info_screen/screen.html'
def get_context_data(self, **kwargs):
"""
Excludes any polls that aren't published yet.
"""
context = super(ScreenView, self).get_context_data(**kwargs)
screen = get_object_or_404(InfoScreen, uuid=kwargs['screen_uuid'])
context.update({
'page': screen.visible_pages().first(),
'screen': screen,
})
return context
class ImageView(TemplateView):
template_name = 'info_screen/image.html'
def get_context_data(self, **kwargs):
context = super(ImageView, self).get_context_data(**kwargs)
page = get_object_or_404(Page, uuid=kwargs['page_uuid'])
if page.image_file:
context.update({
'image_url': page.image_file.url,
})
return context
class ScreenJsonView(View):
def get(self, *args, **kwargs):
ret = dict()
keys = self.request.GET.keys()
current_page = None
np = None
if 'page_id' in keys:
page_id = self.request.GET['page_id']
if page_id.isdigit():
current_page = get_object_or_404(Page, pk=page_id)
if 'screen_uuid' in keys:
screen = get_object_or_404(InfoScreen, uuid=self.request.GET['screen_uuid'])
if current_page is not None:
# Normal case
np = current_page.next_page(screen)
else:
# Case that no page, i.e. no visible pages
if screen.visible_pages().exists():
np = screen.visible_pages().first()
if np:
ret = {
'id': np.id,
'url': np.show_url(),
'is_slideshow_page': np.is_slideshow_page,
# Protect server load
'delay_in_sec': max([3, np.delay_in_sec])}
return HttpResponse(json.dumps(ret))
| mit | -6,112,927,717,599,500,000 | 29.273973 | 88 | 0.551131 | false | 3.932384 | false | false | false |
titanmonsta/fluorescence | gumps/shardlist.py | 12 | 1457 |
from data import *
from ui import *
import client
import theme
# In case you want to redesign this gump, keep in mind that at this point, you can not
# access any uo data (e.g. art.mul graphics, hues), as the mul files are not loaded yet.
def create(args):
g = GumpMenu("shardlist", 400, 300)
g.closable = False
shardlist = args["shardlist"]
if len(shardlist) > 0:
g.onEnter = selectFirst
g.store["firstName"] = shardlist[0]
else:
g.onEnter = createShard
g.addImage((0, 0), Texture(TextureSource.THEME, "images/background_250x250.png"))
scroll = theme.addScrollArea(g, (20, 20, 210, 137))
y = 0
for shard in shardlist:
btnShard = theme.addPythonButton(scroll, (0, y, 190, 25), selectShard)
btnShard.text = shard
btnShard.store["shard"] = shard
y += 28
scroll.updateScrollbars()
btnCreate = theme.addPythonButton(g, (20, 175, 210, 25), createShard)
btnCreate.text = "Create shard"
btnExit = theme.addPythonButton(g, (20, 203, 210, 25), shutdown)
btnExit.text = "Exit"
def createShard(button):
client.openGump("createshard")
# don't close the gump, shard creation can be cancelled
return False
def shutdown(button):
client.shutdown()
return True
def selectFirst(gump):
client.setShard(gump.store["firstName"])
return True
def selectShard(button):
client.setShard(button.store["shard"])
return True
| gpl-3.0 | -7,818,187,446,363,884,000 | 25.981481 | 88 | 0.660947 | false | 3.39627 | false | false | false |
benrudolph/commcare-hq | corehq/apps/fixtures/interface.py | 2 | 3474 | from couchdbkit import ResourceNotFound
from django.contrib import messages
from django.http import HttpResponseRedirect
from corehq.apps.fixtures.views import fixtures_home, FixtureViewMixIn
from corehq.apps.reports.generic import GenericReportView, GenericTabularReport
from corehq.apps.reports.filters.base import BaseSingleOptionFilter
from corehq.apps.fixtures.dispatcher import FixtureInterfaceDispatcher
from corehq.apps.fixtures.models import FixtureDataType, _id_from_doc
from dimagi.utils.decorators.memoized import memoized
from django.utils.translation import ugettext_noop, ugettext as _
class FixtureInterface(FixtureViewMixIn, GenericReportView):
base_template = 'fixtures/fixtures_base.html'
asynchronous = False
dispatcher = FixtureInterfaceDispatcher
exportable = False
needs_filters = False
class FixtureSelectFilter(BaseSingleOptionFilter):
slug = "table_id"
label = ""
placeholder = "place"
default_text = "Select a Table"
@property
def selected(self):
# ko won't display default selected-value as it should, display default_text instead
return ""
@property
@memoized
def fixtures(self):
fdts = list(FixtureDataType.by_domain(self.domain))
return fdts
@property
@memoized
def options(self):
return [(_id_from_doc(f), f.tag) for f in self.fixtures]
class FixtureViewInterface(GenericTabularReport, FixtureInterface):
name = ugettext_noop("View Tables")
slug = "view_lookup_tables"
report_template_path = 'fixtures/view_table.html'
fields = ['corehq.apps.fixtures.interface.FixtureSelectFilter']
@property
def view_response(self):
if not self.has_tables():
messages.info(self.request, _("You don't have any tables defined yet - create tables to view them."))
return HttpResponseRedirect(fixtures_home(self.domain))
else:
return super(FixtureViewInterface, self).view_response
@property
def report_context(self):
assert self.has_tables()
if not self.request.GET.get("table_id", None):
return {"table_not_selected": True}
try:
context = super(FixtureViewInterface, self).report_context
except ResourceNotFound:
return {"table_not_selected": True}
context.update({"selected_table": self.table.get("table_id", "")})
return context
@memoized
def has_tables(self):
return True if list(FixtureDataType.by_domain(self.domain)) else False
@property
@memoized
def table(self):
from corehq.apps.fixtures.views import data_table
if self.has_tables() and self.request.GET.get("table_id", None):
return data_table(self.request, self.domain)
else:
return {"headers": None, "rows": None}
@property
def headers(self):
return self.table["headers"]
@property
def rows(self):
return self.table["rows"]
class FixtureEditInterface(FixtureInterface):
name = ugettext_noop("Manage Tables")
slug = "edit_lookup_tables"
report_template_path = 'fixtures/manage_tables.html'
@property
def report_context(self):
context = super(FixtureInterface, self).report_context
context.update(types=self.data_types)
return context
@property
@memoized
def data_types(self):
return list(FixtureDataType.by_domain(self.domain))
| bsd-3-clause | -2,079,126,917,013,998,000 | 30.87156 | 113 | 0.689119 | false | 4.145585 | false | false | false |
GoogleCloudPlatform/datacatalog-connectors-rdbms | google-datacatalog-rdbms-connector/src/google/datacatalog_connectors/rdbms/scrape/metadata_normalizer.py | 1 | 8545 | #!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import six
class MetadataNormalizer:
def __init__(self):
pass
@classmethod
def normalize(cls, metadata, metadata_definition):
"""
Receives a Pandas dataframe and normalizes it by creating a dictionary
with Table Container(Database/Schema) -> Tables -> Columns hierarchy.
:param metadata: the Pandas dataframe
:param metadata_definition: the Metadata Definition
the normalized dictionary will be created with the specified
target keys.
Example:
>>> metadata_definition = {
... 'table_container_def': {
... 'key': 'schemas',
... 'type': 'schema',
... 'name': 'schema_name',
... 'fields': [
... {
... 'source': 'schema_created',
... 'target': 'create_time'
... }
... ]
... },
... 'table_def': {
... 'key': 'tables',
... 'type': 'table',
... 'name': 'table_name',
... 'fields': [
... {
... 'source': 'table_comments',
... 'target': 'desc'
... }
... ]
... },
... 'column_def': {
... 'key': 'columns',
... 'type': 'column',
... 'name': 'column_name',
... 'fields': [
... {
... 'source': 'data_length',
... 'target': 'length'
... }
... ]
... }
...}
:return: a normalized dict object
"""
cls._remove_nan_rows(metadata)
table_container_def = metadata_definition['table_container_def']
return {
table_container_def['key']:
cls._normalize_objects(
metadata=metadata,
key_column_name=table_container_def['name'],
normalizer_method=cls.__normalize_table_container,
metadata_definition=metadata_definition)
}
@classmethod
def _remove_nan_rows(cls, metadata):
# Remove nan fields
pd.options.mode.chained_assignment = None
metadata.dropna(axis=0, how='all', inplace=True)
@classmethod
def _normalize_objects(cls, metadata, key_column_name, normalizer_method,
metadata_definition):
"""
Generic method to normalize a Pandas dataframe
into an array dictionary objects.
:param metadata: the Pandas dataframe
:param key_column_name: column used to
distinguish top-level objects from each other
:param normalizer_method: the method
used to normalize each top-level object
:param metadata_definition: the Metadata Definition
:return: an array of normalized dict objects
"""
metadata.set_index(key_column_name, inplace=True)
key_values = metadata.index.unique().tolist()
array = []
for key_value in key_values:
# We use an array with: [key_value] to make sure the dataframe loc
# always returns a dataframe, and not a Series
if pd.notnull(key_value):
metadata_subset = metadata.loc[[key_value]]
metadata.drop(key_value, inplace=True)
array.append(
normalizer_method(key_value.strip(), metadata_subset,
metadata_definition))
return array
@classmethod
def _extract_value_from_first_row(cls, df, column_name):
value = df.iloc[0][column_name]
if pd.isna(value):
return value
if isinstance(value, six.string_types):
return value.strip()
return value
@classmethod
def _normalize_timestamp_field(cls, timestamp_field):
return pd.Timestamp(timestamp_field)
@classmethod
def __normalize_table_container(cls, name, table_container_metadata,
metadata_definition):
tables_container_def = metadata_definition['table_container_def']
fields = tables_container_def['fields']
normalized_dict = {'name': name}
normalized_dict.update(
cls._normalize_fields(fields, table_container_metadata))
table_def = metadata_definition['table_def']
normalized_dict[table_def['key']] = \
cls._normalize_objects(
metadata=table_container_metadata.loc[
:, table_def['name']:],
key_column_name=table_def['name'],
normalizer_method=cls.__normalize_table,
metadata_definition=metadata_definition
)
return normalized_dict
@classmethod
def __normalize_table(cls, name, table_metadata, metadata_definition):
table_def = metadata_definition['table_def']
fields = table_def['fields']
normalized_dict = {'name': name}
normalized_dict.update(cls._normalize_fields(fields, table_metadata))
column_def = metadata_definition['column_def']
normalized_dict[column_def['key']] = cls._normalize_objects(
metadata=table_metadata.loc[:, column_def['name']:],
key_column_name=column_def['name'],
normalizer_method=cls.__normalize_column,
metadata_definition=metadata_definition)
return normalized_dict
@classmethod
def __normalize_column(cls, name, column_metadata, metadata_definition):
column_def = metadata_definition['column_def']
fields = column_def['fields']
normalized_dict = {'name': name}
normalized_dict.update(cls._normalize_fields(fields, column_metadata))
return normalized_dict
@classmethod
def _normalize_fields(cls, fields, metadata):
fields_dict = {}
for field in fields:
source = field['source']
target = field['target']
# could be that optional information ('source')
# is not present in scraped metadata
if source in metadata:
value = cls._extract_value_from_first_row(metadata, source)
if cls._is_timestamp_field(target):
value = cls._normalize_timestamp_field(value)
fields_dict[target] = value
return fields_dict
@classmethod
def _is_timestamp_field(cls, target):
# [TODO] Improve logic to identify timestamp fields
# currently using a naming convention
if '_date' in target or '_time' in target:
return True
return False
@staticmethod
def get_exact_table_names_from_dataframe(dataframe, metadata_definition):
"""
Get table names in a form schema_name.table_name
"""
container_name_col = metadata_definition['table_container_def']['name']
table_name_col = metadata_definition['table_def']['name']
container_table_pairs_df = dataframe[[
container_name_col, table_name_col
]]
container_table_pairs_records = container_table_pairs_df.to_dict(
orient='records')
exact_table_names = list()
for pair_dict in container_table_pairs_records:
values = [val.strip() for val in pair_dict.values()]
exact_table_name = ".".join(values)
exact_table_names.append(exact_table_name)
return exact_table_names
| apache-2.0 | -5,122,508,654,907,185,000 | 33.595142 | 79 | 0.541603 | false | 4.697636 | false | false | false |
Ledoux/ShareYourSystem | Pythonlogy/draft/Filterer/Drafts/__init__ copy.py | 1 | 1800 | # -*- coding: utf-8 -*-
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
A Filterer pick and
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Applyiers.Walker"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer"
SYS.setSubModule(globals())
#</DefineAugmentation>
#<ImportSpecificModules>
import copy
import collections
#</ImportSpecificModules>
#<DefineClass>
@DecorationClass()
class FiltererClass(BaseClass):
#Definition
RepresentingKeyStrsList=[
'FilteredVariablesList'
]
def default_init(self,
_FilteredVariablesList=None,
**_KwargVariablesDict):
#Call the parent __init__ method
BaseClass.__init__(self,**_KwargVariablesDict)
def do_filter(self):
#debug
'''
self.debug(('self.',self,[
'WalkingSocketDict',
'WalkedTopOrderedDict'
]))
'''
#Init
if 'FilterVariablesList' not in self.WalkedTopOrderedDict:
self.WalkedTopOrderedDict['FilterVariablesList']=[]
#Check
if self.conclude(
self,
self.WalkingSocketDict['ConcludeConditionVariable']
).ConcludedIsBool:
#debug
'''
self.debug(
(
'self.',self,[
'ConcludedConditionIsBoolsList',
]+SYS.unzip(
self.WalkingSocketDict[
'ConcludeConditionVariable'],[0]
)
)
)
'''
#Pick
self.WalkedTopOrderedDict['FilterVariablesList'].append(
self.pick(
self.WalkingSocketDict['PickVariablesList']
)
)
#set
if self.WalkingSocketDict['TopVariable']==self:
self.FilteredVariablesList=self.WalkedTopOrderedDict['FilterVariablesList']
#self.FilteredVariablesList=copy.copy(self.WalkedTopOrderedDict['FilterVariablesList'])
#</DefineClass>
| mit | -6,953,273,175,532,897,000 | 18.78022 | 90 | 0.682222 | false | 3.169014 | false | false | false |
avatar29A/adbook | adbook/examples/find_person_by_email_example.py | 1 | 1158 | # coding=utf-8
from typing import List, Callable
from adbook.orm.entity import Entity
from adbook.addressbook import AddressBook
def convert_person_list(persons: List[Entity]) -> List[str]:
return [str(person) for person in persons]
if __name__ == '__main__':
"""
* Find person by email address (can supply either the exact string or a prefix string,
ie. both "[email protected]" and "alex" should work).
"""
with AddressBook() as ab:
p1 = ab.persons.create(first_name="Franc", last_name="Kafka", email="[email protected]")
p2 = ab.persons.create(first_name="Sergey", last_name="Esenin")
p2.emails.append("[email protected]")
p2.emails.append("[email protected]")
ab.persons.add(p1, p2)
lookingfor_email_1 = "esenin1"
found1 = ab.persons.find_by_email(lookingfor_email_1)
print("Find email {}: {}".format(lookingfor_email_1, convert_person_list(found1)))
lookingfor_email_2 = "[email protected]"
found2 = ab.persons.find_by_email(lookingfor_email_2)
print("Find email {}: {}".format(lookingfor_email_2, convert_person_list(found2)))
| mit | -1,871,371,984,925,062,100 | 33.058824 | 95 | 0.657168 | false | 3.198895 | false | false | false |
mganeva/mantid | qt/python/mantidqt/widgets/workspacedisplay/matrix/table_view_model.py | 1 | 8826 | # coding=utf-8
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# This file is part of the mantid workbench.
#
#
from __future__ import (absolute_import, division, print_function)
from qtpy import QtGui
from qtpy.QtCore import QVariant, Qt, QAbstractTableModel
from mantid.py3compat import Enum
class MatrixWorkspaceTableViewModelType(Enum):
x = 'x'
y = 'y'
e = 'e'
class MatrixWorkspaceTableViewModel(QAbstractTableModel):
HORIZONTAL_HEADER_DISPLAY_STRING = u"{0}\n{1:0.1f}{2}"
HORIZONTAL_HEADER_TOOLTIP_STRING = u"index {0}\n{1} {2:0.1f}{3} (bin centre)"
HORIZONTAL_HEADER_DISPLAY_STRING_FOR_X_VALUES = "{0}"
HORIZONTAL_HEADER_TOOLTIP_STRING_FOR_X_VALUES = "index {0}"
VERTICAL_HEADER_DISPLAY_STRING = "{0} {1}"
VERTICAL_HEADER_TOOLTIP_STRING = "index {0}\nspectra no {1}"
HORIZONTAL_BINS_VARY_DISPLAY_STRING = "{0}\nbins vary"
HORIZONTAL_BINS_VARY_TOOLTIP_STRING = "index {0}\nbin centre value varies\nRebin to set common bins"
MASKED_MONITOR_ROW_STRING = "This is a masked monitor spectrum. "
MASKED_ROW_STRING = "This is a masked spectrum. "
MONITOR_ROW_STRING = "This is a monitor spectrum. "
MASKED_BIN_STRING = "This bin is masked. "
def __init__(self, ws, model_type):
"""
:param ws:
:param model_type: MatrixWorkspaceTableViewModelType
:type model_type: MatrixWorkspaceTableViewModelType
"""
assert model_type in [MatrixWorkspaceTableViewModelType.x, MatrixWorkspaceTableViewModelType.y,
MatrixWorkspaceTableViewModelType.e], "The Model type must be either X, Y or E."
super(MatrixWorkspaceTableViewModel, self).__init__()
self.ws = ws
self.ws_spectrum_info = self.ws.spectrumInfo()
self.row_count = self.ws.getNumberHistograms()
self.column_count = self.ws.blocksize()
self.masked_rows_cache = []
self.monitor_rows_cache = []
self.masked_bins_cache = {}
self.masked_color = QtGui.QColor(240, 240, 240)
self.monitor_color = QtGui.QColor(255, 253, 209)
self.type = model_type
if self.type == MatrixWorkspaceTableViewModelType.x:
self.relevant_data = self.ws.readX
# add another column if the workspace is histogram data
# this will contain the right boundary for the last bin
if self.ws.isHistogramData():
self.column_count += 1
elif self.type == MatrixWorkspaceTableViewModelType.y:
self.relevant_data = self.ws.readY
elif self.type == MatrixWorkspaceTableViewModelType.e:
self.relevant_data = self.ws.readE
else:
raise ValueError("Unknown model type {0}".format(self.type))
def _makeVerticalHeader(self, section, role):
axis_index = 1
# check that the vertical axis actually exists in the workspace
if self.ws.axes() > axis_index:
if role == Qt.DisplayRole:
return self.VERTICAL_HEADER_DISPLAY_STRING.format(section, self.ws.getAxis(axis_index).label(section))
else:
spectrum_number = self.ws.getSpectrum(section).getSpectrumNo()
return self.VERTICAL_HEADER_TOOLTIP_STRING.format(section, spectrum_number)
else:
raise NotImplementedError("What do we do here? Handle if the vertical axis does NOT exist")
def _makeHorizontalHeader(self, section, role):
"""
:param section: The workspace index or bin number
:param role: Qt.DisplayRole - is the label for the header
or Qt.TooltipRole - is the tooltip for the header when moused over
:return: The formatted header string
"""
# X values get simpler labels
if self.type == MatrixWorkspaceTableViewModelType.x:
if role == Qt.DisplayRole:
return self.HORIZONTAL_HEADER_DISPLAY_STRING_FOR_X_VALUES.format(section)
else:
# format for the tooltip
return self.HORIZONTAL_HEADER_TOOLTIP_STRING_FOR_X_VALUES.format(section)
if not self.ws.isCommonBins():
if role == Qt.DisplayRole:
return self.HORIZONTAL_BINS_VARY_DISPLAY_STRING.format(section)
else:
# format for the tooltip
return self.HORIZONTAL_BINS_VARY_TOOLTIP_STRING.format(section)
# for the Y and E values, create a label with the units
axis_index = 0
x_vec = self.ws.readX(0)
if self.ws.isHistogramData():
bin_centre_value = (x_vec[section] + x_vec[section + 1]) / 2.0
else:
bin_centre_value = x_vec[section]
unit = self.ws.getAxis(axis_index).getUnit()
if role == Qt.DisplayRole:
return self.HORIZONTAL_HEADER_DISPLAY_STRING.format(section, bin_centre_value, unit.symbol().utf8())
else:
# format for the tooltip
return self.HORIZONTAL_HEADER_TOOLTIP_STRING.format(section, unit.caption(), bin_centre_value,
unit.symbol().utf8())
def headerData(self, section, orientation, role=None):
if not (role == Qt.DisplayRole or role == Qt.ToolTipRole):
return QVariant()
if orientation == Qt.Vertical:
return self._makeVerticalHeader(section, role)
else:
return self._makeHorizontalHeader(section, role)
def rowCount(self, parent=None, *args, **kwargs):
return self.row_count
def columnCount(self, parent=None, *args, **kwargs):
return self.column_count
def data(self, index, role=None):
row = index.row()
if role == Qt.DisplayRole:
# DisplayRole determines the text of each cell
return str(self.relevant_data(row)[index.column()])
elif role == Qt.BackgroundRole:
# BackgroundRole determines the background of each cell
# Checks if the row is MASKED, if so makes it the specified color for masked
# The check for masked rows should be first as a monitor row can be masked as well - and we want it to be
# colored as a masked row, rather than as a monitor row.
# First do the check in the cache, and only if not present go through SpectrumInfo and cache it. This logic
# is repeated in the other checks below
if self.checkMaskedCache(row):
return self.masked_color
# Checks if the row is a MONITOR, if so makes it the specified color for monitors
elif self.checkMonitorCache(row):
return self.monitor_color
# Checks if the BIN is MASKED, if so makes it the specified color for masked
elif self.checkMaskedBinCache(row, index):
return self.masked_color
elif role == Qt.ToolTipRole:
tooltip = QVariant()
if self.checkMaskedCache(row):
if self.checkMonitorCache(row):
tooltip = self.MASKED_MONITOR_ROW_STRING
else:
tooltip = self.MASKED_ROW_STRING
elif self.checkMonitorCache(row):
tooltip = self.MONITOR_ROW_STRING
if self.checkMaskedBinCache(row, index):
tooltip += self.MASKED_BIN_STRING
elif self.checkMaskedBinCache(row, index):
tooltip = self.MASKED_BIN_STRING
return tooltip
else:
return QVariant()
def checkMaskedCache(self, row):
if row in self.masked_rows_cache:
return True
elif self.ws_spectrum_info.hasDetectors(row) and self.ws_spectrum_info.isMasked(row):
self.masked_rows_cache.append(row)
return True
def checkMonitorCache(self, row):
if row in self.monitor_rows_cache:
return True
elif self.ws_spectrum_info.hasDetectors(row) and self.ws_spectrum_info.isMonitor(row):
self.monitor_rows_cache.append(row)
return True
def checkMaskedBinCache(self, row, index):
if row in self.masked_bins_cache:
# retrieve the masked bins IDs from the cache
if index.column() in self.masked_bins_cache[row]:
return True
elif self.ws.hasMaskedBins(row):
masked_bins = self.ws.maskedBinsIndices(row)
if index.column() in masked_bins:
self.masked_bins_cache[row] = masked_bins
return True
| gpl-3.0 | 4,422,619,221,616,382,000 | 40.242991 | 119 | 0.619873 | false | 4.000907 | false | false | false |
leifos/treconomics | treconomics_project/search/diversify.py | 1 | 6884 | #
# Diversification Algorithm with access to the diversity QRELs
# Mark II -- More complex algorithm, not as rewarding as the first attempt.
# Updated to work with the ifind search objects.
#
# Slightly updated to make it easier to drop into the treconomis environment.
#
# Author: David Maxwell and Leif Azzopardi
# Date: 2018-01-06
#
import copy
from treconomics.experiment_functions import qrels_diversity
# TODO: @leifos
# - What values do we use above?
# - To diversity, you need:
# * a list of results
# * a topic number
# * a lambda value
# * a DIVERSIFY_TO_RANK value
#
# - call diversify(results, topic_num, to_rank, lam)
# This returns a new list, with the diversified set of results according to our algorithm.
# The results object you pass in should be an iterable -- it can be a whoosh.results object or a list.
# The object that is returned is just a Python list -- so there could be an issue down the line if it relies on something whoosh.results provides. Hope not -- I can't create an artifical whoosh.results object (easily, at least).
def convert_results_to_list(results, deep_copy=True):
"""
Given a Whoosh results object, converts it to a list and returns that list.
Useful, as the Whoosh results object does not permit reassignment of Hit objects.
Note that if deep_copy is True, a deep copy of the list is returned.
"""
results_list = []
for hit in results:
if deep_copy:
results_list.append(copy.copy(hit))
continue
results_list.append(hit)
return results_list
def get_highest_score_index(results_list):
"""
Given a list of results, returns the index of the hit with the highest score.
Simple find the maximum algorithm stuff going on here.
"""
highest_score = 0.0
highest_index = 0
index = 0
for hit in results_list:
if hit.score > highest_score:
highest_score = hit.score
highest_index = index
index = index + 1
return highest_index
def get_new_entities(observed_entities, document_entities):
"""
Given a list of previously seen entities, and a list of document entities, returns
a list of entities in the document which have not yet been previously seen.
"""
return list(set(document_entities) - set(observed_entities))
# def get_existing_entities(observed_entities, document_entities):
# """
# Given a list of previously seen entities, and a list of document entities, returns
# the intersection of the two lists -- i.e. the entities that have already been seen.
# """
# return list(set(observed_entities) & set(document_entities))
def get_observed_entities_for_list(topic, rankings_list):
"""
Given a list of Whoosh Hit objects, returns a list of the different entities that are mentioned in them.
"""
observed_entities = []
for hit in rankings_list:
docid = hit.docid
entities = qrels_diversity.get_mentioned_entities_for_doc(topic, docid)
new_entities = get_new_entities(observed_entities, entities)
observed_entities = observed_entities + new_entities
return observed_entities
def diversify_results(results, topic, to_rank=30, lam=1.0):
"""
The diversification algorithm.
Given a ifind results object, returns a re-ranked list, with more diverse content at the top.
By diverse, we mean a selection of documents discussing a wider range of identified entities.
"""
results_len = len(results.results)
#results_len = results.scored_length() # Doing len(results) returns the number of hits, not the top k.
#print(results)
# Simple sanity check -- no results? Can't diversify anything!
if results_len == 0:
return results
# Before diversifying, check -- are there enough results to go to to_rank?
# If not, change to_rank to the length of the results we have.
if to_rank is None:
to_rank = results_len
# Not enough results to get to to_rank? Change the to_rank cap to the results length.
if results_len < to_rank:
to_rank = results_len
# Check that lambda is a float in case of floating point calculations...
if type(lam) != float:
lam = float(lam)
############################
### Main algorithm below ###
############################
observed_entities = [] # What entities have been previously seen? This list holds them.
# As the list of results is probably larger than the depth we re-rank to, take a slice.
# This is our original list of results that we'll be modifiying and popping from.
old_rankings = results.results[:to_rank]
# For our new rankings, start with the first document -- this won't change.
# This list will be populated as we iterate through the other rankings list.
new_rankings = [old_rankings.pop(0)]
for i in range(1, to_rank):
observed_entities = get_observed_entities_for_list(topic, new_rankings)
for j in range(0, len(old_rankings)):
docid = old_rankings[j].docid
entities = qrels_diversity.get_mentioned_entities_for_doc(topic, docid)
new_entities = get_new_entities(observed_entities, entities)
#seen_entities = get_existing_entities(qrels_diversity, observed_entities, entities)
old_rankings[j].score = old_rankings[j].score + (lam * len(new_entities))
# Sort the list in reverse order, so the highest score is first. Then pop from old, push to new.
old_rankings.sort(key=lambda x: x.score, reverse=True)
new_rankings.append(old_rankings.pop(0))
results.results = new_rankings + results.results[to_rank:]
return results
# The main algorithm -- only work on the top to_rank documents.
# Leif notes (algorithm): two loops still...
# for first doc, get the mentioned entities. this is outside the loop (set to x).
# for each doc in the rest of the list, what entities are in those docs, and how different are they?
# compute score, sort it, take the first element from that, that becomes the second document
# the key is to sort the sublist scores, and pick the top element from that list.
# take entities from the first two now - replace x with this.
# repeat this until all documents have been observed.
#
# how does lambda influence the performance?
# alpha ndcg -- run the queries from the sigir study from before to see what happens when you change lambda.
# More:
# take all documents that have been judged
# take the non-rel documents in the QRELs for the
# so you update the list of entities with those previously seen (i.e. x) after each document has been observed.
| mit | -5,953,792,841,605,681,000 | 38.563218 | 230 | 0.667635 | false | 3.904708 | false | false | false |
SCSSoftware/BlenderTools | addon/io_scs_tools/unused/import_pmx.py | 1 | 9863 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Copyright (C) 2013-2014: SCS Software
"""
This script manages import of various SCS binary data files into Blender.
"""
import bpy
import os
# from bpy_extras import io_utils
# from . import import_pmg
# from . import import_pit
# from . import import_pic
# from . import import_pip
# from . import import_pis
# from . import import_pia
# from . import io_utils
from .deprecated_utils import Print
if "bpy" in locals():
import imp
# if "import_pmg" in locals():
# imp.reload(import_pmg)
# else:
# from . import import_pmg
# if "import_pit" in locals():
# imp.reload(import_pit)
# else:
# from . import import_pit
# #if "import_pic" in locals():
# #imp.reload(import_pic)
# #else:
# #from . import import_pic
# if "import_pip" in locals():
# imp.reload(import_pip)
# else:
# from . import import_pip
# if "import_pis" in locals():
# imp.reload(import_pis)
# else:
# from . import import_pis
# if "import_pia" in locals():
# imp.reload(import_pia)
# else:
# from . import import_pia
if "io_utils" in locals():
imp.reload(io_utils)
else:
from . import io_utils
def version():
"""Here is where to alter version number of the script."""
return 0.2
def create_lod_empty(name, objects, locators, armature, skeleton):
"""Creates an 'SCS Root Object' (Empty Object) for currently imported 'SCS Game Object' and parent all import content to it."""
if name in bpy.data.objects:
name = io_utils.make_unique_name(bpy.data.objects[0], name)
## CREATE EMPTY OBJECT
bpy.ops.object.empty_add(
type='PLAIN_AXES',
view_align=False,
# location=scs_loc,
# rotation=rot,
)#, layers=(False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False))
## MAKE A PROPER SETTINGS TO THE 'SCS ROOT OBJECT'
lod_object = bpy.context.active_object
lod_object.name = name
lod_object.show_name = True
lod_object.scs_props.scs_root_object_export_enabled = True
lod_object.scs_props.empty_object_type = 'SCS_Root'
## MAKE ALL CHILDREN SELECTED
if armature:
bpy.ops.object.select_all(action='DESELECT')
armature.select = True
else:
for obj in objects:
obj.select = True
for obj in locators:
obj.select = True
## SET PARENT
bpy.ops.object.parent_set(type='OBJECT', keep_transform=False)
## MAKE ONLY 'SCS GAME OBJECT' SELECTED
bpy.ops.object.select_all(action='DESELECT')
for obj in bpy.data.objects:
obj.select = False
lod_object.select = True
bpy.context.scene.objects.active = lod_object
return bpy.data.objects.get(name)
def load(
operator,
context,
filepath,
):
import time
t = time.time()
bpy.context.window.cursor_modal_set('WAIT')
# import_scale = bpy.data.worlds[0].scs_globals.import_scale
# load_textures = bpy.data.worlds[0].scs_globals.load_textures
# mesh_creation_type = bpy.data.worlds[0].scs_globals.mesh_creation_type
dump_level = int(bpy.data.worlds[0].scs_globals.dump_level)
prefab_locators = []
objects = []
locators = []
armature = skeleton = None
# ## NEW SCENE CREATION
# if bpy.data.worlds[0].scs_globals.scs_lod_definition_type == 'scenes':
# if context.scene.name != 'Scene':
# bpy.ops.scene.new(type='NEW')
## IMPORT PMG (PIM)
if bpy.data.worlds[0].scs_globals.import_pmg_file or bpy.data.worlds[0].scs_globals.import_pis_file:
if filepath:
if os.path.isfile(filepath):
Print(dump_level, '\nD PMG filepath:\n %s', str(filepath).replace("\\", "/"))
# result, objects, locators, armature, skeleton = import_pmg.load(operator, context, filepath)
else:
Print(dump_level, '\nI No file found at %r!' % str(filepath).replace("\\", "/"))
else:
Print(dump_level, '\nI No filepath provided!')
# ## IMPORT PIT
# if bpy.data.worlds[0].scs_globals.import_pit_file:
# pit_filepath = str(filepath[:-1] + 't')
# if os.path.isfile(pit_filepath):
# Print(dump_level, '\nD PIT filepath:\n %s', pit_filepath)
# # print('PIT filepath:\n %s' % pit_filepath)
# result = import_pit.load(operator, context, pit_filepath)
# else:
# Print(dump_level, '\nI No PIT file.')
# # print('INFO - No PIT file.')
# ## IMPORT PIC
# if bpy.data.worlds[0].scs_globals.import_pic_file:
# pic_filepath = str(filepath[:-1] + 'c')
# if os.path.isfile(pic_filepath):
# Print(dump_level, '\nD PIC filepath:\n %s', pic_filepath)
# # print('PIC filepath:\n %s' % pic_filepath)
# else:
# Print(dump_level, '\nI No PIC file.')
# # print('INFO - No PIC file.')
# ## IMPORT PIP
# if bpy.data.worlds[0].scs_globals.import_pip_file:
# pip_filepath = str(filepath[:-1] + 'p')
# if os.path.isfile(pip_filepath):
# Print(dump_level, '\nD PIP filepath:\n %s', pip_filepath)
# # print('PIP filepath:\n %s' % pip_filepath)
# result, prefab_locators = import_pip.load(operator, context, pip_filepath)
# else:
# Print(dump_level, '\nI No PIP file.')
# # print('INFO - No PIP file.')
# ## IMPORT PIS
# if bpy.data.worlds[0].scs_globals.import_pis_file:
# pis_filepath = str(filepath[:-1] + 's')
# if os.path.isfile(pis_filepath):
# Print(dump_level, '\nD PIS filepath:\n %s', pis_filepath)
# # print('PIS filepath:\n %s' % pis_filepath)
# result, bones = import_pis.load(operator, context, pis_filepath, armature)
# else:
# bones = None
# Print(dump_level, '\nI No PIS file.')
# # print('INFO - No PIS file.')
# ## IMPORT PIA
# if bpy.data.worlds[0].scs_globals.import_pis_file and bpy.data.worlds[0].scs_globals.import_pia_file:
# basepath = os.path.dirname(filepath)
# ## Search for PIA files in model's directory and its subdirectiories...
# Print(dump_level, '\nI Searching the directory for PIA files:\n %s', str(basepath))
# # print('\nSearching the directory for PIA files:\n %s' % str(basepath))
# pia_files = []
# index = 0
# for root, dirs, files in os.walk(basepath):
# if not bpy.data.worlds[0].scs_globals.include_subdirs_for_pia:
# if index > 0:
# break
# # print(' root: %s - dirs: %s - files: %s' % (str(root), str(dirs), str(files)))
# for file in files:
# if file.endswith(".pia"):
# pia_filepath = os.path.join(root, file)
# pia_files.append(pia_filepath)
# index += 1
#
# if len(pia_files) > 0:
# if dump_level > 1:
# Print(dump_level, 'I PIA files found:')
# for pia_filepath in pia_files: Print(dump_level, 'I %r', pia_filepath)
# # print('armature: %s\nskeleton: %r\nbones: %s\n' % (str(armature), str(skeleton), str(bones)))
# result = import_pia.load(operator, context, pia_files, armature, skeleton, bones)
# # print(' result: %s' % str(result))
# else:
# Print(dump_level, '\nI No PIA files.')
## SETUP LODS
for item in prefab_locators:
locators.append(item)
path, file = os.path.split(filepath)
# print(' path: %r\n file: %r' % (path, file))
lod_name, ext = os.path.splitext(file)
# print(' root: %r\n ext: %r' % (root, ext))
# if bpy.data.worlds[0].scs_globals.scs_lod_definition_type == 'scenes':
# print('LODs as Scenes...')
# context.scene.name = lod_name
# context.scene.scs_props.scene_lod = True
# else:
print('LODs as Objects...')
if objects:
create_lod_empty(lod_name, objects, locators, armature, skeleton)
## SET DRAW MODES
## Turn on Textured Solid in 3D view...
for bl_screen in bpy.data.screens:
for bl_area in bl_screen.areas:
for bl_space in bl_area.spaces:
if bl_space.type == 'VIEW_3D':
bl_space.show_textured_solid = True
# bl_space.viewport_shade = 'WIREFRAME'
# bl_space.show_manipulator = True
bl_space.transform_orientation = 'NORMAL'
bl_space.transform_manipulators = {'ROTATE'}
## Turn on GLSL in 3D view...
bpy.context.scene.game_settings.material_mode = 'GLSL'
## TURN ON SCS TOOLS
# bpy.context.scene.scs_props.locator_size = 10.0 # TMP: increase locators' size
bpy.context.window.cursor_modal_restore()
Print(dump_level, '\nI files imported (in %.3f sec)', time.time() - t)
| gpl-2.0 | 8,172,225,801,110,876,000 | 37.228682 | 164 | 0.589577 | false | 3.195011 | false | false | false |
Fibio/flask-mongoset | examples.py | 1 | 2098 | import flask
import trafaret as t
from flask.ext.mongoset import MongoSet, Model
app = flask.Flask(__name__)
app.config['MONGODB_HOST'] = "localhost"
app.config['MONGODB_PORT'] = 27017
app.config['MONGODB_DATABASE'] = "testdb"
app.config['MONGODB_AUTOREF'] = True
app.config['TESTING'] = True
mongo = MongoSet(app)
class BaseProduct(Model):
__abstract__ = True
structure = t.Dict({
'name': t.String,
'quantity': t.Int,
'attrs': t.Mapping(t.String, t.Or(t.Int, t.Float, t.String)),
}).allow_extra('*')
i18n = ['name', 'attrs']
indexes = ['id']
@mongo.register
class Product(Model):
__collection__ = "products"
inc_id = True
structure = t.Dict({
'list_attrs': t.List(t.String)
}).allow_extra('*')
i18n = ['list_attrs']
indexes = [('quantity', -1), 'name']
def as_dict(self, api_fields=None, exclude=None):
""" Returns instance as dict in selected language
"""
keys = api_fields or self.keys()
if exclude:
keys = list(set(keys) | set(exclude))
result = dict(map(lambda key: (key, getattr(self, key)), keys))
'_id' in result and result.__setitem__('_id', str(result['_id']))
return result
@app.route("/")
def index():
product = Product.get_or_create({'name': 'Name', 'quantity': 1,
'attrs': {'feature': 'ice', 'revision': 1},
'list_attrs': ['one', 'two']}, _lang='en')
product._lang = 'fr'
product.update({'name': 'Nom'})
product.update({'attrs': {'feature': 'glace', 'revision': 1}})
Product.get_or_create({'name': 'Nom', 'quantity': 1,
'attrs': {'feature': 'glace', 'revision': 1}},
_lang='fr')
product_fr = product
product._lang = 'en'
product_en = product
total = Product.query.count()
return "Total: %d. <br> product en is: %s <br> product fr is: %s" % (total,
product_en.as_dict(), product_fr.as_dict())
if __name__ == "__main__":
app.run()
| mit | -3,599,347,163,045,619,700 | 28.138889 | 79 | 0.540515 | false | 3.394822 | false | false | false |
arulalant/UMRider | bsubScripts/ncumeps_global_tigge/tigge_create_tarball_g2files_put_into_ftp.py | 1 | 5917 | #!/usr/bin/env python
## This is call back script which will be executed after created the grib2 files.
##
## Hycom Model Input requires analysis of 06, 09, 12, 15, 18, 21-hours from
## yesterday and 00 & 03-hours from today date. All 3-hourly forecasts from
## today date.
##
## While creating tar ball, all files must be in present directory, so that
## when user extract it, will produce only files instead of entire paths!
##
## And finally putting into their ftp server.
##
## Arulalan.T
## 04-Mar-2016.
import os, subprocess, datetime, getopt, sys, glob, time
pbzip2 = '/gpfs1/home/Libs/GNU/ZIPUTIL/pbzip2'
pigz = '/gpfs1/home/Libs/GNU/ZIPUTIL/pigz'
tigge_check = '/gpfs1/home/Libs/GNU/GRIB_API/gribapi-1.21.0/bin/tigge_check'
filesCount = {'ttr': 41, 'lsm': 41, 'orog': 41, '10v': 41, 'tcc': 41, 'gh': 369, 'skt': 41, 'tp': 41, 'msl': 41, 'mx2t6': 40, '2d': 41, '10u': 41, 'mn2t6': 40, 'sshf': 41, 'slhf': 41, 'ssr': 41, '2t': 41, 'sp': 41, 'st': 41, 'q': 328, 'u': 328, 't': 328, 'str': 41, 'v': 328, 'sd': 41}
dirsOrder = [
'gh', 'u', 'v', 'q', 't', '10u', '10v', '2t', 'mx2t6', 'mn2t6', 'skt', 'st',
'2d', 'sp', 'msl', 'tp', 'ttr', 'lsm', 'tcc', 'slhf', 'ssr', 'sshf', 'str', 'sd', 'orog'
]
def createTarBalls(path, today, member):
member = str(member).zfill(3)
inpath = os.path.join(path, member)
if member == '000':
# merge cmd into single grib2 of each members
catcmd = ['%s/z_tigge_c_dems*%s' % (d,d) for d in dirsOrder]
else:
# merge cmd into single grib2 of each members except orography and land-sea mask
catcmd = ['%s/z_tigge_c_dems*%s' % (d,d) for d in dirsOrder if d not in ['lsm', 'orog']]
# merge cmd into single grib2 of each members
catcmd = ' '.join(catcmd)
catcmd = 'cat %s ' % catcmd
catcmd += ' > %s'
# check the filesCount
for var, vlen in filesCount.iteritems():
vpath = os.path.join(inpath, var)
if not os.path.exists(vpath):
raise ValueError("%s Folder doensnt exists" % vpath)
files = os.listdir(vpath)
if len(files) != vlen:
raise ValueError("filesCount do not matches,%s %d, %d" % (vpath, len(files), vlen))
ncfile = [f for f in files if f.endswith('.nc')]
if ncfile: raise ValueError("Got nc file %s" % vpath)
# end of for var, vlen in filesCount.iteritems():
cdir = os.getcwd()
os.chdir(inpath)
for tgf in os.listdir('.'):
cmd = tigge_check + ' -v -w %s/*' % tgf
tigge_check_val = os.system(cmd) # it should return 0 on pass
if tigge_check_val != 0 :
print "WARNING : While checking via tigge_check cmd got error!"
#sys.exit(0)
# end of for tgf in os.listdir('.'):
tDay = datetime.datetime.strptime(today, "%Y%m%d")
# get past 6th day timestamp
y6Day = (tDay - datetime.timedelta(days=5)).strftime('%Y%m%d')
tardir = '../../TarFiles/%s' % today
if not os.path.exists(tardir):
try:
os.makedirs(tardir)
except Exception as e:
print "parallel folder creation", e
# end of if not os.path.exists(tardir):
mergedg2file = 'ncmrwf_dems_tigge_%s_%s.grib2' % (today, member)
mergedg2filepath = os.path.join(tardir, mergedg2file)
print "currnet path : ", os.getcwd()
# merge all the params, all the levels, all the time steps, but individual members
# into single grib2 (BIG) file.
catcmd_out = catcmd % mergedg2filepath
subprocess.call(catcmd_out, shell=True)
time.sleep(30)
# Lets compress single BIG grib2 file by using gz compress cmd.
os.chdir(tardir)
gzip_cmd = '%s -9 -p 32 %s' % (pigz, mergedg2file)
print "gzip_cmd = ", gzip_cmd
subprocess.call(gzip_cmd, shell=True)
time.sleep(5)
print os.getcwd(), member
if member == '000':
# remove today directory!!!
print "path", path
if os.path.exists(path):
cmd = "rm -rf %s" % path
print cmd
subprocess.call(cmd, shell=True)
# end of if os.path.exists(path):
tarpath = os.path.abspath(tardir)
if not len(os.listdir(tarpath)) == 45:
print "45 tar.gz files are expected to transfer ftp site, but we got only %s files." % len(os.listdir(tarpath))
else:
# do scp the tar files to ftp_server
cmd = 'ssh ncmlogin3 "rsync --update --ignore-existing -razt %s %s:/data/ftp/pub/outgoing/NCUM_TIGGE/"' % (tarpath, ftp_server)
print cmd
subprocess.call(cmd, shell=True)
time.sleep(5)
# remove past 11th day tar ball from ftp_server
cmd = 'ssh ncmlogin3 "ssh %s rm -rf /data/ftp/pub/outgoing/NCUM_TIGGE/%s"' % (ftp_server, y6Day)
print cmd
try:
subprocess.call(cmd, shell=True)
except Exception as e:
print "past 6th day tar balls folder has been removed from ftp_server, already", e
# end of if member == '000':
os.chdir(cdir)
# end of def createTarBalls(path, today, ...):
if __name__ == '__main__':
ftp_server="prod@ftp"
date = None
member = '000'
outpath = '/gpfs3/home/umeps/EPS/ShortJobs/NCUM_EPS_TIGGE/%s/'
helpmsg = 'tigge_create_tarball_g2files_put_into_ftp.py --date=20160302 --member=001'
try:
opts, args = getopt.getopt(sys.argv[1:], "d:m:", ["date=", "member="])
except getopt.GetoptError:
print helpmsg
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print helpmsg
sys.exit()
elif opt in ("-d", "--date"):
date = arg
elif opt in ("-m", "--member"):
member = arg
# end of for opt, arg in opts:
outpath = outpath % date
createTarBalls(outpath, date, member)
| gpl-2.0 | 2,097,474,215,553,767,000 | 38.18543 | 285 | 0.576475 | false | 3.107668 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.