repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
jecki/MetaInductionSim | PyPlotter/qtGfx.py | 3 | 10165 | #!/usr/bin/python
# qtGfx - Implementation of the Gfx.Driver Interface in a
# qt evnironment
"""Implementes Gfx.Driver using the qt GUI toolkit.
"""
import sys, math
try:
from PyQt5.Qt import Qt
from PyQt5.QtCore import pyqtSignal as SIGNAL
from PyQt5.QtCore import QPoint, QObject
from PyQt5.QtWidgets import QApplication, QLabel
import PyQt5.QtGui as qt
QT3 = False
QT5 = True
except ImportError:
QT5 = False
try:
from PyQt4.Qt import Qt, SIGNAL
from PyQt4.QtCore import QPoint, QObject
import PyQt4.QtGui as qt
from PyQt4.QtGui import QApplication, QLabel
QT3 = False
except ImportError:
import qt
from qt import Qt, SIGNAL, QPoint, QObject, QApplication, QLabel
QT3 = True
try:
import Gfx
except ImportError:
from . import Gfx
driverName = "qtGfx"
########################################################################
#
# class Driver
#
########################################################################
class Driver(Gfx.Driver):
"""A graphics driver for qt4.
For an explanation of the inherited methods see Gfx.py.
"""
def __init__(self, paintDevice):
"""Initialize canvas on the QPaintDevice 'paintDevice'."""
Gfx.Driver.__init__(self)
self.paintDevice = None
self.painter = qt.QPainter()
self.font = qt.QFont("SansSerif", 12, qt.QFont.Normal, False)
self.pen = qt.QPen()
self.pen.setCapStyle(Qt.RoundCap)
self.pen.setJoinStyle(Qt.RoundJoin)
self.brush = qt.QBrush(Qt.SolidPattern)
self.color = (0.0, 0.0, 0.0)
self.w, self. h = 640, 480
self.changePaintDevice(paintDevice)
self.reset()
self.clear()
def changePaintDevice(self, paintDevice):
"""Use a new QPaintDevice for the following drawing commands."""
oldPaintDevice = self.paintDevice
if oldPaintDevice:
self.painter.end()
self.paintDevice = paintDevice
self.painter.begin(self.paintDevice)
self.resizedGfx()
self.painter.setPen(self.pen)
self.painter.setBrush(Qt.NoBrush)
self.painter.setBackgroundMode(Qt.TransparentMode)
if QT3:
self.painter.setBackgroundColor(qt.QColor(255,255,255))
else:
backgroundBrush = qt.QBrush(qt.QColor(255,255,255), Qt.SolidPattern)
self.painter.setBackground(backgroundBrush)
self.painter.setFont(self.font)
return oldPaintDevice
def getPaintDevice(self):
"""-> QPaintDevice of this graphics drivers object"""
return self.paintDevice
def _qtEnd(self):
"""Calls end() method of the QPainter obejct. Before any
drawing can be done again qtBegin() must be called."""
self.painter.end()
def _qtBegin(self):
"""Calls begin() method of the QPainter obejct."""
self.painter.begin()
def resizedGfx(self):
self.w, self.h = self.paintDevice.width(), self.paintDevice.height()
def getSize(self):
return self.w, self.h
def getResolution(self):
return 100
def setColor(self, rgbTuple):
self.color = rgbTuple
qtCol = qt.QColor(int(round(rgbTuple[0]*255)),
int(round(rgbTuple[1]*255)),
int(round(rgbTuple[2]*255)))
self.pen.setColor(qtCol)
self.brush.setColor(qtCol)
self.painter.setPen(self.pen)
def setLineWidth(self, width):
self.lineWidth = width
if width == Gfx.THIN: tn = 1
elif width == Gfx.MEDIUM: tn = 2
elif width == Gfx.THICK: tn = 3
else: raise ValueError("'thickness' must be 'thin', 'medium' or thick' !")
self.pen.setWidth(tn)
self.painter.setPen(self.pen)
def setLinePattern(self, pattern):
self.linePattern = pattern
if pattern == Gfx.CONTINUOUS: lp = Qt.SolidLine
elif pattern == Gfx.DASHED: lp = Qt.DashLine
elif pattern == Gfx.DOTTED: lp = Qt.DotLine
else: raise ValueError("'pattern' must be 'continuous','dashed' " + \
"or 'dotted'")
self.pen.setStyle(lp)
self.painter.setPen(self.pen)
def setFillPattern(self, pattern):
self.fillPattern = pattern
if pattern == Gfx.SOLID: fp = Qt.SolidPattern
elif pattern == Gfx.PATTERN_A: fp = Qt.BDiagPattern
elif pattern == Gfx.PATTERN_B: fp = Qt.FDiagPattern
elif pattern == Gfx.PATTERN_C: fp = Qt.DiagCrossPattern
else: raise ValueError("'pattern' must be 'solid' or 'patternA', " + \
"'patternB', 'patternC' !")
self.brush.setStyle(fp)
def setFont(self, ftype, size, weight):
self.fontType = ftype
self.fontSize = size
self.fontWeight = weight
if ftype == Gfx.SANS: ff = "SansSerif"
elif ftype == Gfx.SERIF: ff = "Serif"
elif ftype == Gfx.FIXED: ff = "Typewriter"
else: raise ValueError("'type' must be 'sans', 'serif' or 'fixed' !")
if size == Gfx.SMALL: fs = 8
elif size == Gfx.NORMAL: fs = 12
elif size == Gfx.LARGE: fs = 16
else: raise ValueError("'size' must be 'small', 'normal' or 'large' !")
fst = False
fw = qt.QFont.Normal
if "i" in weight: fst = True
elif "b" in weight: fw = qt.QFont.Bold
self.font = qt.QFont(ff, fs, fw, fst)
self.painter.setFont(self.font)
def getTextSize(self, text):
fm = self.painter.fontMetrics()
return fm.width(text), fm.height()
# except AttributeError:
# if self.fontSize == Gfx.SMALL: fs = 8
# elif self.fontSize == Gfx.NORMAL: fs = 12
# elif self.fontSize == Gfx.LARGE: fs = 16
# return (len(text) * fs * 2/3, fs) # very inexact
def drawPoint(self, x, y):
self.painter.drawPoint(x, self.h-y-1)
# if self.lineWidth == Gfx.THIN:
# self.dc.DrawPoint(x, self.h-y-1)
# else:
# self.dc.DrawLine(x, self.h-y-1, x, self.h-y-1)
def drawLine(self, x1, y1, x2, y2):
self.painter.drawLine(x1, self.h-y1-1, x2, self.h-y2-1)
def drawRect(self, x, y, w, h):
self.painter.drawRect(x, self.h-y-h, w-1 ,h-1)
def drawPoly(self, array):
if array:
points = [QPoint(p[0],self.h-p[1]-1) for p in array]
if QT3:
pointArray = qt.QPointArray(len(points))
for i in range(len(points)):
pointArray.setPoint(i, points[i])
self.painter.drawPolygon(pointArray)
else:
self.painter.drawPolyline(qt.QPolygon(points))
def drawCircle(self, x, y, r):
self.painter.drawEllipse(x-r, self.h-y-1-r, 2*r, 2*r)
def fillRect(self, x, y, w, h):
self.painter.fillRect(x, self.h-y-h, w, h, self.brush)
def fillPoly(self, array):
if array:
points = [QPoint(p[0],self.h-p[1]-1) for p in array]
self.painter.setBrush(self.brush); self.painter.setPen(Qt.NoPen)
if QT3:
pointArray = qt.QPointArray(len(points))
for i in range(len(points)):
pointArray.setPoint(i, points[i])
self.painter.drawPolygon(pointArray)
else:
self.painter.drawPolygon(qt.QPolygon(points))
self.painter.setPen(self.pen); self.painter.setBrush(Qt.NoBrush)
def fillCircle(self, x, y, r):
self.painter.setBrush(self.brush); self.painter.setPen(Qt.NoPen)
self.painter.drawEllipse(x-r, self.h-y-1-r, 2*r, 2*r)
self.painter.setPen(self.pen); self.painter.setBrush(Qt.NoBrush)
def writeStr(self, x, y, strg, rotationAngle=0.0):
h = self.getTextSize(strg)[1]
if rotationAngle == 0.0:
self.painter.drawText(x, self.h-y-h/4, strg)
else:
rotationAngle = 360.0-rotationAngle
cx = x
cy = self.h-y
self.painter.translate(cx, cy)
self.painter.rotate(rotationAngle)
self.painter.translate(-cx, -cy)
self.painter.drawText(x, self.h-y-h/4, strg)
if QT3:
self.painter.resetXForm()
else:
self.painter.resetTransform()
########################################################################
#
# class Window
#
########################################################################
class Window(Driver, Gfx.Window):
def __init__(self, size=(640, 480), title="qt.Graph", app=None):
Gfx.Window.__init__(self, size, title)
if app != None:
self.app = app
else:
self.app = QApplication(sys.argv)
self.pixmap = qt.QPixmap(size[0], size[1])
self.pixmap.fill(qt.QColor(255,255,255))
self.win = QLabel("", None)
self.win.setPixmap(self.pixmap)
self.win.show()
#self.win.setMinimumSize(size[0], size[1])
#self.win.setMaximum(size[0], size[1])
self.win.resize(size[0], size[1])
if QT5:
#self.lastClosedSignal = SIGNAL("lastWindowClosed()")
self.app.lastWindowClosed.connect(self._qtEnd)
else:
QObject.connect(self.app, SIGNAL("lastWindowClosed()"), self._qtEnd)
Driver.__init__(self, self.pixmap)
def refresh(self):
self.win.setPixmap(self.pixmap)
self.win.update()
def quit(self):
self._qtEnd()
self.win.close()
self.win = None
self.app.quit()
def waitUntilClosed(self):
self.refresh()
if QT3:
self.app.exec_loop()
else:
self.app.exec_()
########################################################################
#
# Test
#
########################################################################
if __name__ == "__main__":
import systemTest
systemTest.Test_qtGfx()
| mit | 7,680,707,670,072,934,000 | 32.883333 | 82 | 0.546188 | false |
rwrobe/learning_journal | setup.py | 1 | 1340 | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_jinja2',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
'wtforms',
'cryptacular',
'markdown',
'pygments',
]
setup(name='learning_journal',
version='0.0',
description='learning_journal',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='learning_journal',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = learning_journal:main
[console_scripts]
initialize_learning_journal_db = learning_journal.scripts.initializedb:main
""",
)
| mit | -2,457,594,420,203,618,300 | 24.769231 | 81 | 0.59403 | false |
AlanProject/day08 | MyFTP_Client/modules/main.py | 1 | 5250 | #-*- coding:utf-8 -*-
#/usr/bin/env python
import sys,os
import pickle
import socket
class ClientArgv(object):
def __init__(self,argvs):
self.argvs = argvs
self.argvs_parser()
self.handle()
def handle(self):
self.connect()
#接收打印欢迎信息
server_data = self.client_socket.recv(1024)
print server_data
if self.auther():
self.comm_argv()
#处理参数
def argvs_parser(self):
argv_list = ['-s','-p']
if len(self.argvs) < 5:
self.help()
sys.exit()
for i in argv_list:
if i not in self.argvs:
sys.exit('Argv is not found please try again !!!')
try:
self.host = self.argvs[self.argvs.index('-s')+1]
self.port = int(self.argvs[self.argvs.index('-p')+1])
except (ValueError,IndexError) as e:
self.help()
sys.exit()
#定义help信息
def help(self):
print '''
MyFTP Client command argv
-s :Server Host Address IP or Domain
-p :Server Port
'''
def comm_help(self):
print '''
get [file] :Download file
put [file] :Upload file
cd [path] :change dir path
rm [path] :delete file
exit :exit Ftp system
'''
#连接服务器端socket
def connect(self):
try:
self.client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.client_socket.connect((self.host,self.port))
except socket.error as e:
sys.exit('connect server filed')
#用户认证模块
def auther(self):
auther_count = 0
while auther_count < 3:
user_name = raw_input('Please input username:')
if len(user_name) == 0:continue
user_pass = raw_input('Please input passwd:')
if len(user_pass) == 0:continue
data = pickle.dumps({'user_name':user_name,'user_pass':user_pass})
self.client_socket.send(data)
server_data = self.client_socket.recv(1024)
if server_data == '200':
return True
else:
print '%s user name or password error please try agin'%server_data
auther_count += 1
else:
sys.exit('User or Passwd too many mistakes')
#命令调度
def comm_argv(self):
while True:
self.command = raw_input('>>>')
if len(self.command.split()) == 0:continue
if hasattr(self,self.command.split()[0]):
func = getattr(self,self.command.split()[0])
func()
else:
self.comm_help()
#下载文件
def get(self):
comm_list = self.command.split()
if len(comm_list) < 2:
self.comm_help()
sys.exit()
self.client_socket.send(self.command)
status_coding = self.client_socket.recv(1024)
if status_coding == '203':
print 'file is not found'
else:
self.client_socket.send('start')
file_size = int(self.client_socket.recv(1024))
self.client_socket.send('ok')
file_data = 0
with open(comm_list[1],'wb') as file_write:
while file_data != file_size:
data = self.client_socket.recv(2048)
file_write.write(data)
file_data += len(data)
print '%s Transfer ok'%comm_list[1]
self.client_socket.send('ok')
#上传文件
def put(self):
comm_list = self.command.split()
if len(comm_list) < 2:
self.comm_help()
sys.exit()
#发送命令
self.client_socket.send(self.command)
#接受服务器确认收到命令的消息
self.client_socket.recv(1024)
if not os.path.isfile(comm_list[1]):
print 'File is not found'
else:
file_size = str(os.path.getsize(comm_list[1]))
self.client_socket.send(file_size)
self.client_socket.recv(100)
file_data = 0
with open(comm_list[1],'rb') as file_read:
while file_data != int(file_size):
data = file_read.read(2048)
file_data += len(data)
self.client_socket.sendall(data)
self.client_socket.recv(1024)
#列出文件目录
def ls(self):
self.client_socket.send(self.command)
file_number = int(self.client_socket.recv(1024))
self.client_socket.send('OK')
for i in range(file_number):
self.client_socket.send('ok')
file_name = self.client_socket.recv(1024)
print file_name
def rm(self):
self.client_socket.send(self.command)
rm_data = self.client_socket.recv(1024)
print rm_data
#切换文件目录
def cd(self):
comm_list = self.command.split()
if len(comm_list) < 2:
self.comm_help()
sys.exit()
#退出FTP客户端
def exit(self):
sys.exit('Exiting')
| apache-2.0 | 6,361,247,776,647,331,000 | 33 | 82 | 0.516471 | false |
ctu-yfsg/2015-a-grass-reclass | reclassify/Layout/PreviewPanel.py | 1 | 2207 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Subclass of wx.Panel"""
#-----------------------------------------------------------------------------------------
#Import
try:
#wxPython
import wx
#python std library
import sys
#GRASS modules and packages
from modules.colorrules import BufferedWindow
from core.render import Map
#our modules and packages
except ImportError as err:
print(u"ImportError: {}".format(err))
sys.exit("-1")
#-----------------------------------------------------------------------------------------
class PreviewPanel(wx.Panel):
"""
Subclass of wx.Panel.
Represents center part of the window.
Contains preview of the reclassified mapset.
"""
def __init__(self, parent, id):
wx.Panel.__init__(self, parent, id)
#PREVIEW
self.__buildPreviewPanel()
#LAYOUT
self.__layout()
#self.previewPanel.Hide() #hidden when no preview active
#-----------------------------------------------------------------------------------------
def __buildPreviewPanel(self):
"""
Creates preview panel.
:return: void
"""
#Output preview
self.map = Map()
self.width = self.map.width = 400
self.height = self.map.height = 300
self.map.geom = self.width, self.height
self.preview = BufferedWindow(parent=self,
id=wx.NewId(),
size = (400, 300),
Map=self.map)
self.preview.EraseMap()
#-----------------------------------------------------------------------------------------
def __layout(self):
"""
Specifies final layout for PreviewPanel.
:return: void
"""
sBox = wx.StaticBox(self, wx.NewId(), "Preview")
vBox = wx.StaticBoxSizer(sBox, wx.VERTICAL)
vBox.AddStretchSpacer()
vBox.Add(self.preview, 0, wx.CENTER)
vBox.AddStretchSpacer()
self.SetSizer(vBox)
#-----------------------------------------------------------------------------------------
if __name__ == "__main__":
pass | gpl-2.0 | 7,031,197,689,677,351,000 | 27.675325 | 94 | 0.434073 | false |
fabricematrat/py-macaroon-bakery | macaroonbakery/identity.py | 1 | 4152 | # Copyright 2017 Canonical Ltd.
# Licensed under the LGPLv3, see LICENCE file for details.
import abc
import macaroonbakery as bakery
class Identity(object):
''' Holds identity information declared in a first party caveat added when
discharging a third party caveat.
'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def id(self):
''' Returns the id of the user.
May be an opaque blob with no human meaning. An id is only considered
to be unique with a given domain.
:return string
'''
raise NotImplementedError('id method must be defined in subclass')
@abc.abstractmethod
def domain(self):
'''Return the domain of the user.
This will be empty if the user was authenticated
directly with the identity provider.
:return string
'''
raise NotImplementedError('domain method must be defined in subclass')
class ACLIdentity(Identity):
''' ACLIdentity may be implemented by Identity implementations
to report group membership information.
'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def allow(self, ctx, acls):
''' reports whether the user should be allowed to access
any of the users or groups in the given acl list.
:param ctx(AuthContext) is the context of the authorization request.
:param acls array of string acl
:return boolean
'''
raise NotImplementedError('allow method must be defined in subclass')
class SimpleIdentity(ACLIdentity):
''' A simple form of identity where the user is represented by a string.
'''
def __init__(self, user):
self._identity = user
def domain(self):
''' A simple identity has no domain.
'''
return ''
def id(self):
'''Return the user name as the id.
'''
return self._identity
def allow(self, ctx, acls):
'''Allow access to any ACL members that was equal to the user name.
That is, some user u is considered a member of group u and no other.
'''
for acl in acls:
if self._identity == acl:
return True
return False
class IdentityClient(object):
''' Represents an abstract identity manager. User identities can be based
on local informaton (for example HTTP basic auth) or by reference to an
external trusted third party (an identity manager).
'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def identity_from_context(self, ctx):
''' Returns the identity based on information in the context.
If it cannot determine the identity based on the context, then it
should return a set of caveats containing a third party caveat that,
when discharged, can be used to obtain the identity with
declared_identity.
It should only raise an error if it cannot check the identity
(for example because of a database access error) - it's
OK to return all zero values when there's
no identity found and no third party to address caveats to.
@param ctx an AuthContext
:return: an Identity and array of caveats
'''
raise NotImplementedError('identity_from_context method must be '
'defined in subclass')
@abc.abstractmethod
def declared_identity(self, ctx, declared):
'''Parses the identity declaration from the given declared attributes.
TODO take the set of first party caveat conditions instead?
@param ctx (AuthContext)
@param declared (dict of string/string)
:return: an Identity
'''
raise NotImplementedError('declared_identity method must be '
'defined in subclass')
class NoIdentities(IdentityClient):
''' Defines the null identity provider - it never returns any identities.
'''
def identity_from_context(self, ctx):
return None, None
def declared_identity(self, ctx, declared):
raise bakery.IdentityError('no identity declared or possible')
| lgpl-3.0 | -1,514,180,123,860,796,000 | 31.952381 | 78 | 0.646435 | false |
destijl/grr | grr/lib/rdfvalues/filestore_test.py | 1 | 2451 | #!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
"""These are tests for the file store-related RDFValues implementations."""
from grr.lib.aff4_objects import filestore
from grr.lib.rdfvalues import test_base
class FileStoreHashTest(test_base.RDFValueTestCase):
"""Test the FileStoreHash implementation."""
rdfvalue_class = filestore.FileStoreHash
def CheckRDFValue(self, value, sample):
"""Check that the rdfproto is the same as the sample."""
super(FileStoreHashTest, self).CheckRDFValue(value, sample)
self.assertEqual(value.fingerprint_type, sample.fingerprint_type)
self.assertEqual(value.hash_type, sample.hash_type)
self.assertEqual(value.hash_value, sample.hash_value)
def GenerateSample(self, number=0):
"""Make a sample FileStoreHash instance."""
return filestore.FileStoreHash("aff4:/files/hash/pecoff/sha1/"
"eb875812858d27b22cb2b75f992dffadc1b05c6%d" %
number)
def testHashIsInferredCorrectlyFromTheURN(self):
"""Test we can initialized a hash from the HashFileStore urn."""
sample = self.GenerateSample()
self.assertEqual(sample.fingerprint_type, "pecoff")
self.assertEqual(sample.hash_type, "sha1")
self.assertEqual(sample.hash_value,
"eb875812858d27b22cb2b75f992dffadc1b05c60")
def testHashIsInitializedFromConstructorArguments(self):
"""Test that we can construct FileStoreHash from keyword arguments."""
sample = filestore.FileStoreHash(
fingerprint_type="pecoff",
hash_type="sha1",
hash_value="eb875812858d27b22cb2b75f992dffadc1b05c60")
self.assertEqual(sample, self.GenerateSample())
def testInitialization(self):
# Invalid URN prefix
self.assertRaises(ValueError, filestore.FileStoreHash,
"aff4:/sha1/eb875812858d27b22cb2b75f992dffadc1b05c66")
# Invalid fingerprint type
self.assertRaises(
ValueError, filestore.FileStoreHash,
"aff4:/files/hash/_/sha1/eb875812858d27b22cb2b75f992dffadc1b05c66")
# Invalid hash type
self.assertRaises(
ValueError, filestore.FileStoreHash,
"aff4:/files/hash/pecoff/_/eb875812858d27b22cb2b75f992dffadc1b05c66")
# Additional path components
self.assertRaises(
ValueError, filestore.FileStoreHash,
"aff4:/files/hash/pecoff/sha1/eb875812858d27b22cb2b75f992dffadc1b05c66/"
"_")
| apache-2.0 | -5,360,122,692,229,730,000 | 36.707692 | 80 | 0.709098 | false |
TieWei/nova | nova/tests/virt/vmwareapi/stubs.py | 1 | 2197 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Stubouts for the test suite
"""
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import network_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import vmware_images
def fake_get_vim_object(arg):
"""Stubs out the VMwareAPISession's get_vim_object method."""
return fake.FakeVim()
def fake_is_vim_object(arg, module):
"""Stubs out the VMwareAPISession's is_vim_object method."""
return isinstance(module, fake.FakeVim)
def fake_temp_method_exception():
raise error_util.VimFaultException(
[error_util.FAULT_NOT_AUTHENTICATED],
"Session Empty/Not Authenticated")
def fake_temp_session_exception():
raise error_util.SessionConnectionException([],
"Session Exception")
def set_stubs(stubs):
"""Set the stubs."""
stubs.Set(vmops.VMwareVMOps, 'plug_vifs', fake.fake_plug_vifs)
stubs.Set(network_util, 'get_network_with_the_name',
fake.fake_get_network)
stubs.Set(vmware_images, 'fetch_image', fake.fake_fetch_image)
stubs.Set(vmware_images, 'get_vmdk_size_and_properties',
fake.fake_get_vmdk_size_and_properties)
stubs.Set(vmware_images, 'upload_image', fake.fake_upload_image)
stubs.Set(driver.VMwareAPISession, "_get_vim_object",
fake_get_vim_object)
stubs.Set(driver.VMwareAPISession, "_is_vim_object",
fake_is_vim_object)
| apache-2.0 | -7,053,649,786,720,345,000 | 33.873016 | 78 | 0.706873 | false |
saaros/pghoard | pghoard/receivexlog.py | 1 | 2656 | """
pghoard - pg_receivexlog handler
Copyright (c) 2016 Ohmu Ltd
See LICENSE for details
"""
import datetime
import logging
import select
import subprocess
import time
from .common import set_subprocess_stdout_and_stderr_nonblocking, terminate_subprocess
from .pgutil import get_connection_info
from threading import Thread
class PGReceiveXLog(Thread):
def __init__(self, config, connection_string, xlog_location, slot, pg_version_server):
super().__init__()
self.log = logging.getLogger("PGReceiveXLog")
self.config = config
self.connection_string = connection_string
self.xlog_location = xlog_location
self.slot = slot
self.pg_version_server = pg_version_server
self.pid = None
self.running = False
self.latest_activity = datetime.datetime.utcnow()
self.log.debug("Initialized PGReceiveXLog")
def run(self):
self.running = True
command = [
self.config["pg_receivexlog_path"],
"--status-interval", "1",
"--verbose",
"--directory", self.xlog_location,
]
if self.pg_version_server < 90300:
conn_info = get_connection_info(self.connection_string)
if "user" in conn_info:
command.extend(["--user", conn_info["user"]])
if "port" in conn_info:
command.extend(["--port", conn_info["port"]])
if "host" in conn_info:
command.extend(["--host", conn_info["host"]])
else:
command.extend(["--dbname", self.connection_string])
if self.pg_version_server >= 90400 and self.slot:
command.extend(["--slot", self.slot])
self.log.debug("Starting to run: %r", command)
start_time = time.time()
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
set_subprocess_stdout_and_stderr_nonblocking(proc)
self.pid = proc.pid
self.log.info("Started: %r, running as PID: %r", command, self.pid)
while self.running:
rlist, _, _ = select.select([proc.stdout, proc.stderr], [], [], 1.0)
for fd in rlist:
content = fd.read()
if content:
self.log.debug(content)
self.latest_activity = datetime.datetime.utcnow()
if proc.poll() is not None:
break
rc = terminate_subprocess(proc, log=self.log)
self.log.debug("Ran: %r, took: %.3fs to run, returncode: %r",
command, time.time() - start_time, rc)
self.running = False
| apache-2.0 | -5,884,344,457,204,641,000 | 34.891892 | 90 | 0.583961 | false |
tinloaf/home-assistant | tests/helpers/test_template.py | 1 | 38585 | """Test Home Assistant template helper methods."""
import asyncio
from datetime import datetime
import unittest
import random
import math
from unittest.mock import patch
from homeassistant.components import group
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
from homeassistant.util.unit_system import UnitSystem
from homeassistant.const import (
LENGTH_METERS,
TEMP_CELSIUS,
MASS_GRAMS,
VOLUME_LITERS,
MATCH_ALL,
)
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
import pytest
class TestHelpersTemplate(unittest.TestCase):
"""Test the Template."""
# pylint: disable=invalid-name
def setUp(self):
"""Set up the tests."""
self.hass = get_test_home_assistant()
self.hass.config.units = UnitSystem('custom', TEMP_CELSIUS,
LENGTH_METERS, VOLUME_LITERS,
MASS_GRAMS)
# pylint: disable=invalid-name
def tearDown(self):
"""Stop down stuff we started."""
self.hass.stop()
def test_referring_states_by_entity_id(self):
"""Test referring states by entity id."""
self.hass.states.set('test.object', 'happy')
assert 'happy' == \
template.Template(
'{{ states.test.object.state }}', self.hass).render()
def test_iterating_all_states(self):
"""Test iterating all states."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.temperature', 10)
assert '10happy' == \
template.Template(
'{% for state in states %}{{ state.state }}{% endfor %}',
self.hass).render()
def test_iterating_domain_states(self):
"""Test iterating domain states."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.back_door', 'open')
self.hass.states.set('sensor.temperature', 10)
assert 'open10' == \
template.Template("""
{% for state in states.sensor %}{{ state.state }}{% endfor %}
""", self.hass).render()
def test_float(self):
"""Test float."""
self.hass.states.set('sensor.temperature', '12')
assert '12.0' == \
template.Template(
'{{ float(states.sensor.temperature.state) }}',
self.hass).render()
assert 'True' == \
template.Template(
'{{ float(states.sensor.temperature.state) > 11 }}',
self.hass).render()
def test_rounding_value(self):
"""Test rounding value."""
self.hass.states.set('sensor.temperature', 12.78)
assert '12.8' == \
template.Template(
'{{ states.sensor.temperature.state | round(1) }}',
self.hass).render()
assert '128' == \
template.Template(
'{{ states.sensor.temperature.state | multiply(10) | round }}',
self.hass).render()
def test_rounding_value_get_original_value_on_error(self):
"""Test rounding value get original value on error."""
assert 'None' == \
template.Template('{{ None | round }}', self.hass).render()
assert 'no_number' == \
template.Template(
'{{ "no_number" | round }}', self.hass).render()
def test_multiply(self):
"""Test multiply."""
tests = {
None: 'None',
10: '100',
'"abcd"': 'abcd'
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | multiply(10) | round }}' % inp,
self.hass).render()
def test_logarithm(self):
"""Test logarithm."""
tests = [
(4, 2, '2.0'),
(1000, 10, '3.0'),
(math.e, '', '1.0'),
('"invalid"', '_', 'invalid'),
(10, '"invalid"', '10.0'),
]
for value, base, expected in tests:
assert expected == \
template.Template(
'{{ %s | log(%s) | round(1) }}' % (value, base),
self.hass).render()
assert expected == \
template.Template(
'{{ log(%s, %s) | round(1) }}' % (value, base),
self.hass).render()
def test_sine(self):
"""Test sine."""
tests = [
(0, '0.0'),
(math.pi / 2, '1.0'),
(math.pi, '0.0'),
(math.pi * 1.5, '-1.0'),
(math.pi / 10, '0.309')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | sin | round(3) }}' % value,
self.hass).render()
def test_cos(self):
"""Test cosine."""
tests = [
(0, '1.0'),
(math.pi / 2, '0.0'),
(math.pi, '-1.0'),
(math.pi * 1.5, '-0.0'),
(math.pi / 10, '0.951')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | cos | round(3) }}' % value,
self.hass).render()
def test_tan(self):
"""Test tangent."""
tests = [
(0, '0.0'),
(math.pi, '-0.0'),
(math.pi / 180 * 45, '1.0'),
(math.pi / 180 * 90, '1.633123935319537e+16'),
(math.pi / 180 * 135, '-1.0')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | tan | round(3) }}' % value,
self.hass).render()
def test_sqrt(self):
"""Test square root."""
tests = [
(0, '0.0'),
(1, '1.0'),
(2, '1.414'),
(10, '3.162'),
(100, '10.0'),
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | sqrt | round(3) }}' % value,
self.hass).render()
def test_strptime(self):
"""Test the parse timestamp method."""
tests = [
('2016-10-19 15:22:05.588122 UTC',
'%Y-%m-%d %H:%M:%S.%f %Z', None),
('2016-10-19 15:22:05.588122+0100',
'%Y-%m-%d %H:%M:%S.%f%z', None),
('2016-10-19 15:22:05.588122',
'%Y-%m-%d %H:%M:%S.%f', None),
('2016-10-19', '%Y-%m-%d', None),
('2016', '%Y', None),
('15:22:05', '%H:%M:%S', None),
('1469119144', '%Y', '1469119144'),
('invalid', '%Y', 'invalid')
]
for inp, fmt, expected in tests:
if expected is None:
expected = datetime.strptime(inp, fmt)
temp = '{{ strptime(\'%s\', \'%s\') }}' % (inp, fmt)
assert str(expected) == \
template.Template(temp, self.hass).render()
def test_timestamp_custom(self):
"""Test the timestamps to custom filter."""
now = dt_util.utcnow()
tests = [
(None, None, None, 'None'),
(1469119144, None, True, '2016-07-21 16:39:04'),
(1469119144, '%Y', True, '2016'),
(1469119144, 'invalid', True, 'invalid'),
(dt_util.as_timestamp(now), None, False,
now.strftime('%Y-%m-%d %H:%M:%S'))
]
for inp, fmt, local, out in tests:
if fmt:
fil = 'timestamp_custom(\'{}\')'.format(fmt)
elif fmt and local:
fil = 'timestamp_custom(\'{0}\', {1})'.format(fmt, local)
else:
fil = 'timestamp_custom'
assert out == template.Template(
'{{ %s | %s }}' % (inp, fil), self.hass).render()
def test_timestamp_local(self):
"""Test the timestamps to local filter."""
tests = {
None: 'None',
1469119144: '2016-07-21 16:39:04',
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | timestamp_local }}' % inp,
self.hass).render()
def test_min(self):
"""Test the min filter."""
assert '1' == \
template.Template('{{ [1, 2, 3] | min }}',
self.hass).render()
def test_max(self):
"""Test the max filter."""
assert '3' == \
template.Template('{{ [1, 2, 3] | max }}',
self.hass).render()
def test_base64_encode(self):
"""Test the base64_encode filter."""
self.assertEqual(
'aG9tZWFzc2lzdGFudA==',
template.Template('{{ "homeassistant" | base64_encode }}',
self.hass).render())
def test_base64_decode(self):
"""Test the base64_decode filter."""
self.assertEqual(
'homeassistant',
template.Template('{{ "aG9tZWFzc2lzdGFudA==" | base64_decode }}',
self.hass).render())
def test_ordinal(self):
"""Test the ordinal filter."""
tests = [
(1, '1st'),
(2, '2nd'),
(3, '3rd'),
(4, '4th'),
(5, '5th'),
]
for value, expected in tests:
self.assertEqual(
expected,
template.Template(
'{{ %s | ordinal }}' % value,
self.hass).render())
def test_timestamp_utc(self):
"""Test the timestamps to local filter."""
now = dt_util.utcnow()
tests = {
None: 'None',
1469119144: '2016-07-21 16:39:04',
dt_util.as_timestamp(now):
now.strftime('%Y-%m-%d %H:%M:%S')
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | timestamp_utc }}' % inp,
self.hass).render()
def test_as_timestamp(self):
"""Test the as_timestamp function."""
assert "None" == \
template.Template(
'{{ as_timestamp("invalid") }}', self.hass).render()
self.hass.mock = None
assert "None" == \
template.Template('{{ as_timestamp(states.mock) }}',
self.hass).render()
tpl = '{{ as_timestamp(strptime("2024-02-03T09:10:24+0000", ' \
'"%Y-%m-%dT%H:%M:%S%z")) }}'
assert "1706951424.0" == \
template.Template(tpl, self.hass).render()
@patch.object(random, 'choice')
def test_random_every_time(self, test_choice):
"""Ensure the random filter runs every time, not just once."""
tpl = template.Template('{{ [1,2] | random }}', self.hass)
test_choice.return_value = 'foo'
assert 'foo' == tpl.render()
test_choice.return_value = 'bar'
assert 'bar' == tpl.render()
def test_passing_vars_as_keywords(self):
"""Test passing variables as keywords."""
assert '127' == \
template.Template('{{ hello }}', self.hass).render(hello=127)
def test_passing_vars_as_vars(self):
"""Test passing variables as variables."""
assert '127' == \
template.Template('{{ hello }}', self.hass).render({'hello': 127})
def test_passing_vars_as_list(self):
"""Test passing variables as list."""
assert "['foo', 'bar']" == \
template.render_complex(template.Template('{{ hello }}',
self.hass), {'hello': ['foo', 'bar']})
def test_passing_vars_as_list_element(self):
"""Test passing variables as list."""
assert 'bar' == \
template.render_complex(template.Template('{{ hello[1] }}',
self.hass),
{'hello': ['foo', 'bar']})
def test_passing_vars_as_dict_element(self):
"""Test passing variables as list."""
assert 'bar' == \
template.render_complex(template.Template('{{ hello.foo }}',
self.hass),
{'hello': {'foo': 'bar'}})
def test_passing_vars_as_dict(self):
"""Test passing variables as list."""
assert "{'foo': 'bar'}" == \
template.render_complex(template.Template('{{ hello }}',
self.hass), {'hello': {'foo': 'bar'}})
def test_render_with_possible_json_value_with_valid_json(self):
"""Render with possible JSON value with valid JSON."""
tpl = template.Template('{{ value_json.hello }}', self.hass)
assert 'world' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_with_invalid_json(self):
"""Render with possible JSON value with invalid JSON."""
tpl = template.Template('{{ value_json }}', self.hass)
assert '' == \
tpl.render_with_possible_json_value('{ I AM NOT JSON }')
def test_render_with_possible_json_value_with_template_error_value(self):
"""Render with possible JSON value with template error value."""
tpl = template.Template('{{ non_existing.variable }}', self.hass)
assert '-' == \
tpl.render_with_possible_json_value('hello', '-')
def test_render_with_possible_json_value_with_missing_json_value(self):
"""Render with possible JSON value with unknown JSON object."""
tpl = template.Template('{{ value_json.goodbye }}', self.hass)
assert '' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_valid_with_is_defined(self):
"""Render with possible JSON value with known JSON object."""
tpl = template.Template('{{ value_json.hello|is_defined }}', self.hass)
assert 'world' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_undefined_json(self):
"""Render with possible JSON value with unknown JSON object."""
tpl = template.Template('{{ value_json.bye|is_defined }}', self.hass)
assert '{"hello": "world"}' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_undefined_json_error_value(self):
"""Render with possible JSON value with unknown JSON object."""
tpl = template.Template('{{ value_json.bye|is_defined }}', self.hass)
assert '' == \
tpl.render_with_possible_json_value('{"hello": "world"}', '')
def test_raise_exception_on_error(self):
"""Test raising an exception on error."""
with pytest.raises(TemplateError):
template.Template('{{ invalid_syntax').ensure_valid()
def test_if_state_exists(self):
"""Test if state exists works."""
self.hass.states.set('test.object', 'available')
tpl = template.Template(
'{% if states.test.object %}exists{% else %}not exists{% endif %}',
self.hass)
assert 'exists' == tpl.render()
def test_is_state(self):
"""Test is_state method."""
self.hass.states.set('test.object', 'available')
tpl = template.Template("""
{% if is_state("test.object", "available") %}yes{% else %}no{% endif %}
""", self.hass)
assert 'yes' == tpl.render()
tpl = template.Template("""
{{ is_state("test.noobject", "available") }}
""", self.hass)
assert 'False' == tpl.render()
def test_is_state_attr(self):
"""Test is_state_attr method."""
self.hass.states.set('test.object', 'available', {'mode': 'on'})
tpl = template.Template("""
{% if is_state_attr("test.object", "mode", "on") %}yes{% else %}no{% endif %}
""", self.hass)
assert 'yes' == tpl.render()
tpl = template.Template("""
{{ is_state_attr("test.noobject", "mode", "on") }}
""", self.hass)
assert 'False' == tpl.render()
def test_state_attr(self):
"""Test state_attr method."""
self.hass.states.set('test.object', 'available', {'mode': 'on'})
tpl = template.Template("""
{% if state_attr("test.object", "mode") == "on" %}yes{% else %}no{% endif %}
""", self.hass)
assert 'yes' == tpl.render()
tpl = template.Template("""
{{ state_attr("test.noobject", "mode") == None }}
""", self.hass)
assert 'True' == tpl.render()
def test_states_function(self):
"""Test using states as a function."""
self.hass.states.set('test.object', 'available')
tpl = template.Template('{{ states("test.object") }}', self.hass)
assert 'available' == tpl.render()
tpl2 = template.Template('{{ states("test.object2") }}', self.hass)
assert 'unknown' == tpl2.render()
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_now(self, mock_is_safe):
"""Test now method."""
now = dt_util.now()
with patch.dict(template.ENV.globals, {'now': lambda: now}):
assert now.isoformat() == \
template.Template('{{ now().isoformat() }}',
self.hass).render()
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_utcnow(self, mock_is_safe):
"""Test utcnow method."""
now = dt_util.utcnow()
with patch.dict(template.ENV.globals, {'utcnow': lambda: now}):
assert now.isoformat() == \
template.Template('{{ utcnow().isoformat() }}',
self.hass).render()
def test_regex_match(self):
"""Test regex_match method."""
tpl = template.Template(r"""
{{ '123-456-7890' | regex_match('(\\d{3})-(\\d{3})-(\\d{4})') }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'home assistant test' | regex_match('Home', True) }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'Another home assistant test' | regex_match('home') }}
""", self.hass)
assert 'False' == tpl.render()
def test_regex_search(self):
"""Test regex_search method."""
tpl = template.Template(r"""
{{ '123-456-7890' | regex_search('(\\d{3})-(\\d{3})-(\\d{4})') }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'home assistant test' | regex_search('Home', True) }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'Another home assistant test' | regex_search('home') }}
""", self.hass)
assert 'True' == tpl.render()
def test_regex_replace(self):
"""Test regex_replace method."""
tpl = template.Template(r"""
{{ 'Hello World' | regex_replace('(Hello\\s)',) }}
""", self.hass)
assert 'World' == tpl.render()
def test_regex_findall_index(self):
"""Test regex_findall_index method."""
tpl = template.Template("""
{{ 'Flight from JFK to LHR' | regex_findall_index('([A-Z]{3})', 0) }}
""", self.hass)
assert 'JFK' == tpl.render()
tpl = template.Template("""
{{ 'Flight from JFK to LHR' | regex_findall_index('([A-Z]{3})', 1) }}
""", self.hass)
assert 'LHR' == tpl.render()
def test_bitwise_and(self):
"""Test bitwise_and method."""
tpl = template.Template("""
{{ 8 | bitwise_and(8) }}
""", self.hass)
assert str(8 & 8) == tpl.render()
tpl = template.Template("""
{{ 10 | bitwise_and(2) }}
""", self.hass)
assert str(10 & 2) == tpl.render()
tpl = template.Template("""
{{ 8 | bitwise_and(2) }}
""", self.hass)
assert str(8 & 2) == tpl.render()
def test_bitwise_or(self):
"""Test bitwise_or method."""
tpl = template.Template("""
{{ 8 | bitwise_or(8) }}
""", self.hass)
assert str(8 | 8) == tpl.render()
tpl = template.Template("""
{{ 10 | bitwise_or(2) }}
""", self.hass)
assert str(10 | 2) == tpl.render()
tpl = template.Template("""
{{ 8 | bitwise_or(2) }}
""", self.hass)
assert str(8 | 2) == tpl.render()
def test_distance_function_with_1_state(self):
"""Test distance function with 1 state."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
tpl = template.Template('{{ distance(states.test.object) | round }}',
self.hass)
assert '187' == tpl.render()
def test_distance_function_with_2_states(self):
"""Test distance function with 2 states."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance(states.test.object, states.test.object_2) | round }}',
self.hass)
assert '187' == tpl.render()
def test_distance_function_with_1_coord(self):
"""Test distance function with 1 coord."""
tpl = template.Template(
'{{ distance("32.87336", "-117.22943") | round }}', self.hass)
assert '187' == \
tpl.render()
def test_distance_function_with_2_coords(self):
"""Test distance function with 2 coords."""
assert '187' == \
template.Template(
'{{ distance("32.87336", "-117.22943", %s, %s) | round }}'
% (self.hass.config.latitude, self.hass.config.longitude),
self.hass).render()
def test_distance_function_with_1_state_1_coord(self):
"""Test distance function with 1 state 1 coord."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance("32.87336", "-117.22943", states.test.object_2) '
'| round }}', self.hass)
assert '187' == tpl.render()
tpl2 = template.Template(
'{{ distance(states.test.object_2, "32.87336", "-117.22943") '
'| round }}', self.hass)
assert '187' == tpl2.render()
def test_distance_function_return_None_if_invalid_state(self):
"""Test distance function return None if invalid state."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': 10,
})
tpl = template.Template('{{ distance(states.test.object_2) | round }}',
self.hass)
assert 'None' == \
tpl.render()
def test_distance_function_return_None_if_invalid_coord(self):
"""Test distance function return None if invalid coord."""
assert 'None' == \
template.Template(
'{{ distance("123", "abc") }}', self.hass).render()
assert 'None' == \
template.Template('{{ distance("123") }}', self.hass).render()
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template('{{ distance("123", states.test_object_2) }}',
self.hass)
assert 'None' == \
tpl.render()
def test_distance_function_with_2_entity_ids(self):
"""Test distance function with 2 entity ids."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance("test.object", "test.object_2") | round }}',
self.hass)
assert '187' == tpl.render()
def test_distance_function_with_1_entity_1_coord(self):
"""Test distance function with 1 entity_id and 1 coord."""
self.hass.states.set('test.object', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance("test.object", "32.87336", "-117.22943") | round }}',
self.hass)
assert '187' == tpl.render()
def test_closest_function_home_vs_domain(self):
"""Test closest function home vs domain."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_test_domain.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
assert 'test_domain.object' == \
template.Template('{{ closest(states.test_domain).entity_id }}',
self.hass).render()
def test_closest_function_home_vs_all_states(self):
"""Test closest function home vs all states."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain_2.and_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
assert 'test_domain_2.and_closer' == \
template.Template('{{ closest(states).entity_id }}',
self.hass).render()
def test_closest_function_home_vs_group_entity_id(self):
"""Test closest function home vs group entity id."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group.create_group(
self.hass, 'location group', ['test_domain.object'])
assert 'test_domain.object' == \
template.Template(
'{{ closest("group.location_group").entity_id }}',
self.hass).render()
def test_closest_function_home_vs_group_state(self):
"""Test closest function home vs group state."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group.create_group(
self.hass, 'location group', ['test_domain.object'])
assert 'test_domain.object' == \
template.Template(
'{{ closest(states.group.location_group).entity_id }}',
self.hass).render()
def test_closest_function_to_coord(self):
"""Test closest function to coord."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
tpl = template.Template(
'{{ closest("%s", %s, states.test_domain).entity_id }}'
% (self.hass.config.latitude + 0.3,
self.hass.config.longitude + 0.3), self.hass)
assert 'test_domain.closest_zone' == \
tpl.render()
def test_closest_function_to_entity_id(self):
"""Test closest function to entity id."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
assert 'test_domain.closest_zone' == \
template.Template(
'{{ closest("zone.far_away", '
'states.test_domain).entity_id }}', self.hass).render()
def test_closest_function_to_state(self):
"""Test closest function to state."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
assert 'test_domain.closest_zone' == \
template.Template(
'{{ closest(states.zone.far_away, '
'states.test_domain).entity_id }}', self.hass).render()
def test_closest_function_invalid_state(self):
"""Test closest function invalid state."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
for state in ('states.zone.non_existing', '"zone.non_existing"'):
assert 'None' == \
template.Template('{{ closest(%s, states) }}' % state,
self.hass).render()
def test_closest_function_state_with_invalid_location(self):
"""Test closest function state with invalid location."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': 'invalid latitude',
'longitude': self.hass.config.longitude + 0.1,
})
assert 'None' == \
template.Template(
'{{ closest(states.test_domain.closest_home, '
'states) }}', self.hass).render()
def test_closest_function_invalid_coordinates(self):
"""Test closest function invalid coordinates."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
assert 'None' == \
template.Template('{{ closest("invalid", "coord", states) }}',
self.hass).render()
def test_closest_function_no_location_states(self):
"""Test closest function without location states."""
assert '' == \
template.Template('{{ closest(states).entity_id }}',
self.hass).render()
def test_extract_entities_none_exclude_stuff(self):
"""Test extract entities function with none or exclude stuff."""
assert [] == template.extract_entities(None)
assert [] == template.extract_entities("mdi:water")
assert MATCH_ALL == \
template.extract_entities(
'{{ closest(states.zone.far_away, '
'states.test_domain).entity_id }}')
assert MATCH_ALL == \
template.extract_entities(
'{{ distance("123", states.test_object_2) }}')
def test_extract_entities_no_match_entities(self):
"""Test extract entities function with none entities stuff."""
assert MATCH_ALL == \
template.extract_entities(
"{{ value_json.tst | timestamp_custom('%Y' True) }}")
assert MATCH_ALL == \
template.extract_entities("""
{% for state in states.sensor %}
{{ state.entity_id }}={{ state.state }},d
{% endfor %}
""")
def test_extract_entities_match_entities(self):
"""Test extract entities function with entities stuff."""
assert ['device_tracker.phone_1'] == \
template.extract_entities("""
{% if is_state('device_tracker.phone_1', 'home') %}
Ha, Hercules is home!
{% else %}
Hercules is at {{ states('device_tracker.phone_1') }}.
{% endif %}
""")
assert ['binary_sensor.garage_door'] == \
template.extract_entities("""
{{ as_timestamp(states.binary_sensor.garage_door.last_changed) }}
""")
assert ['binary_sensor.garage_door'] == \
template.extract_entities("""
{{ states("binary_sensor.garage_door") }}
""")
assert ['device_tracker.phone_2'] == \
template.extract_entities("""
{{ is_state_attr('device_tracker.phone_2', 'battery', 40) }}
""")
assert sorted([
'device_tracker.phone_1',
'device_tracker.phone_2',
]) == \
sorted(template.extract_entities("""
{% if is_state('device_tracker.phone_1', 'home') %}
Ha, Hercules is home!
{% elif states.device_tracker.phone_2.attributes.battery < 40 %}
Hercules you power goes done!.
{% endif %}
"""))
assert sorted([
'sensor.pick_humidity',
'sensor.pick_temperature',
]) == \
sorted(template.extract_entities("""
{{
states.sensor.pick_temperature.state ~ „°C (“ ~
states.sensor.pick_humidity.state ~ „ %“
}}
"""))
assert sorted([
'sensor.luftfeuchtigkeit_mean',
'input_number.luftfeuchtigkeit',
]) == \
sorted(template.extract_entities(
"{% if (states('sensor.luftfeuchtigkeit_mean') | int)"
" > (states('input_number.luftfeuchtigkeit') | int +1.5)"
" %}true{% endif %}"
))
def test_extract_entities_with_variables(self):
"""Test extract entities function with variables and entities stuff."""
assert ['input_boolean.switch'] == \
template.extract_entities(
"{{ is_state('input_boolean.switch', 'off') }}", {})
assert ['trigger.entity_id'] == \
template.extract_entities(
"{{ is_state(trigger.entity_id, 'off') }}", {})
assert MATCH_ALL == \
template.extract_entities(
"{{ is_state(data, 'off') }}", {})
assert ['input_boolean.switch'] == \
template.extract_entities(
"{{ is_state(data, 'off') }}",
{'data': 'input_boolean.switch'})
assert ['input_boolean.switch'] == \
template.extract_entities(
"{{ is_state(trigger.entity_id, 'off') }}",
{'trigger': {'entity_id': 'input_boolean.switch'}})
assert MATCH_ALL == \
template.extract_entities(
"{{ is_state('media_player.' ~ where , 'playing') }}",
{'where': 'livingroom'})
def test_jinja_namespace(self):
"""Test Jinja's namespace command can be used."""
test_template = template.Template(
(
"{% set ns = namespace(a_key='') %}"
"{% set ns.a_key = states.sensor.dummy.state %}"
"{{ ns.a_key }}"
),
self.hass
)
self.hass.states.set('sensor.dummy', 'a value')
assert 'a value' == test_template.render()
self.hass.states.set('sensor.dummy', 'another value')
assert 'another value' == test_template.render()
@asyncio.coroutine
def test_state_with_unit(hass):
"""Test the state_with_unit property helper."""
hass.states.async_set('sensor.test', '23', {
'unit_of_measurement': 'beers',
})
hass.states.async_set('sensor.test2', 'wow')
tpl = template.Template(
'{{ states.sensor.test.state_with_unit }}', hass)
assert tpl.async_render() == '23 beers'
tpl = template.Template(
'{{ states.sensor.test2.state_with_unit }}', hass)
assert tpl.async_render() == 'wow'
tpl = template.Template(
'{% for state in states %}{{ state.state_with_unit }} {% endfor %}',
hass)
assert tpl.async_render() == '23 beers wow'
tpl = template.Template('{{ states.sensor.non_existing.state_with_unit }}',
hass)
assert tpl.async_render() == ''
@asyncio.coroutine
def test_length_of_states(hass):
"""Test fetching the length of states."""
hass.states.async_set('sensor.test', '23')
hass.states.async_set('sensor.test2', 'wow')
hass.states.async_set('climate.test2', 'cooling')
tpl = template.Template('{{ states | length }}', hass)
assert tpl.async_render() == '3'
tpl = template.Template('{{ states.sensor | length }}', hass)
assert tpl.async_render() == '2'
| apache-2.0 | 7,943,343,220,525,600,000 | 35.530303 | 79 | 0.517628 | false |
nsoranzo/tools-iuc | tools/metaphlan/formatoutput.py | 1 | 5572 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import re
from pathlib import Path
taxo_level = {
'k': 'kingdom',
'p': 'phylum',
'c': 'class',
'o': 'order',
'f': 'family',
'g': 'genus',
's': 'species',
't': 'strains'}
def split_levels(metaphlan_output_fp, out_dp, legacy_output):
'''
Split default MetaPhlAn into a report for each taxonomic level
:param metaphlan_output_fp: Path default MetaPhlAn output
:param out_dp: Path to output directory
:param legacy_output: Boolean for legacy output
'''
# prepare output files
abund_f = {
'k': open(out_dp / Path('kingdom'), 'w'),
'p': open(out_dp / Path('phylum'), 'w'),
'c': open(out_dp / Path('class'), 'w'),
'o': open(out_dp / Path('order'), 'w'),
'f': open(out_dp / Path('family'), 'w'),
'g': open(out_dp / Path('genus'), 'w'),
's': open(out_dp / Path('species'), 'w'),
't': open(out_dp / Path('strains'), 'w')
}
for level in abund_f:
abund_f[level].write("%s\t" % taxo_level[level])
if not legacy_output:
abund_f[level].write("%s_id\t" % taxo_level[level])
abund_f[level].write("abundance\n")
levels_number = len(taxo_level)
with open(metaphlan_output_fp, 'r') as metaphlan_output_f:
with open(out_dp / Path('all'), 'w') as all_level_f:
# write header in all leve file
for level in ['k', 'p', 'c', 'o', 'f', 'g', 's', 't']:
all_level_f.write("%s\t" % taxo_level[level])
if not legacy_output:
all_level_f.write("%s_id\t" % taxo_level[level])
all_level_f.write("abundance\n")
# parse metaphlan file
for line in metaphlan_output_f.readlines():
# skip headers
if line.startswith("#"):
continue
# spit lines
split_line = line[:-1].split('\t')
taxo_n = split_line[0].split('|')
if legacy_output:
abundance = split_line[1]
else:
taxo_id = split_line[1].split('|')
abundance = split_line[2]
# get taxon name and ids
for i in range(len(taxo_n)):
taxo = taxo_n[i].split('__')[1]
taxo = taxo.replace("_", " ")
all_level_f.write("%s\t" % taxo)
if not legacy_output:
all_level_f.write("%s\t" % taxo_id[i])
# if not all taxon levels
for i in range(len(taxo_n), levels_number):
all_level_f.write('\t')
all_level_f.write("%s\n" % abundance)
# write
last_taxo_level = taxo_n[-1].split('__')
taxo = last_taxo_level[1].replace("_", " ")
level = last_taxo_level[0]
abund_f[level].write("%s\t" % taxo)
if not legacy_output:
abund_f[level].write("%s\t" % taxo_id[-1])
abund_f[level].write("%s\n" % abundance)
# close files
for taxo_level_f in abund_f:
abund_f[taxo_level_f].close()
def format_for_krona(metaphlan_output_fp, krona_out_fp):
'''
Split default MetaPhlAn into a report for each taxonomic levKRONAel
:param metaphlan_output_fp: Path default MetaPhlAn output
:param krona_out: Path to output file for Krona
'''
re_replace = re.compile(r"\w__")
re_bar = re.compile(r"\|")
re_underscore = re.compile(r"_")
with open(metaphlan_output_fp, 'r') as metaphlan_output_f:
with open(krona_out_fp, 'w') as krona_out_f:
for line in metaphlan_output_f.readlines():
if "s__" in line:
x = line.rstrip().split('\t')
lineage = re.sub(re_bar, '', x[0])
lineage = re.sub(re_replace, '\t', lineage)
lineage = re.sub(re_underscore, ' ', lineage)
krona_out_f.write("%s\t%s\n" % (x[-1], lineage))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Format MetaPhlAn output')
subparsers = parser.add_subparsers(dest='function')
# split_levels
split_levels_parser = subparsers.add_parser('split_levels', help='Split default MetaPhlAn into a report for each taxonomic level')
split_levels_parser.add_argument('--metaphlan_output', help="Path to default MetaPhlAn output")
split_levels_parser.add_argument('--outdir', help="Path to output directory")
split_levels_parser.add_argument('--legacy-output', dest='legacy_output', action='store_true', help="Old MetaPhlAn2 two columns output")
split_levels_parser.set_defaults(legacy_output=False)
# format_for_krona
format_for_krona_parser = subparsers.add_parser('format_for_krona', help='Split default MetaPhlAn into a report for each taxonomic level')
format_for_krona_parser.add_argument('--metaphlan_output', help="Path to default MetaPhlAn output")
format_for_krona_parser.add_argument('--krona_output', help="Path to Krona output directory")
args = parser.parse_args()
if args.function == 'split_levels':
split_levels(
Path(args.metaphlan_output),
Path(args.outdir),
args.legacy_output)
elif args.function == 'format_for_krona':
format_for_krona(
Path(args.metaphlan_output),
Path(args.krona_output))
| mit | -7,910,039,779,148,934,000 | 37.694444 | 142 | 0.542355 | false |
jjcf89/vt_legislation_bot | vt_legislation_bot.py | 1 | 1798 | #!/usr/bin/python2.7
import argparse
import urllib
from bs4 import BeautifulSoup
#TODO Parse arguments
URL="http://legislature.vermont.gov/bill/status/2016/H.159"
def fetch_url(url):
opener = urllib.FancyURLopener({})
f = opener.open(url)
return f.read()
def fetch_example():
fd = open("Example.html")
return fd.read()
# Get website
#page = fetch_url(URL)
page = fetch_example()
# Feed page into BeautifulSoup parser
soup = BeautifulSoup(page)
# We are going to ignore everything outside of the #main-content div
main_content = soup.select("#main-content")[0]
# Bill number
bill_number = main_content.find("h1").string.strip()
print bill_number
# Bill description contained in the div .charge class
description = main_content.select(".charge")[0].string
print description
print
# Locations and Sponsors are included in the summary-table
summary_table = main_content.select(".summary-table")[0]
# Grab location header and print
location_dt = summary_table.find("dt", text="Location")
print location_dt.string + ":"
# Go over two tags to find location contents
location_dd = location_dt.next_sibling.next_sibling
print location_dd.string
print
# Grab sponsors header and print
sponsors_dt = summary_table.find("dt", text="Sponsor(s)")
print sponsors_dt.string + ":"
# Go over two tags to find sponsors contents
sponsors_dd = sponsors_dt.next_sibling.next_sibling
# Iterate over list of sponsors and print
for li in sponsors_dd.find_all("li"):
print li.string
print
# Detailed status table contains the FULL STATUS information
detailed_status_table = main_content.select("#bill-detailed-status-table")[0]
# FULL STATUS is the fourth column of table
full_status_td = detailed_status_table.find_all("td")[3]
print " ".join(full_status_td.stripped_strings)
print
# Print url
print URL | gpl-2.0 | 4,761,416,767,378,805,000 | 25.850746 | 77 | 0.75139 | false |
jrichte43/ProjectEuler | Problem-0200/solutions.py | 1 | 1266 |
__problem_title__ = "Find the 200th prime-proof sqube containing the contiguous sub-string "200""
__problem_url___ = "https://projecteuler.net/problem=200"
__problem_description__ = "We shall define a sqube to be a number of the form, , where and are " \
"distinct primes. For example, 200 = 5 2 or 120072949 = 23 61 . The " \
"first five squbes are 72, 108, 200, 392, and 500. Interestingly, 200 " \
"is also the first number for which you cannot change any single digit " \
"to make a prime; we shall call such numbers, prime-proof. The next " \
"prime-proof sqube which contains the contiguous sub-string "200" is " \
"1992008. Find the 200th prime-proof sqube containing the contiguous " \
"sub-string "200"."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':
s = Solution()
print(s.solution1())
s.time_solutions()
| gpl-3.0 | -4,679,411,115,905,603,000 | 38.5625 | 100 | 0.575039 | false |
DArtagan/teetimer | tracker/migrations/0003_auto__del_field_teetime_people.py | 1 | 5085 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'TeeTime.people'
db.delete_column('tracker_teetime', 'people_id')
# Adding M2M table for field people on 'TeeTime'
m2m_table_name = db.shorten_name('tracker_teetime_people')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('teetime', models.ForeignKey(orm['tracker.teetime'], null=False)),
('user', models.ForeignKey(orm['accounts.user'], null=False))
))
db.create_unique(m2m_table_name, ['teetime_id', 'user_id'])
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'TeeTime.people'
raise RuntimeError("Cannot reverse this migration. 'TeeTime.people' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration # Adding field 'TeeTime.people'
db.add_column('tracker_teetime', 'people',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.User']),
keep_default=False)
# Removing M2M table for field people on 'TeeTime'
db.delete_table(db.shorten_name('tracker_teetime_people'))
models = {
'accounts.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'related_name': "'user_set'", 'blank': 'True', 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'related_name': "'user_set'", 'blank': 'True', 'symmetrical': 'False'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True', 'symmetrical': 'False'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tracker.teetime': {
'Meta': {'object_name': 'TeeTime'},
'date_edited': ('django.db.models.fields.DateField', [], {'blank': 'True', 'auto_now': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'people': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['accounts.User']", 'symmetrical': 'False'}),
'slots': ('django.db.models.fields.IntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {})
}
}
complete_apps = ['tracker'] | mit | -6,011,046,772,855,287,000 | 60.277108 | 192 | 0.574041 | false |
Moonshile/fast12306 | src/core/token.py | 1 | 5021 | #coding=utf-8
import re
from fetch import FetchSlice
class Token(object):
def __init__(self, session, base_url):
self.session = session
self.base_url = base_url
self.init_url_pattern = re.compile(r'<script\s+src="/(otn/dynamicJs/.+)"\s+type="text/javascript"\s+xml:space="preserve">\s*</script>\s+</head>')
self.key_pattern = re.compile(r'function\s+gc\(\)\s*{\s*var\s+key\s*=\s*\'(.+)\'\s*;var\s+value\s*=')
def retrieve_key(self, init_url):
"""
:param init_url: URL which contains the link to the js file that contains the token key
:param base_url: URL base
"""
fs = FetchSlice(self.session)
url = fs.fetch(self.init_url_pattern, init_url)[0]
key = fs.fetch(self.key_pattern, self.base_url + url)[0]
return key
def retrieve_value(self, key):
return self.encode32(self.bin216(self.base32('1111', key)))
"""
The following methods are translated from a javascript file from 12306
"""
def text2array(self, text, include_length):
length = len(text)
res = []
for i in range(0, length, 4):
res.append(ord(text[i]) | ord(text[i + 1]) << 8 | ord(text[i + 2]) << 16 | ord(text[i + 3]) << 24)
if include_length:
res.append(length)
return res
def array2text(self, data, include_length):
"""
length = len(data)
n = (length - 1) << 2;
if include_length:
m = data[length - 1]
if m < n - 3 or m > n:
return None
n = m
res = reduce(
lambda res, x: res + x,
map(
lambda x: chr(x & 0xff) + chr(x >> 8 & 0xff) + chr(x >> 16 & 0xff) + chr(x >> 24 & 0xff),
data
),
''
)
if include_length:
return res[:n]
else:
return res
"""
return map(lambda x: ((x&0xff)<<24)|((x>>8&0xff)<<16)|((x>>16&0xff)<<8)|(x>>24&0xff), data)
def base32(self, text, key):
delta = 0x9E3779B8
def rshift(v, n):
return (v % 0x100000000) >> n
def compute_mx(z, y, s, k, p, e):
r1 = rshift(z, 5)
r2 = y << 2 & 0xffffffff
r3 = r1 ^ r2
r4 = rshift(y, 3)
r5 = z << 4 & 0xffffffff
r6 = r4 ^ r5
r7 = r3 + r6 & 0xffffffff
r8 = s ^ y
r9 = k[p & 3 ^ e] ^ z
r10 = r8 + r9 & 0xffffffff
return r7 ^ r10
if text == '':
return ''
v = self.text2array(text, True)
k = self.text2array(key, False)
if len(k) < 4:
for i in range(0, 4 - len(k)):
k.append(0)
n = len(v) - 1
z = v[n]
y = v[0]
mx = None
e = None
p = None
q = int(6 + 52/(n + 1))
s = 0
while 0 < q:
q = q - 1
s = (s + delta & 0xffffffff)
e = rshift(s, 2) & 3
for p in range(0, n):
y = v[p + 1]
mx = compute_mx(z, y, s, k, p, e)
z = v[p] = (v[p] + mx & 0xffffffff)
p = n
y = v[0]
mx = compute_mx(z, y, s, k, p, e)
z = v[n] = (v[n] + mx & 0xffffffff)
return self.array2text(v, False)
def bin216(self, text):
"""
i = None
o = ''
n = None
text = text + ''
l = len(text)
b = ''
for i in range(0, l):
b = ord(text[i])
n = hex(b).replace('0x', '')
o = o + ('0' + n if len(n) < 2 else n)
return o
"""
return reduce(lambda res, x: res + x,
map(
lambda x: hex(0x100000000 | x)[3:],
text
),
''
)
def encode32(self, text):
keyStr = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='
output = ''
chr1 = None
chr2 = None
chr3 = ''
enc1 = None
enc2 = None
enc3 = None
enc4 = ''
i = 0
while True:
chr1 = ord(text[i])
i = i + 1
chr2 = ord(text[i]) if i < len(text) else None
i = i + 1
chr3 = ord(text[i]) if i < len(text) else None
i = i + 1
enc1 = chr1 >> 2
enc2 = ((chr1 & 3) << 4) | ((chr2 >> 4) if chr2 else 0)
enc3 = (((chr2 & 15) << 2) if chr2 else 0) | ((chr3 >> 6) if chr3 else 0)
enc4 = chr3 & 63 if chr3 else 0
if chr2 is None:
enc3 = enc4 = 64
elif chr3 is None:
enc4 = 64
output = output + keyStr[enc1] + keyStr[enc2] + keyStr[enc3] + keyStr[enc4]
chr1 = chr2 = chr3 = ''
enc1 = enc2 = enc3 = enc4 = ''
if i >= len(text):
break
return output
| apache-2.0 | -8,190,554,962,425,597,000 | 29.065868 | 153 | 0.430592 | false |
jonathansick/synthsb | scripts/segmap_wircamsb.py | 1 | 5931 | #!/usr/bin/env python
# encoding: utf-8
"""
Compute WIRCam synthetic surface brightnesses within regions of a segmentation
map.
Accepts segmentation maps and pixel tables made by, e.g. andromass.
2014-11-18 - Created by Jonathan Sick
"""
import argparse
# from collections import defaultdict
# import math
import numpy as np
from astropy import log
import astropy.io.fits as fits
from astropy.wcs import WCS
from astropy.table import Table
# from sqlalchemy import func
from sqlalchemy.orm import aliased
from starplex.database import connect_to_server, Session
from starplex.database import Catalog, Bandpass, CatalogStar, Observation
from androphotsys import wircam_vega_to_ab
# from starplex.utils.timer import Timer
from synthsb.directsb import NoDataError
# from synthsb.directsb import compute_sb
def main():
log.setLevel("INFO")
args = parse_args()
segmap_fits = fits.open(args.seg_path)
segmap = segmap_fits[0].data
wcs = WCS(segmap_fits[0].header)
pixel_table = Table.read(args.pix_table_path,
format='ascii.commented_header')
fluxsum_J = np.full(len(pixel_table), 0, dtype=np.float)
varsum_J = np.full(len(pixel_table), 0, dtype=np.float)
fluxsum_Ks = np.full(len(pixel_table), 0, dtype=np.float)
varsum_Ks = np.full(len(pixel_table), 0, dtype=np.float)
star_count = np.zeros(len(pixel_table), dtype=np.int)
fields = ["M31-{0:d}".format(i) for i in range(1, 28)] + \
["M31-{0:d}".format(i) for i in range(47, 72)]
# fields = ['M31-1']
for field in fields:
print "Processing", field
data = load_photometry(field)
x, y = wcs.wcs_world2pix(data['ra'], data['dec'], 0)
# Round down to pixel indices
x = x.astype(np.int)
y = y.astype(np.int)
# Filter out stars contained inside the image footprint
ny, nx = segmap.shape
s = np.where((x >= 0) & (y >= 0) &
(x < nx) & (y < ny) &
np.isfinite(data['J']) & np.isfinite(data['Ks']) &
np.isfinite(data['J_err']) & np.isfinite(data['Ks_err']) &
(data['cfrac'] > 0.))[0]
data = data[s]
n_stars = data.shape[0]
flux_J, flux_var_J = mag_to_mjy(data['J'], data['J_err'])
flux_Ks, flux_var_Ks = mag_to_mjy(data['Ks'], data['Ks_err'])
for i in xrange(n_stars):
bin_id = segmap[y[i], x[i]]
if bin_id >= 0:
# add light to bin
fluxsum_J[bin_id] += flux_J[i] / data['cfrac'][i]
fluxsum_Ks[bin_id] += flux_Ks[i] / data['cfrac'][i]
varsum_J[bin_id] += flux_var_J[i]
varsum_Ks[bin_id] += flux_var_Ks[i]
star_count[bin_id] += 1
empty = np.where(star_count == 0)[0]
fluxsum_J[empty] = np.nan
fluxsum_Ks[empty] = np.nan
varsum_J[empty] = np.nan
varsum_Ks[empty] = np.nan
flux_err_J = np.sqrt(varsum_J)
flux_err_Ks = np.sqrt(varsum_Ks)
pixel_table['n_stars'] = star_count
pixel_table['synth_J'] = fluxsum_J
pixel_table['synth_Ks'] = fluxsum_Ks
pixel_table['synth_J_err'] = flux_err_J
pixel_table['synth_Ks_err'] = flux_err_Ks
pixel_table.write(args.output_path,
format='ascii.commented_header')
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('pix_table_path')
parser.add_argument('seg_path')
parser.add_argument('output_path')
return parser.parse_args()
def load_photometry(fieldname,
use_vega=False, apply_intercal=False,
server='marvin'):
"""Load WIRCam photometry from Starplex, converted to AB mag.
Filter out MW stars with a rudimentary J-Ks > 0.9 (Vega) color cut.
"""
instrument = "wircam"
connect_to_server(server)
session = Session()
mag1obs = aliased(Observation)
mag2obs = aliased(Observation)
bp1 = aliased(Bandpass)
bp2 = aliased(Bandpass)
catalog = session.query(Catalog).\
filter(Catalog.name == fieldname).\
filter(Catalog.instrument == instrument).\
one()
q = session.query(CatalogStar.cfrac, CatalogStar.ra, CatalogStar.dec,
mag1obs.mag, mag1obs.mag_err,
mag2obs.mag, mag2obs.mag_err).\
join(mag1obs, CatalogStar.observations).\
join(mag2obs, CatalogStar.observations).\
join(Catalog).\
filter(Catalog.name == fieldname).\
filter(Catalog.instrument == instrument).\
join(bp1, mag1obs.bandpass).\
filter(bp1.name == "J").\
join(bp2, mag2obs.bandpass).\
filter(bp2.name == "Ks")
dt = [('cfrac', np.float), ('ra', np.float), ('dec', np.float),
('J', np.float), ('J_err', np.float),
('Ks', np.float), ('Ks_err', np.float)]
data = np.array(q.all(), dtype=np.dtype(dt))
# Filter out MW stars
# FIXME rudimentary
# Using Vega Mag here!
sel = np.where((data['J'] - data['Ks']) > 0.9)[0]
data = data[sel]
# Apply the intercal ZP correction
if apply_intercal:
if 'intercal' in catalog.meta:
for band in ['J', 'Ks']:
if band in catalog.meta['intercal']:
data[band] += catalog.meta['intercal'][band]['zp']
# Convert to AB
if not use_vega:
data['J'] = wircam_vega_to_ab(data['J'], "J")
data['Ks'] = wircam_vega_to_ab(data['Ks'], "Ks")
log.info("Field {0} {2} has {1:d} stars".
format(fieldname, data.shape[0], instrument))
session.close()
if len(data) == 0:
raise NoDataError
return data
def mag_to_mjy(mag, mag_err):
MICROJY_ZP = 10. ** 6. * 10. ** 23. * 10. ** (-48.6 / 2.5)
mjy = MICROJY_ZP * np.power(10., -mag / 2.5)
mjy_err = (mjy * mag_err) / 1.0875
return mjy, mjy_err
if __name__ == '__main__':
main()
| bsd-3-clause | 7,598,660,081,612,174,000 | 32.320225 | 79 | 0.581521 | false |
hjanime/VisTrails | vistrails/db/versions/v0_9_3/translate/v0_9_2.py | 1 | 7218 | ###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
import copy
from vistrails.db.versions.v0_9_3.domain import DBVistrail, DBAction, DBTag, DBModule, \
DBConnection, DBPortSpec, DBFunction, DBParameter, DBLocation, DBAdd, \
DBChange, DBDelete, DBAnnotation, DBPort, DBAbstractionRef, DBGroup
def translateVistrail(_vistrail):
vistrail = DBVistrail()
for _action in _vistrail.db_actions:
ops = []
for op in _action.db_operations:
if op.vtType == 'add':
data = convert_data(op.db_data)
ops.append(DBAdd(id=op.db_id,
what=op.db_what,
objectId=op.db_objectId,
parentObjId=op.db_parentObjId,
parentObjType=op.db_parentObjType,
data=data))
elif op.vtType == 'change':
data = convert_data(op.db_data)
ops.append(DBChange(id=op.db_id,
what=op.db_what,
oldObjId=op.db_oldObjId,
newObjId=op.db_newObjId,
parentObjId=op.db_parentObjId,
parentObjType=op.db_parentObjType,
data=data))
elif op.vtType == 'delete':
ops.append(DBDelete(id=op.db_id,
what=op.db_what,
objectId=op.db_objectId,
parentObjId=op.db_parentObjId,
parentObjType=op.db_parentObjType))
annotations = []
for annotation in _action.db_annotations:
annotations.append(DBAnnotation(id=annotation.db_id,
key=annotation.db_key,
value=annotation.db_value))
session = _action.db_session
if not session:
session = None
else:
session = long(_action.db_session)
action = DBAction(id=_action.db_id,
prevId=_action.db_prevId,
date=_action.db_date,
user=_action.db_user,
prune=_action.db_prune,
session=session,
operations=ops,
annotations=annotations)
vistrail.db_add_action(action)
for _tag in _vistrail.db_tags:
tag = DBTag(id=_tag.db_id,
name=_tag.db_name)
vistrail.db_add_tag(tag)
vistrail.db_version = '0.9.3'
return vistrail
def convert_data(child):
if child.vtType == 'module':
return DBModule(id=child.db_id,
cache=child.db_cache,
name=child.db_name,
namespace=child.db_namespace,
package=child.db_package,
version=child.db_version,
tag=child.db_tag)
elif child.vtType == 'abstractionRef':
return DBAbstractionRef(id=child.db_id,
name=child.db_name,
cache=child.db_cache,
abstraction_id=child.db_abstraction_id,
version=child.db_version)
elif child.vtType == 'connection':
return DBConnection(id=child.db_id)
elif child.vtType == 'portSpec':
return DBPortSpec(id=child.db_id,
name=child.db_name,
type=child.db_type,
spec=child.db_spec)
elif child.vtType == 'function':
return DBFunction(id=child.db_id,
pos=child.db_pos,
name=child.db_name)
elif child.vtType == 'parameter':
return DBParameter(id=child.db_id,
pos=child.db_pos,
name=child.db_name,
type=child.db_type,
val=child.db_val,
alias=child.db_alias)
elif child.vtType == 'location':
return DBLocation(id=child.db_id,
x=child.db_x,
y=child.db_y)
elif child.vtType == 'annotation':
return DBAnnotation(id=child.db_id,
key=child.db_key,
value=child.db_value)
elif child.vtType == 'port':
return DBPort(id=child.db_id,
type=child.db_type,
moduleId=child.db_moduleId,
moduleName=child.db_moduleName,
name=child.db_name,
spec=child.db_spec)
elif child.vtType == 'group':
return DBGroup(id=child.db_id,
workflow=child.db_workflow,
cache=child.db_cache,
name=child.db_name,
namespace=child.db_namespace,
package=child.db_package,
version=child.db_version,
tag=child.db_tag)
| bsd-3-clause | -7,308,370,435,144,763,000 | 44.683544 | 88 | 0.518426 | false |
MuckRock/muckrock | muckrock/message/tasks.py | 1 | 11469 | """
Tasks for the messages application.
"""
# Django
from celery.exceptions import SoftTimeLimitExceeded
from celery.schedules import crontab
from celery.task import periodic_task, task
from django.contrib.auth.models import User
from django.utils import timezone
# Standard Library
import logging
from random import randint
# Third Party
import stripe
from dateutil.relativedelta import relativedelta
from requests.exceptions import RequestException
# MuckRock
from muckrock.accounts.models import RecurringDonation
from muckrock.core.utils import stripe_retry_on_error
from muckrock.crowdfund.models import RecurringCrowdfundPayment
from muckrock.message import digests, receipts
from muckrock.message.email import TemplateEmail
from muckrock.message.notifications import SlackNotification
logger = logging.getLogger(__name__)
@task(
time_limit=600,
soft_time_limit=570,
name="muckrock.message.tasks.send_activity_digest",
)
def send_activity_digest(user_id, subject, preference):
"""Individual task to create and send an activity digest to a user."""
user = User.objects.get(id=user_id)
interval = {
"hourly": relativedelta(hours=1),
"daily": relativedelta(days=1),
"weekly": relativedelta(weeks=1),
"monthly": relativedelta(months=1),
}[preference]
logger.info(
"Starting activity digest at: %s User: %s Subject: %s Interval: %s",
timezone.now(),
user,
subject,
interval,
)
try:
email = digests.ActivityDigest(user=user, subject=subject, interval=interval)
email.send()
except SoftTimeLimitExceeded:
logger.error(
"Send Activity Digest took too long. " "User: %s, Subject: %s, Interval %s",
user,
subject,
interval,
)
def send_digests(preference, subject):
"""Helper to send out timed digests"""
users = User.objects.filter(
profile__email_pref=preference, notifications__read=False
).distinct()
for user in users:
send_activity_digest.delay(user.pk, subject, preference)
# every hour
@periodic_task(
run_every=crontab(hour="*/1", minute=0), name="muckrock.message.tasks.hourly_digest"
)
def hourly_digest():
"""Send out hourly digest"""
send_digests("hourly", "Hourly Digest")
# every day at 10am
@periodic_task(
run_every=crontab(hour=10, minute=0), name="muckrock.message.tasks.daily_digest"
)
def daily_digest():
"""Send out daily digest"""
send_digests("daily", "Daily Digest")
# every Monday at 10am
@periodic_task(
run_every=crontab(day_of_week=1, hour=10, minute=0),
name="muckrock.message.tasks.weekly_digest",
)
def weekly_digest():
"""Send out weekly digest"""
send_digests("weekly", "Weekly Digest")
# first day of every month at 10am
@periodic_task(
run_every=crontab(day_of_month=1, hour=10, minute=0),
name="muckrock.message.tasks.monthly_digest",
)
def monthly_digest():
"""Send out monthly digest"""
send_digests("monthly", "Monthly Digest")
# every day at 9:30am
@periodic_task(
run_every=crontab(hour=9, minute=30), name="muckrock.message.tasks.staff_digest"
)
def staff_digest():
"""Send out staff digest"""
staff_users = User.objects.filter(is_staff=True)
for staff_user in staff_users:
email = digests.StaffDigest(user=staff_user, subject="Daily Staff Digest")
email.send()
@task(name="muckrock.message.tasks.send_invoice_receipt")
def send_invoice_receipt(invoice_id):
"""Send out a receipt for an invoiced charge"""
invoice = stripe_retry_on_error(stripe.Invoice.retrieve, invoice_id)
try:
charge = stripe_retry_on_error(stripe.Charge.retrieve, invoice.charge)
except stripe.error.InvalidRequestError:
# a free subscription has no charge attached
# maybe send a notification about the renewal
# but for now just handle the error
return
try:
customer = stripe_retry_on_error(stripe.Customer.retrieve, invoice.customer)
charge.metadata["email"] = customer.email
except stripe.error.InvalidRequestError:
logger.error("Could not retrieve customer")
return
plan = get_subscription_type(invoice)
if plan == "donate":
receipt_function = receipts.donation_receipt
elif plan.startswith("crowdfund"):
receipt_function = receipts.crowdfund_payment_receipt
charge.metadata["crowdfund_id"] = plan.split("-")[1]
recurring_payment = RecurringCrowdfundPayment.objects.filter(
subscription_id=invoice.subscription
).first()
if recurring_payment:
recurring_payment.log_payment(charge)
else:
logger.error("No recurring crowdfund payment for: %s", invoice.subscription)
else:
# other types are handled by squarelet
return
receipt = receipt_function(None, charge)
receipt.send(fail_silently=False)
@task(name="muckrock.message.tasks.send_charge_receipt")
def send_charge_receipt(charge_id):
"""Send out a receipt for a charge"""
logger.info("Charge Receipt for %s", charge_id)
charge = stripe_retry_on_error(stripe.Charge.retrieve, charge_id)
# if the charge was generated by an invoice, let the invoice handler send the receipt
if charge.invoice:
return
# we should expect charges to have metadata attached when they are made
try:
user_email = charge.metadata["email"]
user_action = charge.metadata["action"]
except KeyError:
# squarelet charges will not have matching metadata
logger.warning("Malformed charge metadata, no receipt sent: %s", charge)
return
# try getting the user based on the provided email
# we know from Checkout purchases that logged in users have their email autofilled
try:
user = User.objects.get(email=user_email)
except User.DoesNotExist:
user = None
logger.info("Charge Receipt User: %s", user)
try:
receipt_functions = {
"crowdfund-payment": receipts.crowdfund_payment_receipt,
"donation": receipts.donation_receipt,
}
receipt_function = receipt_functions[user_action]
except KeyError:
# squarelet charges will be handled on squarelet
logger.warning("Unrecognized charge: %s", user_action)
receipt_function = receipts.generic_receipt
receipt = receipt_function(user, charge)
receipt.send(fail_silently=False)
def get_subscription_type(invoice):
"""Gets the subscription type from the invoice."""
# get the first line of the invoice
if invoice.lines.total_count > 0:
return invoice.lines.data[0].plan.id
else:
return "unknown"
@task(name="muckrock.message.tasks.failed_payment")
def failed_payment(invoice_id):
"""Notify a customer about a failed subscription invoice."""
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
invoice = stripe_retry_on_error(stripe.Invoice.retrieve, invoice_id)
attempt = invoice.attempt_count
subscription_type = get_subscription_type(invoice)
recurring_donation = None
crowdfund = None
email_to = []
if subscription_type == "donate":
recurring_donation = RecurringDonation.objects.filter(
subscription_id=invoice.subscription
).first()
if recurring_donation:
user = recurring_donation.user
if user is None:
email_to = [recurring_donation.email]
recurring_donation.payment_failed = True
recurring_donation.save()
else:
user = None
logger.error("No recurring crowdfund found for %s", invoice.subscription)
elif subscription_type.startswith("crowdfund"):
recurring_payment = RecurringCrowdfundPayment.objects.filter(
subscription_id=invoice.subscription
).first()
if recurring_payment:
user = recurring_payment.user
if user is None:
email_to = [recurring_payment.email]
crowdfund = recurring_payment.crowdfund
recurring_payment.payment_failed = True
recurring_payment.save()
else:
user = None
logger.error("No recurring crowdfund found for %s", invoice.subscription)
else:
# squarelet handles other types
return
subject = "Your payment has failed"
context = {"attempt": attempt, "type": subscription_type, "crowdfund": crowdfund}
if subscription_type.startswith("crowdfund"):
context["type"] = "crowdfund"
if attempt == 4:
# on last attempt, cancel the user's subscription and lower the failed payment flag
if subscription_type == "donate" and recurring_donation:
recurring_donation.cancel()
elif subscription_type.startswith("crowdfund") and recurring_payment:
recurring_payment.cancel()
logger.info("%s subscription has been cancelled due to failed payment", user)
subject = "Your %s subscription has been cancelled" % subscription_type
context["attempt"] = "final"
else:
logger.info("Failed payment by %s, attempt %s", user, attempt)
notification = TemplateEmail(
user=user,
to=email_to,
extra_context=context,
text_template="message/notification/failed_payment.txt",
html_template="message/notification/failed_payment.html",
subject=subject,
)
notification.send(fail_silently=False)
@task(name="muckrock.message.tasks.support")
def support(user_id, message, task_id):
"""Send a response to a user about a flag task."""
# pylint: disable=import-outside-toplevel
from muckrock.task.models import FlaggedTask
user = User.objects.get(id=user_id)
task_ = FlaggedTask.objects.get(id=task_id)
context = {"message": message, "task": task_}
notification = TemplateEmail(
user=user,
extra_context=context,
text_template="message/notification/support.txt",
html_template="message/notification/support.html",
subject="Support #%d" % task_.id,
)
notification.send(fail_silently=False)
@task(name="muckrock.message.tasks.notify_project_contributor")
def notify_project_contributor(user_id, project_id, added_by_id):
"""Notify a user that they were added as a contributor to a project."""
# pylint: disable=import-outside-toplevel
from muckrock.project.models import Project
user = User.objects.get(id=user_id)
project = Project.objects.get(id=project_id)
added_by = User.objects.get(id=added_by_id)
context = {"project": project, "added_by": added_by}
notification = TemplateEmail(
user=user,
extra_context=context,
text_template="message/notification/project.txt",
html_template="message/notification/project.html",
subject="Added to a project",
)
notification.send(fail_silently=False)
@task(name="muckrock.message.tasks.slack")
def slack(payload):
"""Send a Slack notification using the provided payload."""
try:
notification = SlackNotification(payload)
notification.send(fail_silently=False)
except RequestException as exc:
slack.retry(
countdown=2 ** slack.request.retries * 30 + randint(0, 30),
args=[payload],
exc=exc,
)
| agpl-3.0 | -8,486,624,507,151,663,000 | 33.966463 | 91 | 0.671026 | false |
Shouqun/node-gn | tools/depot_tools/tests/gclient_utils_test.py | 1 | 6965 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import StringIO
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support.super_mox import SuperMoxTestBase
from testing_support import trial_dir
import gclient_utils
import subprocess2
class GclientUtilBase(SuperMoxTestBase):
def setUp(self):
super(GclientUtilBase, self).setUp()
gclient_utils.sys.stdout.flush = lambda: None
self.mox.StubOutWithMock(subprocess2, 'Popen')
self.mox.StubOutWithMock(subprocess2, 'communicate')
class CheckCallAndFilterTestCase(GclientUtilBase):
class ProcessIdMock(object):
def __init__(self, test_string):
self.stdout = StringIO.StringIO(test_string)
self.pid = 9284
# pylint: disable=no-self-use
def wait(self):
return 0
def _inner(self, args, test_string):
cwd = 'bleh'
gclient_utils.sys.stdout.write(
'________ running \'boo foo bar\' in \'bleh\'\n')
for i in test_string:
gclient_utils.sys.stdout.write(i)
# pylint: disable=no-member
subprocess2.Popen(
args,
cwd=cwd,
stdout=subprocess2.PIPE,
stderr=subprocess2.STDOUT,
bufsize=0).AndReturn(self.ProcessIdMock(test_string))
os.getcwd()
self.mox.ReplayAll()
compiled_pattern = gclient_utils.re.compile(r'a(.*)b')
line_list = []
capture_list = []
def FilterLines(line):
line_list.append(line)
assert isinstance(line, str), type(line)
match = compiled_pattern.search(line)
if match:
capture_list.append(match.group(1))
gclient_utils.CheckCallAndFilterAndHeader(
args, cwd=cwd, always=True, filter_fn=FilterLines)
self.assertEquals(line_list, ['ahah', 'accb', 'allo', 'addb'])
self.assertEquals(capture_list, ['cc', 'dd'])
def testCheckCallAndFilter(self):
args = ['boo', 'foo', 'bar']
test_string = 'ahah\naccb\nallo\naddb\n'
self._inner(args, test_string)
self.checkstdout('________ running \'boo foo bar\' in \'bleh\'\n'
'ahah\naccb\nallo\naddb\n'
'________ running \'boo foo bar\' in \'bleh\'\nahah\naccb\nallo\naddb'
'\n')
class SplitUrlRevisionTestCase(GclientUtilBase):
def testSSHUrl(self):
url = "ssh://[email protected]/test.git"
rev = "ac345e52dc"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://example.com/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://example.com/git/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
rev = "test-stable"
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://[email protected]/~/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://[email protected]/~username/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "[email protected]:dart-lang/spark.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
def testSVNUrl(self):
url = "svn://example.com/test"
rev = "ac345e52dc"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
class GClientUtilsTest(trial_dir.TestCase):
def testHardToDelete(self):
# Use the fact that tearDown will delete the directory to make it hard to do
# so.
l1 = os.path.join(self.root_dir, 'l1')
l2 = os.path.join(l1, 'l2')
l3 = os.path.join(l2, 'l3')
f3 = os.path.join(l3, 'f3')
os.mkdir(l1)
os.mkdir(l2)
os.mkdir(l3)
gclient_utils.FileWrite(f3, 'foo')
os.chmod(f3, 0)
os.chmod(l3, 0)
os.chmod(l2, 0)
os.chmod(l1, 0)
def testUpgradeToHttps(self):
values = [
['', ''],
[None, None],
['foo', 'https://foo'],
['http://foo', 'https://foo'],
['foo/', 'https://foo/'],
['ssh-svn://foo', 'ssh-svn://foo'],
['ssh-svn://foo/bar/', 'ssh-svn://foo/bar/'],
['codereview.chromium.org', 'https://codereview.chromium.org'],
['codereview.chromium.org/', 'https://codereview.chromium.org/'],
['http://foo:10000', 'http://foo:10000'],
['http://foo:10000/bar', 'http://foo:10000/bar'],
['foo:10000', 'http://foo:10000'],
['foo:', 'https://foo:'],
]
for content, expected in values:
self.assertEquals(
expected, gclient_utils.UpgradeToHttps(content))
def testParseCodereviewSettingsContent(self):
values = [
['# bleh\n', {}],
['\t# foo : bar\n', {}],
['Foo:bar', {'Foo': 'bar'}],
['Foo:bar:baz\n', {'Foo': 'bar:baz'}],
[' Foo : bar ', {'Foo': 'bar'}],
[' Foo : bar \n', {'Foo': 'bar'}],
['a:b\n\rc:d\re:f', {'a': 'b', 'c': 'd', 'e': 'f'}],
['an_url:http://value/', {'an_url': 'http://value/'}],
[
'CODE_REVIEW_SERVER : http://r/s',
{'CODE_REVIEW_SERVER': 'https://r/s'}
],
['VIEW_VC:http://r/s', {'VIEW_VC': 'https://r/s'}],
]
for content, expected in values:
self.assertEquals(
expected, gclient_utils.ParseCodereviewSettingsContent(content))
if __name__ == '__main__':
import unittest
unittest.main()
# vim: ts=2:sw=2:tw=80:et:
| mit | 2,180,982,842,484,420,000 | 34 | 80 | 0.622972 | false |
Connectomics-Classes/hackwicket-silverbacks | postProcessScript/src/postPocess.py | 1 | 1940 | #!/usr/bin/python
#imports
import numpy as np
import cv
import cv2
import sys
import glob
from mayavi import mlab
from mayavi.mlab import *
def morphOps(probMap):
workableMap = probMap.copy()
#apply erosion & dilation filtering, rids photo of stray vescicle detections
workableMap = eroDilFilter(workableMap, 5, 3) #4 and 1 experimentally determined
#restrict array to include only above 70% confidence
restrictedArr = cv2.inRange(workableMap,.7, 1)
#change all remaining values to 100
restrictedArr[restrictedArr > 0] = 100
return restrictedArr
def loadNpyVolume():
print 'loading files from data directory...'
numpyVolume = []
#for all of the numpy arrays in the current directory
for numpyArr in glob.glob('../data/*.np[yz]'):
probMap = np.load(numpyArr)
#if the numpy is 3d
if(len(probMap.shape) == 3):
#split and add sub numpys to volume
#reorder parameters for looping
np.rollaxis(probMap, 2)
for subMap in probMap:
#add all subArrs in the 3d npy to the volume
print subMap.shape
numpyVolume.append(subMap)
#if the numpy is 2d
elif(len(probMap.shape) == 2):
numpyVolume.append(probMap)
#if the numpy doesnt make sense
else:
print 'Error: Npy data format not recognized'
return numpyVolume
def eroDilFilter(array,ero, dil):
kernel = np.ones((ero, ero), np.uint8)
erodedArr = cv2.erode(array,kernel)
kernel = np.ones((dil,dil), np.uint8)
dilatedArr = cv2.dilate(array, kernel)
return dilatedArr
#load the numpy into the program
numpyVolume = loadNpyVolume()
#instantiate list of array for display
stackableList = []
#perform morph ops to clean up data
print 'cleaning up data...'
for probMap in numpyVolume:
displayArr = morphOps(probMap)
#add array to display list
stackableList.append(displayArr)
#stitch arrays together for display
print 'generating 3d volume...'
finalVolume = np.dstack(stackableList)
#display arrays
mlab.contour3d(finalVolume)
mlab.show()
| apache-2.0 | -1,765,742,468,419,555,000 | 27.115942 | 81 | 0.744845 | false |
ethanrublee/ecto-release | python/ecto/__init__.py | 1 | 7057 | #
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import platform, sys
#import inspect
class EctoCellBase(object):
pass
def cell_getitem(self, *args, **kwargs):
if len(args) == 1 and type(args[0]) == slice:
return __getitem_slice__(self.__impl, args[0])
if len(args) == 1 and type(args[0]) == tuple:
return __getitem_tuple__(self.__impl, args[0])
if len(args) == 1 and type(args[0]) == list:
return __getitem_list__(self.__impl, args[0])
return __getitem_str__(self.__impl, args[0])
def cellinit(cpptype):
def impl(self, *args, **kwargs):
if len(args) > 1:
raise RuntimeError("Too many positional args: only one allowed, representing cell instance name")
e = lookup(cpptype)
c = self.__impl = e.construct()
if len(args) == 1:
self.__impl.name(args[0])
e.declare_params(self.__impl.params)
# c.construct(args, kwargs)
#print "c=", c
self.inputs = c.inputs
self.outputs = c.outputs
self.params = c.params
for k, v in kwargs.iteritems():
if k == 'strand':
self.__impl._set_strand(v)
elif isinstance(v, _cell_cpp):
setattr(self.params, k, v.__impl)
else:
setattr(self.params, k, v)
# print "now:", getattr(self.params, k)
e.declare_io(self.params, self.inputs, self.outputs)
try:
self.__impl.verify_params()
except ecto.EctoException as e:
print >>sys.stderr, cpptype
raise type(e)('\nCell Type: %s\nCell Name: %s\nWhat:\n%s'%(cpptype,self.__impl.name(),str(e)))
# self.params.get('k') = v
return impl
def cell_print_tendrils(tendril):
s = ""
for x in tendril:
try:
value = str(x.data().get())
except TypeError, e:
value = "[unprintable]"
s += " - " + x.key() + " [%s]" % x.data().type_name
if x.data().required:
s += " REQUIRED"
if x.data().has_default:
s += " default = " + value
s += "\n"
docstr = str(x.data().doc)
doclines = docstr.splitlines()
if doclines :
for docline in doclines:
s += " " + docline + "\n"
s += "\n"
return s
@classmethod
def cell_inspect(self, *args, **kwargs):
c = self.__factory()
c.declare_params()
c.declare_io()
return c
def cell_process(self):
return self.__impl.process()
def cell_configure(self):
return self.__impl.configure()
def cell_name(self):
return self.__impl.name()
def cell_typename(self):
return self.__impl.typename()
def cell_doc(short_doc, c):
doc =short_doc + "\n\n"
params = cell_print_tendrils(c.params)
inputs = cell_print_tendrils(c.inputs)
outputs = cell_print_tendrils(c.outputs)
if(params):
doc += "Parameters:\n%s"%params
if(inputs):
doc += "Inputs:\n%s"%inputs
if(outputs):
doc += "Outputs:\n%s"%outputs
return doc
def postregister(cellname, cpptypename, short_doc, inmodule):
e = lookup(cpptypename)
c = e.construct()
c.declare_params()
c.declare_io()
thistype = type(cellname, (_cell_cpp,),
dict(__doc__ = cell_doc(short_doc,c),
__module__ = inmodule.__name__,
inputs = c.inputs,
outputs = c.outputs,
params = c.params,
type = c.typename,
short_doc = short_doc,
__init__ = cellinit(cpptypename),
__getitem__ = cell_getitem,
inspect = cell_inspect,
process = cell_process,
configure = cell_configure,
name = cell_name,
type_name = cell_typename,
__factory = e.construct,
__looks_like_a_cell__ = True
))
inmodule.__dict__[cellname] = thistype
if platform.system().startswith('freebsd'):
# C++ modules are extremely fragile when loaded with RTLD_LOCAL,
# which is what Python uses on FreeBSD by default, and maybe other
# systems. Convince it to use RTLD_GLOBAL.
# See thread by Abrahams et al:
# http://mail.python.org/pipermail/python-dev/2002-May/024074.html
sys.setdlopenflags(0x102)
def load_pybindings(name, path):
"""
Merges python bindings from shared library 'name' into module 'name'.
Use when you have a directory structure::
lib/
foo.so
foo/
__init__.py
something.py
Here, inside ``foo/__init__.py`` call ``load_pybindings(__name__, __path__)``
this assumes that the first entry in list ``__path__`` is where
you want the wrapped classes to merge to.
"""
import imp
m = imp.load_dynamic(name, path[0] + ".so") #TODO this is only going to work on unix...
thismod = sys.modules[name]
for (k,v) in m.__dict__.items():
if not k.startswith("_"):
thismod.__dict__[k] = v
load_pybindings(__name__, __path__)
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from doc import *
from cell import *
from blackbox import *
import test
#
# temporary backwards compat measures
#
schedulers.Threadpool = schedulers.Multithreaded
| bsd-3-clause | -9,032,442,430,647,433,000 | 32.604762 | 110 | 0.586652 | false |
excid3/keryx | keryx/unwrapt/Download.py | 1 | 5779 | # Unwrapt - cross-platform package system emulator
# Copyright (C) 2010 Chris Oliver <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import urllib
import httplib
import urlparse
from datetime import datetime
from utils import format_number
#TODO: Add resume support: http://code.activestate.com/recipes/83208-resuming-download-of-a-file/
class InvalidCredentials(Exception):
"""
Exception raised if the proxy credentials are invalid
"""
pass
class ProxyOpener(urllib.FancyURLopener):
"""
Class for handling proxy credentials
"""
def __init__(self, proxy={}, usr=None, pwd=None):
urllib.FancyURLopener.__init__(self, proxy)
self.count = 0
self.proxy = proxy
self.usr = usr
self.pwd = pwd
def prompt_user_passwd(self, host, realm):
"""
Override the FancyURLopener prompt and simply return what was given
Raise an error if there is a problem
"""
self.count += 1
if self.count > 1:
raise InvalidCredentials, "Unable to authenticate to proxy"
return (self.usr, self.pwd)
def textprogress(display, current, total):
"""
Download progress in terminal
"""
percentage = current/float(total) * 100
sys.stdout.write("\r%-56.56s %3i%% [%5sB / %5sB]" % \
(display,
percentage,
format_number(current),
format_number(total)))
if percentage == 100:
sys.stdout.write("\n")
# This makes sure the cursor ends up on the far right
# Without this the cursor constantly jumps around
sys.stdout.flush()
def download_url(url, filename, display=None, progress=textprogress, proxy={}, username=None, password=None):
"""
Downloads a file to ram and returns a string of the contents
"""
if not display:
display = url.rsplit("/", 1)[1]
# Do we already have a file to continue off of?
# modified determines whether the file is outdated or not based on headers
modified = None
downloaded = 0
if os.path.exists(filename):
modified = datetime.utcfromtimestamp(os.stat(filename).st_mtime)
downloaded = os.path.getsize(filename)
# Open up a temporary connection to see if the file we have downloaded
# is still usable (based on modification date)
# format meanings are located http://docs.python.org/library/time.html
opener = ProxyOpener(proxy, username, password)
headers = opener.open(url).headers
if modified and "Last-Modified" in headers:
dt = datetime.strptime(headers["Last-Modified"],
"%a, %d %b %Y %H:%M:%S %Z")
# File is too old so we delete the old file
if modified < dt:
#logging.debug("OLD FILE")
#print "OLD FILE"
downloaded = 0
os.remove(filename)
# Test existing filesize compared to length of download
if "Content-Length" in headers:
length = int(headers["Content-Length"])
# File already downloaded?
if downloaded == length:
progress("Hit: %s" % display, length, length)
return
# File corrupted?
elif downloaded > length:
downloaded = 0
os.remove(filename)
# Open up the real connection for downloading
opener = ProxyOpener(proxy, username, password)
if downloaded:
opener.addheader("Range", "bytes=%s-" % str(downloaded))
page = opener.open(url)
# The file range must have matched the download size
if not "Content-Length" in page.headers:
progress("Hit: %s" % display, downloaded, downloaded)
return
# Finish downloading the file
length = int(page.headers["Content-Length"]) + downloaded
f = open(filename, "ab")
while 1:
data = page.read(8192)
if not data:
break
downloaded += len(data)
f.write(data)
progress(display, downloaded, length)
f.close()
page.close()
return
##Check for Valid URL based on the HTTP response code
def httpExists(url):
host, path = urlparse.urlsplit(url)[1:3]
found = False
connection = httplib.HTTPConnection(host) ## Make HTTPConnection Object
try:
connection.request("HEAD", path)
responseOb = connection.getresponse() ## Grab HTTPResponse Object
if responseOb.status == 200:
found = True
except:
pass
return found
if __name__ == "__main__":
# Successful proxy usage
#download_url("http://launchpad.net/keryx/stable/0.92/+download/keryx_0.92.4.zip",
# "keryx.zip")
#proxy={"http": "http://tank:3128"},
#username="excid3", password="password")
download_url("http://dl.google.com/linux/chrome/deb/dists/stable/main/binary-amd64/Packages.gz", "google.gz")
download_url("http://linux.dropbox.com/ubuntu/dists/maverick/main/binary-amd64/Packages.gz", "dropbox.gz")
| gpl-3.0 | -3,711,110,002,626,798,000 | 30.407609 | 113 | 0.622599 | false |
trevisanj/f311 | f311/collaboration.py | 1 | 8609 | """
Collaboration-related routines
1) Class Catalog -- resources to retrieve File* classes by different criteria
2) Script utilities: collect scripts (standalone applications) across collaborator packages
"""
from collections import OrderedDict
import importlib
import a99
import os
import glob
import copy
__all__ = [
"COLLABORATORS_C", "COLLABORATORS_S",
"classes_txt", "classes_bin", "classes_sp", "classes_file",
"get_suitable_vis_classes", "get_suitable_vis_list_classes",
"get_scripts_path", "get_programs_dict",
"EXTERNAL_COLLABORATORS"
]
# List of Python packages to be considered "external collaborators"
#
# These packages may contribute with:
# - scripts
# - DataFile subclasses
# - Vis subclasses
EXTERNAL_COLLABORATORS = ["pyfant", "aosss", "convmolworks", "ariastro"]
# List of **classes** collaborators packages (**change to add**)
#
COLLABORATORS_C = ["f311"]+EXTERNAL_COLLABORATORS
# List of **script** collaborator packages to look for scripts (**change to add**)
#
__F311 = ["f311"]+["f311."+x for x in a99.get_subpackages_names(os.path.split(__file__)[0])]
COLLABORATORS_S = __F311+EXTERNAL_COLLABORATORS
# ** **** ****** **** ****** ****
# ** ** ****** ****** ** ** ****** ****** ** ** ****** ******
# **** **** **** **** **** ****
#
# Class catalog-related routines
def get_suitable_vis_classes(obj):
"""Retuns a list of Vis classes that can handle obj."""
ret = []
for class_ in classes_vis():
if isinstance(obj, class_.input_classes):
ret.append(class_)
return ret
def get_suitable_vis_list_classes(objs):
"""Retuns a list of VisList classes that can handle a list of objects."""
from f311 import explorer as ex
ret = []
for class_ in classes_vis():
if isinstance(class_, ex.VisList):
flag_can = True
for obj in objs:
if not isinstance(obj, class_.item_input_classes):
flag_can = False
break
if flag_can:
ret.append(class_)
return ret
def classes_txt():
"""Classes to consider when attempts to load a text file (see load_any_file())"""
if __flag_first:
__setup()
return _classes_txt
def classes_bin():
"""Classes to consider when attempts to load a binary file (see load_any_file())"""
if __flag_first:
__setup()
return _classes_bin
def classes_sp():
"""Classes to consider when attempts to load a spectrum file (see load_spectrum())"""
if __flag_first:
__setup()
return _classes_sp
def classes_file(flag_leaf=False):
"""All known File* classes
Args:
flag_leaf: returns only classes that do not have subclasses
("leaf" nodes as in a class tree graph)
"""
if __flag_first:
__setup()
if not flag_leaf:
return _classes_file
return [cls for cls in _classes_file if cls not in _classes_file_superclass]
def classes_vis():
"""All known Vis* classes"""
if __flag_first:
__setup()
return _classes_vis
def _collect_classes(m):
"""
Adds entries to _classes_*
Args:
m: module object that must contain the following sub-modules: datatypes, vis
"""
from f311 import filetypes as ft
from f311 import explorer as ex
def _extend(classes, newclasses):
"""Filters out classes already present in list.
This shouldn't be necessary, but collaborators may accidentally import already loaded
classes into the datatypes namespace"""
classes.extend([class_ for class_ in newclasses if class_ not in classes])
# classes.extend(newclasses)
file_classes = [class_ for class_ in a99.get_classes_in_module(m, ft.DataFile) if class_.flag_collect]
# Classes to consider when attempts to load a text file (see load_any_file())
_extend(_classes_txt, [class_ for class_ in file_classes if class_.flag_txt])
# Classes to consider when attempts to load a binary file (see load_any_file())
_extend(_classes_bin, [class_ for class_ in file_classes if not class_.flag_txt])
# Adds Classes to consider when attempts to load a spectrum file (see load_spectrum())
_extend(_classes_sp, [class_ for class_ in file_classes if issubclass(class_, ft.FileSpectrum)])
# All kwown File* classes
_extend(_classes_file, file_classes)
# All kwnown Vis* classes
_extend(_classes_vis, a99.get_classes_in_module(m, ex.Vis))
global _classes_file_superclass
_classes_file_superclass = [cls.__bases__[0] for cls in _classes_file]
# # List of classes representing all file formats either read or written
# ====================================================================
_classes_txt = []
_classes_bin = []
_classes_sp = []
_classes_file = []
_classes_file_superclass = [] # superclasses of items in _classes_file
_classes_vis = []
__flag_first = True
__collaborators = OrderedDict()
def __setup():
"""Will be executed in the first time someone calls classes_*() """
global __collaborators, __flag_first
import f311
__flag_first = False
for pkgname in f311.COLLABORATORS_C:
try:
pkg = importlib.import_module(pkgname)
a99.get_python_logger().info("Imported collaborator package '{}'".format(pkgname))
try:
if hasattr(pkg, "_setup_filetypes"):
pkg._setup_filetypes()
else:
_collect_classes(pkg)
__collaborators[pkgname] = pkg
except:
a99.get_python_logger().exception(
"Actually, package '{}' gave error".format(pkgname))
raise
except:
a99.get_python_logger().warning("Failed to import package '{}".format(pkgname))
# raise
# ** **** ****** **** ****** ****
# ** ** ****** ****** ** ** ****** ****** ** ** ****** ******
# **** **** **** **** **** ****
#
# Scripts-related routines
def get_scripts_path(packagename):
"""**Convention** Returns full path to scripts directory"""
return os.path.join(packagename, "scripts")
# {"packagename0": {"exeinfo": [ExeInfo00, ...], "description": description0}, ...}
# keys in COLLABORATORS_S
__programs_dict = None
def _get_programs_dict():
"""
Builds and returns programs dictionary
This will have to import the packages in COLLABORATORS_S in order to get their absolute path.
Returns:
dictionary: {"packagename": [ExeInfo0, ...], ...}
"packagename" examples: "f311.explorer", "numpy"
"""
global __programs_dict
if __programs_dict is not None:
return __programs_dict
d = __programs_dict = OrderedDict()
for pkgname in COLLABORATORS_S:
try:
package = importlib.import_module(pkgname)
except ImportError:
# I think it is better to be silent when a collaborator package is not installed
continue
path_ = os.path.join(os.path.split(package.__file__)[0], "scripts")
bulk = a99.get_exe_info(path_, flag_protected=True)
d[pkgname] = {"description": a99.get_obj_doc0(package), "exeinfo": bulk}
return __programs_dict
def get_programs_dict(pkgname_only=None, flag_protected=False):
"""
Scans COLLABORATORS_S packages for scripts, eventually filtering if arguments passed
Args:
pkgname_only: name of single package within COLLABORATORS_S
flag_protected: include scripts starting with "_"?
Returns:
dictionary: {"packagename0": {"exeinfo": [ExeInfo00, ...], "description": description0}, ...}
"""
___ret = _get_programs_dict()
__ret = ___ret if pkgname_only is None else OrderedDict(((pkgname_only, ___ret[pkgname_only]),))
if flag_protected:
_ret = __ret
else:
_ret = copy.deepcopy(__ret)
for value in _ret.values():
value["exeinfo"] = [exeinfo for exeinfo in value["exeinfo"] if not exeinfo.filename.startswith("_")]
# Removes packages that may have gone out of scripts after filtering
ret = _ret if pkgname_only is None and flag_protected is None else \
OrderedDict(((key, value) for key, value in _ret.items() if len(value["exeinfo"]) > 0))
return ret
| gpl-3.0 | 7,911,966,800,209,908,000 | 30.534799 | 112 | 0.588454 | false |
Onirik79/aaritmud | src/enums/MONTH.py | 1 | 1699 | # -*- coding: utf-8 -*-
"""
Enumerazione dei mesi di un anno rpg.
"""
from src.element import EnumElement, finalize_enumeration
#-------------------------------------------------------------------------------
name = __name__[__name__.rfind(".")+1 : ]
elements = []
cycle_on_last = True
#-------------------------------------------------------------------------------
class MonthElement(EnumElement):
def __init__(self, name, description=""):
super(MonthElement, self).__init__(name, description)
#- Fine Inizializzazione -
#-------------------------------------------------------------------------------
NONE = MonthElement("Nessuno")
ONE = MonthElement("[white]Inverno del Lupo[close]", "dell'[white]Inverno del Lupo[close]")
TWO = MonthElement("[cyan]Gigante di Ghiaccio[close]", "del [cyan]Gigante di Ghiaccio[close]")
THREE = MonthElement("[blue]Arcano Passato[close]", "dell'[blue]Arcano Passato[close]")
FOUR = MonthElement("[green]Natura[close]", "della [green]Natura[close]")
FIVE = MonthElement("[red]Grande Lotta[close]", "della [red]Grande Lotta[close]")
SIX = MonthElement("[red]Dragone[close]", "del [red]Dragone[close]")
SEVEN = MonthElement("[red]Battaglia[close]", "della [red]Battaglia[close]")
EIGHT = MonthElement("[dimgray]Lunghe Ombre[close]", "delle [dimgray]Lunghe Ombre[close]")
NINE = MonthElement("[blue]Antica Oscurità[close]", "dell'[blue]Antica Oscurità[close]")
TEN = MonthElement("[dimgray]Grande Male[close]", "del [dimgray]Grande Male[close]")
#-------------------------------------------------------------------------------
finalize_enumeration(__name__)
| gpl-2.0 | 2,097,785,702,521,239,300 | 40.341463 | 96 | 0.533923 | false |
chrsrds/scikit-learn | sklearn/datasets/tests/test_openml.py | 1 | 45496 | """Test the openml loader.
"""
import gzip
import json
import numpy as np
import os
import re
import scipy.sparse
import sklearn
import pytest
from sklearn import config_context
from sklearn.datasets import fetch_openml
from sklearn.datasets.openml import (_open_openml_url,
_get_data_description_by_id,
_download_data_arff,
_get_local_path,
_retry_with_clean_cache,
_feature_to_dtype)
from sklearn.utils.testing import (assert_warns_message,
assert_raise_message)
from sklearn.utils import is_scalar_nan
from sklearn.utils.testing import assert_allclose, assert_array_equal
from urllib.error import HTTPError
from sklearn.datasets.tests.test_common import check_return_X_y
from functools import partial
currdir = os.path.dirname(os.path.abspath(__file__))
# if True, urlopen will be monkey patched to only use local files
test_offline = True
def _test_features_list(data_id):
# XXX Test is intended to verify/ensure correct decoding behavior
# Not usable with sparse data or datasets that have columns marked as
# {row_identifier, ignore}
def decode_column(data_bunch, col_idx):
col_name = data_bunch.feature_names[col_idx]
if col_name in data_bunch.categories:
# XXX: This would be faster with np.take, although it does not
# handle missing values fast (also not with mode='wrap')
cat = data_bunch.categories[col_name]
result = [None if is_scalar_nan(idx) else cat[int(idx)]
for idx in data_bunch.data[:, col_idx]]
return np.array(result, dtype='O')
else:
# non-nominal attribute
return data_bunch.data[:, col_idx]
data_bunch = fetch_openml(data_id=data_id, cache=False, target_column=None)
# also obtain decoded arff
data_description = _get_data_description_by_id(data_id, None)
sparse = data_description['format'].lower() == 'sparse_arff'
if sparse is True:
raise ValueError('This test is not intended for sparse data, to keep '
'code relatively simple')
data_arff = _download_data_arff(data_description['file_id'],
sparse, None, False)
data_downloaded = np.array(list(data_arff['data']), dtype='O')
for i in range(len(data_bunch.feature_names)):
# XXX: Test per column, as this makes it easier to avoid problems with
# missing values
np.testing.assert_array_equal(data_downloaded[:, i],
decode_column(data_bunch, i))
def _fetch_dataset_from_openml(data_id, data_name, data_version,
target_column,
expected_observations, expected_features,
expected_missing,
expected_data_dtype, expected_target_dtype,
expect_sparse, compare_default_target):
# fetches a dataset in three various ways from OpenML, using the
# fetch_openml function, and does various checks on the validity of the
# result. Note that this function can be mocked (by invoking
# _monkey_patch_webbased_functions before invoking this function)
data_by_name_id = fetch_openml(name=data_name, version=data_version,
cache=False)
assert int(data_by_name_id.details['id']) == data_id
# Please note that cache=False is crucial, as the monkey patched files are
# not consistent with reality
fetch_openml(name=data_name, cache=False)
# without specifying the version, there is no guarantee that the data id
# will be the same
# fetch with dataset id
data_by_id = fetch_openml(data_id=data_id, cache=False,
target_column=target_column)
assert data_by_id.details['name'] == data_name
assert data_by_id.data.shape == (expected_observations, expected_features)
if isinstance(target_column, str):
# single target, so target is vector
assert data_by_id.target.shape == (expected_observations, )
elif isinstance(target_column, list):
# multi target, so target is array
assert data_by_id.target.shape == (expected_observations,
len(target_column))
assert data_by_id.data.dtype == np.float64
assert data_by_id.target.dtype == expected_target_dtype
assert len(data_by_id.feature_names) == expected_features
for feature in data_by_id.feature_names:
assert isinstance(feature, str)
# TODO: pass in a list of expected nominal features
for feature, categories in data_by_id.categories.items():
feature_idx = data_by_id.feature_names.index(feature)
values = np.unique(data_by_id.data[:, feature_idx])
values = values[np.isfinite(values)]
assert set(values) <= set(range(len(categories)))
if compare_default_target:
# check whether the data by id and data by id target are equal
data_by_id_default = fetch_openml(data_id=data_id, cache=False)
if data_by_id.data.dtype == np.float64:
np.testing.assert_allclose(data_by_id.data,
data_by_id_default.data)
else:
assert np.array_equal(data_by_id.data, data_by_id_default.data)
if data_by_id.target.dtype == np.float64:
np.testing.assert_allclose(data_by_id.target,
data_by_id_default.target)
else:
assert np.array_equal(data_by_id.target, data_by_id_default.target)
if expect_sparse:
assert isinstance(data_by_id.data, scipy.sparse.csr_matrix)
else:
assert isinstance(data_by_id.data, np.ndarray)
# np.isnan doesn't work on CSR matrix
assert (np.count_nonzero(np.isnan(data_by_id.data)) ==
expected_missing)
# test return_X_y option
fetch_func = partial(fetch_openml, data_id=data_id, cache=False,
target_column=target_column)
check_return_X_y(data_by_id, fetch_func)
return data_by_id
def _monkey_patch_webbased_functions(context,
data_id,
gzip_response):
# monkey patches the urlopen function. Important note: Do NOT use this
# in combination with a regular cache directory, as the files that are
# stored as cache should not be mixed up with real openml datasets
url_prefix_data_description = "https://openml.org/api/v1/json/data/"
url_prefix_data_features = "https://openml.org/api/v1/json/data/features/"
url_prefix_download_data = "https://openml.org/data/v1/"
url_prefix_data_list = "https://openml.org/api/v1/json/data/list/"
path_suffix = '.gz'
read_fn = gzip.open
class MockHTTPResponse:
def __init__(self, data, is_gzip):
self.data = data
self.is_gzip = is_gzip
def read(self, amt=-1):
return self.data.read(amt)
def tell(self):
return self.data.tell()
def seek(self, pos, whence=0):
return self.data.seek(pos, whence)
def close(self):
self.data.close()
def info(self):
if self.is_gzip:
return {'Content-Encoding': 'gzip'}
return {}
def _file_name(url, suffix):
return (re.sub(r'\W', '-', url[len("https://openml.org/"):])
+ suffix + path_suffix)
def _mock_urlopen_data_description(url, has_gzip_header):
assert url.startswith(url_prefix_data_description)
path = os.path.join(currdir, 'data', 'openml', str(data_id),
_file_name(url, '.json'))
if has_gzip_header and gzip_response:
fp = open(path, 'rb')
return MockHTTPResponse(fp, True)
else:
fp = read_fn(path, 'rb')
return MockHTTPResponse(fp, False)
def _mock_urlopen_data_features(url, has_gzip_header):
assert url.startswith(url_prefix_data_features)
path = os.path.join(currdir, 'data', 'openml', str(data_id),
_file_name(url, '.json'))
if has_gzip_header and gzip_response:
fp = open(path, 'rb')
return MockHTTPResponse(fp, True)
else:
fp = read_fn(path, 'rb')
return MockHTTPResponse(fp, False)
def _mock_urlopen_download_data(url, has_gzip_header):
assert (url.startswith(url_prefix_download_data))
path = os.path.join(currdir, 'data', 'openml', str(data_id),
_file_name(url, '.arff'))
if has_gzip_header and gzip_response:
fp = open(path, 'rb')
return MockHTTPResponse(fp, True)
else:
fp = read_fn(path, 'rb')
return MockHTTPResponse(fp, False)
def _mock_urlopen_data_list(url, has_gzip_header):
assert url.startswith(url_prefix_data_list)
json_file_path = os.path.join(currdir, 'data', 'openml',
str(data_id), _file_name(url, '.json'))
# load the file itself, to simulate a http error
json_data = json.loads(read_fn(json_file_path, 'rb').
read().decode('utf-8'))
if 'error' in json_data:
raise HTTPError(url=None, code=412,
msg='Simulated mock error',
hdrs=None, fp=None)
if has_gzip_header:
fp = open(json_file_path, 'rb')
return MockHTTPResponse(fp, True)
else:
fp = read_fn(json_file_path, 'rb')
return MockHTTPResponse(fp, False)
def _mock_urlopen(request):
url = request.get_full_url()
has_gzip_header = request.get_header('Accept-encoding') == "gzip"
if url.startswith(url_prefix_data_list):
return _mock_urlopen_data_list(url, has_gzip_header)
elif url.startswith(url_prefix_data_features):
return _mock_urlopen_data_features(url, has_gzip_header)
elif url.startswith(url_prefix_download_data):
return _mock_urlopen_download_data(url, has_gzip_header)
elif url.startswith(url_prefix_data_description):
return _mock_urlopen_data_description(url, has_gzip_header)
else:
raise ValueError('Unknown mocking URL pattern: %s' % url)
# XXX: Global variable
if test_offline:
context.setattr(sklearn.datasets.openml, 'urlopen', _mock_urlopen)
@pytest.mark.parametrize('feature, expected_dtype', [
({'data_type': 'string', 'number_of_missing_values': '0'}, object),
({'data_type': 'string', 'number_of_missing_values': '1'}, object),
({'data_type': 'numeric', 'number_of_missing_values': '0'}, np.float64),
({'data_type': 'numeric', 'number_of_missing_values': '1'}, np.float64),
({'data_type': 'real', 'number_of_missing_values': '0'}, np.float64),
({'data_type': 'real', 'number_of_missing_values': '1'}, np.float64),
({'data_type': 'integer', 'number_of_missing_values': '0'}, np.int64),
({'data_type': 'integer', 'number_of_missing_values': '1'}, np.float64),
({'data_type': 'nominal', 'number_of_missing_values': '0'}, 'category'),
({'data_type': 'nominal', 'number_of_missing_values': '1'}, 'category'),
])
def test_feature_to_dtype(feature, expected_dtype):
assert _feature_to_dtype(feature) == expected_dtype
@pytest.mark.parametrize('feature', [
{'data_type': 'datatime', 'number_of_missing_values': '0'}
])
def test_feature_to_dtype_error(feature):
msg = 'Unsupported feature: {}'.format(feature)
with pytest.raises(ValueError, match=msg):
_feature_to_dtype(feature)
def test_fetch_openml_iris_pandas(monkeypatch):
# classification dataset with numeric only columns
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 61
data_shape = (150, 4)
target_shape = (150, )
frame_shape = (150, 5)
target_dtype = CategoricalDtype(['Iris-setosa', 'Iris-versicolor',
'Iris-virginica'])
data_dtypes = [np.float64] * 4
data_names = ['sepallength', 'sepalwidth', 'petallength', 'petalwidth']
target_name = 'class'
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert np.all(data.dtypes == data_dtypes)
assert data.shape == data_shape
assert np.all(data.columns == data_names)
assert np.all(bunch.feature_names == data_names)
assert isinstance(target, pd.Series)
assert target.dtype == target_dtype
assert target.shape == target_shape
assert target.name == target_name
assert target.index.is_unique
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
assert np.all(frame.dtypes == data_dtypes + [target_dtype])
assert frame.index.is_unique
def test_fetch_openml_iris_pandas_equal_to_no_frame(monkeypatch):
# as_frame = True returns the same underlying data as as_frame = False
pytest.importorskip('pandas')
data_id = 61
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
frame_bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False)
frame_data = frame_bunch.data
frame_target = frame_bunch.target
norm_bunch = fetch_openml(data_id=data_id, as_frame=False, cache=False)
norm_data = norm_bunch.data
norm_target = norm_bunch.target
assert_allclose(norm_data, frame_data)
assert_array_equal(norm_target, frame_target)
def test_fetch_openml_iris_multitarget_pandas(monkeypatch):
# classification dataset with numeric only columns
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 61
data_shape = (150, 3)
target_shape = (150, 2)
frame_shape = (150, 5)
target_column = ['petalwidth', 'petallength']
cat_dtype = CategoricalDtype(['Iris-setosa', 'Iris-versicolor',
'Iris-virginica'])
data_dtypes = [np.float64, np.float64] + [cat_dtype]
data_names = ['sepallength', 'sepalwidth', 'class']
target_dtypes = [np.float64, np.float64]
target_names = ['petalwidth', 'petallength']
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False,
target_column=target_column)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert np.all(data.dtypes == data_dtypes)
assert data.shape == data_shape
assert np.all(data.columns == data_names)
assert np.all(bunch.feature_names == data_names)
assert isinstance(target, pd.DataFrame)
assert np.all(target.dtypes == target_dtypes)
assert target.shape == target_shape
assert np.all(target.columns == target_names)
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
assert np.all(frame.dtypes == [np.float64] * 4 + [cat_dtype])
def test_fetch_openml_anneal_pandas(monkeypatch):
# classification dataset with numeric and categorical columns
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 2
target_column = 'class'
data_shape = (11, 38)
target_shape = (11,)
frame_shape = (11, 39)
expected_data_categories = 32
expected_data_floats = 6
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True,
target_column=target_column, cache=False)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert data.shape == data_shape
n_categories = len([dtype for dtype in data.dtypes
if isinstance(dtype, CategoricalDtype)])
n_floats = len([dtype for dtype in data.dtypes if dtype.kind == 'f'])
assert expected_data_categories == n_categories
assert expected_data_floats == n_floats
assert isinstance(target, pd.Series)
assert target.shape == target_shape
assert isinstance(target.dtype, CategoricalDtype)
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
def test_fetch_openml_cpu_pandas(monkeypatch):
# regression dataset with numeric and categorical columns
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 561
data_shape = (209, 7)
target_shape = (209, )
frame_shape = (209, 8)
cat_dtype = CategoricalDtype(['adviser', 'amdahl', 'apollo', 'basf',
'bti', 'burroughs', 'c.r.d', 'cdc',
'cambex', 'dec', 'dg', 'formation',
'four-phase', 'gould', 'hp', 'harris',
'honeywell', 'ibm', 'ipl', 'magnuson',
'microdata', 'nas', 'ncr', 'nixdorf',
'perkin-elmer', 'prime', 'siemens',
'sperry', 'sratus', 'wang'])
data_dtypes = [cat_dtype] + [np.float64] * 6
feature_names = ['vendor', 'MYCT', 'MMIN', 'MMAX', 'CACH',
'CHMIN', 'CHMAX']
target_name = 'class'
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert data.shape == data_shape
assert np.all(data.dtypes == data_dtypes)
assert np.all(data.columns == feature_names)
assert np.all(bunch.feature_names == feature_names)
assert isinstance(target, pd.Series)
assert target.shape == target_shape
assert target.dtype == np.float64
assert target.name == target_name
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
def test_fetch_openml_australian_pandas_error_sparse(monkeypatch):
data_id = 292
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
msg = 'Cannot return dataframe with sparse data'
with pytest.raises(ValueError, match=msg):
fetch_openml(data_id=data_id, as_frame=True, cache=False)
def test_convert_arff_data_dataframe_warning_low_memory_pandas(monkeypatch):
pytest.importorskip('pandas')
data_id = 1119
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
msg = 'Could not adhere to working_memory config.'
with pytest.warns(UserWarning, match=msg):
with config_context(working_memory=1e-6):
fetch_openml(data_id=data_id, as_frame=True, cache=False)
def test_fetch_openml_adultcensus_pandas_return_X_y(monkeypatch):
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 1119
data_shape = (10, 14)
target_shape = (10, )
expected_data_categories = 8
expected_data_floats = 6
target_column = 'class'
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
X, y = fetch_openml(data_id=data_id, as_frame=True, cache=False,
return_X_y=True)
assert isinstance(X, pd.DataFrame)
assert X.shape == data_shape
n_categories = len([dtype for dtype in X.dtypes
if isinstance(dtype, CategoricalDtype)])
n_floats = len([dtype for dtype in X.dtypes if dtype.kind == 'f'])
assert expected_data_categories == n_categories
assert expected_data_floats == n_floats
assert isinstance(y, pd.Series)
assert y.shape == target_shape
assert y.name == target_column
def test_fetch_openml_adultcensus_pandas(monkeypatch):
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
# Check because of the numeric row attribute (issue #12329)
data_id = 1119
data_shape = (10, 14)
target_shape = (10, )
frame_shape = (10, 15)
expected_data_categories = 8
expected_data_floats = 6
target_column = 'class'
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert data.shape == data_shape
n_categories = len([dtype for dtype in data.dtypes
if isinstance(dtype, CategoricalDtype)])
n_floats = len([dtype for dtype in data.dtypes if dtype.kind == 'f'])
assert expected_data_categories == n_categories
assert expected_data_floats == n_floats
assert isinstance(target, pd.Series)
assert target.shape == target_shape
assert target.name == target_column
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
def test_fetch_openml_miceprotein_pandas(monkeypatch):
# JvR: very important check, as this dataset defined several row ids
# and ignore attributes. Note that data_features json has 82 attributes,
# and row id (1), ignore attributes (3) have been removed.
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 40966
data_shape = (7, 77)
target_shape = (7, )
frame_shape = (7, 78)
target_column = 'class'
frame_n_categories = 1
frame_n_floats = 77
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert data.shape == data_shape
assert np.all(data.dtypes == np.float64)
assert isinstance(target, pd.Series)
assert isinstance(target.dtype, CategoricalDtype)
assert target.shape == target_shape
assert target.name == target_column
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
n_categories = len([dtype for dtype in frame.dtypes
if isinstance(dtype, CategoricalDtype)])
n_floats = len([dtype for dtype in frame.dtypes if dtype.kind == 'f'])
assert frame_n_categories == n_categories
assert frame_n_floats == n_floats
def test_fetch_openml_emotions_pandas(monkeypatch):
# classification dataset with multiple targets (natively)
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 40589
target_column = ['amazed.suprised', 'happy.pleased', 'relaxing.calm',
'quiet.still', 'sad.lonely', 'angry.aggresive']
data_shape = (13, 72)
target_shape = (13, 6)
frame_shape = (13, 78)
expected_frame_categories = 6
expected_frame_floats = 72
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False,
target_column=target_column)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert data.shape == data_shape
assert isinstance(target, pd.DataFrame)
assert target.shape == target_shape
assert np.all(target.columns == target_column)
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
n_categories = len([dtype for dtype in frame.dtypes
if isinstance(dtype, CategoricalDtype)])
n_floats = len([dtype for dtype in frame.dtypes if dtype.kind == 'f'])
assert expected_frame_categories == n_categories
assert expected_frame_floats == n_floats
def test_fetch_openml_titanic_pandas(monkeypatch):
# dataset with strings
pd = pytest.importorskip('pandas')
CategoricalDtype = pd.api.types.CategoricalDtype
data_id = 40945
data_shape = (1309, 13)
target_shape = (1309, )
frame_shape = (1309, 14)
name_to_dtype = {
'pclass': np.float64,
'name': object,
'sex': CategoricalDtype(['female', 'male']),
'age': np.float64,
'sibsp': np.float64,
'parch': np.float64,
'ticket': object,
'fare': np.float64,
'cabin': object,
'embarked': CategoricalDtype(['C', 'Q', 'S']),
'boat': object,
'body': np.float64,
'home.dest': object,
'survived': CategoricalDtype(['0', '1'])
}
frame_columns = ['pclass', 'survived', 'name', 'sex', 'age', 'sibsp',
'parch', 'ticket', 'fare', 'cabin', 'embarked',
'boat', 'body', 'home.dest']
frame_dtypes = [name_to_dtype[col] for col in frame_columns]
feature_names = ['pclass', 'name', 'sex', 'age', 'sibsp',
'parch', 'ticket', 'fare', 'cabin', 'embarked',
'boat', 'body', 'home.dest']
target_name = 'survived'
_monkey_patch_webbased_functions(monkeypatch, data_id, True)
bunch = fetch_openml(data_id=data_id, as_frame=True, cache=False)
data = bunch.data
target = bunch.target
frame = bunch.frame
assert isinstance(data, pd.DataFrame)
assert data.shape == data_shape
assert np.all(data.columns == feature_names)
assert isinstance(target, pd.Series)
assert target.shape == target_shape
assert target.name == target_name
assert target.dtype == name_to_dtype[target_name]
assert isinstance(frame, pd.DataFrame)
assert frame.shape == frame_shape
assert np.all(frame.dtypes == frame_dtypes)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_iris(monkeypatch, gzip_response):
# classification dataset with numeric only columns
data_id = 61
data_name = 'iris'
data_version = 1
target_column = 'class'
expected_observations = 150
expected_features = 4
expected_missing = 0
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
assert_warns_message(
UserWarning,
"Multiple active versions of the dataset matching the name"
" iris exist. Versions may be fundamentally different, "
"returning version 1.",
_fetch_dataset_from_openml,
**{'data_id': data_id, 'data_name': data_name,
'data_version': data_version,
'target_column': target_column,
'expected_observations': expected_observations,
'expected_features': expected_features,
'expected_missing': expected_missing,
'expect_sparse': False,
'expected_data_dtype': np.float64,
'expected_target_dtype': object,
'compare_default_target': True}
)
def test_decode_iris(monkeypatch):
data_id = 61
_monkey_patch_webbased_functions(monkeypatch, data_id, False)
_test_features_list(data_id)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_iris_multitarget(monkeypatch, gzip_response):
# classification dataset with numeric only columns
data_id = 61
data_name = 'iris'
data_version = 1
target_column = ['sepallength', 'sepalwidth']
expected_observations = 150
expected_features = 3
expected_missing = 0
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
object, np.float64, expect_sparse=False,
compare_default_target=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_anneal(monkeypatch, gzip_response):
# classification dataset with numeric and categorical columns
data_id = 2
data_name = 'anneal'
data_version = 1
target_column = 'class'
# Not all original instances included for space reasons
expected_observations = 11
expected_features = 38
expected_missing = 267
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
object, object, expect_sparse=False,
compare_default_target=True)
def test_decode_anneal(monkeypatch):
data_id = 2
_monkey_patch_webbased_functions(monkeypatch, data_id, False)
_test_features_list(data_id)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_anneal_multitarget(monkeypatch, gzip_response):
# classification dataset with numeric and categorical columns
data_id = 2
data_name = 'anneal'
data_version = 1
target_column = ['class', 'product-type', 'shape']
# Not all original instances included for space reasons
expected_observations = 11
expected_features = 36
expected_missing = 267
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
object, object, expect_sparse=False,
compare_default_target=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_cpu(monkeypatch, gzip_response):
# regression dataset with numeric and categorical columns
data_id = 561
data_name = 'cpu'
data_version = 1
target_column = 'class'
expected_observations = 209
expected_features = 7
expected_missing = 0
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
object, np.float64, expect_sparse=False,
compare_default_target=True)
def test_decode_cpu(monkeypatch):
data_id = 561
_monkey_patch_webbased_functions(monkeypatch, data_id, False)
_test_features_list(data_id)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_australian(monkeypatch, gzip_response):
# sparse dataset
# Australian is the only sparse dataset that is reasonably small
# as it is inactive, we need to catch the warning. Due to mocking
# framework, it is not deactivated in our tests
data_id = 292
data_name = 'Australian'
data_version = 1
target_column = 'Y'
# Not all original instances included for space reasons
expected_observations = 85
expected_features = 14
expected_missing = 0
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
assert_warns_message(
UserWarning,
"Version 1 of dataset Australian is inactive,",
_fetch_dataset_from_openml,
**{'data_id': data_id, 'data_name': data_name,
'data_version': data_version,
'target_column': target_column,
'expected_observations': expected_observations,
'expected_features': expected_features,
'expected_missing': expected_missing,
'expect_sparse': True,
'expected_data_dtype': np.float64,
'expected_target_dtype': object,
'compare_default_target': False} # numpy specific check
)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_adultcensus(monkeypatch, gzip_response):
# Check because of the numeric row attribute (issue #12329)
data_id = 1119
data_name = 'adult-census'
data_version = 1
target_column = 'class'
# Not all original instances included for space reasons
expected_observations = 10
expected_features = 14
expected_missing = 0
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
np.float64, object, expect_sparse=False,
compare_default_target=True)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_miceprotein(monkeypatch, gzip_response):
# JvR: very important check, as this dataset defined several row ids
# and ignore attributes. Note that data_features json has 82 attributes,
# and row id (1), ignore attributes (3) have been removed (and target is
# stored in data.target)
data_id = 40966
data_name = 'MiceProtein'
data_version = 4
target_column = 'class'
# Not all original instances included for space reasons
expected_observations = 7
expected_features = 77
expected_missing = 7
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
np.float64, object, expect_sparse=False,
compare_default_target=True)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_emotions(monkeypatch, gzip_response):
# classification dataset with multiple targets (natively)
data_id = 40589
data_name = 'emotions'
data_version = 3
target_column = ['amazed.suprised', 'happy.pleased', 'relaxing.calm',
'quiet.still', 'sad.lonely', 'angry.aggresive']
expected_observations = 13
expected_features = 72
expected_missing = 0
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
_fetch_dataset_from_openml(data_id, data_name, data_version, target_column,
expected_observations, expected_features,
expected_missing,
np.float64, object, expect_sparse=False,
compare_default_target=True)
def test_decode_emotions(monkeypatch):
data_id = 40589
_monkey_patch_webbased_functions(monkeypatch, data_id, False)
_test_features_list(data_id)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_open_openml_url_cache(monkeypatch, gzip_response, tmpdir):
data_id = 61
_monkey_patch_webbased_functions(
monkeypatch, data_id, gzip_response)
openml_path = sklearn.datasets.openml._DATA_FILE.format(data_id)
cache_directory = str(tmpdir.mkdir('scikit_learn_data'))
# first fill the cache
response1 = _open_openml_url(openml_path, cache_directory)
# assert file exists
location = _get_local_path(openml_path, cache_directory)
assert os.path.isfile(location)
# redownload, to utilize cache
response2 = _open_openml_url(openml_path, cache_directory)
assert response1.read() == response2.read()
@pytest.mark.parametrize('gzip_response', [True, False])
@pytest.mark.parametrize('write_to_disk', [True, False])
def test_open_openml_url_unlinks_local_path(
monkeypatch, gzip_response, tmpdir, write_to_disk):
data_id = 61
openml_path = sklearn.datasets.openml._DATA_FILE.format(data_id)
cache_directory = str(tmpdir.mkdir('scikit_learn_data'))
location = _get_local_path(openml_path, cache_directory)
def _mock_urlopen(request):
if write_to_disk:
with open(location, "w") as f:
f.write("")
raise ValueError("Invalid request")
monkeypatch.setattr(sklearn.datasets.openml, 'urlopen', _mock_urlopen)
with pytest.raises(ValueError, match="Invalid request"):
_open_openml_url(openml_path, cache_directory)
assert not os.path.exists(location)
def test_retry_with_clean_cache(tmpdir):
data_id = 61
openml_path = sklearn.datasets.openml._DATA_FILE.format(data_id)
cache_directory = str(tmpdir.mkdir('scikit_learn_data'))
location = _get_local_path(openml_path, cache_directory)
os.makedirs(os.path.dirname(location))
with open(location, 'w') as f:
f.write("")
@_retry_with_clean_cache(openml_path, cache_directory)
def _load_data():
# The first call will raise an error since location exists
if os.path.exists(location):
raise Exception("File exist!")
return 1
warn_msg = "Invalid cache, redownloading file"
with pytest.warns(RuntimeWarning, match=warn_msg):
result = _load_data()
assert result == 1
def test_retry_with_clean_cache_http_error(tmpdir):
data_id = 61
openml_path = sklearn.datasets.openml._DATA_FILE.format(data_id)
cache_directory = str(tmpdir.mkdir('scikit_learn_data'))
@_retry_with_clean_cache(openml_path, cache_directory)
def _load_data():
raise HTTPError(url=None, code=412,
msg='Simulated mock error',
hdrs=None, fp=None)
error_msg = "Simulated mock error"
with pytest.raises(HTTPError, match=error_msg):
_load_data()
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_cache(monkeypatch, gzip_response, tmpdir):
def _mock_urlopen_raise(request):
raise ValueError('This mechanism intends to test correct cache'
'handling. As such, urlopen should never be '
'accessed. URL: %s' % request.get_full_url())
data_id = 2
cache_directory = str(tmpdir.mkdir('scikit_learn_data'))
_monkey_patch_webbased_functions(
monkeypatch, data_id, gzip_response)
X_fetched, y_fetched = fetch_openml(data_id=data_id, cache=True,
data_home=cache_directory,
return_X_y=True)
monkeypatch.setattr(sklearn.datasets.openml, 'urlopen',
_mock_urlopen_raise)
X_cached, y_cached = fetch_openml(data_id=data_id, cache=True,
data_home=cache_directory,
return_X_y=True)
np.testing.assert_array_equal(X_fetched, X_cached)
np.testing.assert_array_equal(y_fetched, y_cached)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_notarget(monkeypatch, gzip_response):
data_id = 61
target_column = None
expected_observations = 150
expected_features = 5
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
data = fetch_openml(data_id=data_id, target_column=target_column,
cache=False)
assert data.data.shape == (expected_observations, expected_features)
assert data.target is None
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_inactive(monkeypatch, gzip_response):
# fetch inactive dataset by id
data_id = 40675
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
glas2 = assert_warns_message(
UserWarning, "Version 1 of dataset glass2 is inactive,", fetch_openml,
data_id=data_id, cache=False)
# fetch inactive dataset by name and version
assert glas2.data.shape == (163, 9)
glas2_by_version = assert_warns_message(
UserWarning, "Version 1 of dataset glass2 is inactive,", fetch_openml,
data_id=None, name="glass2", version=1, cache=False)
assert int(glas2_by_version.details['id']) == data_id
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_nonexiting(monkeypatch, gzip_response):
# there is no active version of glass2
data_id = 40675
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
# Note that we only want to search by name (not data id)
assert_raise_message(ValueError, "No active dataset glass2 found",
fetch_openml, name='glass2', cache=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_raises_illegal_multitarget(monkeypatch, gzip_response):
data_id = 61
targets = ['sepalwidth', 'class']
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
# Note that we only want to search by name (not data id)
assert_raise_message(ValueError,
"Can only handle homogeneous multi-target datasets,",
fetch_openml, data_id=data_id,
target_column=targets, cache=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_warn_ignore_attribute(monkeypatch, gzip_response):
data_id = 40966
expected_row_id_msg = "target_column={} has flag is_row_identifier."
expected_ignore_msg = "target_column={} has flag is_ignore."
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
# single column test
assert_warns_message(UserWarning, expected_row_id_msg.format('MouseID'),
fetch_openml, data_id=data_id,
target_column='MouseID',
cache=False)
assert_warns_message(UserWarning, expected_ignore_msg.format('Genotype'),
fetch_openml, data_id=data_id,
target_column='Genotype',
cache=False)
# multi column test
assert_warns_message(UserWarning, expected_row_id_msg.format('MouseID'),
fetch_openml, data_id=data_id,
target_column=['MouseID', 'class'],
cache=False)
assert_warns_message(UserWarning, expected_ignore_msg.format('Genotype'),
fetch_openml, data_id=data_id,
target_column=['Genotype', 'class'],
cache=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_string_attribute_without_dataframe(monkeypatch, gzip_response):
data_id = 40945
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
# single column test
assert_raise_message(ValueError,
('STRING attributes are not supported for '
'array representation. Try as_frame=True'),
fetch_openml, data_id=data_id, cache=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_dataset_with_openml_error(monkeypatch, gzip_response):
data_id = 1
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
assert_warns_message(
UserWarning,
"OpenML registered a problem with the dataset. It might be unusable. "
"Error:",
fetch_openml, data_id=data_id, cache=False
)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_dataset_with_openml_warning(monkeypatch, gzip_response):
data_id = 3
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
assert_warns_message(
UserWarning,
"OpenML raised a warning on the dataset. It might be unusable. "
"Warning:",
fetch_openml, data_id=data_id, cache=False
)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_illegal_column(monkeypatch, gzip_response):
data_id = 61
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
assert_raise_message(KeyError, "Could not find target_column=",
fetch_openml, data_id=data_id,
target_column='undefined', cache=False)
assert_raise_message(KeyError, "Could not find target_column=",
fetch_openml, data_id=data_id,
target_column=['undefined', 'class'],
cache=False)
@pytest.mark.parametrize('gzip_response', [True, False])
def test_fetch_openml_raises_missing_values_target(monkeypatch, gzip_response):
data_id = 2
_monkey_patch_webbased_functions(monkeypatch, data_id, gzip_response)
assert_raise_message(ValueError, "Target column ",
fetch_openml, data_id=data_id, target_column='family')
def test_fetch_openml_raises_illegal_argument():
assert_raise_message(ValueError, "Dataset data_id=",
fetch_openml, data_id=-1, name="name")
assert_raise_message(ValueError, "Dataset data_id=",
fetch_openml, data_id=-1, name=None,
version="version")
assert_raise_message(ValueError, "Dataset data_id=",
fetch_openml, data_id=-1, name="name",
version="version")
assert_raise_message(ValueError, "Neither name nor data_id are provided. "
"Please provide name or data_id.", fetch_openml)
| bsd-3-clause | 90,765,303,893,162,000 | 38.22069 | 79 | 0.628121 | false |
cltrudeau/django-dform | dform/views.py | 1 | 11825 | import json, logging
from collections import OrderedDict
from functools import wraps
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.http import JsonResponse, HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404, render
from django.template import Context, Template
from django.views.decorators.csrf import csrf_exempt
from awl.decorators import post_required
from wrench.utils import dynamic_load
from .forms import SurveyForm
from .models import (EditNotAllowedException, Survey, SurveyVersion, Question,
AnswerGroup)
logger = logging.getLogger(__name__)
# ============================================================================
# Security Decorator
# ============================================================================
def permission_hook(target):
@wraps(target)
def wrapper(*args, **kwargs):
if hasattr(settings, 'DFORM_PERMISSION_HOOK'):
fn = dynamic_load(settings.DFORM_PERMISSION_HOOK)
fn(target.__name__, *args, **kwargs)
# everything verified, run the view
return target(*args, **kwargs)
return wrapper
# ============================================================================
# Admin Methods
# ============================================================================
@staff_member_required
@post_required(['delta'])
def survey_delta(request, survey_version_id):
delta = json.loads(request.POST['delta'], object_pairs_hook=OrderedDict)
if survey_version_id == '0':
# new survey
survey = Survey.factory(name=delta['name'])
version = survey.latest_version
else:
version = get_object_or_404(SurveyVersion, id=survey_version_id)
response = {
'success':True,
}
try:
version.replace_from_dict(delta)
except ValidationError as ve:
response['success'] = False
response['errors'] = ve.params
except EditNotAllowedException:
raise Http404('Survey %s is not editable' % version.survey)
except Question.DoesNotExist as dne:
raise Http404('Bad question id: %s' % dne)
# issue a 200 response
return JsonResponse(response)
@staff_member_required
def survey_editor(request, survey_version_id):
if survey_version_id == '0':
# new survey
survey = Survey.factory(name='New Survey')
version = survey.latest_version
else:
version = get_object_or_404(SurveyVersion, id=survey_version_id)
admin_link = reverse('admin:index')
return_url = request.META.get('HTTP_REFERER', admin_link)
save_url = reverse('dform-survey-delta', args=(version.id, ))
data = {
'survey_version':json.dumps(version.to_dict()),
'save_url':save_url,
'return_url':return_url,
}
return render(request, 'dform/edit_survey.html', data)
@staff_member_required
def new_version(request, survey_id):
survey = get_object_or_404(Survey, id=survey_id)
survey.new_version()
admin_link = reverse('admin:index')
return_url = request.META.get('HTTP_REFERER', admin_link)
return HttpResponseRedirect(return_url)
@staff_member_required
def survey_links(request, survey_version_id):
"""Shows links and embedding code for pointing to this survey on an HTML
page.
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id)
survey_url = request.build_absolute_uri(
reverse('dform-survey', args=(version.id, version.survey.token)))
embedded_survey_url = request.build_absolute_uri(
reverse('dform-embedded-survey', args=(version.id,
version.survey.token)))
survey_latest_url = request.build_absolute_uri(
reverse('dform-survey-latest', args=(version.survey.id,
version.survey.token)))
embedded_survey_latest_url = request.build_absolute_uri(
reverse('dform-embedded-survey-latest', args=(version.survey.id,
version.survey.token)))
pym_url = request.build_absolute_uri(
staticfiles_storage.url('dform/js/pym.min.js'))
data = {
'title':'Links for: %s' % version.survey.name,
'survey_url':survey_url,
'embedded_survey_url':embedded_survey_url,
'survey_latest_url':survey_latest_url,
'embedded_survey_latest_url':embedded_survey_latest_url,
'pym_url':pym_url,
'version':version,
}
return render(request, 'dform/links_survey.html', data)
@staff_member_required
def answer_links(request, answer_group_id):
"""Shows links and embedding code for pointing to this AnswerGroup on an
HTML page so a user could edit their data.
"""
answer_group = get_object_or_404(AnswerGroup, id=answer_group_id)
survey_url = request.build_absolute_uri(
reverse('dform-survey-with-answers', args=(
answer_group.survey_version.id,
answer_group.survey_version.survey.token, answer_group.id,
answer_group.token)))
data = {
'title':'Answer Links for: %s' % (
answer_group.survey_version.survey.name),
'survey_url':survey_url,
}
return render(request, 'dform/links_answers.html', data)
# ============================================================================
# Form Views
# ============================================================================
@permission_hook
def sample_survey(request, survey_version_id):
"""A view for displaying a sample version of a form. The submit mechanism
does nothing.
URL name reference for this view: ``dform-sample-survey``
:param survey_version_id:
Id of a :class:`SurveyVersion` object
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id)
form = SurveyForm(survey_version=version)
data = {
'title':'Sample: %s' % version.survey.name,
'survey_version':version,
'form':form,
'submit_action':'',
}
return render(request, 'dform/survey.html', data)
# -------------------
def _survey_view(request, survey_version_id, token, is_embedded):
"""General view code for handling a survey, called by survey() or
embedded_survey()
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id,
survey__token=token)
if request.method == 'POST':
form = SurveyForm(request.POST, survey_version=version,
ip_address=request.META['REMOTE_ADDR'])
if form.is_valid():
form.save()
name = getattr(settings, 'DFORM_SUBMIT_HOOK', '')
if name:
fn = dynamic_load(name)
fn(form)
return HttpResponseRedirect(version.on_success())
else:
form = SurveyForm(survey_version=version)
try:
# check if we have an alternate submit mechanism defined
template = Template(settings.DFORM_SURVEY_SUBMIT)
context = Context({'survey_version':version})
submit_action = template.render(context)
except AttributeError:
# use our default submit url
name = 'dform-embedded-survey' if is_embedded else 'dform-survey'
submit_action = reverse(name, args=(version.id, version.survey.token))
data = {
'title':version.survey.name,
'survey_version':version,
'form':form,
'is_embedded':is_embedded,
'submit_action':submit_action,
}
return render(request, 'dform/survey.html', data)
@permission_hook
def survey(request, survey_version_id, token):
"""View for submitting the answers to a survey version.
URL name reference for this view: ``dform-survey``
"""
return _survey_view(request, survey_version_id, token, False)
@permission_hook
@csrf_exempt
def embedded_survey(request, survey_version_id, token):
"""View for submitting the answers to a survey version with additional
Javascript handling for being embedded in an iframe.
URL name reference for this view: ``dform-survey``
"""
return _survey_view(request, survey_version_id, token, True)
@permission_hook
def survey_latest(request, survey_id, token):
"""View for submitting the answers to the latest version of a survey.
URL name reference for this view: ``dform-survey``
"""
survey = get_object_or_404(Survey, id=survey_id, token=token)
return _survey_view(request, survey.latest_version.id, token, False)
@permission_hook
@csrf_exempt
def embedded_survey_latest(request, survey_id, token):
"""View for submitting the answers to the latest version of a survey with
additional Javascript handling for being embedded in an iframe.
URL name reference for this view: ``dform-survey``
"""
survey = get_object_or_404(Survey, id=survey_id, token=token)
return _survey_view(request, survey.latest_version.id, token, True)
#------------------
def _survey_with_answers_view(request, survey_version_id, survey_token,
answer_group_id, answer_token, is_embedded):
"""General view code for editing answer for a survey. Called by
survey_with_answers() and embedded_survey_with_answers()
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id,
survey__token=survey_token)
answer_group = get_object_or_404(AnswerGroup, id=answer_group_id,
token=answer_token)
if request.method == 'POST':
form = SurveyForm(request.POST, survey_version=version,
answer_group=answer_group)
if form.is_valid():
form.save()
name = getattr(settings, 'DFORM_EDIT_HOOK', '')
if name:
fn = dynamic_load(name)
fn(form)
return HttpResponseRedirect(version.on_success())
else:
form = SurveyForm(survey_version=version, answer_group=answer_group)
try:
# check for alternate survey edit handler
template = Template(settings.DFORM_SURVEY_WITH_ANSWERS_SUBMIT)
context = Context({
'survey_version':version,
'answer_group':answer_group
})
submit_action = template.render(context)
except AttributeError:
# use default survey edit handler
name = 'dform-survey-with-answers' if is_embedded else \
'dform-embedded-survey-with-answers'
submit_action = reverse(name, args=(version.id, version.survey.token,
answer_group.id, answer_group.token))
data = {
'title':version.survey.name,
'survey_version':version,
'answer_group':answer_group,
'form':form,
'is_embedded':is_embedded,
'submit_action':submit_action,
}
return render(request, 'dform/survey.html', data)
@permission_hook
def survey_with_answers(request, survey_version_id, survey_token,
answer_group_id, answer_token):
"""View for viewing and changing the answers to a survey that already has
answers.
URL name reference for this view: ``dform-survey-with-answers``
"""
return _survey_with_answers_view(request, survey_version_id, survey_token,
answer_group_id, answer_token, False)
@permission_hook
@csrf_exempt
def embedded_survey_with_answers(request, survey_version_id, survey_token,
answer_group_id, answer_token):
"""View for viewing and changing the answers to a survey that already has
answers with additional Javascript for being handled in an iframe.
URL name reference for this view: ``dform-survey-with-answers``
"""
return _survey_with_answers_view(request, survey_version_id, survey_token,
answer_group_id, answer_token, True)
| mit | 1,802,551,763,523,872,000 | 32.785714 | 78 | 0.635518 | false |
priseborough/ardupilot | Tools/ardupilotwaf/boards.py | 1 | 22752 | #!/usr/bin/env python
# encoding: utf-8
from collections import OrderedDict
import sys, os
import waflib
from waflib import Utils
from waflib.Configure import conf
_board_classes = {}
_board = None
class BoardMeta(type):
def __init__(cls, name, bases, dct):
super(BoardMeta, cls).__init__(name, bases, dct)
if 'abstract' not in cls.__dict__:
cls.abstract = False
if cls.abstract:
return
if not hasattr(cls, 'toolchain'):
cls.toolchain = 'native'
board_name = getattr(cls, 'name', name)
if board_name in _board_classes:
raise Exception('board named %s already exists' % board_name)
_board_classes[board_name] = cls
class Board:
abstract = True
def __init__(self):
self.with_uavcan = False
def configure(self, cfg):
cfg.env.TOOLCHAIN = cfg.options.toolchain or self.toolchain
cfg.env.ROMFS_FILES = []
cfg.load('toolchain')
cfg.load('cxx_checks')
env = waflib.ConfigSet.ConfigSet()
self.configure_env(cfg, env)
d = env.get_merged_dict()
# Always prepend so that arguments passed in the command line get
# the priority.
for k, val in d.items():
# Dictionaries (like 'DEFINES') are converted to lists to
# conform to waf conventions.
if isinstance(val, dict):
keys = list(val.keys())
if not isinstance(val, OrderedDict):
keys.sort()
val = ['%s=%s' % (vk, val[vk]) for vk in keys]
if k in cfg.env and isinstance(cfg.env[k], list):
cfg.env.prepend_value(k, val)
else:
cfg.env[k] = val
cfg.ap_common_checks()
cfg.env.prepend_value('INCLUDES', [
cfg.srcnode.find_dir('libraries/AP_Common/missing').abspath()
])
def configure_env(self, cfg, env):
# Use a dictionary instead of the convetional list for definitions to
# make easy to override them. Convert back to list before consumption.
env.DEFINES = {}
env.CFLAGS += [
'-ffunction-sections',
'-fdata-sections',
'-fsigned-char',
'-Wall',
'-Wextra',
'-Wformat',
'-Wpointer-arith',
'-Wcast-align',
'-Wundef',
'-Wno-missing-field-initializers',
'-Wno-unused-parameter',
'-Wno-redundant-decls',
'-Wno-unknown-pragmas',
'-Wno-trigraphs',
'-Werror=shadow',
'-Werror=return-type',
'-Werror=unused-result',
'-Werror=narrowing',
'-Werror=attributes',
]
if cfg.options.enable_scripting:
env.DEFINES.update(
ENABLE_SCRIPTING = 1,
ENABLE_HEAP = 1,
LUA_32BITS = 1,
)
env.ROMFS_FILES += [
('sandbox.lua', 'libraries/AP_Scripting/scripts/sandbox.lua'),
]
env.AP_LIBRARIES += [
'AP_Scripting',
'AP_Scripting/lua/src',
]
env.CXXFLAGS += [
'-DHAL_HAVE_AP_ROMFS_EMBEDDED_H'
]
if cfg.options.scripting_checks:
env.DEFINES.update(
AP_SCRIPTING_CHECKS = 1,
)
if 'clang' in cfg.env.COMPILER_CC:
env.CFLAGS += [
'-fcolor-diagnostics',
'-Wno-gnu-designator',
'-Wno-inconsistent-missing-override',
'-Wno-mismatched-tags',
'-Wno-gnu-variable-sized-type-not-at-end',
]
if cfg.env.DEBUG:
env.CFLAGS += [
'-g',
'-O0',
]
if cfg.options.enable_math_check_indexes:
env.CXXFLAGS += ['-DMATH_CHECK_INDEXES']
env.CXXFLAGS += [
'-std=gnu++11',
'-fdata-sections',
'-ffunction-sections',
'-fno-exceptions',
'-fsigned-char',
'-Wall',
'-Wextra',
'-Wformat',
'-Wpointer-arith',
'-Wcast-align',
'-Wundef',
'-Wno-unused-parameter',
'-Wno-missing-field-initializers',
'-Wno-reorder',
'-Wno-redundant-decls',
'-Wno-unknown-pragmas',
'-Werror=attributes',
'-Werror=format-security',
'-Werror=enum-compare',
'-Werror=array-bounds',
'-Werror=uninitialized',
'-Werror=init-self',
'-Werror=narrowing',
'-Werror=return-type',
'-Werror=switch',
'-Werror=sign-compare',
'-Werror=type-limits',
'-Werror=unused-result',
'-Werror=shadow',
'-Werror=unused-variable',
'-Wfatal-errors',
'-Wno-trigraphs',
]
if 'clang++' in cfg.env.COMPILER_CXX:
env.CXXFLAGS += [
'-fcolor-diagnostics',
'-Werror=inconsistent-missing-override',
'-Werror=overloaded-virtual',
'-Wno-gnu-designator',
'-Wno-mismatched-tags',
'-Wno-gnu-variable-sized-type-not-at-end',
]
else:
env.CXXFLAGS += [
'-Werror=unused-but-set-variable'
]
if cfg.env.DEBUG:
env.CXXFLAGS += [
'-g',
'-O0',
]
if cfg.env.DEST_OS == 'darwin':
env.LINKFLAGS += [
'-Wl,-dead_strip',
]
else:
env.LINKFLAGS += [
'-Wl,--gc-sections',
]
if self.with_uavcan:
env.AP_LIBRARIES += [
'AP_UAVCAN',
'modules/uavcan/libuavcan/src/**/*.cpp'
]
env.CXXFLAGS += [
'-Wno-error=cast-align',
]
env.DEFINES.update(
UAVCAN_CPP_VERSION = 'UAVCAN_CPP03',
UAVCAN_NO_ASSERTIONS = 1,
UAVCAN_NULLPTR = 'nullptr'
)
env.INCLUDES += [
cfg.srcnode.find_dir('modules/uavcan/libuavcan/include').abspath()
]
# We always want to use PRI format macros
cfg.define('__STDC_FORMAT_MACROS', 1)
def pre_build(self, bld):
'''pre-build hook that gets called before dynamic sources'''
if bld.env.ROMFS_FILES:
self.embed_ROMFS_files(bld)
def build(self, bld):
bld.ap_version_append_str('GIT_VERSION', bld.git_head_hash(short=True))
import time
ltime = time.localtime()
bld.ap_version_append_int('BUILD_DATE_YEAR', ltime.tm_year)
bld.ap_version_append_int('BUILD_DATE_MONTH', ltime.tm_mon)
bld.ap_version_append_int('BUILD_DATE_DAY', ltime.tm_mday)
def embed_ROMFS_files(self, ctx):
'''embed some files using AP_ROMFS'''
import embed
if ctx.env.USE_NUTTX_IOFW:
# use fmuv2_IO_NuttX.bin instead of fmuv2_IO.bin
for i in range(len(ctx.env.ROMFS_FILES)):
(name,filename) = ctx.env.ROMFS_FILES[i]
if name == 'io_firmware.bin':
filename = 'Tools/IO_Firmware/fmuv2_IO_NuttX.bin'
print("Using IO firmware %s" % filename)
ctx.env.ROMFS_FILES[i] = (name,filename);
header = ctx.bldnode.make_node('ap_romfs_embedded.h').abspath()
if not embed.create_embedded_h(header, ctx.env.ROMFS_FILES):
ctx.fatal("Failed to created ap_romfs_embedded.h")
Board = BoardMeta('Board', Board.__bases__, dict(Board.__dict__))
def add_dynamic_boards():
'''add boards based on existance of hwdef.dat in subdirectories for ChibiOS'''
dirname, dirlist, filenames = next(os.walk('libraries/AP_HAL_ChibiOS/hwdef'))
for d in dirlist:
if d in _board_classes.keys():
continue
hwdef = os.path.join(dirname, d, 'hwdef.dat')
if os.path.exists(hwdef):
newclass = type(d, (chibios,), {'name': d})
def get_boards_names():
add_dynamic_boards()
return sorted(list(_board_classes.keys()), key=str.lower)
def get_removed_boards():
'''list of boards which have been removed'''
return sorted(['px4-v1', 'px4-v2', 'px4-v3', 'px4-v4', 'px4-v4pro'])
@conf
def get_board(ctx):
global _board
if not _board:
if not ctx.env.BOARD:
ctx.fatal('BOARD environment variable must be set before first call to get_board()')
if ctx.env.BOARD in get_removed_boards():
ctx.fatal('''
The board target %s has been removed from ArduPilot with the removal of NuttX support and HAL_PX4.
Please use a replacement build as follows:
px4-v2 Use Pixhawk1 build
px4-v3 Use Pixhawk1 or CubeBlack builds
px4-v4 Use Pixracer build
px4-v4pro Use DrotekP3Pro build
''' % ctx.env.BOARD)
boards = _board_classes.keys()
if not ctx.env.BOARD in boards:
ctx.fatal("Invalid board '%s': choices are %s" % (ctx.env.BOARD, ', '.join(sorted(boards, key=str.lower))))
_board = _board_classes[ctx.env.BOARD]()
return _board
# NOTE: Keeping all the board definitions together so we can easily
# identify opportunities to simplify common flags. In the future might
# be worthy to keep board definitions in files of their own.
class sitl(Board):
def configure_env(self, cfg, env):
super(sitl, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD = 'HAL_BOARD_SITL',
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_NONE',
AP_SCRIPTING_CHECKS = 1, # SITL should always do runtime scripting checks
)
env.CXXFLAGS += [
'-Werror=float-equal'
]
if not cfg.env.DEBUG:
env.CXXFLAGS += [
'-O3',
]
env.LIB += [
'm',
]
cfg.check_librt(env)
env.LINKFLAGS += ['-pthread',]
env.AP_LIBRARIES += [
'AP_HAL_SITL',
'SITL',
]
if cfg.options.enable_sfml:
if not cfg.check_SFML(env):
cfg.fatal("Failed to find SFML libraries")
env.CXXFLAGS += ['-DWITH_SITL_OSD','-DOSD_ENABLED=ENABLED','-DHAL_HAVE_AP_ROMFS_EMBEDDED_H']
import fnmatch
for f in os.listdir('libraries/AP_OSD/fonts'):
if fnmatch.fnmatch(f, "font*bin"):
env.ROMFS_FILES += [(f,'libraries/AP_OSD/fonts/'+f)]
if cfg.options.enable_sfml_audio:
if not cfg.check_SFML_Audio(env):
cfg.fatal("Failed to find SFML Audio libraries")
env.CXXFLAGS += ['-DWITH_SITL_TONEALARM']
if cfg.options.sitl_flash_storage:
env.CXXFLAGS += ['-DSTORAGE_USE_FLASH=1']
if cfg.env.DEST_OS == 'cygwin':
env.LIB += [
'winmm',
]
if Utils.unversioned_sys_platform() == 'cygwin':
env.CXXFLAGS += ['-DCYGWIN_BUILD']
if 'clang++' in cfg.env.COMPILER_CXX:
print("Disabling SLP for clang++")
env.CXXFLAGS += [
'-fno-slp-vectorize' # compiler bug when trying to use SLP
]
class chibios(Board):
abstract = True
toolchain = 'arm-none-eabi'
def configure_env(self, cfg, env):
super(chibios, self).configure_env(cfg, env)
cfg.load('chibios')
env.BOARD = self.name
env.DEFINES.update(
CONFIG_HAL_BOARD = 'HAL_BOARD_CHIBIOS',
HAVE_OCLOEXEC = 0,
HAVE_STD_NULLPTR_T = 0,
)
env.AP_LIBRARIES += [
'AP_HAL_ChibiOS',
]
# make board name available for USB IDs
env.CHIBIOS_BOARD_NAME = 'HAL_BOARD_NAME="%s"' % self.name
env.CFLAGS += cfg.env.CPU_FLAGS + [
'-Wno-cast-align',
'-Wlogical-op',
'-Wframe-larger-than=1300',
'-fsingle-precision-constant',
'-Wno-attributes',
'-Wno-error=double-promotion',
'-Wno-error=missing-declarations',
'-Wno-error=float-equal',
'-Wno-error=undef',
'-Wno-error=cpp',
'-fno-exceptions',
'-Wall',
'-Wextra',
'-Wno-sign-compare',
'-Wfloat-equal',
'-Wpointer-arith',
'-Wmissing-declarations',
'-Wno-unused-parameter',
'-Werror=array-bounds',
'-Wfatal-errors',
'-Werror=uninitialized',
'-Werror=init-self',
'-Wframe-larger-than=1024',
'-Werror=unused-but-set-variable',
'-Wno-missing-field-initializers',
'-Wno-trigraphs',
'-fno-strict-aliasing',
'-fomit-frame-pointer',
'-falign-functions=16',
'-ffunction-sections',
'-fdata-sections',
'-fno-strength-reduce',
'-fno-builtin-printf',
'-fno-builtin-fprintf',
'-fno-builtin-vprintf',
'-fno-builtin-vfprintf',
'-fno-builtin-puts',
'-mno-thumb-interwork',
'-mthumb',
'--specs=nano.specs',
'-specs=nosys.specs',
'-DCHIBIOS_BOARD_NAME="%s"' % self.name,
]
env.CXXFLAGS += env.CFLAGS + [
'-fno-rtti',
'-fno-threadsafe-statics',
]
if Utils.unversioned_sys_platform() == 'cygwin':
env.CXXFLAGS += ['-DCYGWIN_BUILD']
bldnode = cfg.bldnode.make_node(self.name)
env.BUILDROOT = bldnode.make_node('').abspath()
env.LINKFLAGS = cfg.env.CPU_FLAGS + [
'-fomit-frame-pointer',
'-falign-functions=16',
'-ffunction-sections',
'-fdata-sections',
'-u_port_lock',
'-u_port_unlock',
'-u_exit',
'-u_kill',
'-u_getpid',
'-u_errno',
'-uchThdExit',
'-fno-common',
'-nostartfiles',
'-mno-thumb-interwork',
'-mthumb',
'-specs=nano.specs',
'-specs=nosys.specs',
'-L%s' % env.BUILDROOT,
'-L%s' % cfg.srcnode.make_node('modules/ChibiOS/os/common/startup/ARMCMx/compilers/GCC/ld/').abspath(),
'-L%s' % cfg.srcnode.make_node('libraries/AP_HAL_ChibiOS/hwdef/common/').abspath(),
'-Wl,--gc-sections,--no-warn-mismatch,--library-path=/ld,--script=ldscript.ld,--defsym=__process_stack_size__=%s,--defsym=__main_stack_size__=%s' % (cfg.env.PROCESS_STACK, cfg.env.MAIN_STACK)
]
if cfg.env.DEBUG:
env.CFLAGS += [
'-gdwarf-4',
'-g3',
]
env.LINKFLAGS += [
'-gdwarf-4',
'-g3',
]
if cfg.env.ENABLE_ASSERTS:
cfg.msg("Enabling ChibiOS asserts", "yes")
env.CFLAGS += [ '-DHAL_CHIBIOS_ENABLE_ASSERTS' ]
env.CXXFLAGS += [ '-DHAL_CHIBIOS_ENABLE_ASSERTS' ]
else:
cfg.msg("Enabling ChibiOS asserts", "no")
env.LIB += ['gcc', 'm']
env.GIT_SUBMODULES += [
'ChibiOS',
]
try:
import intelhex
env.HAVE_INTEL_HEX = True
cfg.msg("Checking for intelhex module:", 'OK')
except Exception:
cfg.msg("Checking for intelhex module:", 'disabled', color='YELLOW')
env.HAVE_INTEL_HEX = False
def build(self, bld):
super(chibios, self).build(bld)
bld.ap_version_append_str('CHIBIOS_GIT_VERSION', bld.git_submodule_head_hash('ChibiOS', short=True))
bld.load('chibios')
def pre_build(self, bld):
'''pre-build hook that gets called before dynamic sources'''
super(chibios, self).pre_build(bld)
from waflib.Context import load_tool
module = load_tool('chibios', [], with_sys_path=True)
fun = getattr(module, 'pre_build', None)
if fun:
fun(bld)
class linux(Board):
def configure_env(self, cfg, env):
super(linux, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD = 'HAL_BOARD_LINUX',
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_NONE',
)
if not cfg.env.DEBUG:
env.CXXFLAGS += [
'-O3',
]
env.LIB += [
'm',
]
cfg.check_librt(env)
cfg.check_lttng(env)
cfg.check_libdl(env)
cfg.check_libiio(env)
env.LINKFLAGS += ['-pthread',]
env.AP_LIBRARIES += [
'AP_HAL_Linux',
]
if self.with_uavcan:
cfg.define('UAVCAN_EXCEPTIONS', 0)
if cfg.options.apstatedir:
cfg.define('AP_STATEDIR', cfg.options.apstatedir)
def build(self, bld):
super(linux, self).build(bld)
if bld.options.upload:
waflib.Options.commands.append('rsync')
# Avoid infinite recursion
bld.options.upload = False
class erleboard(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(erleboard, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_ERLEBOARD',
)
class navio(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(navio, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_NAVIO',
)
class navio2(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(navio2, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_NAVIO2',
)
class edge(linux):
toolchain = 'arm-linux-gnueabihf'
def __init__(self):
self.with_uavcan = True
def configure_env(self, cfg, env):
super(edge, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_EDGE',
)
class zynq(linux):
toolchain = 'arm-xilinx-linux-gnueabi'
def configure_env(self, cfg, env):
super(zynq, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_ZYNQ',
)
class ocpoc_zynq(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(ocpoc_zynq, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_OCPOC_ZYNQ',
)
class bbbmini(linux):
toolchain = 'arm-linux-gnueabihf'
def __init__(self):
self.with_uavcan = True
def configure_env(self, cfg, env):
super(bbbmini, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BBBMINI',
)
class blue(linux):
toolchain = 'arm-linux-gnueabihf'
def __init__(self):
self.with_uavcan = True
def configure_env(self, cfg, env):
super(blue, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BLUE',
)
class pocket(linux):
toolchain = 'arm-linux-gnueabihf'
def __init__(self):
self.with_uavcan = True
def configure_env(self, cfg, env):
super(pocket, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_POCKET',
)
class pxf(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(pxf, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_PXF',
)
class bebop(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(bebop, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BEBOP',
)
class disco(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(disco, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_DISCO',
)
class erlebrain2(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(erlebrain2, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_ERLEBRAIN2',
)
class bhat(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(bhat, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BH',
)
class dark(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(dark, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_DARK',
)
class pxfmini(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(pxfmini, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_PXFMINI',
)
class aero(linux):
def __init__(self):
self.with_uavcan = True
def configure_env(self, cfg, env):
super(aero, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_AERO',
)
class rst_zynq(linux):
toolchain = 'arm-linux-gnueabihf'
def configure_env(self, cfg, env):
super(rst_zynq, self).configure_env(cfg, env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_RST_ZYNQ',
)
class SITL_static(sitl):
def configure_env(self, cfg, env):
super(SITL_static, self).configure_env(cfg, env)
cfg.env.STATIC_LINKING = True
class SITL_x86_64_linux_gnu(SITL_static):
toolchain = 'x86_64-linux-gnu'
class SITL_arm_linux_gnueabihf(SITL_static):
toolchain = 'arm-linux-gnueabihf'
| gpl-3.0 | -5,554,313,772,015,345,000 | 28.936842 | 203 | 0.533887 | false |
cemagg/sucem-fem | examples/2D_waveguide/eigenmode/TM_driver.py | 1 | 3597 | ## Copyright (C) 2011 Stellenbosch University
##
## This file is part of SUCEM.
##
## SUCEM is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## SUCEM is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with SUCEM. If not, see <http://www.gnu.org/licenses/>.
##
## Contact: [email protected]
# Authors:
# Evan Lezar <[email protected]>
"""A simple 2D eigenproblem which calculates the TM modes of a square guide.
Note that this is done by modelling the Magnetic field and as such no dirichlet BCs are used.
Only natural boundary conditions."""
import sys
import numpy as N
import os
import dolfin as dol
sys.path.insert(0, '../../../')
from sucemfem.ProblemConfigurations.EMVectorWaveEigenproblem import EigenProblem
from sucemfem.ProblemConfigurations.EMVectorWaveEigenproblem import DefaultEigenSolver
from sucemfem.Consts import c0
del sys.path[0]
script_path = os.path.dirname(__file__)
# Load the mesh and the material region markers
mesh = dol.UnitSquare ( 5, 5 )
a = 1.0
b = 1.0
mesh.coordinates()[:,0] = a*mesh.coordinates()[:,0]
mesh.coordinates()[:,1] = b*mesh.coordinates()[:,1]
# Use 4th order basis functions
order = 4
# Set up the eigen problem
ep = EigenProblem()
ep.set_mesh(mesh)
ep.set_basis_order(order)
ep.init_problem()
# Set up eigen problem solver where sigma is the shift to use in the
# shift-invert process
sigma = 1.5
es = DefaultEigenSolver()
es.set_eigenproblem(ep)
es.set_sigma(sigma)
# Solve the eigenproblem
eigs_w, eigs_v = es.solve_problem(10)
# Output the results
#res = N.array(sorted(eigs_w)[0:])
res = N.array(sorted(1/eigs_w+sigma)[0:]) #HAVE TO CORRECT FOR THE SPECTRUM SHIFT
res = N.sqrt(res)/N.pi
def k_mnl ( abd, m, n, l, normalize = False):
"""
Return the analytical cutoff wavenumber for a resonant cavity with dimensions specified in the tuple abd
@param abd: a 3-tuple of the dimensions in meters of the cavitys
@param m: the index of the mode in the x direction
@param n: the index of the mode in the y direction
@param l: the index of the mode in the z direction
@param normalize: divide by the factor \pi
"""
if len(abd) == 3:
a, b, d = abd
elif len(abd) == 2:
a, b = abd
d = 1;
l = 0;
root = N.sqrt ( (m/a)**2 + (n/b)**2 + (l/d)**2 )
if normalize:
return root
else:
return root*N.pi
print '\nreference:'
steps = 5
abd = (a, b)
ids = []
values = []
for m in range(steps):
for n in range(steps):
l = 0
i = (m,n)
if i.count(0) == 0:
ids.append((m,n,l))
values.append(k_mnl ( abd, m, n, l, True ))
import warnings
warnings.simplefilter("ignore", N.ComplexWarning)
r = 0;
errors = N.zeros_like(res)
print "mnl, analytical, calculated, relative error"
for i in N.argsort(values).tolist():
if r < len(res):
errors[r] = N.linalg.norm( res[r] - values[i])/N.linalg.norm( values[i] )
print "%d%d%d, " % (
ids[i]), "%9.3f, %10.3f, %.2e" % ( values[i], res[r], errors[r] )
r += 1
else:
break;
N.testing.assert_array_almost_equal( errors, N.zeros_like(res), 4 ) | gpl-3.0 | 1,550,712,025,160,284,200 | 28.491803 | 108 | 0.66055 | false |
datagrok/python-misc | datagrok/misc/cli.py | 1 | 2465 | """Framework for creating tools that employ a robust command-line interface."""
from __future__ import absolute_import
import os
import shutil
# TODO: interface to datagrok.ansicolor ?
class CLIManager(object):
"""Captures the boilerplate involved in making a decent command-line
interface for a multi-function script.
Think about the interface to cvs, svn, git, etc.
Example:
class MyApp(CLIManager):
def cmd_who(self):
"Tells who"
pass
def cmd_what(self)
"Tells what"
pass
...
if __name__=='__main__':
import sys
# Create an instance with arg0
App = MyApp(sys.argv.pop(0))
# Call the instance with command line arguments
App(*sys.argv)
"""
def __init__(self, argv0):
self.argv0 = os.path.basename(argv0)
def __call__(self, *args):
args = list(args)
command = '_default'
if len(args):
command = args.pop(0)
if command == '--help':
command = 'help'
getattr(self, 'cmd_%s' % command, self._cmd_not_found(command))(*args)
if len(args) == 1 and command != 'help':
print
print "See '%s help' for more information." % self.argv0
def _cmd_not_found(self, command):
def error():
print "%s: '%s' is not a known command. see '%s help'" % (self.argv0, command, self.argv0)
return error
def cmd_help(self, *args):
"""Prints the usage information for this program or a command"""
if len(args) == 0:
print "usage: %s COMMAND [ARGS]" % self.argv0
print
print "The most commonly used commands are:"
for command in [x[len('cmd_'):] for x in dir(self) if x.startswith('cmd_') and not x.startswith('cmd__')]:
print " %-10s %s" % (command, getattr(self, 'cmd_' + command).__doc__.splitlines()[0])
print
print "See '%s help COMMAND' for more information on a specific command." % self.argv0
else:
command = list(args).pop(0)
cmd = getattr(self, 'cmd_%s' % command, None)
if cmd:
print "usage: %s %s [ARGS]" % (self.argv0, command)
print cmd.__doc__
else:
self._cmd_not_found(command)(self)
cmd__default = cmd_help
| agpl-3.0 | -7,054,265,450,523,929,000 | 33.236111 | 118 | 0.533469 | false |
andrewyang96/RacetrackGenerator | catmullrom.py | 1 | 1950 | # Source: http://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline
# http://people.wku.edu/qi.li/teaching/446/cg14_curve_surface.pdf
import numpy as np
from utils import distance
def CatmullRomSpline(P0, P1, P2, P3, nPoints=100):
"""
P0, P1, P2, and P3 should be (x,y) point pairs that define the Catmull-Rom spline.
nPoints is the number of points to include in this curve segment.
"""
# Convert the points to numpy so that we can do array multiplication
P0, P1, P2, P3 = map(np.array, [P0, P1, P2, P3])
# Calculate t0 to t4
alpha = 0.5
def tj(ti, Pi, Pj):
xi, yi = Pi
xj, yj = Pj
return ( ( (xj-xi)**2 + (yj-yi)**2 )**0.5 )**alpha + ti
t0 = P0[0]
t1 = tj(t0, P0, P1)
t2 = tj(t1, P1, P2)
t3 = P3[0]
# Only calculate points between P1 and P2
t = np.linspace(t1,t2,nPoints)
# Reshape so that we can multiply by the points P0 to P3
# and get a point for each value of t.
t = t.reshape(len(t),1)
A1 = (t1-t)/(t1-t0)*P0 + (t-t0)/(t1-t0)*P1
A2 = (t2-t)/(t2-t1)*P1 + (t-t1)/(t2-t1)*P2
A3 = (t3-t)/(t3-t2)*P2 + (t-t2)/(t3-t2)*P3
B1 = (t2-t)/(t2-t0)*A1 + (t-t0)/(t2-t0)*A2
B2 = (t3-t)/(t3-t1)*A2 + (t-t1)/(t3-t1)*A3
C = (t2-t)/(t2-t1)*B1 + (t-t1)/(t2-t1)*B2
return C
def CatmullRomLoop(loop, pointsPerUnitDist=1.):
"""
Calculate Catmull Rom for a list of points, named loop, with loop[0] == loop[-1].
"""
if len(loop) < 4:
raise ValueError("Loop must have at least 4 points in it")
ret = []
# Add extra control points to ends
loop = [loop[-2],] + loop + [loop[1],]
# Produce coords for loop
for i in xrange(len(loop)-3):
numPoints = int(distance(loop[i+1], loop[i+2]) * pointsPerUnitDist)
ret.append(CatmullRomSpline(loop[i], loop[i+1], loop[i+2], loop[i+3], nPoints=numPoints))
ret = [tuple(coords) for seg in ret for coords in seg]
return ret
| mit | 1,019,315,799,465,238,700 | 33.210526 | 97 | 0.587692 | false |
VaclavDedik/infinispan-py | tests/unit/test_utils.py | 1 | 1300 | # -*- coding: utf-8 -*-
import pytest
from infinispan import utils
from infinispan.hotrod import TimeUnits
class TestUtils(object):
def test_from_pretty_time(self):
assert utils.from_pretty_time('10s') == (10, TimeUnits.SECONDS)
assert utils.from_pretty_time('10ms') == (10, TimeUnits.MILISECONDS)
assert utils.from_pretty_time('10ns') == (10, TimeUnits.NANOSECONDS)
assert utils.from_pretty_time('10us') == (10, TimeUnits.MICROSECONDS)
assert utils.from_pretty_time('10m') == (10, TimeUnits.MINUTES)
assert utils.from_pretty_time('10h') == (10, TimeUnits.HOURS)
assert utils.from_pretty_time('10d') == (10, TimeUnits.DAYS)
assert utils.from_pretty_time('inf') == (None, TimeUnits.INFINITE)
assert utils.from_pretty_time('def') == (None, TimeUnits.DEFAULT)
def test_from_pretty_time_invalid_format(self):
with pytest.raises(ValueError):
utils.from_pretty_time('10')
with pytest.raises(ValueError):
utils.from_pretty_time('s')
with pytest.raises(ValueError):
utils.from_pretty_time('10S')
with pytest.raises(ValueError):
utils.from_pretty_time('10s1')
with pytest.raises(ValueError):
utils.from_pretty_time('10ss')
| mit | -5,564,530,746,362,472,000 | 39.625 | 77 | 0.641538 | false |
stormi/tsunami | src/secondaires/magie/types/parchemin.py | 1 | 4405 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le type Parchemin."""
from primaires.interpreteur.editeur.uniligne import Uniligne
from bases.objet.attribut import Attribut
from primaires.objet.types.base import BaseType
class Parchemin(BaseType):
"""Type d'objet: parchemin.
"""
nom_type = "parchemin"
def __init__(self, cle=""):
"""Constructeur de l'objet"""
BaseType.__init__(self, cle)
self._cle_sort = ""
self.charges = 1
self.etendre_editeur("s", "sort", Uniligne, self, "cle_sort")
self.etendre_editeur("c", "charges", Uniligne, self, "charges")
def _get_cle_sort(self):
return self._cle_sort
def _set_cle_sort(self, sort):
sorts = [sort.cle for sort in type(self).importeur.magie.sorts.values()]
if sort in sorts:
self._cle_sort = sort
cle_sort = property(_get_cle_sort, _set_cle_sort)
@property
def sort(self):
"""Renvoie le sort de ce parchemin."""
if self.cle_sort:
return type(self).importeur.magie.sorts[self.cle_sort]
else:
return None
def travailler_enveloppes(self, enveloppes):
"""Travail sur les enveloppes"""
sort = enveloppes["s"]
sort.apercu = "{objet.cle_sort}"
sort.prompt = "Clé du sort : "
sort.aide_courte = \
"Entrez la |ent|clé|ff| du sort contenu dans ce parchemin. Il " \
"va sans dire que le sort\nen question doit être déjà créé. " \
"Entrez |cmd|/|ff| pour revenir à la fenêtre parente.\n\n" \
"Sort actuel : {objet.cle_sort}"
sort.type = str
charges = enveloppes["c"]
charges.apercu = "{objet.charges}"
charges.prompt = "Nombre de charges : "
charges.aide_courte = \
"Entrez le |ent|nombre|ff| de charges du parchemin ; ce nombre " \
"correspond à la quantité\nde sorts que l'on peut lancer avant " \
"que le parchemin soit inutilisable.\n" \
"Entrez |cmd|/|ff| pour revenir à la fenêtre parente.\n\n" \
"Charges actuelles : {objet.charges}"
charges.type = int
@staticmethod
def regarder(objet, personnage):
"""Le personnage regarde l'objet."""
msg = BaseType.regarder(objet, personnage)
if getattr(objet, "sort", False):
de = "de"
if objet.sort.nom[0] in ["a", "e", "i", "o", "u", "y"]:
de = "d'"
if objet.charges > 0:
s = "s" if objet.charges > 1 else ""
msg += "\nCe parchemin contient " + str(objet.charges)
msg += " charge" + s + " du sort " + de + " " + objet.sort.nom
msg += "."
else:
msg += "\nCe parchemin ne contient plus aucune charge."
return msg
| bsd-3-clause | -7,939,619,277,976,388,000 | 39.666667 | 80 | 0.636612 | false |
ericdill/bluesky | bluesky/simple_scans.py | 1 | 14733 | """
These "scans" bundle a Message generator with an instance of the RunEngine,
combining two separate concepts -- instructions and execution -- into one
object. This makes the interface less flexible and somewhat less "Pythonic"
but more condensed.
This module is meant to be run in a namespace where several global
variables have been defined. If some variables are left undefined, the
associated scans will be not usable.
DETS # list of detectors
MASTER_DET # detector to use for tw
MASTER_DET_FIELD # detector field to use for tw
H_MOTOR
K_MOTOR
L_MOTOR
TH_MOTOR
TTH_MOTOR
TEMP_CONTROLLER
Page numbers in the code comments refer to the SPEC manual at
http://www.certif.com/downloads/css_docs/spec_man.pdf
"""
from inspect import signature
import matplotlib.pyplot as plt
from bluesky import scans
from bluesky.callbacks import LiveTable, LivePlot, LiveRaster, _get_obj_fields
from bluesky.scientific_callbacks import PeakStats
from boltons.iterutils import chunked
from bluesky.global_state import gs
from bluesky.utils import normalize_subs_input, Subs, DefaultSubs
from collections import defaultdict
from itertools import filterfalse, chain
# ## Factory functions acting a shim between scans and callbacks ###
def table_from_motors(scan):
"Setup a LiveTable by inspecting a scan and gs."
# > 1 motor
return LiveTable(list(scan.motors) + gs.TABLE_COLS)
def table_from_motor(scan):
"Setup a LiveTable by inspecting a scan and gs."
# 1 motor
return LiveTable([scan.motor] + gs.TABLE_COLS)
def table_gs_only(scan):
"Setup a LiveTable by inspecting a scan and gs."
# no motors
return LiveTable(gs.TABLE_COLS)
def plot_first_motor(scan):
"Setup a LivePlot by inspecting a scan and gs."
fig_name = 'BlueSky: {} v {}'.format(list(scan.motors)[0].name, gs.PLOT_Y)
fig = plt.figure(fig_name)
if not gs.OVERPLOT:
fig.clf()
return LivePlot(gs.PLOT_Y, list(scan.motors)[0].name, fig=fig)
def plot_motor(scan):
"Setup a LivePlot by inspecting a scan and gs."
fig_name = 'BlueSky: {} v {}'.format(scan.motor.name, gs.PLOT_Y)
fig = plt.figure(fig_name)
if not gs.OVERPLOT:
fig.clf()
return LivePlot(gs.PLOT_Y, scan.motor.name, fig=fig)
def raster(scan):
"Set up a LiveRaster by inspect a scan and gs."
if len(scan.shape) != 2:
return None
# first motor is 'slow' -> Y axis
ylab, xlab = _get_obj_fields(scan.motors)
# shape goes in (rr, cc)
# extents go in (x, y)
return LiveRaster(scan.shape, gs.MASTER_DET_FIELD, xlabel=xlab,
ylabel=ylab, extent=list(chain(*scan.extents[::-1])))
def peakstats_first_motor(scan):
"Set up peakstats"
ps = PeakStats(_get_obj_fields([list(scan.motors)[0]])[0],
gs.MASTER_DET_FIELD, edge_count=3)
gs.PS = ps
return ps
def peakstats(scan):
"Set up peakstats"
ps = PeakStats(_get_obj_fields([scan.motor])[0],
gs.MASTER_DET_FIELD, edge_count=3)
gs.PS = ps
return ps
class _BundledScan:
default_subs = DefaultSubs({})
default_sub_factories = DefaultSubs({})
# These are set to the defaults at init time.
subs = Subs({})
sub_factories = Subs({})
def __init__(self):
# subs and sub_factories can be set individually per instance
self.subs = dict(self.default_subs)
self.sub_factories = dict(self.default_sub_factories)
self.params = list(signature(self.scan_class).parameters.keys())
self.configuration = {}
self.flyers = []
def __call__(self, *args, subs=None, sub_factories=None, **kwargs):
scan_kwargs = dict()
# Any kwargs valid for the scan go to the scan, not the RE.
for k, v in kwargs.copy().items():
if k in self.params:
scan_kwargs[k] = kwargs.pop(k)
from bluesky.global_state import gs
RE_params = list(signature(gs.RE.__call__).parameters.keys())
if set(RE_params) & set(self.params):
raise AssertionError("The names of the scan's arguments clash "
"the RunEngine arguments. Use different "
"names. Avoid: {0}".format(RE_params))
global_dets = gs.DETS if gs.DETS is not None else []
self.scan = self.scan_class(global_dets, *args, **scan_kwargs)
# Combine subs passed as args and subs set up in subs attribute.
_subs = defaultdict(list)
_update_lists(_subs, normalize_subs_input(subs))
_update_lists(_subs, normalize_subs_input(self.subs))
# Create a sub from each sub_factory.
_update_lists(_subs, _run_factories(sub_factories, self.scan))
_update_lists(_subs, _run_factories(self.sub_factories, self.scan))
# Set up scan attributes.
self.scan.configuration = self.configuration
global_flyers = gs.FLYERS if gs.FLYERS is not None else []
self.scan.flyers = list(set(list(self.flyers) + list(global_flyers)))
# Any remainging kwargs go the RE. To be safe, no args are passed
# to RE; RE args effectively become keyword-only arguments.
return gs.RE(self.scan, _subs, **kwargs)
def _update_lists(out, inp):
"""Extends dictionary `out` lists with those in `inp`
Assumes dictionaries where all values are lists
"""
for k, v in inp.items():
try:
out[k].extend(v)
except KeyError:
out[k] = list(v)
def _run_factories(factories, scan):
'''Run sub factory functions for a scan
Factory functions should return lists, which will be added onto the
subscription key (e.g., 'all' or 'start') specified in the factory
definition.
If the factory function returns None, the list will not be modified.
'''
factories = normalize_subs_input(factories)
out = {k: list(filterfalse(lambda x: x is None,
(sf(scan) for sf in v)))
for k, v in factories.items()}
gs._SECRET_STASH = out
return out
# ## Mid-level base classes ###
# These are responsible for popping off the time arg and adjusting the
# interval. SPEC counts "bonds;" idiomatic Python counts "sites."
class _OuterProductScan(_BundledScan):
default_sub_factories = DefaultSubs({'all': [table_from_motors]})
def __call__(self, *args, time=None, subs=None, **kwargs):
args = list(args)
if len(args) % 4 == 1:
if time is None:
time = args.pop(-1)
else:
raise ValueError("wrong number of positional arguments")
original_times = _set_acquire_time(time)
for i, _ in enumerate(chunked(list(args), 4)):
# intervals -> intervals + 1
args[4*i + 3] += 1
# never snake; SPEC doesn't know how
if i != 0:
args.insert(4*(i + 1), False)
result = super().__call__(*args, subs=subs, **kwargs)
_unset_acquire_time(original_times)
return result
class _InnerProductScan(_BundledScan):
default_sub_factories = DefaultSubs(
{'all': [table_from_motors, plot_first_motor,
peakstats_first_motor]})
def __call__(self, *args, time=None, subs=None, **kwargs):
args = list(args)
if len(args) % 3 == 2:
if time is None:
time = args.pop(-1)
else:
raise ValueError("wrong number of positional arguments")
intervals = args.pop(-1) + 1
original_times = _set_acquire_time(time)
result = super().__call__(intervals, *args, subs=subs, **kwargs)
_unset_acquire_time(original_times)
return result
class _StepScan(_BundledScan):
default_sub_factories = DefaultSubs(
{'all': [table_from_motor, plot_motor,
peakstats]})
def __call__(self, motor, start, finish, intervals, time=None,
subs=None, **kwargs):
"""Invoke the scan
Parameters
----------
motor
start : number
The start point of the motion
finish : number
The finish point of the motion
intervals : int
The number of steps between `start` and `finish`
time : number
The acquire time of the detector(s)?
subs : dict
The temporary subscriptions to add to **this scan only**. These
subscriptions are **not** persistent
"""
original_times = _set_acquire_time(time)
result = super().__call__(motor, start, finish, intervals + 1,
subs=subs, **kwargs)
_unset_acquire_time(original_times)
return result
class _HardcodedMotorStepScan(_BundledScan):
# Subclasses must define self.motor as a property.
default_sub_factories = DefaultSubs(
{'all': [table_from_motor, plot_motor]})
def __call__(self, start, finish, intervals, time=None, subs=None,
**kwargs):
original_times = _set_acquire_time(time)
result = super().__call__(self.motor, start, finish,
intervals + 1, subs=subs, **kwargs)
_unset_acquire_time(original_times)
return result
### Counts (p. 140) ###
class Count(_BundledScan):
"ct"
scan_class = scans.Count
default_sub_factories = DefaultSubs({'all': [table_gs_only]})
def __call__(self, time=None, subs=None, **kwargs):
original_times = _set_acquire_time(time)
result = super().__call__(subs=subs, **kwargs)
_unset_acquire_time(original_times)
return result
### Motor Scans (p. 146) ###
class AbsScan(_StepScan):
"ascan"
scan_class = scans.AbsScan
class OuterProductAbsScan(_OuterProductScan):
"mesh"
default_sub_factories = DefaultSubs({'all': [table_from_motors, raster]})
scan_class = scans.OuterProductAbsScan
class InnerProductAbsScan(_InnerProductScan):
"a2scan, a3scan, etc."
scan_class = scans.InnerProductAbsScan
class DeltaScan(_StepScan):
"dscan (also known as lup)"
scan_class = scans.DeltaScan
class InnerProductDeltaScan(_InnerProductScan):
"d2scan, d3scan, etc."
scan_class = scans.InnerProductDeltaScan
class ThetaTwoThetaScan(_InnerProductScan):
"th2th"
scan_class = scans.InnerProductDeltaScan
def __call__(self, start, finish, intervals, time=None, **kwargs):
TTH_MOTOR = gs.TTH_MOTOR
TH_MOTOR = gs.TH_MOTOR
original_times = _set_acquire_time(time)
result = super().__call__(TTH_MOTOR, start, finish,
TH_MOTOR, start/2, finish/2,
intervals, time, **kwargs)
_unset_acquire_time(original_times)
### Temperature Scans (p. 148) ###
class _TemperatureScan(_HardcodedMotorStepScan):
def __call__(self, start, finish, intervals, time=None, sleep=0,
**kwargs):
self._sleep = sleep
original_times = _set_acquire_time(time)
self.motor.settle_time = sleep
result = super().__call__(start, finish, intervals + 1, **kwargs)
_unset_acquire_time(original_times)
return result
@property
def motor(self):
from bluesky.global_state import gs
return gs.TEMP_CONTROLLER
class AbsTemperatureScan(_TemperatureScan):
"tscan"
scan_class = scans.AbsScan
class DeltaTemperatureScan(_TemperatureScan):
"dtscan"
scan_class = scans.DeltaScan
### Basic Reciprocal Space Scans (p. 147) ###
class HScan(_HardcodedMotorStepScan):
"hscan"
scan_class = scans.AbsScan
@property
def motor(self):
from bluesky.global_state import gs
return gs.H_MOTOR
class KScan(_HardcodedMotorStepScan):
"kscan"
scan_class = scans.AbsScan
@property
def motor(self):
from bluesky.global_state import gs
return gs.K_MOTOR
class LScan(_HardcodedMotorStepScan):
"lscan"
scan_class = scans.AbsScan
@property
def motor(self):
from bluesky.global_state import gs
return gs.L_MOTOR
class OuterProductHKLScan(_BundledScan):
"hklmesh"
scan_class = scans.OuterProductAbsScan
def __call__(self, Q1, start1, finish1, intervals1, Q2, start2, finish2,
intervals2, time=None, **kwargs):
# To be clear, like all other functions in this module, this
# eye-gouging API is for compatbility with SPEC, not the author's
# idea of good Python code.
from bluesky.global_state import gs
H_MOTOR = gs.H_MOTOR
K_MOTOR = gs.K_MOTOR
L_MOTOR = gs.L_MOTOR
original_times = _set_acquire_time(time)
_motor_mapping = {'H': H_MOTOR, 'K': K_MOTOR, 'L': L_MOTOR}
motor1 = _motor_mapping[Q1]
motor2 = _motor_mapping[Q2]
# Note that intervals + 1 is handled in the base class.
result = super().__call__(motor1, start1, finish1, intervals1,
motor2, start2, finish2, intervals2,
**kwargs)
_unset_acquire_time(original_times)
return result
class InnerProductHKLScan(_BundledScan):
"hklscan"
scan_class = scans.InnerProductAbsScan
def __call__(self, start_h, finish_h, start_k, finish_k, start_l,
finish_l, intervals, time=None, **kwargs):
from bluesky.global_state import gs
H_MOTOR = gs.H_MOTOR
K_MOTOR = gs.K_MOTOR
L_MOTOR = gs.L_MOTOR
original_times = _set_acquire_time(time)
result = super().__call__(intervals, start_h, finish_h, start_k,
finish_k, start_l, finish_l, **kwargs)
_unset_acquire_time(original_times)
### Special Reciprocal Space Scans ###
# TODO:
# klradial
# hlradial
# hkradial
# klcircle
# hlcircle
# hkcircle
class Tweak(_BundledScan):
"tw"
scan_class = scans.Tweak
def __call__(motor, step, **kwargs):
from bluesky.global_state import gs
MASTER_DET = gs.MASTER_DET
MASTER_DET_FIELD = gs.MASTER_DET_FIELD
return super().__call__(MASTER_DET, MASTER_DET_FIELD, motor,
step, **kwargs)
def _set_acquire_time(time):
from bluesky.global_state import gs
if time is None:
time = gs.COUNT_TIME
original_times = {}
for det in gs.DETS:
if hasattr(det, 'count_time'):
original_times[det] = det.count_time
det.count_time = time
return original_times
def _unset_acquire_time(original_times):
for det, time in original_times.items():
det.count_time = time
| bsd-3-clause | 1,619,961,103,741,633,800 | 30.280255 | 78 | 0.614539 | false |
walterbender/turtleconfusion | TurtleArt/tatype.py | 1 | 15982 | # Copyright (c) 2013 Marion Zepf
# Copyright (c) 2014 Walter Bender
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
""" type system for Primitives and their arguments """
import ast
from tablock import Media
from taconstants import (Color, ColorObj, CONSTANTS, Vector)
class Type(object):
""" A type in the type hierarchy. """
def __init__(self, constant_name, value):
""" constant_name -- the name of the constant that points to this Type
object
value -- an arbitrary integer that is different from the values of
all other Types. The order of the integers doesn't matter. """
self.constant_name = constant_name
self.value = value
def __eq__(self, other):
if other is None:
return False
if not isinstance(other, Type):
return False
return self.value == other.value
def __str__(self):
return str(self.constant_name)
__repr__ = __str__
class TypeDisjunction(tuple, Type):
""" Disjunction of two or more Types (from the type hierarchy) """
def __init__(self, iterable):
self = tuple(iterable)
def __str__(self):
s = ["("]
for disj in self:
s.append(str(disj))
s.append(" or ")
s.pop()
s.append(")")
return "".join(s)
# individual types
TYPE_OBJECT = Type('TYPE_OBJECT', 0)
TYPE_CHAR = Type('TYPE_CHAR', 1)
TYPE_COLOR = Type('TYPE_COLOR', 2)
TYPE_FLOAT = Type('TYPE_FLOAT', 3)
TYPE_INT = Type('TYPE_INT', 4)
TYPE_BOOL = Type('TYPE_BOOL', 5)
# shortcut to avoid a TypeDisjunction between TYPE_FLOAT and TYPE_INT
TYPE_NUMBER = Type('TYPE_NUMBER', 6)
TYPE_NUMERIC_STRING = Type('TYPE_NUMERIC_STRING', 7)
TYPE_BOX = Type('TYPE_BOX', 8) # special type for the unknown content of a box
TYPE_STRING = Type('TYPE_STRING', 9)
TYPE_MEDIA = Type('TYPE_MEDIA', 10)
# An array of numbers used by the food plugin et al.
TYPE_VECTOR = Type('TYPE_VECTOR', 11)
# groups/ classes of types
TYPES_NUMERIC = (TYPE_FLOAT, TYPE_INT, TYPE_NUMBER)
BOX_AST = ast.Name(id='BOX', ctx=ast.Load)
ACTION_AST = ast.Name(id='ACTION', ctx=ast.Load)
def get_type(x):
""" Return the most specific type in the type hierarchy that applies to x
and a boolean indicating whether x is an AST. If the type cannot be
determined, return TYPE_OBJECT as the type. """
# non-AST types
if isinstance(x, (int, long)):
return (TYPE_INT, False)
elif isinstance(x, float):
return (TYPE_FLOAT, False)
elif isinstance(x, basestring):
if len(x) == 1:
return (TYPE_CHAR, False)
try:
float(x)
except ValueError:
return (TYPE_STRING, False)
else:
return (TYPE_NUMERIC_STRING, False)
elif isinstance(x, Color):
return (TYPE_COLOR, False)
elif isinstance(x, Media):
return (TYPE_MEDIA, False)
elif isinstance(x, Vector):
return (TYPE_VECTOR, False)
elif hasattr(x, "return_type"):
return (x.return_type, False)
# AST types
elif isinstance(x, ast.Num):
return (get_type(x.n)[0], True)
elif isinstance(x, ast.Str):
return (get_type(x.s)[0], True)
elif isinstance(x, ast.Name):
try:
# we need to have imported CONSTANTS for this to work
value = eval(x.id)
except NameError:
return (TYPE_OBJECT, True)
else:
return (get_type(value)[0], True)
elif isinstance(x, ast.Subscript):
if x.value == BOX_AST:
return (TYPE_BOX, True)
elif isinstance(x, ast.Call):
if isinstance(x.func, ast.Name):
if x.func.id == 'float':
return (TYPE_FLOAT, True)
elif x.func.id in ('int', 'ord'):
return (TYPE_INT, True)
elif x.func.id == 'chr':
return (TYPE_CHAR, True)
elif x.func.id in ('repr', 'str', 'unicode'):
return (TYPE_STRING, True)
elif x.func.id == 'Color':
return (TYPE_COLOR, True)
elif x.func.id == 'Media':
return (TYPE_MEDIA, True)
# unary operands never change the type of their argument
elif isinstance(x, ast.UnaryOp):
if issubclass(x.op, ast.Not):
# 'not' always returns a boolean
return (TYPE_BOOL, True)
else:
return get_type(x.operand)
# boolean and comparison operators always return a boolean
if isinstance(x, (ast.BoolOp, ast.Compare)):
return (TYPE_BOOL, True)
# other binary operators
elif isinstance(x, ast.BinOp):
type_left = get_type(x.left)[0]
type_right = get_type(x.right)[0]
if type_left == TYPE_STRING or type_right == TYPE_STRING:
return (TYPE_STRING, True)
if type_left == type_right == TYPE_INT:
return (TYPE_INT, True)
else:
return (TYPE_FLOAT, True)
return (TYPE_OBJECT, isinstance(x, ast.AST))
def is_instancemethod(method):
# TODO how to access the type `instancemethod` directly?
return type(method).__name__ == "instancemethod"
def is_bound_method(method):
return ((is_instancemethod(method) and method.im_self is not None) or
(hasattr(method, '__self__') and method.__self__ is not None))
def is_staticmethod(method):
# TODO how to access the type `staticmethod` directly?
return type(method).__name__ == "staticmethod"
def identity(x):
return x
TYPE_CONVERTERS = {
# Type hierarchy: If there is a converter A -> B, then A is a subtype of B.
# The converter from A to B is stored under TYPE_CONVERTERS[A][B].
# The relation describing the type hierarchy must be transitive, i.e.
# converting A -> C must yield the same result as converting A -> B -> C.
# TYPE_OBJECT is the supertype of everything.
TYPE_BOX: {
TYPE_COLOR: ColorObj, # FIXME: should be Color.name
TYPE_VECTOR: Vector,
TYPE_FLOAT: float,
TYPE_INT: int,
TYPE_NUMBER: float,
TYPE_STRING: str},
TYPE_CHAR: {
TYPE_INT: ord,
TYPE_STRING: identity},
TYPE_COLOR: {
TYPE_FLOAT: float,
TYPE_INT: int,
TYPE_NUMBER: int,
TYPE_STRING: Color.get_number_string},
TYPE_FLOAT: {
TYPE_INT: int,
TYPE_NUMBER: identity,
TYPE_STRING: str},
TYPE_INT: {
TYPE_FLOAT: float,
TYPE_NUMBER: identity,
TYPE_STRING: str},
TYPE_NUMBER: {
TYPE_FLOAT: float,
TYPE_INT: int,
TYPE_STRING: str},
TYPE_NUMERIC_STRING: {
TYPE_FLOAT: float,
TYPE_STRING: identity}
}
class TATypeError(BaseException):
""" TypeError with the types from the hierarchy, not with Python types """
def __init__(self, bad_value, bad_type=None, req_type=None, message=''):
""" bad_value -- the mis-typed value that caused the error
bad_type -- the type of the bad_value
req_type -- the type that the value was expected to have
message -- short statement about the cause of the error. It is
not shown to the user, but may appear in debugging output. """
self.bad_value = bad_value
self.bad_type = bad_type
self.req_type = req_type
self.message = message
def __str__(self):
msg = []
if self.message:
msg.append(self.message)
msg.append(" (")
msg.append("bad value: ")
msg.append(repr(self.bad_value))
if self.bad_type is not None:
msg.append(", bad type: ")
msg.append(repr(self.bad_type))
if self.req_type is not None:
msg.append(", req type: ")
msg.append(repr(self.req_type))
if self.message:
msg.append(")")
return "".join(msg)
__repr__ = __str__
def get_converter(old_type, new_type):
""" If there is a converter old_type -> new_type, return it. Else return
None. If a chain of converters is necessary, return it as a tuple or
list (starting with the innermost, first-to-apply converter). """
# every type can be converted to TYPE_OBJECT
if new_type == TYPE_OBJECT:
return identity
# every type can be converted to itself
if old_type == new_type:
return identity
# is there a converter for this pair of types?
converters_from_old = TYPE_CONVERTERS.get(old_type)
if converters_from_old is None:
return None
converter = converters_from_old.get(new_type)
if converter is not None:
return converter
else:
# form the transitive closure of all types that old_type can be
# converted to, and look for new_type there
backtrace = converters_from_old.copy()
new_backtrace = backtrace.copy()
break_all = False
while True:
newest_backtrace = {}
for t in new_backtrace:
for new_t in TYPE_CONVERTERS.get(t, {}):
if new_t not in backtrace:
newest_backtrace[new_t] = t
backtrace[new_t] = t
if new_t == new_type:
break_all = True
break
if break_all:
break
if break_all or not newest_backtrace:
break
new_backtrace = newest_backtrace
# use the backtrace to find the path from old_type to new_type
if new_type in backtrace:
converter_chain = []
t = new_type
while t in backtrace and isinstance(backtrace[t], Type):
converter_chain.insert(0, TYPE_CONVERTERS[backtrace[t]][t])
t = backtrace[t]
converter_chain.insert(0, TYPE_CONVERTERS[old_type][t])
return converter_chain
return None
def convert(x, new_type, old_type=None, converter=None):
""" Convert x to the new type if possible.
old_type -- the type of x. If not given, it is computed. """
if not isinstance(new_type, Type):
raise ValueError('%s is not a type in the type hierarchy'
% (repr(new_type)))
# every type can be converted to TYPE_OBJECT
if new_type == TYPE_OBJECT:
return x
if not isinstance(old_type, Type):
(old_type, is_an_ast) = get_type(x)
else:
is_an_ast = isinstance(x, ast.AST)
# every type can be converted to itself
if old_type == new_type:
return x
# special case: 'box' block (or 'pop' block) as an AST
if is_an_ast and old_type == TYPE_BOX:
new_type_ast = ast.Name(id=new_type.constant_name)
return get_call_ast('convert', [x, new_type_ast], return_type=new_type)
# if the converter is not given, try to find one
if converter is None:
converter = get_converter(old_type, new_type)
if converter is None:
# no converter available
raise TATypeError(
bad_value=x,
bad_type=old_type,
req_type=new_type,
message=(
"found no converter"
" for this type combination"))
def _apply_converter(converter, y):
try:
if is_an_ast:
if converter == identity:
return y
elif is_instancemethod(converter):
func = ast.Attribute(value=y,
attr=converter.im_func.__name__,
ctx=ast.Load)
return get_call_ast(func)
else:
func_name = converter.__name__
return get_call_ast(func_name, [y])
else:
return converter(y)
except BaseException:
raise TATypeError(bad_value=x, bad_type=old_type,
req_type=new_type, message=("error during "
"conversion"))
if isinstance(converter, (list, tuple)):
# apply the converter chain recursively
result = x
for conv in converter:
result = _apply_converter(conv, result)
return result
elif converter is not None:
return _apply_converter(converter, x)
class TypedAST(ast.AST):
@property
def return_type(self):
if self._return_type is None:
return get_type(self.func)[0]
else:
return self._return_type
class TypedCall(ast.Call, TypedAST):
""" Like a Call AST, but with a return type """
def __init__(self, func, args=None, keywords=None, starargs=None,
kwargs=None, return_type=None):
if args is None:
args = []
if keywords is None:
keywords = []
ast.Call.__init__(self, func=func, args=args, keywords=keywords,
starargs=starargs, kwargs=kwargs)
self._return_type = return_type
class TypedSubscript(ast.Subscript, TypedAST):
""" Like a Subscript AST, but with a type """
def __init__(self, value, slice_, ctx=ast.Load, return_type=None):
ast.Subscript.__init__(self, value=value, slice=slice_, ctx=ctx)
self._return_type = return_type
class TypedName(ast.Name, TypedAST):
""" Like a Name AST, but with a type """
def __init__(self, id_, ctx=ast.Load, return_type=None):
ast.Name.__init__(self, id=id_, ctx=ctx)
self._return_type = return_type
def get_call_ast(func_name, args=None, kwargs=None, return_type=None):
""" Return an AST representing the call to a function with the name
func_name, passing it the arguments args (given as a list) and the
keyword arguments kwargs (given as a dictionary).
func_name -- either the name of a callable as a string, or an AST
representing a callable expression
return_type -- if this is not None, return a TypedCall object with this
return type instead """
if args is None:
args = []
# convert keyword argument dict to a list of (key, value) pairs
keywords = []
if kwargs is not None:
for (key, value) in kwargs.iteritems():
keywords.append(ast.keyword(arg=key, value=value))
# get or generate the AST representing the callable
if isinstance(func_name, ast.AST):
func_ast = func_name
else:
func_ast = ast.Name(id=func_name, ctx=ast.Load)
# if no return type is given, return a simple Call AST
if return_type is None:
return ast.Call(func=func_ast, args=args, keywords=keywords,
starargs=None, kwargs=None)
# if a return type is given, return a TypedCall AST
else:
return TypedCall(func=func_ast, args=args, keywords=keywords,
return_type=return_type)
| mit | 6,725,969,836,886,066,000 | 33.743478 | 79 | 0.589163 | false |
infrae/mobi.devices | src/mobi/devices/tests/test_doctest_wurfl_parsing.py | 1 | 1866 | # Copyright (c) 2010 Infrae. All rights reserved.
# See also LICENSE.txt.
"""
We will start by initializing the database from wurfl stream.
It should return a tuple (db, index)
>>> from mobi.devices.index.radixtree import NOTSET
>>> from mobi.devices.wurfl.db import initialize_db
>>> db, index = initialize_db(config)
>>> db is not None
True
>>> index #doctest: +ELLIPSIS
<mobi.devices.index.radixtree.RadixTree ...>
Now we'll have a look at what's inside the index.
>>> user_agent = 'Mozilla/5.0 (iPhone; ...'
>>> node, string, pos = index.search(user_agent)
>>> node.value
<class 'mobi.devices.index.radixtree.NOTSET'>
>>> string
u'Mozilla/5.0 (iPhone; '
>>> pos
21
>>> dev_id = node.values().next()
Let's look that up into the database.
>>> from mobi.devices.wurfl.db import Device
>>> device = Device.deserialize(db[dev_id])
>>> device #doctest: +ELLIPSIS
<mobi.devices.wurfl.parser.Device user_agent="Mozilla/5.0 (iPhone; ...
>>> int(device.get_capability('xhtml_support_level'))
4
>>> device.parent_id
u'apple_iphone_ver2'
>>> device.type
<InterfaceClass mobi.interfaces.devices.IAdvancedDeviceType>
>>> device.platform
u'iphone'
"""
import shutil
import os
from mobi.devices.wurfl.parser import Device
data_dir = os.path.join(os.path.dirname(__file__), 'var')
config = {
'var': data_dir
}
def setup(test):
teardown(test)
try:
os.mkdir(data_dir)
except OSError:
pass
def teardown(test):
try:
if Device.db:
Device.db.close()
shutil.rmtree(data_dir)
except:
pass
def test_suite():
import unittest
import doctest
suite = unittest.TestSuite()
suite.addTest(
doctest.DocTestSuite(__name__, setUp=setup, tearDown=teardown))
return suite
| bsd-3-clause | 7,810,178,486,091,748,000 | 23.88 | 74 | 0.635048 | false |
cedriclaunay/gaffer | python/GafferUI/PlugWidget.py | 1 | 5374 | ##########################################################################
#
# Copyright (c) 2011, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import warnings
import IECore
import Gaffer
import GafferUI
QtGui = GafferUI._qtImport( "QtGui" )
## The PlugWidget combines a LabelPlugValueWidget with a second PlugValueWidget
## suitable for editing the plug.
## \todo This could provide functionality for arbitrary Widgets to be placed
## on the right, which combined with the ability to find a
## PlugWidget given a Plug could be quite useful for many things.
## \todo Remove deprecated label and description capabilities.
class PlugWidget( GafferUI.Widget ) :
def __init__( self, plugOrWidget, label=None, description=None, **kw ) :
GafferUI.Widget.__init__( self, QtGui.QWidget(), **kw )
layout = QtGui.QHBoxLayout()
layout.setContentsMargins( 0, 0, 0, 0 )
layout.setSpacing( 4 )
layout.setSizeConstraint( QtGui.QLayout.SetMinAndMaxSize )
self._qtWidget().setLayout( layout )
if isinstance( plugOrWidget, Gaffer.Plug ) :
self.__valueWidget = GafferUI.PlugValueWidget.create( plugOrWidget )
plug = plugOrWidget
else :
assert( isinstance( plugOrWidget, GafferUI.PlugValueWidget ) or hasattr( plugOrWidget, "plugValueWidget" ) )
self.__valueWidget = plugOrWidget
plug = self.plugValueWidget().getPlug()
self.__label = GafferUI.LabelPlugValueWidget(
plug,
horizontalAlignment = GafferUI.Label.HorizontalAlignment.Right,
verticalAlignment = GafferUI.Label.VerticalAlignment.Top,
)
## \todo Decide how we allow this sort of tweak using the public
# interface. Perhaps we should have a SizeableContainer or something?
self.__label.label()._qtWidget().setFixedWidth( self.labelWidth() )
if label is not None :
warnings.warn(
"The PlugWidget label parameter is deprecated. Use Metadata instead.",
DeprecationWarning,
2
)
self.__label.label().setText( label )
if description is not None :
warnings.warn(
"The PlugWidget description parameter is deprecated. Use Metadata instead.",
DeprecationWarning,
2
)
self.__label.label().setToolTip( description )
layout.addWidget( self.__label._qtWidget() )
layout.addWidget( self.__valueWidget._qtWidget() )
# The plugValueWidget() may have smarter drop behaviour than the labelPlugValueWidget(),
# because it has specialised PlugValueWidget._dropValue(). It's also more meaningful to the
# user if we highlight the plugValueWidget() on dragEnter rather than the label. So we
# forward the dragEnter/dragLeave/drop signals from the labelPlugValueWidget() to the plugValueWidget().
self.__dragEnterConnection = self.__label.dragEnterSignal().connect( 0, Gaffer.WeakMethod( self.__labelDragEnter ) )
self.__dragLeaveConnection = self.__label.dragLeaveSignal().connect( 0, Gaffer.WeakMethod( self.__labelDragLeave ) )
self.__dropConnection = self.__label.dropSignal().connect( 0, Gaffer.WeakMethod( self.__labelDrop ) )
def plugValueWidget( self ) :
if isinstance( self.__valueWidget, GafferUI.PlugValueWidget ) :
return self.__valueWidget
else :
return self.__valueWidget.plugValueWidget()
def labelPlugValueWidget( self ) :
return self.__label
@staticmethod
def labelWidth() :
return 150
def __labelDragEnter( self, label, event ) :
return self.plugValueWidget().dragEnterSignal()( self.plugValueWidget(), event )
def __labelDragLeave( self, label, event ) :
return self.plugValueWidget().dragLeaveSignal()( self.plugValueWidget(), event )
def __labelDrop( self, label, event ) :
return self.plugValueWidget().dropSignal()( self.plugValueWidget(), event )
| bsd-3-clause | 1,593,743,067,569,437,400 | 38.514706 | 118 | 0.716971 | false |
iamdober/training | final/task3/task3.py | 1 | 2189 | #
# In lecture, we took the bipartite Marvel graph,
# where edges went between characters and the comics
# books they appeared in, and created a weighted graph
# with edges between characters where the weight was the
# number of comic books in which they both appeared.
#
# In this assignment, determine the weights between
# comic book characters by giving the probability
# that a randomly chosen comic book containing one of
# the characters will also contain the other
#
# from marvel import marvel, characters
def get_books_list(bipartiteG, characters):
books = []
for key in bipartiteG:
if key not in characters:
books.append(key)
return books
def calc_prob(bipartiteG, books, a, b):
# books = get_books_list(bipartiteG, characters)
books_total = 0.
books_both = 0.
for book in books:
if a in bipartiteG[book] or b in bipartiteG[book]:
books_total += 1
if a in bipartiteG[book] and b in bipartiteG[book]:
books_both += 1
if not books_both:
return None
return books_both/books_total
def create_weighted_graph(bipartiteG, characters):
books = get_books_list(bipartiteG, characters)
G = {}
for i in characters:
G[i] = {}
for charA in characters:
for charB in characters:
if charA != charB:
prob = calc_prob(bipartiteG, books, charA, charB)
G[charA][charB] = prob
G[charB][charA] = prob
return G
######
#
# Test
def test():
bipartiteG = {'charA':{'comicB':1, 'comicC':1},
'charB':{'comicB':1, 'comicD':1},
'charC':{'comicD':1},
'comicB':{'charA':1, 'charB':1},
'comicC':{'charA':1},
'comicD': {'charC':1, 'charB':1}}
G = create_weighted_graph(bipartiteG, ['charA', 'charB', 'charC'])
# three comics contain charA or charB
# charA and charB are together in one of them
assert G['charA']['charB'] == 1.0 / 3
assert G['charA'].get('charA') == None
assert G['charA'].get('charC') == None
# def test2():
# G = create_weighted_graph(marvel, characters)
test() | gpl-3.0 | 2,350,563,976,907,062,000 | 29.416667 | 70 | 0.604842 | false |
evanbrumley/psmove-restful | utils.py | 1 | 8083 | import sys, os, time
from threading import Thread
import requests
PSMOVEAPI_BUILD_DIR = os.environ.get('PSMOVEAPI_BUILD_DIR')
if PSMOVEAPI_BUILD_DIR:
sys.path.insert(0, os.environ['PSMOVEAPI_BUILD_DIR'])
import psmove
class Controller(object):
_active = False
_loop_thread = None
controller = None
read_only = False
red = 0
green = 0
blue = 0
ax = 0
ay = 0
az = 0
gx = 0
gy = 0
gz = 0
btn_triangle = False
btn_circle = False
btn_cross = False
btn_square = False
btn_select = False
btn_start = False
btn_move = False
btn_t = False
btn_ps = False
battery = 0
trigger = 0
rumble = 0
def __init__(self, controller, read_only=False):
self.controller = controller
self.read_only = read_only
self.start_loop()
def terminate(self):
self._active = False
if self._loop_thread:
self._loop_thread.join()
def _loop(self):
while(self._active):
if not self.read_only:
self.controller.set_leds(self.red, self.green, self.blue)
self.controller.update_leds()
self.controller.set_rumble(self.rumble)
self.update_state()
time.sleep(0.01)
def start_loop(self):
self._active = True
self._loop_thread = Thread(target=self._loop)
self._loop_thread.daemon = True
self._loop_thread.start()
def update_state(self):
result = self.controller.poll()
if result:
buttons = self.controller.get_buttons()
button_events_on, button_events_off = self.controller.get_button_events()
self.btn_triangle = bool(buttons & psmove.Btn_TRIANGLE)
self.btn_circle = bool(buttons & psmove.Btn_CIRCLE)
self.btn_cross = bool(buttons & psmove.Btn_CROSS)
self.btn_square = bool(buttons & psmove.Btn_SQUARE)
self.btn_select = bool(buttons & psmove.Btn_SELECT)
self.btn_start = bool(buttons & psmove.Btn_START)
self.btn_move = bool(buttons & psmove.Btn_MOVE)
self.btn_t = bool(buttons & psmove.Btn_T)
self.btn_ps = bool(buttons & psmove.Btn_PS)
self.battery = self.controller.get_battery()
self.trigger = self.controller.get_trigger()
self.ax, self.ay, self.az = self.controller.get_accelerometer_frame(psmove.Frame_SecondHalf)
self.gx, self.gy, self.gz = self.controller.get_gyroscope_frame(psmove.Frame_SecondHalf)
def state_as_dict(self):
state_dict = {
'ax': self.ax,
'ay': self.ay,
'az': self.az,
'gx': self.gx,
'gy': self.gy,
'gz': self.gz,
'btn_triangle': self.btn_triangle,
'btn_circle': self.btn_circle,
'btn_cross': self.btn_cross,
'btn_square': self.btn_square,
'btn_select': self.btn_select,
'btn_start': self.btn_start,
'btn_move': self.btn_move,
'btn_t': self.btn_t,
'btn_ps': self.btn_ps,
'battery': self.battery,
'trigger': self.trigger,
'red': self.red,
'green': self.green,
'blue': self.blue,
'rumble': self.rumble,
}
# There's currently no way to get color
# or rumble directly from the controller
if self.read_only:
del state_dict['red']
del state_dict['green']
del state_dict['blue']
del state_dict['rumble']
return state_dict
def set_color(self, red=None, green=None, blue=None):
if red is not None:
self.red = red
if green is not None:
self.green = green
if blue is not None:
self.blue = blue
def set_rumble(self, rumble):
self.rumble = rumble
def on_btn_triangle(self, fn, *args, **kwargs):
callback = Callback(fn, *args, **kwargs)
_btn_triangle_callbacks.append(callback)
return callback
class RemoteController(Controller):
_red = 0
_green = 0
_blue = 0
_rumble = 0
_dirty = True # Default to True so values get cleared on startup
def __init__(self, url):
self.url = url
self.start_loop()
def _loop(self):
while(self._active):
self.update_state()
time.sleep(0.02)
if self._dirty:
self.update_remote_state()
self._dirty = False
def terminate(self):
# Let the loop do its thing until
# we're not dirty any more
while(self._dirty):
time.sleep(0.02)
self._active = False
if self._loop_thread:
self._loop_thread.join()
def update_remote_state(self):
data = {
'red': self.red,
'green': self.green,
'blue': self.blue,
'rumble': self.rumble,
}
try:
response = requests.put(self.url, data)
except requests.ConnectionError:
print "Could not connect to controller at %s" % self.url
self._active = False
return
if response.status_code == 404:
print "Controller not found at %s" % self.url
self._active = False
return
elif not response.ok:
print "Encountered error updating controller: %s (%s)" % (response.status_code, response.reason)
self._active = False
return
@property
def red(self):
return self._red
@red.setter
def red(self, val):
self._red = val
self._dirty = True
@property
def green(self):
return self._green
@green.setter
def green(self, val):
self._green = val
self._dirty = True
@property
def blue(self):
return self._blue
@blue.setter
def blue(self, val):
self._blue = val
self._dirty = True
@property
def rumble(self):
return self._rumble
@rumble.setter
def rumble(self, val):
self._rumble = val
self._dirty = True
def update_state(self):
try:
response = requests.get(self.url)
except requests.ConnectionError:
print "Could not connect to controller at %s" % self.url
self._active = False
return
if response.status_code == 404:
print "Controller not found at %s" % self.url
self._active = False
return
elif not response.ok:
print "Encountered error updating controller: %s (%s)" % (response.status_code, response.reason)
self._active = False
return
result = response.json()
self.btn_triangle = result.get('btn_triangle')
self.btn_circle = result.get('btn_circle')
self.btn_cross = result.get('btn_cross')
self.btn_square = result.get('btn_square')
self.btn_select = result.get('btn_select')
self.btn_start = result.get('btn_start')
self.btn_move = result.get('btn_move')
self.btn_t = result.get('btn_t')
self.btn_ps = result.get('btn_ps')
self.battery = result.get('battery')
self.ax = result.get('ax')
self.ay = result.get('ay')
self.az = result.get('ax')
self.gx = result.get('gx')
self.gy = result.get('gy')
self.gz = result.get('gz')
class Callback(object):
def __init__(self, fn, *args, **kwargs):
self.fn = fn
self.args = args
self.kwargs = kwargs
def run(self):
self.fn(*self.args, **self.kwargs)
def get_controllers(read_only=False):
controllers = [psmove.PSMove(x) for x in range(psmove.count_connected())]
return [Controller(c, read_only) for c in controllers if c.connection_type == psmove.Conn_Bluetooth]
def get_remote_controller(url):
return RemoteController(url)
| mit | -3,089,800,069,205,572,600 | 26.776632 | 108 | 0.550662 | false |
ronin13/pyvolume | docs/conf.py | 1 | 8596 | # -*- coding: utf-8 -*-
# !/usr/bin/env python
# -*- coding: utf-8 -*-
#
# pyvolume documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import absolute_import
import os
import sys
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import pyvolume
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"PyVolume"
copyright = u"2016, Raghavendra Prabhu"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = pyvolume.__version__
# The full version, including alpha/beta/rc tags.
release = pyvolume.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
# keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "pyvolumedoc"
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
(
"index",
"pyvolume.tex",
u"PyVolume Documentation",
u"Raghavendra Prabhu",
"manual",
),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
("index", "pyvolume", u"PyVolume Documentation", [u"Raghavendra Prabhu"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"pyvolume",
u"PyVolume Documentation",
u"Raghavendra Prabhu",
"pyvolume",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| mit | -2,895,800,814,160,401,000 | 29.590747 | 80 | 0.695905 | false |
thorwhalen/ut | parse/venere.py | 1 | 4355 | __author__ = 'thorwhalen'
import ut.parse.util as parse_util
import re
import ut.pstr.trans as pstr_trans
import ut.parse.bsoup as parse_bsoup
pois_near_hotel_exp_0 = re.compile("(?<=\. )[\w ]+(?=- 0\.\d km / 0\.\d mi)")
pois_near_hotel_exp = re.compile("(.+)- (\d+[\.\d]*) km / (\d+[\.\d]*) mi")
def get_pois_near_hotel_location(html):
html = parse_util.x_to_soup(html)
html = html.find('div', attrs={'id': "location-distances"}).renderContents()
t = html.split('<br/>')
t = ['. '+x for x in t]
# print len(t)
# return [re.search(pois_near_hotel_exp, x) for x in t]
return [x.group(0).strip() for x in
[re.search(pois_near_hotel_exp_0, x) for x in t]
if x]
def parse_hotel_info_page(html):
html = parse_util.x_to_soup(html)
d = dict()
# hotel name
d = parse_bsoup.add_text_to_parse_dict(soup=html, parse_dict=d,
key='hotel_name', name='h1', attrs={'property': 'v:name'}, text_transform=parse_util.strip_spaces)
# hotel address
tag = html.find(name='p', attrs={'id': 'property-address'})
if tag:
d['hotel_address'] = pstr_trans.strip(tag.text)
d = parse_bsoup.add_text_to_parse_dict(soup=tag, parse_dict=d,
key='hotel_street_address', name='span', attrs={'property': "v:street-address"},
text_transform=parse_util.strip_spaces)
d = parse_bsoup.add_text_to_parse_dict(soup=tag, parse_dict=d,
key='hotel_locality', name='span', attrs={'property': "v:locality"},
text_transform=parse_util.strip_spaces)
# average price
d = parse_bsoup.add_text_to_parse_dict(soup=html, parse_dict=d,
key='currency', name='span', attrs={'id': 'currency-symbol'}, text_transform=parse_util.strip_spaces)
avgPriceEl0 = html.find(name='span', attrs={'id': 'avgPriceEl0'})
avgPriceDecimals = html.find(name='sup', attrs={'id': 'avgPriceDecimals'})
if avgPriceEl0:
d['average_price'] = avgPriceEl0.text
if avgPriceDecimals:
d['average_price'] = d['average_price'] + avgPriceDecimals.text
d['average_price'] = float(d['average_price'])
# facebook likes
d = parse_bsoup.add_text_to_parse_dict(soup=html, parse_dict=d,
key='facebook_likes', name='span', attrs={'class': 'pluginCountTextDisconnected'}, text_transform=float)
# num_of_photos
tag = html.find(name='div', attrs={'id': 'photo_gallery'})
if tag:
d['num_of_photos'] = len(tag.findAll(name='li'))
# hotel description
d = parse_bsoup.add_text_to_parse_dict(soup=html, parse_dict=d,
key='hotel_description', name='div', attrs={'id': 'hotel-description-body'}, text_transform=parse_util.strip_spaces)
# average_venere_rating
tag = html.find(name='div', attrs={'id': 'avg_guest_rating'})
if tag:
d['average_venere_rating'] = float(tag.find(name='b', attrs={'property': 'v:rating'}).text)
# facilities
tag = html.find(name='div', attrs={'id': 'facilities'})
if tag:
facilities = tag.findAll(name='li')
if facilities:
d['facilities'] = [parse_util.strip_spaces(x.text) for x in facilities]
# alternate names
tag = html.find(name='div', attrs={'id': 'also_known_as'})
if tag:
tag = tag.find(name='p')
if tag:
t = [parse_util.strip_spaces(x) for x in tag.renderContents().split('<br>')]
t = [parse_util.strip_tags(x) for x in t]
d['alternate_names'] = t
# overview_reviews
tag = html.find(name='div', attrs={'id': 'reviews-overview-hbar-box'})
if tag:
tagg = tag.findAll(name='div', attrs={'class': 'reviews-overview-horizzontalbar'})
if tagg:
d['overview_reviews'] = dict()
for t in tagg:
d['overview_reviews'][t.find(name='p').text] = float(t.find(name='b').text)
# location_distances
tag = html.find(name='div', attrs={'id': 'location-distances'})
if tag:
t = re.sub("^[^<]+<h2>.+</h2>","", tag.renderContents()).split('<br/>')
tt = [re.findall(pois_near_hotel_exp, x) for x in t]
tt = [x[0] for x in tt if x]
d['poi_and_distances'] = [{'poi': parse_util.strip_spaces(x[0].replace('"', '')), 'km': float(x[1]), 'mi': float(x[2])} for x in tt]
return d
| mit | -6,565,722,863,570,062,000 | 39.700935 | 140 | 0.586912 | false |
auto-mat/django-webmap-corpus | webmap/models.py | 1 | 18853 | # -*- coding: utf-8 -*-
from author.decorators import with_author
from colorful.fields import RGBColorField
from constance.admin import config
import django
from django import forms
from django.contrib.gis.db import models
from django.contrib.gis.geos import GeometryCollection
from django.core.cache import cache
from django.db.models.signals import m2m_changed, post_delete, post_save
from django.forms import ModelForm
from django.utils.translation import gettext_lazy as _
from django_gpxpy import gpx_parse
from easy_thumbnails.files import get_thumbnailer
import fgp
from . import admin_image_widget
from .utils import SlugifyFileSystemStorage
def get_default_status():
try:
return config.DEFAULT_STATUS_ID
except:
return 0
class Status(models.Model):
"Stavy zobrazeni konkretniho objektu, vrstvy apod. - aktivni, navrzeny, zruseny, ..."
name = models.CharField(unique=True, max_length=255, verbose_name=_(u"name"), help_text=_(u"Status name"))
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"Status description."))
show = models.BooleanField(help_text=_(u"Show to map user"), default=False, verbose_name=_("show"))
show_to_mapper = models.BooleanField(help_text=_(u"Show to mapper"), default=False, verbose_name=_("show to mapper"))
class Meta:
verbose_name = _(u"status")
verbose_name_plural = _("statuses")
def __str__(self):
return self.name
class Layer(models.Model):
"Vrstvy, ktere se zobrazi v konkretni mape"
name = models.CharField(max_length=255, verbose_name=_(u"name"), help_text=_(u"Name of the layer"), default="")
slug = models.SlugField(unique=True, verbose_name=_(u"name in URL"))
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_("Layer description."))
status = models.ForeignKey(Status, verbose_name=_("status"), on_delete=models.PROTECT)
order = models.IntegerField(verbose_name=_("order"), default=0, blank=False, null=False)
remark = models.TextField(null=True, blank=True, help_text=_(u"Internal information about layer."), verbose_name=_("internal remark"))
enabled = models.BooleanField(verbose_name=_(u"Enabled by defalut"), help_text=_(u"True = the layer is enabled on map load"), default=True)
icon_height = models.IntegerField(default=20)
icon_width = models.IntegerField(default=20)
icon = models.ImageField(
null=True,
blank=True,
upload_to='layer_icons',
storage=SlugifyFileSystemStorage(),
verbose_name=_("layer icon"),
height_field='icon_height',
width_field='icon_width',
)
class Meta:
verbose_name = _(u"layer")
verbose_name_plural = _(u"layers")
ordering = ['order']
def __init__(self, *args, **kwargs):
try:
self._meta.get_field('status').default = get_default_status()
except django.db.utils.ProgrammingError:
pass
return super(Layer, self).__init__(*args, **kwargs)
def __str__(self):
return self.name
class OverlayLayer(Layer):
class Meta:
verbose_name = _(u"overlay layer")
verbose_name_plural = _(u"overlay layers")
class Marker(models.Model):
"Map markers with display style definition."
name = models.CharField(unique=True, max_length=255, verbose_name=_(u"name"), help_text=_("Name of the marker."))
slug = models.SlugField(unique=True, verbose_name=_(u"name in URL"), null=True)
# Relationships
layer = models.ForeignKey(Layer, verbose_name=_("layer"), on_delete=models.PROTECT)
status = models.ForeignKey(Status, verbose_name=_("status"), on_delete=models.PROTECT)
# content
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"Detailed marker descrption."))
remark = models.TextField(null=True, blank=True, help_text=_(u"Internal information about layer."), verbose_name=_("internal remark"))
# Base icon and zoom dependent display range
default_icon_height = models.IntegerField(default=20)
default_icon_width = models.IntegerField(default=20)
default_icon = models.ImageField(
null=True,
blank=True,
upload_to='icons',
storage=SlugifyFileSystemStorage(),
verbose_name=_("default icon"),
height_field='default_icon_height',
width_field='default_icon_width',
)
menu_icon_height = models.IntegerField(default=20)
menu_icon_width = models.IntegerField(default=20)
menu_icon = models.ImageField(
null=True,
blank=True,
upload_to='icons/marker/menu',
storage=SlugifyFileSystemStorage(),
verbose_name=_("menu icon"),
height_field='menu_icon_height',
width_field='menu_icon_width',
)
minzoom = models.PositiveIntegerField(default=1, verbose_name=_("Minimal zoom"), help_text=_(u"Minimal zoom in which the POIs of this marker will be shown on the map."))
maxzoom = models.PositiveIntegerField(default=10, verbose_name=_("Maximal zoom"), help_text=_(u"Maximal zoom in which the POIs of this marker will be shown on the map."))
# Linear elements style
line_width = models.FloatField(verbose_name=_(u"line width"), default=2,)
line_color = RGBColorField(default="#ffc90e", verbose_name=_("line color"))
created_at = models.DateTimeField(auto_now_add=True, verbose_name=_("created at"))
last_modification = models.DateTimeField(auto_now=True, verbose_name=_("last modification at"))
order = models.IntegerField(verbose_name=_("order"), default=0, blank=False, null=False)
def line_color_kml(this):
color = this.line_color[1:]
return "88" + color[4:6] + color[2:4] + color[0:2]
def __init__(self, *args, **kwargs):
try:
self._meta.get_field('status').default = get_default_status()
except django.db.utils.ProgrammingError:
pass
return super(Marker, self).__init__(*args, **kwargs)
class Meta:
permissions = [
("can_only_view", "Can only view"),
]
verbose_name = _(u"marker")
verbose_name_plural = _(u"markers")
ordering = ['order', ]
def __str__(self):
return self.name
class VisibleManager(models.Manager):
"Manager that will return objects visible on the map"
def get_queryset(self):
return super(VisibleManager, self).get_queryset().filter(status__show=True, marker__status__show=True, marker__layer__status__show=True)
class Sector(models.Model):
"Map sector"
name = models.CharField(max_length=255, verbose_name=_(u"name"))
slug = models.SlugField(unique=True, verbose_name=_(u"name in URL"))
geom = models.PolygonField(verbose_name=_(u"area"), srid=4326, help_text=_(u"Sector area"))
objects = models.Manager()
class Meta:
verbose_name = _(u"sector")
verbose_name_plural = _(u"sectors")
def __str__(self):
return self.name
@with_author
@fgp.guard('importance', 'status', name='can_edit_advanced_fields')
class Poi(models.Model):
"Place in map"
name = models.CharField(max_length=255, verbose_name=_(u"name"), help_text=_(u"Exact place name"))
# Relationships
marker = models.ForeignKey(Marker, limit_choices_to={'status__show_to_mapper': 'True', 'layer__status__show_to_mapper': 'True'}, verbose_name=_(u"marker"), help_text=_("Select icon, that will be shown in map"), related_name="pois", on_delete=models.PROTECT)
status = models.ForeignKey(Status, default=0, help_text=_("POI status, determine if it will be shown in map"), verbose_name=_(u"status"), on_delete=models.SET_DEFAULT)
properties = models.ManyToManyField('Property', blank=True, help_text=_("POI properties"), verbose_name=_("properties"), limit_choices_to={'status__show_to_mapper': 'True'})
importance = models.SmallIntegerField(
default=0,
verbose_name=_(u"importance"),
help_text=_(u"""Minimal zoom modificator (use 20+ to show always).<br/>"""),
)
# Geographical intepretation
geom = models.GeometryCollectionField(
verbose_name=_(u"place geometry"),
default=None,
srid=4326,
help_text=_(u"""Add point: Select pencil with plus sign icon and place your point to the map.<br/>
Add line: Select line icon and by clicking to map draw the line. Finish drawing with double click.<br/>
Add area: Select area icon and by clicking to mapy draw the area. Finish drawing with double click.<br/>
Object edition: Select the first icon and then select object in map. Draw points in map to move them, use points in the middle of sections to add new edges."""),
)
objects = models.Manager()
# Own content (facultative)
desc = models.TextField(null=True, blank=True, verbose_name=_(u"description"), help_text=_(u"Text that will be shown after selecting POI."))
desc_extra = models.TextField(null=True, blank=True, verbose_name=_(u"detailed description"), help_text=_("Text that extends the description."))
url = models.URLField(null=True, blank=True, verbose_name=_("URL"), help_text=_(u"Link to the web page of the place."))
address = models.CharField(max_length=255, null=True, blank=True, verbose_name=_(u"adress"), help_text=_(u"Poi address (street, house number)"))
remark = models.TextField(null=True, blank=True, verbose_name=_(u"Internal remark"), help_text=_(u"Internal information about POI."))
# zde se ulozi slugy vsech vlastnosti, aby se pri renederovani kml
# nemusel delat db dotaz pro kazde Poi na jeho vlastnosti
properties_cache = models.CharField(max_length=255, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True, verbose_name=_("created at"))
last_modification = models.DateTimeField(auto_now=True, verbose_name=_("last modification at"))
visible = VisibleManager()
class Meta:
permissions = [
("can_only_own_data_only", "Can only edit his own data"),
]
verbose_name = _("place")
verbose_name_plural = _("places")
def __str__(self):
if self.name:
return self.name
return str(self.marker)
def save_properties_cache(self):
self.properties_cache = u",".join([v.slug for v in self.properties.filter(status__show=True)])
self.save()
def get_absolute_url(self):
return "/misto/%i/" % self.id
def properties_list(self):
return u", ".join([p.name for p in self.properties.all()])
def __init__(self, *args, **kwargs):
try:
self._meta.get_field('status').default = get_default_status()
except django.db.utils.ProgrammingError:
pass
return super(Poi, self).__init__(*args, **kwargs)
def update_properties_cache(sender, instance, action, reverse, model, pk_set, **kwargs):
"Property cache actualization at POI save. It will not work yet after property removal."
if action == 'post_add':
instance.save_properties_cache()
m2m_changed.connect(update_properties_cache, Poi.properties.through)
class GpxPoiForm(ModelForm):
gpx_file = forms.FileField(required=False, help_text=_(u"Upload geometry by GPX file"))
class Meta:
model = Marker
exclude = ('geom',)
def clean(self):
cleaned_data = super(GpxPoiForm, self).clean()
if 'gpx_file' in self.cleaned_data:
gpx_file = self.cleaned_data['gpx_file']
if gpx_file:
cleaned_data['geom'] = GeometryCollection(gpx_parse.parse_gpx_filefield(gpx_file))
class Legend(models.Model):
"map legend items of underlay"
name = models.CharField(unique=True, max_length=255, verbose_name=_(u"name"))
en_name = models.CharField(unique=True, max_length=255, null=True, verbose_name=_(u"English name"))
slug = models.SlugField(unique=True, verbose_name=_(u"name in URL"))
desc = models.TextField(null=True, blank=True, verbose_name=_(u"description"))
image = models.ImageField(upload_to='ikony', storage=SlugifyFileSystemStorage(), verbose_name=_(u"image"))
class Meta:
verbose_name = _(u"legend item")
verbose_name_plural = _(u"legend items")
def __str__(self):
return self.name
def image_tag(self):
return admin_image_widget.list_display(self.image)
image_tag.allow_tags = True
image_tag.short_description = _(u"image")
class LegendAdminForm(ModelForm):
class Meta:
model = Legend
exclude = {}
widgets = {
'image': admin_image_widget.AdminImageWidget,
}
def invalidate_cache(sender, instance, **kwargs):
if sender in [Status, Layer, Marker, Poi, Property, Legend, Sector]:
cache.clear()
post_save.connect(invalidate_cache)
post_delete.connect(invalidate_cache)
class Property(models.Model):
"Place properties"
name = models.CharField(max_length=255, verbose_name=_(u"name"), help_text=_(u"Status name"))
status = models.ForeignKey(Status, verbose_name=_("status"), on_delete=models.PROTECT)
as_filter = models.BooleanField(verbose_name=_("as filter?"), default=False, help_text=_(u"Show as a filter in right map menu?"))
order = models.IntegerField(verbose_name=_("order"), default=0, blank=False, null=False)
# content
slug = models.SlugField(unique=True, verbose_name=_("Name in URL"))
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"Property description."))
remark = models.TextField(null=True, blank=True, verbose_name=_(u"Internal remark"), help_text=_(u"Internal information about the property."))
default_icon_height = models.IntegerField(default=20)
default_icon_width = models.IntegerField(default=20)
default_icon = models.ImageField(
null=True,
blank=True,
upload_to='icons',
storage=SlugifyFileSystemStorage(),
verbose_name=_("default icon"),
height_field='default_icon_height',
width_field='default_icon_width',
)
class Meta:
verbose_name = _(u"property")
verbose_name_plural = _(u"properties")
ordering = ['order']
def __str__(self):
return self.name
def icon_tag(self):
return admin_image_widget.list_display(self.default_icon)
icon_tag.allow_tags = True
icon_tag.short_description = _(u"icon")
def __init__(self, *args, **kwargs):
try:
self._meta.get_field('status').default = get_default_status()
except django.db.utils.ProgrammingError:
pass
return super(Property, self).__init__(*args, **kwargs)
class License(models.Model):
name = models.CharField(max_length=255, verbose_name=_(u"name"), help_text=_(u"License name"))
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"License description."))
class Meta:
verbose_name = _(u"license")
verbose_name_plural = _(u"licenses")
def __str__(self):
return self.name
class BaseLayer(Layer):
url = models.URLField(null=True, blank=True, verbose_name=_("URL"), help_text=_(u"Base layer tiles url. e.g. "))
class Meta:
verbose_name = _(u"base layer")
verbose_name_plural = _(u"base layers")
def __str__(self):
return self.name
class MapPreset(models.Model):
class Meta:
verbose_name = _(u"map preset")
verbose_name_plural = _(u"map presets")
ordering = ['order', ]
name = models.CharField(max_length=255, verbose_name=_(u"name"), help_text=_(u"Name of preset"))
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"Map preset description."))
status = models.ForeignKey(Status, verbose_name=_("status"), default=None, null=True, on_delete=models.SET_NULL)
base_layer = models.ForeignKey(BaseLayer, verbose_name=_("base layer"), on_delete=models.PROTECT)
overlay_layers = models.ManyToManyField(OverlayLayer, blank=True, verbose_name=_("overlay layers"), limit_choices_to={'status__show_to_mapper': 'True'})
order = models.IntegerField(verbose_name=_("order"), default=0, blank=False, null=False)
icon = models.ImageField(
null=False,
blank=False,
upload_to='preset_icons',
storage=SlugifyFileSystemStorage(),
verbose_name=_(u"preset icon"),
)
def overlay_layers_slugs(self):
return [l.slug for l in self.overlay_layers.all()]
@with_author
class Photo(models.Model):
poi = models.ForeignKey(Poi, related_name="photos", verbose_name=_("poi"), on_delete=models.PROTECT)
name = models.CharField(max_length=255, verbose_name=_(u"name"), help_text=_(u"Photo name"), blank=True)
desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"Photo description."))
license = models.ForeignKey(License, verbose_name=_("license"), on_delete=models.PROTECT)
order = models.IntegerField(verbose_name=_("order"), default=0, blank=False, null=False)
photographer = models.CharField(max_length=255, verbose_name=_(u"Photography author"), blank=True, help_text=_(u"Full name of the author of the photography"))
status = models.ForeignKey(Status, default=None, help_text=_("Status, determine if the photo will be shown in map"), blank=False, null=True, verbose_name=_(u"status"), on_delete=models.SET_DEFAULT)
photo = models.ImageField(
null=False,
blank=False,
upload_to='photo',
storage=SlugifyFileSystemStorage(),
verbose_name=_(u"photo"),
help_text=_(u"Upload photo in full resolution."),
)
created_at = models.DateTimeField(auto_now_add=True, null=True, blank=True, verbose_name=_("created at"))
last_modification = models.DateTimeField(auto_now=True, null=True, blank=True, verbose_name=_("last modification at"))
def thumb_url(self):
return get_thumbnailer(self.photo)['photo_thumb'].url
# if we want to filter photos by poi position
objects = models.Manager()
def image_tag(self):
return admin_image_widget.list_display(self.photo)
image_tag.short_description = _(u"image")
image_tag.allow_tags = True
def __str__(self):
if self.name:
return self.name
return self.poi.name
class Meta:
permissions = [
("can_view_photo_list", "Can view photo list"),
]
verbose_name = _(u"photo")
verbose_name_plural = _(u"photographies")
ordering = ['order', ]
class PhotoAdminForm(ModelForm):
class Meta:
model = Photo
exclude = {}
widgets = {
'photo': admin_image_widget.AdminImageWidget,
}
| mit | -2,574,332,993,582,529,000 | 39.631466 | 261 | 0.657667 | false |
pitunti/alfaPitunti | plugin.video.alfa/channels/hentaienespanol.py | 1 | 1882 | # -*- coding: utf-8 -*-
import re
from core import httptools
from core import scrapertools
from core import servertools
from core.item import Item
from platformcode import logger
host = 'http://www.xn--hentaienespaol-1nb.net/'
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0'],
['Referer', host]]
def mainlist(item):
logger.info()
itemlist = []
itemlist.append(Item(channel=item.channel, title="Todos", action="todas", url=host, thumbnail='', fanart=''))
itemlist.append(
Item(channel=item.channel, title="Sin Censura", action="todas", url=host + 'hentai/sin-censura/', thumbnail='',
fanart=''))
return itemlist
def todas(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
patron = '<div class="box-peli" id="post-.*?">.<h2 class="title">.<a href="([^"]+)">([^<]+)<\/a>.*?'
patron += 'height="170px" src="([^"]+)'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedtitle, scrapedthumbnail in matches:
url = scrapedurl
title = scrapedtitle # .decode('utf-8')
thumbnail = scrapedthumbnail
fanart = ''
itemlist.append(
Item(channel=item.channel, action="findvideos", title=title, url=url, thumbnail=thumbnail, fanart=fanart))
# Paginacion
title = ''
siguiente = scrapertools.find_single_match(data, 'class="nextpostslink" rel="next" href="([^"]+)">')
title = 'Pagina Siguiente >>> '
fanart = ''
itemlist.append(Item(channel=item.channel, action="todas", title=title, url=siguiente, fanart=fanart))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "+")
item.url = item.url + texto
if texto != '':
return todas(item)
else:
return []
| gpl-3.0 | -2,298,668,226,065,378,000 | 28.873016 | 119 | 0.623804 | false |
EIREXE/SpaceDock | SpaceDock/ckan.py | 2 | 2394 | from SpaceDock.config import _cfg
from github import Github
from flask import url_for
import subprocess
import json
import os
import re
# TODO(Thomas): Make this modular
def send_to_ckan(mod):
if not _cfg("netkan_repo_path"):
return
if not mod.ckan:
return
json_blob = {
'spec_version': 'v1.4',
'identifier': re.sub(r'\W+', '', mod.name),
'$kref': '#/ckan/spacedock/' + str(mod.id),
'license': mod.license,
'x_via': 'Automated ' + _cfg('site-name') + ' CKAN submission'
}
wd = _cfg("netkan_repo_path")
path = os.path.join(wd, 'NetKAN', json_blob['identifier'] + '.netkan')
if os.path.exists(path):
# If the file is already there, then chances are this mod has already been indexed
return
with open(path, 'w') as f:
f.write(json.dumps(json_blob, indent=4))
subprocess.call(['git', 'fetch', 'upstream'], cwd=wd)
subprocess.call(['git', 'checkout', '-b', 'add-' + json_blob['identifier'], 'upstream/master'], cwd=wd)
subprocess.call(['git', 'add', '-A'], cwd=wd)
subprocess.call(['git', 'commit', '-m', 'Add {0} from '.format(mod.name) + _cfg('site-name') + '\n\nThis is an automated commit on behalf of {1}'\
.format(mod.name, mod.user.username), '--author={0} <{1}>'.format(mod.user.username, mod.user.email)], cwd=wd)
subprocess.call(['git', 'push', '-u', 'origin', 'add-' + json_blob['identifier']], cwd=wd)
g = Github(_cfg('github_user'), _cfg('github_pass'))
r = g.get_repo("KSP-CKAN/NetKAN")
r.create_pull(title="Add {0} from ".format(mod.name) + _cfg('site-name'), base=r.default_branch, head=_cfg('github_user') + ":add-" + json_blob['identifier'], body=\
"""\
This pull request was automatically generated by """ + _cfg('site-name') + """ on behalf of {0}, to add [{1}]({4}{2}) to CKAN.
Mod details:
name = {2}
author = {0}
description = {5}
abstract = {6}
license = {7}
Homepage = {8}
Please direct questions about this pull request to [{0}]({4}{3}).
""".format(mod.user.username, mod.name,\
url_for('mods.mod', mod_name=mod.name, id=mod.id),\
url_for("profile.view_profile", username=mod.user.username),\
_cfg("protocol") + "://" + _cfg("domain"),\
mod.description, mod.short_description,\
mod.license, mod.external_link))
| mit | -2,606,136,009,649,760,000 | 40 | 169 | 0.587719 | false |
clchiou/garage | py/garage/garage/multiprocessing/backport.py | 1 | 1205 | __all__ = [
'BoundedSemaphore',
'UnlimitedSemaphore',
'Timeout',
]
import threading
import time
# NOTE: This module is Python 2 compatible.
class Timeout(Exception):
pass
# Because Python 2 semaphore does not support timeout...
class BoundedSemaphore(object):
def __init__(self, value):
if value < 0:
raise ValueError('semaphore initial value must be >= 0')
self._cond = threading.Condition(threading.Lock())
self._initial_value = value
self._value = value
def acquire(self, timeout):
with self._cond:
endtime = time.time() + timeout
while self._value == 0:
timeout = endtime - time.time()
if timeout <= 0:
raise Timeout
self._cond.wait(timeout)
self._value -= 1
def release(self):
with self._cond:
if self._value >= self._initial_value:
raise ValueError('semaphore is released too many times')
self._value += 1
self._cond.notify()
class UnlimitedSemaphore(object):
def acquire(self, timeout):
pass
def release(self):
pass
| mit | 3,029,463,247,928,132,600 | 22.173077 | 72 | 0.561826 | false |
hvdwolf/Navigator-traffic_lights | traffic_lights_csv.py | 1 | 7684 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Version 1.0, 201607, Harry van der Wolf
# Version 1.1, 201701, Harry van der Wolf; urllib -> do not read large files into memory)
# Version 1.3, 201701, Harry van der Wolf; use csv as intermediate format as gpsbabel osm to gpx only copies about 10%
import os, sys, platform, subprocess, shutil
if sys.version_info<(3,0,0):
# Fall back to Python 2's urllib2
from urllib2 import urlopen
else:
# For Python 3.0 and later
from urllib.request import urlopen
# Use dictionary for our variables, makes it easier to structure this script into functions (if I feel it is necessary :) )
var_dirs = {}
###################################################
###################################################
# Between the double lines you need to modify some data. Outside is not necessary
# These two lines are for windows. Note: Even on windows you need to use FORWARD slashes
var_dirs['gpsbabel'] = "C:/Users/387640/Downloads/GPSBabel/gpsbabel.exe"
var_dirs['DIGGERPATH'] = "C:/Users/387640/Downloads/digger_console"
# Now select a combination of a region with its countries/states
# europe or north-america, asia, south-america, africa, australia-oceania, central-america
### Europe
# Note: Even though the script allows you to download and process all countris of europe in one go, I had to do it in 3 times.
# Simply because the download.geofabrik.de server stopped me after a number of countries with "too many requests in this
# time period from this user"
region = "europe"
countries = ["albania", "andorra", "austria", "azores", "belarus", "belgium", "bosnia-herzegovina", "bulgaria", "croatia", "cyprus", "czech-republic", "denmark", "estonia", "faroe-islands", "finland", "france", "georgia", "germany", "great-britain", "greece", "hungary", "iceland", "ireland-and-northern-ireland", "isle-of-man", "italy", "kosovo", "latvia", "liechtenstein", "lithuania", "luxembourg", "macedonia", "malta", "moldova", "monaco", "montenegro", "netherlands", "norway", "poland", "portugal", "romania", "serbia", "slovakia", "slovenia", "spain", "sweden", "switzerland", "turkey", "ukraine"]
### Russia" -> Comes without region
#region = ""
#countries = ["russia"]
### North-America
#region="north-america"
# In this case I choose for the USA sub-regions instead of separated states
#countries = ["canada", "greenland", "mexico", "us-midwest", "us-northeast", "us-pacific", "us-south", "us-west"]
### South-America
#region = "south-america"
#countries = ["argentina", "bolivia", "brazil", "chile", "colombia", "ecuador", "paraguay", "peru", "suriname", "uruguay"]
### Asia
#region = "asia"
#countries = ["afghanistan", "azerbaijan", "bangladesh", "cambodia", "china", "gcc-states", "india", "indonesia", "iran", "iraq", "israel-and-palestine", "japan", "jordan", "kazakhstan", "kyrgyzstan", "lebanon", "malaysia-singapore-brunei", "maldives", "mongolia", "myanmar", "nepal", "north-korea", "pakistan", "philippines", "south-korea", "sri-lanka", "syria", "taiwan", "tajikistan", "thailand", "turkmenistan", "uzbekistan", "vietnam", "yemen"]
# small test country
#region = "europe"
#countries = ["luxembourg"]
# Below these double hashtags line you should not have to change anything
###################################################
###################################################
######################################################################
# Now create some base directories and the variables for it
var_dirs['CUR_DIR'] = os.path.dirname(os.path.abspath(__file__))
var_dirs['WORKDIR'] = os.path.join(var_dirs['CUR_DIR'], "Workdir")
if not os.path.exists(var_dirs['WORKDIR']):
os.mkdir(var_dirs['WORKDIR'])
var_dirs['OutputDir'] = os.path.join(var_dirs['CUR_DIR'], "OutputDir")
if not os.path.exists(var_dirs['OutputDir']):
os.mkdir(var_dirs['OutputDir'])
# set for windows
var_dirs['DIGGER'] = os.path.join(var_dirs['DIGGERPATH'], "DiggerConsole.exe")
var_dirs['TOOLDIR'] = os.path.join(var_dirs['CUR_DIR'], "tools")
OSplatform = platform.system()
for country in countries:
print("\n\n== Downloading and processing " + country + " ==")
print("\n== Downloading")
map_url = "http://download.geofabrik.de/" + region + "/" + country + "-latest.osm.pbf"
mapfile = urlopen( map_url )
filesprefix = os.path.join(var_dirs['WORKDIR'], country)
with open( filesprefix + "-latest.osm.pbf", 'wb') as output:
while True:
tmp = mapfile.read(1024)
if not tmp:
break
output.write(tmp)
print("\n== Converting " + country + " to .o5m format")
if OSplatform == "Windows":
os.system(os.path.join(var_dirs['TOOLDIR'], "osmconvert.exe") + " -v " + filesprefix + "-latest.osm.pbf" + " --drop-author --out-o5m -o=" + filesprefix + "-latest.o5m")
else:
os.system("osmconvert -v " + filesprefix + "-latest.osm.pbf" + " --drop-author --out-o5m > " + filesprefix + "-latest.o5m")
print("\n\n== Filtering the traffic signals out of " + country + " ==")
# on any pc/server with more than 2GB memory remove the --hash-memory=400-50-2 parameter
if OSplatform == "Windows":
os.system(os.path.join(var_dirs['TOOLDIR'], "osmfilter.exe") + " " + filesprefix + "-latest.o5m" + " --hash-memory=400-50-2 --parameter-file=" + os.path.join(var_dirs['CUR_DIR'], 'traffic_signals.txt') + " -o=" + filesprefix + "-latest.osm")
else:
os.system("osmfilter " + filesprefix + "-latest.o5m" + " --hash-memory=400-50-2 --parameter-file=traffic_signals.txt > " + filesprefix + "-latest.osm")
print("\n\n== run gpsbabel on our filtered osm file for " + country + " to convert to csv ==")
if OSplatform == "Windows":
os.system('"' + var_dirs['gpsbabel'] + '"' + " -i osm -f " + filesprefix + "-latest.osm -o unicsv,fields=lat+lon+osm_id -F " + filesprefix + "-latest.csv")
#subprocess.call(gpsbabel + " -i osm -f " + country + "-latest.osm -o gpx -F " + country + "-latest.gpx")
else:
os.system("gpsbabel -i osm -f " + filesprefix + "-latest.osm -o unicsv,fields=lat+lon+osm_id -F " + filesprefix + "-latest.csv")
print("Removing our downloaded files and intermediate files to clean up")
os.remove(filesprefix + "-latest.osm.pbf")
os.remove(filesprefix + "-latest.osm")
os.remove(filesprefix + "-latest.o5m")
#os.remove(filesprefix + "-latest.csv")
print("###############################################")
print("Now creating the mca file")
# Create tmp dir for digger_config and gpx
TMPworkDir = os.path.join(var_dirs['CUR_DIR'], country.upper() + "-TrafficSignals")
if not os.path.exists(TMPworkDir):
os.mkdir(TMPworkDir)
shutil.copyfile(filesprefix + "-latest.csv", os.path.join(TMPworkDir, country.upper() + "-TrafficSignals.csv"))
# Create digger_config file
f = open(os.path.join(var_dirs['CUR_DIR'],"digger_config_csv.xml"),'r')
filedata = f.read()
f.close()
newdata = filedata.replace("COUNTRY", country.upper())
newdata = newdata.replace("TMPWORKDIR", TMPworkDir.replace('\\', '/'))
newdata = newdata.replace("CUR_DIR", var_dirs['CUR_DIR'].replace("\\","/"))
f = open(os.path.join(TMPworkDir, "digger_config.xml"),'w')
f.write(newdata)
f.close()
# Switch to digger console directory
os.chdir(var_dirs['DIGGERPATH'])
print("Calling diggerconsole to create the mca")
# do a simple system call
os.system(var_dirs['DIGGER'] + " " + os.path.join(TMPworkDir, "digger_config.xml"))
# copy mca to output folder
shutil.move(os.path.join(var_dirs['CUR_DIR'], country.upper() + "-TrafficSignals.mca"), os.path.join(var_dirs['OutputDir'], country.upper() + "-TrafficSignals.mca"))
print("###############################################")
print("###############################################")
print("Your mca file(s) should now be available in " + var_dirs['OutputDir'])
| gpl-3.0 | 2,394,400,697,680,552,000 | 51.272109 | 605 | 0.645497 | false |
garvenshen/swquota | tests/test_swquota.py | 1 | 6671 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
try:
from swift.common.swob import Request
except ImportError:
from webob.exc import Request
from swquota.middleware import Swquota
class FakeCache(object):
def __init__(self, val):
self.val = val
def get(self, *args):
return self.val
def set(self, *args, **kwargs):
pass
class FakeApp(object):
def __init__(self, headers=[]):
self.headers = headers
def __call__(self, env, start_response):
start_response('200 OK', self.headers)
return []
def start_response(*args):
pass
class TestAccountQuota(unittest.TestCase):
def test_unauthorized(self):
headers = [('x-account-bytes-used', 1000), ]
app = Swquota(FakeApp(headers), {})
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
#Response code of 200 because authentication itself is not done here
self.assertEquals(res.status_int, 200)
def test_no_quotas(self):
headers = [('x-account-bytes-used', 1000), ]
app = Swquota(FakeApp(headers), {})
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'REMOTE_USER': 'a'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_exceed_bytes_quota(self):
headers = [('x-account-bytes-used', 1000),
('x-account-meta-bytes-limit', 0)]
app = Swquota(FakeApp(headers), {})
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'REMOTE_USER': 'a'})
res = req.get_response(app)
self.assertEquals(res.status_int, 413)
def test_exceed_bytes_quota_reseller(self):
headers = [('x-account-bytes-used', 1000),
('x-account-meta-bytes-limit', 0)]
app = Swquota(FakeApp(headers), {})
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'REMOTE_USER': 'a.,.reseller_admin'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_exceed_bytes_quota_reseller_keystone(self):
headers = [('x-account-bytes-used', 1000),
('x-account-meta-bytes-limit', 0)]
app = Swquota(FakeApp(headers), {})
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'HTTP_X_ROLES': 'a,reseller'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_not_exceed_bytes_quota(self):
headers = [('x-account-bytes-used', 1000),
('x-account-meta-bytes-limit', 2000)]
app = Swquota(FakeApp(headers), {})
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'REMOTE_USER': 'a'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_invalid_quotas(self):
headers = [('x-account-bytes-used', 0), ]
app = Swquota(FakeApp(headers), {})
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_ACCOUNT_META_BYTES_LIMIT': 'abc',
'REMOTE_USER': 'a,.reseller_admin'})
res = req.get_response(app, {})
self.assertEquals(res.status_int, 400)
def test_valid_quotas_admin(self):
headers = [('x-account-bytes-used', 0), ]
app = Swquota(FakeApp(headers), {})
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_ACCOUNT_META_BYTES_LIMIT': '100',
'REMOTE_USER': 'a'})
res = req.get_response(app, {})
self.assertEquals(res.status_int, 403)
def test_valid_quotas_reseller(self):
headers = [('x-account-bytes-used', 0), ]
app = Swquota(FakeApp(headers), {})
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_ACCOUNT_META_BYTES_LIMIT': 100,
'REMOTE_USER': 'a.,.reseller_admin'})
res = req.get_response(app, {})
self.assertEquals(res.status_int, 200)
def test_delete_quotas(self):
headers = [('x-account-bytes-used', 0), ]
app = Swquota(FakeApp(headers), {})
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_ACCOUNT_META_BYTES_LIMIT': None,
'REMOTE_USER': 'a'})
res = req.get_response(app, {})
self.assertEquals(res.status_int, 403)
def test_delete_quotas_reseller(self):
headers = [('x-account-bytes-used', 0), ]
app = Swquota(FakeApp(headers), {})
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_ACCOUNT_META_BYTES_LIMIT': None,
'REMOTE_USER': 'a.,.reseller_admin'})
res = req.get_response(app, {})
self.assertEquals(res.status_int, 200)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 6,304,735,773,008,333,000 | 37.33908 | 78 | 0.510418 | false |
pablocarderam/genetargeter | gRNAScores/azimuth/metrics.py | 1 | 20467 | """
from https://gist.github.com/bwhite/3726239
Information Retrieval metrics
Useful Resources:
http://www.cs.utexas.edu/~mooney/ir-course/slides/Evaluation.ppt
http://www.nii.ac.jp/TechReports/05-014E.pdf
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
http://hal.archives-ouvertes.fr/docs/00/72/67/60/PDF/07-busa-fekete.pdf
Learning to Rank for Information Retrieval (Tie-Yan Liu)
"""
from time import time
import numpy as np
from scipy.stats.mstats import rankdata
from .elevation.metrics import spearman_weighted_swap_perm_test
def mean_reciprocal_rank(relevance_scores: list) -> np.ndarray:
"""Score is reciprocal of the rank of the first relevant item
First element is 'rank 1'. Relevance is binary (nonzero is relevant).
Example from http://en.wikipedia.org/wiki/Mean_reciprocal_rank
> rs = [[0, 0, 1], [0, 1, 0], [1, 0, 0]]
> mean_reciprocal_rank(rs)
0.61111111111111105
> rs = np.array([[0, 0, 0], [0, 1, 0], [1, 0, 0]])
> mean_reciprocal_rank(rs)
0.5
> rs = [[0, 0, 0, 1], [1, 0, 0], [1, 0, 0]]
> mean_reciprocal_rank(rs)
0.75
Args:
relevance_scores: Iterator of relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Mean reciprocal rank
"""
relevance_scores = (np.asarray(r).nonzero()[0] for r in relevance_scores)
return np.mean([1.0 / (r[0] + 1) if r.size else 0.0 for r in relevance_scores])
def r_precision(relevance: list) -> np.ndarray:
"""Score is precision after all relevant documents have been retrieved
Relevance is binary (nonzero is relevant).
> r = [0, 0, 1]
> r_precision(r)
0.33333333333333331
> r = [0, 1, 0]
> r_precision(r)
0.5
> r = [1, 0, 0]
> r_precision(r)
1.0
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
R Precision
"""
relevance = np.asarray(relevance) != 0
z = relevance.nonzero()[0]
if not z.size:
return 0.0
return np.mean(relevance[: z[-1] + 1])
def precision_at_k(r, k):
"""Score is precision @ k
Relevance is binary (nonzero is relevant).
> r = [0, 0, 1]
> precision_at_k(r, 1)
0.0
> precision_at_k(r, 2)
0.0
> precision_at_k(r, 3)
0.33333333333333331
> precision_at_k(r, 4)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
ValueError: Relevance score length < k
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Precision @ k
Raises:
ValueError: len(r) must be >= k
:param k:
"""
if k < 1:
raise AssertionError()
r = np.asarray(r)[:k] != 0
if r.size != k:
raise ValueError("Relevance score length < k")
return np.mean(r)
def average_precision(r):
"""Score is average precision (area under PR curve)
Relevance is binary (nonzero is relevant).
> r = [1, 1, 0, 1, 0, 1, 0, 0, 0, 1]
> delta_r = 1. / sum(r)
> sum([sum(r[:x + 1]) / (x + 1.) * delta_r for x, y in enumerate(r) if y])
0.7833333333333333
> average_precision(r)
0.78333333333333333
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Average precision
"""
r = np.asarray(r) != 0
out = [precision_at_k(r, k + 1) for k in range(r.size) if r[k]]
if not out:
return 0.0
return np.mean(out)
def mean_average_precision(rs):
"""Score is mean average precision
Relevance is binary (nonzero is relevant).
>>> rs = [[1, 1, 0, 1, 0, 1, 0, 0, 0, 1]]
>>> mean_average_precision(rs)
0.78333333333333333
>>> rs = [[1, 1, 0, 1, 0, 1, 0, 0, 0, 1], [0]]
>>> mean_average_precision(rs)
0.39166666666666666
Args:
rs: Iterator of relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Mean average precision
"""
return np.mean([average_precision(r) for r in rs])
def dcg_at_k(r, k, method=0):
"""Score is discounted cumulative gain (dcg)
Relevance is positive real values. Can use binary
as the previous methods.
Example from
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
> r = [3, 2, 3, 0, 0, 1, 2, 2, 3, 0]
> dcg_at_k(r, 1)
3.0
> dcg_at_k(r, 1, method=1)
3.0
> dcg_at_k(r, 2)
5.0
> dcg_at_k(r, 2, method=1)
4.2618595071429155
> dcg_at_k(r, 10)
9.6051177391888114
> dcg_at_k(r, 11)
9.6051177391888114
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
method: If 0 then weights are [1.0, 1.0, 0.6309, 0.5, 0.4307, ...]
If 1 then weights are [1.0, 0.6309, 0.5, 0.4307, ...]
Returns:
Discounted cumulative gain
"""
r = np.asfarray(r)[:k]
if r.size:
if method == 0:
return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1)))
elif method == 1:
return np.sum(r / np.log2(np.arange(2, r.size + 2)))
else:
raise ValueError("method must be 0 or 1.")
return 0.0
def ndcg_at_k(r, k, method=0):
"""Score is normalized discounted cumulative gain (ndcg)
Relevance is positive real values. Can use binary
as the previous methods.
Example from
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
>>> r = [3, 2, 3, 0, 0, 1, 2, 2, 3, 0]
>>> ndcg_at_k(r, 1)
1.0
>>> r = [2, 1, 2, 0]
>>> ndcg_at_k(r, 4)
0.9203032077642922
>>> ndcg_at_k(r, 4, method=1)
0.96519546960144276
>>> ndcg_at_k([0], 1)
0.0
>>> ndcg_at_k([1], 2)
1.0
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
method: If 0 then weights are [1.0, 1.0, 0.6309, 0.5, 0.4307, ...]
If 1 then weights are [1.0, 0.6309, 0.5, 0.4307, ...]
Returns:
Normalized discounted cumulative gain
"""
dcg_max = dcg_at_k(sorted(r, reverse=True), k, method)
if not dcg_max:
return 0.0
return dcg_at_k(r, k, method) / dcg_max
# ------------------------------------------------------------------------------------
# custom stuff from us to avoid problem with ties
def ndcg_at_k_ties(
labels: list,
predictions: list,
k: int,
method: int = 0,
normalize_from_below_too: bool = False,
theta=None,
) -> float:
"""
See 2008 McSherry et al on how to efficiently compute NDCG with ties
labels are ground truth
if k=None then k gets set to len(labels)
labels and predictions get flattened here
set normalize_from_below_too=False for conventional
ndcg_at_k_ties, but note this will only
ensure the max is 1, not that the min is zero.
to get that added guarantee, set this argument to True
"""
if isinstance(labels, list):
labels = np.array(labels)
if isinstance(predictions, list):
predictions = np.array(predictions)
if len(labels.shape) != 1 and np.min(labels.shape) != 1:
raise AssertionError("should be 1D array or equivalent")
if len(predictions.shape) != 1 and np.min(predictions.shape) != 1:
raise AssertionError("should be 1D array or equivalent")
labels = labels.flatten()
predictions = predictions.flatten()
if np.any(labels.shape != predictions.shape):
raise AssertionError("labels and predictions should have the same shape")
if k is None:
k = len(labels)
labels = labels.copy()
dcg = dcg_at_k_ties(labels, predictions, k, method=method, theta=theta)
dcg_max = dcg_at_k_ties(labels, labels, k, method, theta=theta)
# NOTE: I have checked that dcg_at_k_ties and dcg_at_k match when there are no ties,
# or ties in the labels
if normalize_from_below_too:
dcg_min = dcg_at_k_ties(
np.sort(labels)[::-1], np.sort(predictions), k, method, theta=theta
)
else:
dcg_min = 0
numerator = dcg - dcg_min
if numerator <= -1e-5:
raise AssertionError()
numerator = np.max((0, numerator))
ndcg = numerator / (dcg_max - dcg_min)
if not 1.0 >= ndcg >= 0.0:
raise AssertionError(f"ndcg={ndcg} should be in [0,1]")
if not dcg_max:
ndcg = 0.0
return ndcg
def dcg_helper(discount_factors, gain, k, labels, method, predictions):
# step through, in current order (of decreasing predictions), accumulating tied gains
# (which may be singletons)
ii = 0
dcg = 0.0
while ii < k:
current_pred = predictions[ii]
current_gain = gain(labels[ii], method)
# intializing the tied cumulative variables
cum_tied_gain = current_gain
cum_tied_disc = discount_factors[ii]
num_ties = 1
ii += 1
# count number of ties in predictions
while ii < len(predictions) and predictions[ii] == current_pred: # while tied
num_ties += 1.0
cum_tied_gain += gain(labels[ii], method)
if ii < k:
cum_tied_disc += discount_factors[ii]
ii += 1
avg_gain = cum_tied_gain / num_ties
dcg += avg_gain * cum_tied_disc
if np.isnan(dcg):
raise AssertionError("found nan dcg")
return dcg
def dcg_at_k_ties(labels, predictions, k, method=0, theta=None):
"""
See 2008 McSherry et al on how to efficiently compute NDCG (method=0 here) with ties
(in the predictions)
'labels' are what the "ground truth" judges assign
'predictions' are the algorithm predictions corresponding to each label
Also, http://en.wikipedia.org/wiki/Discounted_cumulative_gain for basic defns
"""
if not isinstance(predictions, np.ndarray):
raise AssertionError()
if len(labels) != len(predictions):
raise AssertionError("labels and predictions should be of same length")
if k > len(labels):
raise AssertionError("k should be <= len(labels)")
# order both labels and preds so that they are in order of decreasing predictive score
sorted_ind = np.argsort(predictions)[::-1]
predictions = predictions[sorted_ind]
labels = labels[sorted_ind]
def gain(label, method):
if method == 0:
return label
elif method == 1:
return 2 ** label - 1.0
elif method == 2 or method == 3 or method == 4:
return label
else:
raise NotImplementedError()
if method == 0:
discount_factors = get_discount_factors(len(labels), discount="log2")
elif method == 1:
raise Exception("need to implement: log_2(i+1)")
elif method == 2:
discount_factors = get_discount_factors(len(labels), discount="linear")
elif method == 3:
discount_factors = get_discount_factors(len(labels), discount="combination")
elif method == 4:
if theta is None:
raise AssertionError("need to specify theta or theta")
discount_factors = get_discount_factors(
len(labels), discount="1/rtheta", theta=theta
)
else:
raise NotImplementedError()
if len(discount_factors) != len(labels):
raise AssertionError("discount factors has wrong length")
dcg = dcg_helper(discount_factors, gain, k, labels, method, predictions)
if np.isnan(dcg):
raise AssertionError("found nan dcg")
return dcg
def get_discount_factors(num_labels, discount="log2", theta=None):
ii_range = np.arange(num_labels) + 1
if discount == "log2":
discount_factors = np.concatenate(
(np.array([1.0]), 1.0 / np.log2(ii_range[1:]))
)
elif discount == "linear":
discount_factors = -ii_range / float(num_labels) + 1.0
elif discount == "combination":
l2 = np.concatenate((np.array([1.0]), 1.0 / np.log2(ii_range[1:])))
linear = -ii_range / float(num_labels) + 1.0
discount_factors = np.max((l2, linear), axis=0)
elif discount == "1/rtheta":
discount_factors = 1.0 / (ii_range ** theta)
else:
raise NotImplementedError
return discount_factors
def rank_data(r, rground):
# we checked this heavily, and is correct, e.g. rground will go from largest rank to smallest
r = rankdata(r)
rground = rankdata(rground)
if np.sum(r) != np.sum(rground):
raise AssertionError("ranks should add up to the same")
return r, rground
def dcg_alt(relevances, rank=20):
relevances = np.asarray(relevances)[:rank]
n_relevances = len(relevances)
if n_relevances == 0:
return 0.0
discounts = np.log2(np.arange(n_relevances) + 2)
return np.sum(relevances / discounts)
def ndcg_alt(relevances, rank=20):
best_dcg = dcg_alt(sorted(relevances, reverse=True), rank)
if best_dcg == 0:
return 0.0
return dcg_alt(relevances, rank) / best_dcg
def ndcg_at_k_swap_perm_test(
preds1, preds2, true_labels, nperm, method, k, normalize_from_below_too, theta=None
):
# pVal is the probability that we would observe as big an AUC diff as we
# did if the ROC curves were drawn from the null hypothesis (which is that
# one model does not perform better than the other)
#
# null hypothesis is that the prediction ranking are the same, so we exchange a random
# number of them with each other.
#
# see ndcg_at_k_ties for all but the first four parameters
#
# balance_zeros = True means that when we swap a zero for a non-zero value, we will also do
# a reverse swap
#
# this is a two-sided test, but since it is a symmetric null distribution, one should
# be able to divide the p-value by 2 to get the one-sided version (but think this through
# before using)
if isinstance(preds1, list):
preds1 = np.array(preds1)
else:
preds1 = preds1.flatten()
if isinstance(preds2, list):
preds2 = np.array(preds2)
else:
preds2 = preds2.flatten()
if isinstance(true_labels, list):
true_labels = np.array(true_labels)
else:
true_labels = true_labels.flatten()
if len(preds1) != len(preds2):
raise AssertionError("need same number of preditions from each model")
if len(preds1) != len(true_labels):
raise AssertionError("need same number of preditions in truth and predictions")
N = len(preds1)
# re-sort all by truth ordering so that when swap they are aligned
sorted_ind = np.argsort(true_labels)[::-1]
true_labels = true_labels[sorted_ind]
preds1 = preds1[sorted_ind]
preds2 = preds2[sorted_ind]
ranks1 = rankdata(preds1)
ranks2 = rankdata(preds2)
ndcg1 = ndcg_at_k_ties(
true_labels,
ranks1,
k=k,
method=method,
normalize_from_below_too=normalize_from_below_too,
theta=theta,
)
ndcg2 = ndcg_at_k_ties(
true_labels,
ranks2,
k=k,
method=method,
normalize_from_below_too=normalize_from_below_too,
theta=theta,
)
real_ndcg_diff = np.abs(ndcg1 - ndcg2)
perm_ndcg_diff = np.nan * np.zeros(nperm)
if np.all(preds1 == preds2):
pval = 1.0
else:
zero_ind = true_labels == 0
if np.sum(zero_ind) >= len(zero_ind):
raise AssertionError("balancing assumes there are more zeros than ones")
for _ in range(nperm):
pair_ind_to_swap = np.random.rand(N) < 0.5
ranks1_perm = ranks1.copy()
ranks1_perm[pair_ind_to_swap] = ranks2[pair_ind_to_swap]
ranks2_perm = ranks2.copy()
ranks2_perm[pair_ind_to_swap] = ranks1[pair_ind_to_swap]
ndcg1_perm = ndcg_at_k_ties(
true_labels,
ranks1_perm,
k=k,
method=method,
normalize_from_below_too=normalize_from_below_too,
theta=theta,
)
ndcg2_perm = ndcg_at_k_ties(
true_labels,
ranks2_perm,
k=k,
method=method,
normalize_from_below_too=normalize_from_below_too,
theta=theta,
)
for thing in theta:
tmp_diff = np.abs(ndcg1_perm[thing] - ndcg2_perm[thing])
perm_ndcg_diff[thing][_] = tmp_diff
num_stat_greater = np.max((((perm_ndcg_diff > real_ndcg_diff).sum() + 1), 1.0))
pval = num_stat_greater / nperm
return pval, real_ndcg_diff, perm_ndcg_diff, ndcg1, ndcg2
if __name__ == "__main__":
simulated_data = True
permute_real_data = True
T = 1000
nperm = 100
weights = np.array([0.001])
theta_range = weights # just to make life easier
# only for simulated data
N = 100
frac_zeros = 0
k = None
allp = np.nan * np.zeros((len(theta_range) + 1, T))
if not simulated_data:
# print(
# "loading up saved data..."
# ) # two-fold CV data from CRISPR off-target GUIDE-SEQ
# with open(r"\\nerds5\kevin\from_nicolo\gs.pickle", "rb") as f:
# predictions, truth_all = pickle.load(f)
# print("done.")
# N = len(truth_all[0])
pass # that gs.pickle file was not in the source repo
for t in range(T):
# totally simulated
if simulated_data:
truth = np.random.rand(N)
zero_ind = np.random.rand(N) < frac_zeros
truth[zero_ind] = 0
pred1 = np.random.rand(N)
pred2 = np.random.rand(N)
# this all refers to stuff from that unavailable gs.pickle from above
# else:
# fold = 0
# truth = truth_all[fold]
# pred1 = predictions["CFD"][fold]
# pred2 = predictions["product"][fold]
# if permute_real_data:
# truth = np.random.permutation(truth)
t0 = time()
for i, w in enumerate(weights):
weights_array = truth.copy()
weights_array += w
pvaltmp, real_corr_diff, perm_corr_diff, corr1, corr2 = spearman_weighted_swap_perm_test(
pred1, pred2, truth, nperm, weights_array
)
allp[i, t] = pvaltmp
t1 = time()
truth = np.array([3, 4, 2, 1, 0, 0, 0])
pred1 = np.array([3, 4, 2, 1, 0, 0, 0])
pred2 = np.array([2, 1, 3, 4, 5, 6, 7])
truth3 = np.array([3, 4, 2, 1, 0, 0, 0])
truth4 = np.zeros(7)
truth4[0] = 1
pred3 = np.array([2, 1, 3, 4, 5, 6, 7]) * 10
pred4 = np.array([4, 3, 2, 1, 0, 0, 0])
pred5 = np.array([4, 3, 1, 2, 0, 0, 0])
nperm = 1000
method = 4
theta = 0.5
normalize_from_below_too = True
k = len(pred3)
pval, real_ndcg_diff, perm_ndcg_diff, ndcg1, ndcg2 = ndcg_at_k_swap_perm_test(
pred1, pred2, truth, nperm, method, k, normalize_from_below_too, theta=theta
)
print(f"ndcg1={ndcg1}, ndcg2={ndcg2}, ndcg_diff={real_ndcg_diff}, p={pval}")
pval, real_ndcg_diff, perm_ndcg_diff, ndcg1, ndcg2 = ndcg_at_k_swap_perm_test(
pred1, pred1, truth, nperm, method, k, normalize_from_below_too, theta=theta
)
print(f"ndcg1={ndcg1}, ndcg2={ndcg2}, ndcg_diff={real_ndcg_diff}, p={pval}")
pval, real_ndcg_diff, perm_ndcg_diff, ndcg1, ndcg2 = ndcg_at_k_swap_perm_test(
pred1, pred4, truth, nperm, method, k, normalize_from_below_too, theta=theta
)
print(f"ndcg1={ndcg1}, ndcg2={ndcg2}, ndcg_diff={real_ndcg_diff}, p={pval}")
pval, real_ndcg_diff, perm_ndcg_diff, ndcg1, ndcg2 = ndcg_at_k_swap_perm_test(
pred1, pred5, truth, nperm, method, k, normalize_from_below_too, theta=theta
)
print(f"ndcg1={ndcg1}, ndcg2={ndcg2}, ndcg_diff={real_ndcg_diff}, p={pval}")
print(ndcg_at_k_ties(truth4, pred2, k, method=3, normalize_from_below_too=True))
print(ndcg_alt(truth[np.argsort(pred2)[::-1]], 5))
print(ndcg_at_k(truth[np.argsort(pred2)[::-1]], 5, method=1))
print(ndcg_at_k(truth[np.argsort(pred2)[::-1]], 5, method=0))
print(ndcg_at_k_ties(truth, pred2, 5, method=1))
print(ndcg_at_k_ties(truth, pred2, 5, method=0))
| mit | 6,929,932,319,144,185,000 | 30.010606 | 101 | 0.591391 | false |
TheVirtualLtd/bda.plone.orders | src/bda/plone/orders/upgrades.py | 1 | 14529 | # -*- coding: utf-8 -*-
from bda.plone.cart import get_object_by_uid
from bda.plone.orders import message_factory as _
from bda.plone.orders.common import acquire_vendor_or_shop_root
from bda.plone.orders.common import calculate_order_salaried
from bda.plone.orders.common import calculate_order_state
from bda.plone.orders.common import get_bookings_soup
from bda.plone.orders.common import get_order
from bda.plone.orders.common import get_orders_soup
from bda.plone.orders.common import OrderData
from bda.plone.orders.contacts import get_contacts_soup
from bda.plone.orders.interfaces import ITrading
from bda.plone.payment import Payments
from bda.plone.shipping.interfaces import IShippingItem
from decimal import Decimal
from node.ext.zodb.utils import reset_odict
from plone.uuid.interfaces import IUUID
from zope.component.hooks import getSite
import logging
import uuid
logger = logging.getLogger('bda.plone.orders UPGRADE')
def fix_bookings_vendor_uid(ctx=None):
"""Add vendor_uid attribute to booking records.
"""
portal = getSite()
soup = get_bookings_soup(portal)
data = soup.storage.data
need_rebuild = False
for item in data.values():
update = False
try:
item.attrs['vendor_uid']
if not isinstance(item.attrs['vendor_uid'], uuid.UUID):
update = True
except KeyError:
update = True
if not update:
continue
buyable_uid = item.attrs['buyable_uid']
obj = get_object_by_uid(portal, buyable_uid)
if not obj:
shop = acquire_vendor_or_shop_root(portal)
else:
shop = acquire_vendor_or_shop_root(obj)
vendor_uid = uuid.UUID(IUUID(shop))
item.attrs['vendor_uid'] = vendor_uid
need_rebuild = True
logging.info(
u"Added vendor_uid to booking {0}".format(item.attrs['uid'])
)
if need_rebuild:
soup.rebuild()
logging.info("Rebuilt bookings catalog")
def fix_orders_vendor_uids(ctx=None):
"""Add vendor_uids attribute to order records.
"""
portal = getSite()
soup = get_orders_soup(portal)
data = soup.storage.data
need_rebuild = False
for item in data.values():
update = False
try:
item.attrs['vendor_uids']
if not isinstance(item.attrs['vendor_uids'], list)\
or not item.attrs['vendor_uids']:
update = True
except KeyError:
update = True
if not update:
continue
order_data = OrderData(portal, order=item)
vendor_uids = set()
for booking in order_data.bookings:
vendor_uids.add(booking.attrs['vendor_uid'])
item.attrs['vendor_uids'] = list(vendor_uids)
need_rebuild = True
logging.info(
u"Added vendor_uids to order {0}".format(item.attrs['uid'])
)
if need_rebuild:
soup.rebuild()
logging.info("Rebuilt orders catalog")
def fix_bookings_state_salaried_tid(ctx=None):
portal = getSite()
soup = get_orders_soup(portal)
data = soup.storage.data
need_rebuild = False
for item in data.values():
order_data = OrderData(portal, order=item)
try:
state = item.attrs['state']
state_exists = True
except KeyError:
state = None
state_exists = False
try:
salaried = item.attrs['salaried']
salaried_exists = True
except KeyError:
salaried = None
salaried_exists = False
try:
tid = item.attrs['tid']
tid_exists = True
except KeyError:
tid = 'none' # tid default in b.p.payment
tid_exists = False
for booking in order_data.bookings:
# add too booking node
try:
booking.attrs['state']
except KeyError:
booking.attrs['state'] = state
need_rebuild = True
logging.info(
u"Added state {0} to booking {1}".format(
state, item.attrs['uid']
)
)
try:
booking.attrs['salaried']
except KeyError:
booking.attrs['salaried'] = salaried
need_rebuild = True
logging.info(
u"Added salaried {0} to booking {1}".format(
salaried, item.attrs['uid']
)
)
try:
booking.attrs['tid']
except KeyError:
booking.attrs['tid'] = tid
need_rebuild = True
logging.info(
u"Added tid {0} to booking {1}".format(
tid, item.attrs['uid']
)
)
# now, delete from order node
if state_exists:
del item.attrs['state']
if salaried_exists:
del item.attrs['salaried']
if tid_exists:
del item.attrs['tid']
if need_rebuild:
bookings_soup = get_bookings_soup(portal)
bookings_soup.rebuild()
logging.info("Rebuilt bookings catalog")
def fix_discount_attrs(ctx=None):
portal = getSite()
# discount attrs on order
orders_soup = get_orders_soup(portal)
need_rebuild = False
data = orders_soup.storage.data
for item in data.values():
try:
item.attrs['cart_discount_net']
except KeyError:
need_rebuild = True
item.attrs['cart_discount_net'] = Decimal(0)
logging.info(
u"Added cart_discount_net to order {0}".format(
item.attrs['uid']
)
)
try:
item.attrs['cart_discount_vat']
except KeyError:
need_rebuild = True
item.attrs['cart_discount_vat'] = Decimal(0)
logging.info(
u"Added cart_discount_vat to order {0}".format(
item.attrs['uid']
)
)
if need_rebuild:
orders_soup.rebuild()
logging.info("Rebuilt orders catalog")
# discount attrs on bookings
bookings_soup = get_bookings_soup(portal)
need_rebuild = False
data = bookings_soup.storage.data
for item in data.values():
try:
item.attrs['discount_net']
except KeyError:
need_rebuild = True
item.attrs['discount_net'] = Decimal(0)
logging.info(
u"Added discount_net to booking {0}".format(item.attrs['uid'])
)
if need_rebuild:
bookings_soup.rebuild()
logging.info("Rebuilt bookings catalog")
def fix_shipping_attrs(ctx=None):
portal = getSite()
orders_soup = get_orders_soup(portal)
data = orders_soup.storage.data
for item in data.values():
try:
item.attrs['shipping_method']
except KeyError:
item.attrs['shipping_method'] = 'unknown'
logging.info(
u"Added shipping_method {0} to booking {1}".format(
'unknown', item.attrs['uid']
)
)
try:
item.attrs['shipping_label']
except KeyError:
item.attrs['shipping_label'] = _('unknown', default=u'Unknown')
logging.info(
u"Added shipping_label {0} to booking {1}".format(
'unknown', item.attrs['uid']
)
)
try:
item.attrs['shipping_description']
except KeyError:
item.attrs['shipping_description'] = \
_('unknown', default=u'Unknown')
logging.info(
u"Added shipping_description {0} to booking {1}".format(
'unknown', item.attrs['uid']
)
)
try:
item.attrs['shipping_net']
except KeyError:
item.attrs['shipping_net'] = item.attrs['shipping']
logging.info(
u"Added shipping_net {0} to booking {1}".format(
item.attrs['shipping'], item.attrs['uid']
)
)
try:
item.attrs['shipping_vat']
except KeyError:
item.attrs['shipping_vat'] = Decimal(0)
logging.info(
u"Added shipping_vat {0} to booking {1}".format(
Decimal(0), item.attrs['uid']
)
)
def fix_payment_attrs(ctx=None):
portal = getSite()
payments = Payments(portal)
orders_soup = get_orders_soup(portal)
data = orders_soup.storage.data
for item in data.values():
try:
item.attrs['payment_method']
item.attrs['payment_label']
continue
except KeyError:
payment_method = item.attrs['payment_selection.payment']
payment = payments.get(payment_method)
if payment:
payment_label = payment.label
else:
payment_label = _('unknown', default=u'Unknown')
item.attrs['payment_method'] = payment_method
logging.info(
u"Added payment_method {0} to booking {1}".format(
payment_method, item.attrs['uid']
)
)
item.attrs['payment_label'] = payment_label
logging.info(
u"Added payment_label {0} to booking {1}".format(
payment_label, item.attrs['uid']
)
)
def fix_bookings_shippable(ctx=None):
portal = getSite()
soup = get_bookings_soup(portal)
data = soup.storage.data
need_rebuild = False
for booking in data.values():
try:
booking.attrs['shippable']
except KeyError:
obj = get_object_by_uid(portal, booking.attrs['buyable_uid'])
shippable = True
if obj:
shippable = IShippingItem(obj).shippable
booking.attrs['shippable'] = shippable
need_rebuild = True
logging.info(
u"Added shippable {0} to booking {1}".format(
shippable, booking.attrs['uid']
)
)
if need_rebuild:
bookings_soup = get_bookings_soup(portal)
bookings_soup.rebuild()
logging.info("Rebuilt bookings catalog")
def fix_bookings_trading(ctx=None):
portal = getSite()
soup = get_bookings_soup(portal)
data = soup.storage.data
need_rebuild = False
for booking in data.values():
try:
booking.attrs['item_number']
except KeyError:
obj = get_object_by_uid(portal, booking.attrs['buyable_uid'])
if obj:
trading = ITrading(obj)
item_number = trading.item_number
gtin = trading.gtin
else:
item_number = ''
gtin = ''
need_rebuild = True
booking.attrs['item_number'] = item_number
logging.info(
u"Added item_number {0} to booking {1}".format(
item_number, booking.attrs['uid']
)
)
booking.attrs['gtin'] = gtin
logging.info(
u"Added gtin {0} to booking {1}".format(
gtin, booking.attrs['uid']
)
)
if need_rebuild:
bookings_soup = get_bookings_soup(portal)
bookings_soup.rebuild()
logging.info("Rebuilt bookings catalog")
def reset_records(ctx=None):
def ignore_key(key):
return key.startswith('____')
portal = getSite()
soup = get_orders_soup(portal)
data = soup.storage.data
for order in data.values():
reset_odict(order.attrs.storage, ignore_key=ignore_key)
logging.info(
u'Reset attributes storage on order {0}'.format(
order.attrs['uid'],
)
)
soup = get_bookings_soup(portal)
data = soup.storage.data
for booking in data.values():
reset_odict(booking.attrs.storage, ignore_key=ignore_key)
logging.info(
u"Reset attributes storage on booking {0}".format(
booking.attrs['uid']
)
)
def fix_bookings_email(ctx=None):
"""Add email attribute to booking records from the corresponding order.
"""
portal = getSite()
soup = get_bookings_soup(portal)
data = soup.storage.data
need_rebuild = False
for item in data.values():
update = False
try:
item.attrs['email']
except KeyError:
update = True
if not update:
continue
order = get_order(portal, item.attrs['order_uid'])
email = order.attrs.get('personal_data.email', 'n/a')
item.attrs['email'] = email
need_rebuild = True
logging.info(
u"Added email to booking {0}".format(item.attrs['uid'])
)
if need_rebuild:
soup.rebuild()
logging.info("Rebuilt bookings catalog")
def fix_contacts_email(ctx=None):
"""Add email attribute to contact records.
"""
portal = getSite()
soup = get_contacts_soup(portal)
data = soup.storage.data
need_rebuild = False
for item in data.values():
update = False
try:
item.attrs['email']
except KeyError:
update = True
if not update:
continue
email = item.attrs.get('personal_data.email', 'n/a')
item.attrs['email'] = email
need_rebuild = True
logging.info(
u"Added email to contact {0}".format(item.attrs['uid'])
)
if need_rebuild:
soup.rebuild()
logging.info("Rebuilt contacts catalog")
def fix_order_state_and_salaried(ctx=None):
"""Re-add state and salaried on order, needed for sorting in orders table
"""
portal = getSite()
soup = get_orders_soup(portal)
data = soup.storage.data
for order in data.values():
order_data = OrderData(portal, uid=order.attrs['uid'])
bookings = order_data.bookings
order.attrs['state'] = calculate_order_state(bookings)
order.attrs['salaried'] = calculate_order_salaried(bookings)
soup.rebuild()
| bsd-3-clause | -2,279,739,217,219,872,300 | 31.286667 | 78 | 0.542157 | false |
pydoit/doit | doit/runner.py | 1 | 20369 | """Task runner"""
from multiprocessing import Process, Queue as MQueue
from threading import Thread
import pickle
import queue
import cloudpickle
from .exceptions import InvalidTask, CatchedException
from .exceptions import TaskFailed, SetupError, DependencyError, UnmetDependency
from .task import Stream, DelayedLoaded
# execution result.
SUCCESS = 0
FAILURE = 1
ERROR = 2
class Runner():
"""Task runner
run_all()
run_tasks():
for each task:
select_task()
execute_task()
process_task_result()
finish()
"""
def __init__(self, dep_manager, reporter, continue_=False,
always_execute=False, stream=None):
"""
@param dep_manager: DependencyBase
@param reporter: reporter object to be used
@param continue_: (bool) execute all tasks even after a task failure
@param always_execute: (bool) execute even if up-to-date or ignored
@param stream: (task.Stream) global verbosity
"""
self.dep_manager = dep_manager
self.reporter = reporter
self.continue_ = continue_
self.always_execute = always_execute
self.stream = stream if stream else Stream(0)
self.teardown_list = [] # list of tasks to be teardown
self.final_result = SUCCESS # until something fails
self._stop_running = False
def _handle_task_error(self, node, catched_excp):
"""handle all task failures/errors
called whenever there is an error before executing a task or
its execution is not successful.
"""
assert isinstance(catched_excp, CatchedException)
node.run_status = "failure"
self.dep_manager.remove_success(node.task)
self.reporter.add_failure(node.task, catched_excp)
# only return FAILURE if no errors happened.
if isinstance(catched_excp, TaskFailed) and self.final_result != ERROR:
self.final_result = FAILURE
else:
self.final_result = ERROR
if not self.continue_:
self._stop_running = True
def _get_task_args(self, task, tasks_dict):
"""get values from other tasks"""
task.init_options()
def get_value(task_id, key_name):
"""get single value or dict from task's saved values"""
if key_name is None:
return self.dep_manager.get_values(task_id)
return self.dep_manager.get_value(task_id, key_name)
# selected just need to get values from other tasks
for arg, value in task.getargs.items():
task_id, key_name = value
if tasks_dict[task_id].has_subtask:
# if a group task, pass values from all sub-tasks
arg_value = {}
base_len = len(task_id) + 1 # length of base name string
for sub_id in tasks_dict[task_id].task_dep:
name = sub_id[base_len:]
arg_value[name] = get_value(sub_id, key_name)
else:
arg_value = get_value(task_id, key_name)
task.options[arg] = arg_value
def select_task(self, node, tasks_dict):
"""Returns bool, task should be executed
* side-effect: set task.options
Tasks should be executed if they are not up-to-date.
Tasks that contains setup-tasks must be selected twice,
so it gives chance for dependency tasks to be executed after
checking it is not up-to-date.
"""
task = node.task
# if run_status is not None, it was already calculated
if node.run_status is None:
self.reporter.get_status(task)
# overwrite with effective verbosity
task.overwrite_verbosity(self.stream)
# check if task should be ignored (user controlled)
if node.ignored_deps or self.dep_manager.status_is_ignore(task):
node.run_status = 'ignore'
self.reporter.skip_ignore(task)
return False
# check task_deps
if node.bad_deps:
bad_str = " ".join(n.task.name for n in node.bad_deps)
self._handle_task_error(node, UnmetDependency(bad_str))
return False
# check if task is up-to-date
res = self.dep_manager.get_status(task, tasks_dict)
if res.status == 'error':
msg = "ERROR: Task '{}' checking dependencies: {}".format(
task.name, res.get_error_message())
self._handle_task_error(node, DependencyError(msg))
return False
# set node.run_status
if self.always_execute:
node.run_status = 'run'
else:
node.run_status = res.status
# if task is up-to-date skip it
if node.run_status == 'up-to-date':
self.reporter.skip_uptodate(task)
task.values = self.dep_manager.get_values(task.name)
return False
if task.setup_tasks:
# dont execute now, execute setup first...
return False
else:
# sanity checks
assert node.run_status == 'run', \
"%s:%s" % (task.name, node.run_status)
assert task.setup_tasks
try:
self._get_task_args(task, tasks_dict)
except Exception as exception:
msg = ("ERROR getting value for argument\n" + str(exception))
self._handle_task_error(node, DependencyError(msg))
return False
return True
def execute_task(self, task):
"""execute task's actions"""
# register cleanup/teardown
if task.teardown:
self.teardown_list.append(task)
# finally execute it!
self.reporter.execute_task(task)
return task.execute(self.stream)
def process_task_result(self, node, catched_excp):
"""handles result"""
task = node.task
# save execution successful
if catched_excp is None:
task.save_extra_values()
try:
self.dep_manager.save_success(task)
except FileNotFoundError as exception:
msg = ("ERROR: Task '{}' saving success: " \
"Dependent file '{}' does not exist".format(
task.name, exception.filename))
catched_excp = DependencyError(msg)
else:
node.run_status = "successful"
self.reporter.add_success(task)
return
# task error
self._handle_task_error(node, catched_excp)
def run_tasks(self, task_dispatcher):
"""This will actually run/execute the tasks.
It will check file dependencies to decide if task should be executed
and save info on successful runs.
It also deals with output to stdout/stderr.
@param task_dispatcher: L{TaskDispacher}
"""
node = None
while True:
if self._stop_running:
break
try:
node = task_dispatcher.generator.send(node)
except StopIteration:
break
if not self.select_task(node, task_dispatcher.tasks):
continue
catched_excp = self.execute_task(node.task)
self.process_task_result(node, catched_excp)
def teardown(self):
"""run teardown from all tasks"""
for task in reversed(self.teardown_list):
self.reporter.teardown_task(task)
catched = task.execute_teardown(self.stream)
if catched:
msg = "ERROR: task '%s' teardown action" % task.name
error = SetupError(msg, catched)
self.reporter.cleanup_error(error)
def finish(self):
"""finish running tasks"""
# flush update dependencies
self.dep_manager.close()
self.teardown()
# report final results
self.reporter.complete_run()
return self.final_result
def run_all(self, task_dispatcher):
"""entry point to run tasks
@ivar task_dispatcher (TaskDispatcher)
"""
try:
if hasattr(self.reporter, 'initialize'):
self.reporter.initialize(task_dispatcher.tasks,
task_dispatcher.selected_tasks)
self.run_tasks(task_dispatcher)
except InvalidTask as exception:
self.reporter.runtime_error(str(exception))
self.final_result = ERROR
finally:
self.finish()
return self.final_result
# JobXXX objects send from main process to sub-process for execution
class JobHold(object):
"""Indicates there is no task ready to be executed"""
type = object()
class JobTask(object):
"""Contains a Task object"""
type = object()
def __init__(self, task):
self.name = task.name
try:
self.task_pickle = cloudpickle.dumps(task)
except pickle.PicklingError as excp:
msg = """Error on Task: `{}`.
Task created at execution time that has an attribute than can not be pickled,
so not feasible to be used with multi-processing. To fix this issue make sure
the task is pickable or just do not use multi-processing execution.
Original exception {}: {}
"""
raise InvalidTask(msg.format(self.name, excp.__class__, excp))
class JobTaskPickle(object):
"""dict of Task object excluding attributes that might be unpicklable"""
type = object()
def __init__(self, task):
self.task_dict = task.pickle_safe_dict() # actually a dict to be pickled
@property
def name(self):
return self.task_dict['name']
class MReporter(object):
"""send reported messages to master process
puts a dictionary {'name': <task-name>,
'reporter': <reporter-method-name>}
on runner's 'result_q'
"""
def __init__(self, runner, reporter_cls):
self.runner = runner
self.reporter_cls = reporter_cls
def __getattr__(self, method_name):
"""substitute any reporter method with a dispatching method"""
if not hasattr(self.reporter_cls, method_name):
raise AttributeError(method_name)
def rep_method(task):
self.runner.result_q.put({'name':task.name,
'reporter':method_name})
return rep_method
def complete_run(self):
"""ignore this on MReporter"""
pass
class MRunner(Runner):
"""MultiProcessing Runner """
Queue = staticmethod(MQueue)
Child = staticmethod(Process)
@staticmethod
def available():
"""check if multiprocessing module is available"""
# see: https://bitbucket.org/schettino72/doit/issue/17
# http://bugs.python.org/issue3770
# not available on BSD systens
try:
import multiprocessing.synchronize
multiprocessing # pyflakes
except ImportError: # pragma: no cover
return False
else:
return True
def __init__(self, dep_manager, reporter,
continue_=False, always_execute=False,
stream=None, num_process=1):
Runner.__init__(self, dep_manager, reporter, continue_=continue_,
always_execute=always_execute, stream=stream)
self.num_process = num_process
self.free_proc = 0 # number of free process
self.task_dispatcher = None # TaskDispatcher retrieve tasks
self.tasks = None # dict of task instances by name
self.result_q = None
def __getstate__(self):
# multiprocessing on Windows will try to pickle self.
# These attributes are actually not used by spawend process so
# safe to be removed.
pickle_dict = self.__dict__.copy()
pickle_dict['reporter'] = None
pickle_dict['task_dispatcher'] = None
pickle_dict['dep_manager'] = None
return pickle_dict
def get_next_job(self, completed):
"""get next task to be dispatched to sub-process
On MP needs to check if the dependencies finished its execution
@returns : - None -> no more tasks to be executed
- JobXXX
"""
if self._stop_running:
return None # gentle stop
node = completed
while True:
# get next task from controller
try:
node = self.task_dispatcher.generator.send(node)
if node == "hold on":
self.free_proc += 1
return JobHold()
# no more tasks from controller...
except StopIteration:
# ... terminate one sub process if no other task waiting
return None
# send a task to be executed
if self.select_task(node, self.tasks):
# If sub-process already contains the Task object send
# only safe pickle data, otherwise send whole object.
task = node.task
if task.loader is DelayedLoaded and self.Child == Process:
return JobTask(task)
else:
return JobTaskPickle(task)
def _run_tasks_init(self, task_dispatcher):
"""initialization for run_tasks"""
self.task_dispatcher = task_dispatcher
self.tasks = task_dispatcher.tasks
def _run_start_processes(self, job_q, result_q):
"""create and start sub-processes
@param job_q: (multiprocessing.Queue) tasks to be executed
@param result_q: (multiprocessing.Queue) collect task results
@return list of Process
"""
# #### DEBUG PICKLE ERRORS
# class MyPickler (pickle._Pickler):
# def save(self, obj):
# print('pickling object {} of type {}'.format(obj, type(obj)))
# try:
# Pickler.save(self, obj)
# except:
# print('error. skipping...')
# from io import BytesIO
# pickler = MyPickler(BytesIO())
# pickler.dump(self)
# ### END DEBUG
proc_list = []
for _ in range(self.num_process):
next_job = self.get_next_job(None)
if next_job is None:
break # do not start more processes than tasks
job_q.put(next_job)
process = self.Child(
target=self.execute_task_subprocess,
args=(job_q, result_q, self.reporter.__class__))
process.start()
proc_list.append(process)
return proc_list
def _process_result(self, node, task, result):
"""process result received from sub-process"""
catched_excp = result.get('failure')
task.update_from_pickle(result['task'])
for action, output in zip(task.actions, result['out']):
action.out = output
for action, output in zip(task.actions, result['err']):
action.err = output
self.process_task_result(node, catched_excp)
def run_tasks(self, task_dispatcher):
"""controls subprocesses task dispatching and result collection
"""
# result queue - result collected from sub-processes
result_q = self.Queue()
# task queue - tasks ready to be dispatched to sub-processes
job_q = self.Queue()
self._run_tasks_init(task_dispatcher)
proc_list = self._run_start_processes(job_q, result_q)
# wait for all processes terminate
proc_count = len(proc_list)
try:
while proc_count:
# wait until there is a result to be consumed
result = result_q.get()
if 'exit' in result:
raise result['exit'](result['exception'])
node = task_dispatcher.nodes[result['name']]
task = node.task
if 'reporter' in result:
getattr(self.reporter, result['reporter'])(task)
continue
self._process_result(node, task, result)
# update num free process
free_proc = self.free_proc + 1
self.free_proc = 0
# tries to get as many tasks as free process
completed = node
for _ in range(free_proc):
next_job = self.get_next_job(completed)
completed = None
if next_job is None:
proc_count -= 1
job_q.put(next_job)
# check for cyclic dependencies
assert len(proc_list) > self.free_proc
except (SystemExit, KeyboardInterrupt, Exception):
if self.Child == Process:
for proc in proc_list:
proc.terminate()
raise
# we are done, join all process
for proc in proc_list:
proc.join()
# get teardown results
while not result_q.empty(): # safe because subprocess joined
result = result_q.get()
assert 'reporter' in result
task = task_dispatcher.tasks[result['name']]
getattr(self.reporter, result['reporter'])(task)
def execute_task_subprocess(self, job_q, result_q, reporter_class):
"""executed on child processes
@param job_q: task queue,
* None elements indicate process can terminate
* JobHold indicate process should wait for next task
* JobTask / JobTaskPickle task to be executed
"""
self.result_q = result_q
if self.Child == Process:
self.reporter = MReporter(self, reporter_class)
try:
while True:
job = job_q.get()
if job is None:
self.teardown()
return # no more tasks to execute finish this process
# job is an incomplete Task obj when pickled, attrbiutes
# that might contain unpickleble data were removed.
# so we need to get task from this process and update it
# to get dynamic task attributes.
if job.type is JobTaskPickle.type:
task = self.tasks[job.name]
if self.Child == Process: # pragma: no cover ...
# ... actually covered but subprocess doesnt get it.
task.update_from_pickle(job.task_dict)
elif job.type is JobTask.type:
task = pickle.loads(job.task_pickle)
# do nothing. this is used to start the subprocess even
# if no task is available when process is created.
else:
assert job.type is JobHold.type
continue # pragma: no cover
result = {'name': task.name}
task_failure = self.execute_task(task)
if task_failure:
result['failure'] = task_failure
result['task'] = task.pickle_safe_dict()
result['out'] = [action.out for action in task.actions]
result['err'] = [action.err for action in task.actions]
result_q.put(result)
except (SystemExit, KeyboardInterrupt, Exception) as exception:
# error, blow-up everything. send exception info to master process
result_q.put({
'exit': exception.__class__,
'exception': str(exception)})
class MThreadRunner(MRunner):
"""Parallel runner using threads"""
Queue = staticmethod(queue.Queue)
class DaemonThread(Thread):
"""daemon thread to make sure process is terminated if there is
an uncatch exception and threads are not correctly joined.
"""
def __init__(self, *args, **kwargs):
Thread.__init__(self, *args, **kwargs)
self.daemon = True
Child = staticmethod(DaemonThread)
@staticmethod
def available():
return True
| mit | 7,347,571,069,337,955,000 | 34.797891 | 80 | 0.56316 | false |
BozoDev/CoCerBot | contrib/HangoutsBot/hangoutsbot/hangupsbot/plugins/CoCerBot/__init__.py | 1 | 5738 | import asyncio, io, logging, os, subprocess, re, time
import plugins
logger = logging.getLogger(__name__)
_cocext = { "running": False }
def _initialise(bot):
plugins.register_user_command(["screen", "coc"])
plugins.register_admin_command(["setlog", "clearlog"])
@asyncio.coroutine
def _open_file(name):
logger.debug("opening file: {}".format(name))
return open(name, 'rb')
@asyncio.coroutine
def _screen( filename):
logger.info("screen as {}".format(filename))
loop = asyncio.get_event_loop()
# read the file into a byte array
file_resource = yield from _open_file(filename)
file_data = yield from loop.run_in_executor(None, file_resource.read)
image_data = yield from loop.run_in_executor(None, io.BytesIO, file_data)
yield from loop.run_in_executor(None, os.remove, filename)
return image_data
def setlog(bot, event, *args):
"""set log from CoCerBot-pipe for current converation
use /bot clearlog to clear it
"""
logpipe = bot.conversation_memory_get(event.conv_id, 'logpipe')
if logpipe is None:
bot.conversation_memory_set(event.conv_id, 'logpipe', ''.join(args))
html = "<i><b>{}</b> updated logpipe URL".format(event.user.full_name)
yield from bot.coro_send_message(event.conv, html)
else:
html = "<i><b>{}</b> URL already exists for this conversation!<br /><br />".format(event.user.full_name)
html += "<i>Clear it first with /bot clearlog before setting a new one."
yield from bot.coro_send_message(event.conv, html)
def clearlog(bot, event, *args):
"""clear log-pipe for current converation
"""
logpipe = bot.conversation_memory_get(event.conv_id, 'logpipe')
if logpipe is None:
html = "<i><b>{}</b> nothing to clear for this conversation".format(event.user.full_name)
yield from bot.coro_send_message(event.conv, html)
else:
bot.conversation_memory_set(event.conv_id, 'logpipe', None)
html = "<i><b>{}</b> Log cleared for this conversation!<br />".format(event.user.full_name)
yield from bot.coro_send_message(event.conv, html)
def screen(bot, event, *args):
"""get a screenshot of current CoCerBot
"""
if _cocext["running"]:
yield from bot.coro_send_message(event.conv_id, "<i>processing another request, try again shortly</i>")
return
if args:
img = args[0]
else:
img = bot.conversation_memory_get(event.conv_id, 'img')
if img is None:
img = '/tmp/CoCNow.png'
else:
_cocext["running"] = True
if not re.match(r'^/tmp/', img):
img = '/tmp/' + img
filename = event.conv_id + "." + str(time.time()) +".png"
filepath = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
logger.debug("temporary screenshot file: {}".format(filepath))
params = ['/usr/bin/convert', '-colorspace', 'gray', img, filename ]
try:
subprocess.check_call(params)
except subprocess.CalledProcessError as e:
yield from bot.coro_send_message(event.conv, "<i>Imagick convert failed</i>".format(e))
_cocext["running"] = False
return
try:
loop = asyncio.get_event_loop()
image_data = yield from _screen( filename)
except Exception as e:
yield from bot.coro_send_message(event.conv_id, "<i>error getting screenshot</i>")
logger.exception("screencap failed".format(url))
_cocext["running"] = False
return
try:
image_id = yield from bot._client.upload_image(image_data, filename=filename)
yield from bot._client.sendchatmessage(event.conv.id_, None, image_id=image_id)
except Exception as e:
yield from bot.coro_send_message(event.conv_id, "<i>error uploading screenshot</i>")
logger.exception("upload failed".format(filename))
_cocext["running"] = False
finally:
_cocext["running"] = False
def coc(bot, event, *args):
"""Various actions for the bot to perform
"""
if _cocext["running"]:
yield from bot.coro_send_message(event.conv_id, "<i>processing another request, try again shortly</i>")
return
cmd = args[0]
while True:
if cmd == "init":
params = ['~/CoCerBot/HangoutsBot/hangoutsbot/hangupsbot/plugins/CoCerBot/init']
try:
subprocess.check_call(params)
except subprocess.CalledProcessError as e:
yield from bot.coro_send_message(event.conv, "<i>Error running init command</i>".format(e))
break
if cmd == "grab":
params = ['~/CoCerBot/HangoutsBot/hangoutsbot/hangupsbot/plugins/CoCerBot/grab']
try:
subprocess.check_call(params)
except subprocess.CalledProcessError as e:
yield from bot.coro_send_message(event.conv, "<i>Error running grab command</i>".format(e))
break
if cmd == "raw":
params = ['~/CoCerBot/HangoutsBot/hangoutsbot/hangupsbot/plugins/CoCerBot/raw', args[1], args[2], args[3], args[4], args[5]]
try:
subprocess.check_call(params)
except subprocess.CalledProcessError as e:
yield from bot.coro_send_message(event.conv, "<i>Error running raw command</i>".format(e))
break
logger.debug("No command entered")
yield from bot.coro_send_message(event.conv_id, "<i>Currently supported actions:</i><br>")
yield from bot.coro_send_message(event.conv_id, "<b>init</b> Start up the bot - get in, collect Resis<br>")
break
| gpl-2.0 | 775,744,570,202,703,900 | 33.566265 | 134 | 0.615371 | false |
rohanpm/qingfanyi | tests/test_navigator.py | 1 | 5716 | # coding=utf-8
# qingfanyi - Chinese to English translation tool
# Copyright (C) 2016 Rohan McGovern <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from qingfanyi.match import Match
from qingfanyi.navigator import Navigator
SAMPLE_GEOM = (100, 160, 200, 320)
class SignalSpy(object):
def __init__(self, navigator):
self.emits = []
navigator.connect('current_match_changed', self.on_signal)
def on_signal(self, sender, *args):
self.emits.append(tuple([sender] + list(args)))
def test_empty_navigator():
navigator = Navigator(SAMPLE_GEOM)
assert navigator.current_match is None
navigator.navigate_offset(-1)
assert navigator.current_match is None
navigator.navigate_offset(1)
assert navigator.current_match is None
def test_navigate_offset_single():
navigator = Navigator(SAMPLE_GEOM)
spy = SignalSpy(navigator)
match = Match('sample', [], [(0, 0, 10, 10)])
navigator.add_matches([match])
assert navigator.current_match is None
assert spy.emits == []
navigator.navigate_offset(-1)
assert navigator.current_match is match
assert spy.emits == [
(navigator, None, match)
]
navigator.navigate_offset(1)
assert navigator.current_match is match
assert len(spy.emits) == 1
navigator.navigate_offset(57)
assert navigator.current_match is match
assert len(spy.emits) == 1
def test_add_match_retains_current():
navigator = Navigator(SAMPLE_GEOM)
spy = SignalSpy(navigator)
# These matches are in the expected sorted order (by geometry),
# but we will add them in a different order.
matches = [
Match('ab', [], [(0, 0, 20, 10)]),
Match('a', [], [(0, 0, 10, 10)]),
Match('ab', [], [(10, 0, 20, 10)]),
Match('a', [], [(10, 0, 10, 10)]),
Match('ab', [], [(40, 0, 10, 10)]),
Match('ab', [], [(0, 30, 10, 10)]),
Match('ab', [], [(0, 40, 10, 10)]),
]
first_batch = [
matches[0],
matches[3],
matches[5]
]
second_batch = [
matches[1],
matches[2],
]
third_batch = [
matches[4],
matches[6],
]
navigator.add_matches(first_batch)
assert navigator.current_match is None
assert spy.emits == []
# should navigate through in the expected order
navigator.navigate_offset(1)
assert navigator.current_match is first_batch[0]
navigator.navigate_offset(1)
assert navigator.current_match is first_batch[1]
navigator.navigate_offset(1)
assert navigator.current_match is first_batch[2]
navigator.navigate_offset(1)
assert navigator.current_match is first_batch[0]
navigator.navigate_offset(1)
assert navigator.current_match is first_batch[1]
assert spy.emits == [
(navigator, None, first_batch[0]),
(navigator, first_batch[0], first_batch[1]),
(navigator, first_batch[1], first_batch[2]),
(navigator, first_batch[2], first_batch[0]),
(navigator, first_batch[0], first_batch[1]),
]
spy.emits = []
# now add some more
navigator.add_matches(second_batch)
# That should not have emitted anything or changed the current match
assert spy.emits == []
assert navigator.current_match is first_batch[1]
navigator.navigate_offset(-1)
assert navigator.current_match is matches[2]
assert spy.emits == [
(navigator, matches[3], matches[2])
]
spy.emits = []
# Add the last batch
navigator.add_matches(third_batch)
assert spy.emits == []
assert navigator.current_match is matches[2]
# It should have sorted all of these in the expected order
assert navigator.matches == matches
def test_set_current_match_by_point():
navigator = Navigator((100, 200, 500, 700))
spy = SignalSpy(navigator)
matches = [
Match('ab', [], [(100, 200, 20, 10)]),
Match('a', [], [(100, 200, 10, 10)]),
Match('ab', [], [(140, 200, 20, 10)]),
]
navigator.add_matches(matches)
# Overlapping matches - pick the longer one (by text)
matched = navigator.set_current_match_by_point(5, 5)
assert matched is matches[0]
assert navigator.current_match is matched
assert spy.emits == [
(navigator, None, matched)
]
spy.emits = []
# Simple match
matched = navigator.set_current_match_by_point(45, 5)
assert matched is matches[2]
assert navigator.current_match is matched
assert spy.emits == [
(navigator, matches[0], matched)
]
spy.emits = []
# Click the same match again, it should return it but not emit anything
matched = navigator.set_current_match_by_point(46, 6)
assert matched is matches[2]
assert navigator.current_match is matched
assert spy.emits == []
# Click somewhere with no match, it should return None and not emit anything nor
# change the current match.
matched = navigator.set_current_match_by_point(200, -30)
assert matched is None
assert navigator.current_match is matches[2]
assert spy.emits == []
| gpl-3.0 | -6,003,047,400,805,221,000 | 29.731183 | 84 | 0.644682 | false |
stormi/tsunami | src/primaires/objet/commandes/donner/__init__.py | 1 | 5296 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'donner'."""
from fractions import Fraction
from primaires.interpreteur.commande.commande import Commande
from primaires.objet.conteneur import SurPoids
class CmdDonner(Commande):
"""Commande 'donner'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "donner", "give")
self.nom_categorie = "objets"
self.schema = "(<nombre>) <nom_objet> " \
"a/to <cible:personnage_present|nom_pnj>"
self.aide_courte = "donne un objet"
self.aide_longue = \
"Cette commande permet de donner un ou plusieurs " \
"objets à un autre personnage présent dans la salle. " \
"La forme simple de cette commande est |cmd|donner " \
"nom de l'objet à nom du personnage|ff| (ou |cmd|give " \
"nom de l'objet to nom du personnage|ff|, si vous " \
"êtes en anglais). Vous pouvez également préciser, " \
"avant le nom de l'objet, un nombre représentant " \
"le nombre d'objets à donner au personnage cible. " \
"Exemple : |cmd|donner 2 botte à tavernier|ff| (ou " \
"|cmd|give 2 botte to tavernier|ff| en anglais)."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
nom_objet = self.noeud.get_masque("nom_objet")
nom_objet.proprietes["conteneurs"] = \
"(personnage.equipement.inventaire_simple.iter_objets_qtt(" \
"True), )"
nom_objet.proprietes["quantite"] = "True"
nom_objet.proprietes["conteneur"] = "True"
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
personnage.agir("poser")
nombre = 1
if dic_masques["nombre"]:
nombre = dic_masques["nombre"].nombre
objets = list(dic_masques["nom_objet"].objets_qtt_conteneurs)[:nombre]
if hasattr(dic_masques["cible"], "personnage"):
cible = dic_masques["cible"].personnage
else:
cible = dic_masques["cible"].pnj
donne = 0
for objet, qtt, conteneur in objets:
if not objet.peut_prendre:
personnage << "Vous ne pouvez pas prendre {} avec vos " \
"mains...".format(objet.nom_singulier)
return
if qtt > nombre:
qtt = nombre
try:
dans = cible.ramasser(objet, qtt=qtt)
except SurPoids:
personnage << "{} ne peut rien porter de plus.".format(
cible.get_nom_pour(personnage))
return
if dans is None:
break
conteneur.retirer(objet, qtt)
donne += 1
if donne == 0:
personnage << "{} ne peut pas prendre cela.".format(
cible.get_nom_pour(personnage))
return
if donne < qtt:
donne = qtt
personnage << "Vous donnez {} à {}.".format(objet.get_nom(donne),
cible.get_nom_pour(personnage))
if not hasattr(cible, "prototype"):
cible << "{} vous donne {}.".format(personnage.get_nom_pour(cible),
objet.get_nom(donne))
personnage.salle.envoyer("{{}} donne {} à {{}}.".format(
objet.get_nom(donne)), personnage, cible)
# Appel de l'évènement 'donne' du PNJ
if hasattr(cible, "prototype"):
cible.script["donne"].executer(objet=objet,
quantite=Fraction(donne), personnage=personnage, pnj=cible)
| bsd-3-clause | -7,827,209,799,237,892,000 | 42.254098 | 79 | 0.617017 | false |
peckhams/topoflow | topoflow/components/diversions_fraction_method.py | 1 | 34322 |
# (2/3/13) Get "dt" from source_file or sink_file vs.
# channels comp, but what about canals ?
########################################################
#
# Copyright (c) 2010-2017, Scott D. Peckham
#
# Feb. 2017. Changes to internal variable names.
# Cleanup & testing with Test_Plane_Canal data.
# Sept 2014. Big changes so Channels component now requests
# what is needed from Diversions component.
#
# January 2013 (Revised handling of input/output names).
# October 2012 (CSDMS Standard Names with BMI)
# Jan-Feb 2010 (started from diversions_base.py)
# May 2010 (changes to unit_test())
#---------------------------------------------------------------------
# Notes: This component is written so that only a small amount
# of data is retrieved from, altered and then passed
# back to the Channels component. This is accomplished
# by using new interface functions in CSDMS_base.py,
# namely:
# get_values_in_grid_double()
# set_values_in_grid_double()
# get_values_in_grid_int()
# set_values_in_grid_int()
# Note that these also had to be added to the IRFPort
# for the TopoFlow CCA project, as defined in the file
# topoflow3.IRFPort.sidl.
#
# The old method required the Diversion component to
# retrieve the entire Q and vol grid from Channel component
# at each timestep, alter it, and then pass it back.
# (02/18/10)
#
# Part of the philosophy of this version is that only
# tiny changes to the code of the Channels component should
# be necessary. An intermediate approach (DIV_METHOD1)
# (before the above functions were added), required a new
# function "update_diversions()" to be added to the
# Channels component. However, that version seems to be
# somewhat faster. For the "test_plane_canal" test, the
# run time was 0.38 secs vs. 0.44 secs (on beach) for this
# new method. This extra cost should only be due to the
# extra interface function calls, and should therefore be
# a fixed cost that doesn't increase with grid size. This
# remains to be tested, however. To test, simply swap
# channels_base_DIV_METHOD1.py and
# diversions_fraction_method_DIV_METHOD1.py for
# channels_base.py and this file.
#
# cp.update_discharge() now calls dp.update(). (2/1/10)
#---------------------------------------------------------------------
#
# class diversions_component: (inherits from diversions_base.py)
#
# get_component_name()
# get_attribute() # (10/26/11)
# get_input_var_names() # (5/16/12, Bolton)
# get_output_var_names() # (5/16/12, Bolton)
# get_var_name() # (5/16/12, Bolton)
# get_var_units() # (5/16/12, Bolton)
# update()
#----------------------------
# read_input_files()
# read_source_data()
# read_sink_data()
# read_canal_data()
#----------------------------
# update_sources()
# update_sinks()
# update_canals()
#
#---------------------------------------------------------------------
import numpy as np
import glob
import os
from topoflow.components import diversions_base
from topoflow.utils import cfg_files as cfg
from topoflow.utils import tf_utils
#---------------------------------------------------------------------
class diversions_component( diversions_base.diversions_component ):
#-----------------------------------------------------------------
_att_map = {
'model_name': 'Diversions_Fraction_Method',
'version': '3.1',
'author_name': 'Scott D. Peckham',
'grid_type': 'uniform', ## (or "none" ?)
'time_step_type': 'fixed',
'step_method': 'explicit', ## (or "none" ?)
#------------------------------------------------------
'comp_name': 'Diversions', # CHANGE LATER ?
'model_family': 'TopoFlow',
'cfg_template_file': 'Diversions_Fraction_Method.cfg.in',
'cfg_extension': '_diversions_fraction_method.cfg',
'cmt_var_prefix': '/DiversionsFraction/Input/Var/',
'gui_xml_file': '/home/csdms/cca/topoflow/3.1/src/share/cmt/gui/Diversions_Fraction_Method.xml',
'dialog_title': 'Diversions: Fraction Method Parameters',
'time_units': 'seconds' }
_input_var_names = [
'canals_entrance_water__volume_flow_rate' ] # canals_in_Q (from Channels)
_output_var_names = [
'canals__count', # n_canals
'canals_entrance_water__volume_fraction', # canals_in_Q_fraction
'canals_entrance__x_coordinate', # canals_in_x
'canals_entrance__y_coordinate', # canals_in_y
'canals_exit_water__volume_flow_rate', # canals_out_Q
'canals_exit__x_coordinate', # canals_out_x
'canals_exit__y_coordinate', # canals_out_y
'model__time_step', # dt
#-------------------------------------------
'sinks__count', # n_sinks
'sinks_water__volume_flow_rate', # sinks_Q
'sinks__x_coordinate', # sinks_x
'sinks__y_coordinate', # sinks_y
#-------------------------------------------
'sources__count', # n_sources
'sources_water__volume_flow_rate', # sources_Q
'sources__x_coordinate', # sources_x
'sources__y_coordinate' ] # sources_y
_var_name_map = {
'model__time_step': 'dt',
#-----------------------------------------------------------
'canals__count': 'n_canals',
'canals_entrance__x_coordinate': 'canals_in_x',
'canals_entrance__y_coordinate': 'canals_in_y',
'canals_entrance_water__volume_flow_rate': 'canals_in_Q', ##############
'canals_entrance_water__volume_fraction': 'canals_in_Q_fraction',
'canals_exit__x_coordinate': 'canals_out_x',
'canals_exit__y_coordinate': 'canals_out_y',
'canals_exit_water__volume_flow_rate': 'canals_out_Q',
#-----------------------------------------------------------
'sinks__count': 'n_sinks',
'sinks__x_coordinate': 'sinks_x',
'sinks__y_coordinate': 'sinks_y',
'sinks_water__volume_flow_rate': 'sinks_Q',
#-----------------------------------------------------------
'sources__count': 'n_sources',
'sources__x_coordinate': 'sources_x',
'sources__y_coordinate': 'sources_y',
'sources_water__volume_flow_rate': 'sources_Q' }
_var_units_map = {
'model__time_step': 's',
'canals_entrance_water__volume_flow_rate': 'm3 s-1',
#------------------------------------------------------
'canals__count': '1',
'canals_entrance__x_coordinate': 'm',
'canals_entrance__y_coordinate': 'm',
'canals_entrance_water__volume_fraction': '1',
'canals_exit__x_coordinate': 'm',
'canals_exit__y_coordinate': 'm',
'canals_exit_water__volume_flow_rate': 'm3 s-1',
#------------------------------------------------------
'sinks__count': '1',
'sinks__x_coordinate': 'm',
'sinks__y_coordinate': 'm',
'sinks_water__volume_flow_rate': 'm3 s-1',
#------------------------------------------------------
'sources__count': '1',
'sources__x_coordinate': 'm',
'sources__y_coordinate': 'm',
'sources_water__volume_flow_rate': 'm3 s-1' }
#------------------------------------------------
# Return NumPy string arrays vs. Python lists ?
#------------------------------------------------
## _input_var_names = np.array( _input_var_names )
## _output_var_names = np.array( _output_var_names )
#-------------------------------------------------------------------
def get_component_name(self):
return 'TopoFlow_Diversions_Fraction_Method'
# get_component_name()
#-------------------------------------------------------------------
def get_attribute(self, att_name):
try:
return self._att_map[ att_name.lower() ]
except:
print '###################################################'
print ' ERROR: Could not find attribute: ' + att_name
print '###################################################'
print ' '
# get_attribute()
#-------------------------------------------------------------------
def get_input_var_names(self):
#--------------------------------------------------------
# Note: These are currently variables needed from other
# components vs. those read from files or GUI.
#--------------------------------------------------------
return self._input_var_names
# get_input_var_names()
#-------------------------------------------------------------------
def get_output_var_names(self):
return self._output_var_names
# get_output_var_names()
#-------------------------------------------------------------------
def get_var_name(self, long_var_name):
return self._var_name_map[ long_var_name ]
# get_var_name()
#-------------------------------------------------------------------
def get_var_units(self, long_var_name):
return self._var_units_map[ long_var_name ]
# get_var_units()
#-------------------------------------------------------------------
def update(self, time_seconds=None):
if (self.comp_status == 'Disabled'):
return
#-----------------------------------------------
# Update self.vol with inputs/outputs from all
# sources, sinks and diversions
#-----------------------------------------------
self.status = 'updating' # (OpenMI 2.0 convention)
# print '#### Calling update_sources()...'
self.update_sources()
# print '#### Calling update_sinks()...'
self.update_sinks()
# print '#### Calling update_canals()...'
self.update_canals()
#------------------------
# Update internal clock
#------------------------
# print '#### Calling update_time()...'
self.update_time()
self.status = 'updated' # (OpenMI 2.0 convention)
# update()
#--------------------------------------------------------------------------
def read_source_data(self):
#------------------------------------------------------------
# Notes: Assume that source_file contains key-value pairs,
# starting with "n_sources:", "nt_max" and "dt:",
# followed by "n_sources" blocks of the form:
#
# source_ID: (source pixel ID as long integer)
# nt: (number of discharge (Q) values)
# Q: (vector of discharges in m^3/s)
#------------------------------------------------------------
if (self.comp_status == 'Disabled'): return
if not(self.use_sources):
self.sources_x = self.initialize_scalar( 0, dtype='float64')
self.sources_y = self.initialize_scalar( 0, dtype='float64')
self.sources_Q = self.initialize_scalar( 0, dtype='float64')
return
#-----------------------------
# Can source_file be found ?
#-----------------------------
FOUND = tf_utils.file_exists( self.source_file )
if not(FOUND):
self.use_sources = False
return
#-------------------------
# Open the "source_file"
#-------------------------
file_unit = open(self.source_file, 'r')
#----------------------------------------------------
# Read number of sources, max number of timesteps
# for any source and the common timestep, source_dt
#----------------------------------------------------
n_sources = cfg.read_value(file_unit, dtype='Int32')
nt_max = cfg.read_value(file_unit, dtype='Int32')
source_dt = cfg.read_value(file_unit, dtype='Float64')
self.source_dt =source_dt
#--------------------
# Initialize arrays
#--------------------
self.source_cols = np.zeros([n_sources], dtype='Int32')
self.source_rows = np.zeros([n_sources], dtype='Int32')
self.nt_sources = np.zeros([n_sources], dtype='Int32')
self.sources_Q_all = np.zeros([n_sources, nt_max], dtype='Float64')
self.n_sources = n_sources
self.nt_max_sources = nt_max
#-----------------------------------
# Read information for each source
#-----------------------------------
for k in xrange(n_sources):
source_col = cfg.read_value(file_unit, dtype='Int32')
source_row = cfg.read_value(file_unit, dtype='Int32')
nt = cfg.read_value(file_unit, dtype='Int32')
Q_values = cfg.read_list_after_key(file_unit, dtype='Float64')
#---------------------------------------------------------------
nQ = np.size(Q_values)
print 'Diversions component: Read', nQ, 'Q_values for source.'
#---------------------------------------------------------------
self.source_cols[k] = source_col
self.source_rows[k] = source_row
self.nt_sources[k] = nt
self.sources_Q_all[k,0:nt] = Q_values
#-----------------------
# Close the input file
#-----------------------
file_unit.close()
#-------------------------------------
# Compute xy coordinates for sources
#-------------------------------------
self.sources_x = (source_cols * self.dx)
self.sources_y = (source_rows * self.dy)
# read_source_data()
#--------------------------------------------------------------------------
def read_sink_data(self):
#------------------------------------------------------------
# Notes: Assume that source_file contains key-value pairs,
# starting with "n_sinks:", "nt_max" and "dt:",
# followed by "n_sinks" blocks of the form:
#
# sink_ID: (sink pixel ID as long integer)
# nt: (number of discharge (Q) values)
# Q: (vector of discharges in m^3/s)
#------------------------------------------------------------
if (self.comp_status == 'Disabled'): return
if not(self.use_sinks):
self.sinks_x = self.initialize_scalar( 0, dtype='float64')
self.sinks_y = self.initialize_scalar( 0, dtype='float64')
self.sinks_Q = self.initialize_scalar( 0, dtype='float64')
return
#---------------------------
# Can sink_file be found ?
#---------------------------
FOUND = tf_utils.file_exists( self.sink_file )
if not(FOUND):
self.use_sinks = False
return
#-----------------------
# Open the "sink_file"
#-----------------------
file_unit = open(self.sink_file, 'r')
#------------------------------------------------
# Read number of sinks, max number of timesteps
# for any sink and the common timestep, dt
#------------------------------------------------
n_sinks = cfg.read_value(file_unit, dtype='Int32')
nt_max = cfg.read_value(file_unit, dtype='Int32')
sink_dt = cfg.read_value(file_unit, dtype='Float64')
self.sink_dt = sink_dt
#--------------------
# Initialize arrays
#--------------------
self.sink_cols = np.zeros([n_sinks], dtype='Int32')
self.sink_rows = np.zeros([n_sinks], dtype='Int32')
self.nt_sinks = np.zeros([n_sinks], dtype='Int32')
self.sinks_Q_all = np.zeros([n_sinks, nt_max], dtype='Float64')
self.n_sinks = n_sinks
self.nt_max_sinks = nt_max
#---------------------------------
# Read information for each sink
#---------------------------------
for k in xrange(n_sinks):
sink_col = cfg.read_value(file_unit, dtype='Int32')
sink_row = cfg.read_value(file_unit, dtype='Int32')
nt = cfg.read_value(file_unit, dtype='Int32')
Q_values = cfg.read_list_after_key(file_unit, dtype='Float64')
#---------------------------------------------------------------
nQ = size(Q_values)
print 'Diversions component: Read', nQ, 'Q_values for sink.'
#---------------------------------------------------------------
self.sink_cols[k] = sink_col
self.sink_rows[k] = sink_row
self.nt_sinks[k] = nt
self.sinks_Q_all[k,0:nt] = Q_values
#-----------------------
# Close the input file
#-----------------------
file_unit.close()
#-----------------------------------
# Compute xy coordinates for sinks
#-----------------------------------
sink_rows = (self.sink_IDs / self.nx)
sink_cols = (self.sink_IDs % self.nx)
self.sinks_x = (sink_cols * self.dx)
self.sinks_y = (sink_rows * self.dy)
# read_sink_data()
#--------------------------------------------------------------------------
def read_canal_data(self):
#-------------------------------------------------------------------
# Notes: Assume that canal_file contains key-value pairs,
# starting with "n_canals:" and followed by "n_canals"
# blocks of the form:
# canal_in_ID: (pixel ID as long integer)
# canal_out_ID: (pixel ID as long integer)
# Q_fraction: (fraction to take from in_ID in [0,1])
# travel_time: (canal travel time, in minutes)
#
# nt_canals is computed as ceil(travel_time / cp.dt)
#-------------------------------------------------------------------
# Note: Q_canals is same at upstream and downstream ends, but the
# downstream end lags the upstream end by the travel time
# from in_ID to out_ID. As a result, the duration and Q
# vector for the downstream end are computed from those of
# the upstream end, and the travel time, td, as:
# Q_out = [0, Q_in]
# dur_out = [td, dur_in]
# dur_sum_out = [0, dur_sum_in] + td
#
# Rather than create the dur_sum_canals_out and
# canals_out_Q vectors, can construct them in Update_Canals.
#-------------------------------------------------------------------
if (self.comp_status == 'Disabled'): return
if not(self.use_canals):
self.canals_in_x = self.initialize_scalar( 0, dtype='float64')
self.canals_in_y = self.initialize_scalar( 0, dtype='float64')
self.canals_in_Q_fraction = self.initialize_scalar( 0, dtype='float64')
self.canals_out_Q = self.initialize_scalar( 0, dtype='float64')
self.canals_out_x = self.initialize_scalar( 0, dtype='float64')
self.canals_out_y = self.initialize_scalar( 0, dtype='float64')
return
#---------------------------
# Can canal_file be found ?
#---------------------------
FOUND = tf_utils.file_exists( self.canal_file )
if not(FOUND):
self.use_canals = False
return
#------------------------
# Open the "canal_file"
#------------------------
file_unit = open(self.canal_file, 'r')
#------------------------
# Read number of canals
#------------------------
n_canals = cfg.read_value(file_unit, dtype='Int32')
self.n_canals = n_canals
#--------------------
# Initialize arrays
#--------------------
self.canals_in_col = np.zeros([n_canals], dtype='Int32')
self.canals_in_row = np.zeros([n_canals], dtype='Int32')
self.canals_out_col = np.zeros([n_canals], dtype='Int32')
self.canals_out_row = np.zeros([n_canals], dtype='Int32')
self.canals_in_Q_fraction = np.zeros([n_canals], dtype='Float64')
self.canal_times = np.zeros([n_canals], dtype='Float64')
#----------------------------------
# Read information for each canal
#----------------------------------
for k in xrange(n_canals):
canal_in_col = cfg.read_value(file_unit, dtype='Int32')
canal_in_row = cfg.read_value(file_unit, dtype='Int32')
canal_out_col = cfg.read_value(file_unit, dtype='Int32')
canal_out_row = cfg.read_value(file_unit, dtype='Int32')
Q_fraction = cfg.read_value(file_unit, dtype='Float64')
travel_time = cfg.read_value(file_unit, dtype='Float64')
#----------------------------------------------------------
self.canals_in_col[k] = canal_in_col
self.canals_in_row[k] = canal_in_row
self.canals_out_col[k] = canal_out_col
self.canals_out_row[k] = canal_out_row
self.canals_in_Q_fraction[k] = Q_fraction
self.canal_times[k] = travel_time
#----------------------------------------------------------
# print '### canal_in_col = ' + str(canal_in_col)
# print '### canal_in_row = ' + str(canal_in_row)
# print '### canal_out_col = ' + str(canal_out_col)
# print '### canal_out_row = ' + str(canal_out_row)
# print '### Q_fraction = ' + str(Q_fraction)
# print '### travel_time = ' + str(travel_time)
#--------------------------------------------------------
# Compute "nt_canals", which is the number of timesteps
# it takes for flow to travel from end to end.
#--------------------------------------------------------
# This depends on "self.dt", which is now read from the
# Diversion component CFG file. ## (9/22/14)
#--------------------------------------------------------
self.nt_canals = np.ceil(self.canal_times / self.dt)
#-----------------------
# Close the input file
#-----------------------
file_unit.close()
#-----------------------------------------------------
# Compute xy coordinates for canal entrance and exit
#-----------------------------------------------------
self.canals_in_x = (self.canals_in_col * self.dx)
self.canals_in_y = (self.canals_in_row * self.dy)
self.canals_out_x = (self.canals_out_col * self.dx)
self.canals_out_y = (self.canals_out_row * self.dy)
#-----------------------------------------------------
# Create a 2D array to store the discharge values as
# they are moving toward downstream end of canal.
#-----------------------------------------------------
# update_canals() will "roll" this array downstream
# by one array element each time step
#-----------------------------------------------------
nt_max = np.int(self.nt_canals.max())
nt_min = np.int(self.nt_canals.min())
self.canal_Q = np.zeros([n_canals, nt_max], dtype='Float64') ###################
self.nt_max = nt_max
print 'Diversions component: Min steps per canal =', nt_min
print 'Diversions component: Max steps per canal =', nt_max
#--------------------------------------------------
# Note that canals_in_Q comes from Channels comp.
#--------------------------------------------------
self.canals_out_Q = np.zeros([n_canals], dtype='Float64')
# read_canal_data()
#--------------------------------------------------------------------------
def update_sources(self):
#---------------------------------------------------------
# Notes: This function avoids loops in favor of array
# operations to increase speed.
#---------------------------------------------------------
# The number of Q-values for each source ID are
# stored as "self.nt_sources". However, for any
# given source ID, the Q-values beyond that index
# are set to zero.
#---------------------------------------------------------
if not(self.use_sources): return
#-------------------------------------------
# Update discharge, Q, for every source ID
#-------------------------------------------
if (self.time_index < self.nt_max_sources):
self.sources_Q[:] = self.sources_Q_all[ :, self.time_index ]
else:
self.sources_Q[:] = np.zeros(self.n_sources)
#------------------------------------------------------------
# Update flow volumes, vol, in CHANNELS component (2/17/10)
#------------------------------------------------------------
#--------------
# For testing
#--------------
## print 'Finished with update_sources() in Diversions.'
## print 'sources_Q ='
## print sources_Q
# update_sources()
#--------------------------------------------------------------------------
def update_sinks(self):
#-------------------------------------------------------
# Notes: This function avoids loops in favor of array
# operations to increase speed.
#-------------------------------------------------------
# The number of Q-values for each sink ID are
# stored as "self.nt_sinks". However, for any
# given sink ID, the Q-values beyond that index
# are set to zero.
#-------------------------------------------------------
# NB! This changes Q grid, so must be called before
# cp.update_flow_volume() uses the Q grid.
#-------------------------------------------------------
if not(self.use_sinks): return
#-----------------------------------------
# Update discharge, Q, for every sink ID
#-----------------------------------------
# Make sure sink cannot produce negative
# discharge values.
#-----------------------------------------
if (self.time_index < self.nt_max_sinks):
sinks_Q[:] = self.sinks_Q_all[ :, self.time_index ]
else:
sinks_Q[:] = np.zeros(self.n_sinks)
#--------------------------------------------------------
# Update discharges, Q, in CHANNELS component (2/17/10)
#--------------------------------------------------------
#------------------------------------------------------------
# Update flow volumes, vol, in CHANNELS component (2/17/10)
# NB! We MUST update "vol" also and not just "Q".
#------------------------------------------------------------
# update_sinks()
#--------------------------------------------------------------------------
def update_canals(self):
#----------------------------------------------------------
# Notes: Before 2/1/10, TopoFlow would update the channel
# component before the diversion component. Now
# cp.update_discharge() calls dp.update() itself.
#
# (2/16/10) Tested for single canal and seems to
# work as intended.
#
# NB! Flow volumes are incremented (by the function
# update_flow_volume(), but discharges are
# always recomputed from d, v and channel geom.
# So changes to cp.Q by calling dp.update()
# after cp.update() would be lost.
#
# cp.update() computes variables in this order:
# update_R()
# update_discharge() (using d and v)
# update_flow_volume() (using Q and R)
# update_flow_depth() (using vol)
# update_velocity()
#----------------------------------------------------------
# Notes: This function avoids loops in favor of array
# operations to increase speed.
#----------------------------------------------------------
# Notes: In this version, the Channels component uses
# canals_in_Q_fraction, canals_in_ID and its own
# Q grid to compute canals_in_Q. It then sets
# this into the Diversion component.
#----------------------------------------------------------
# print '#### Starting update_canals()...'
if not(self.use_canals): return
#---------------------------------------------------
# Update discharges, Q, at upstream ends of canals
# in CHANNELS component (2/17/10)
#---------------------------------------------------
#-------------------------------------------------------
# Update flow volumes, vol, at upstream ends of canals
# in CHANNELS component (2/17/10)
# NB! We MUST update "vol" also and not just "Q".
#-------------------------------------------------------
#---------------------------------------------
# Add specified discharge (Q_in) to upstream
# end of each canal (at array index 0)
#-----------------------------------------------------
# Diversions component now gets canals_in_Q from the
# Channels component as a requested input_var.
#-------------------------------------------------------
# Note that canal_Q is defined as:
# canal_Q = zeros([n_canals, nt_max], dtype='Float64')
#-------------------------------------------------------
# print '#### update_canals(), canal_Q block...'
self.canal_Q[:, 0] = self.canals_in_Q # (from Channels)
#------------------------------------------------
# Get "Q" at downstream end of each canal.
# It will be zero until flow has had time to
# travel the distance.
#------------------------------------------------
# NB! Each canal can have a different travel
# time and therefore its own "nt_canal" value.
#------------------------------------------------
# NB! canals_out_Q will be retrieved by the
# Channels component.
#-------------------------------------------------------
# Note that canal_Q is defined as:
# canal_Q = zeros([n_canals, nt_max], dtype='Float64')
#-------------------------------------------------------
# print '#### update_canals(), for loop...'
nc = self.n_canals
#-------------------------------------------------
# Zero out to be safe, since each canal can have
# a different nt_k.
#-------------------------------------------------
# self.canals_out_Q[:] = np.empty(nc, dtype='Float32')
self.canals_out_Q[:] = np.zeros(nc, dtype='Float32')
for k in xrange(nc):
nt_k = self.nt_canals[k]
self.canals_out_Q[k] = self.canal_Q[k, nt_k - 1]
## self.canal_Q[:, nt_k:] = 0.0 # (not necessary)
#---------------------------------------------------------
# Update flow volumes, vol, at downstream ends of canals
# in CHANNELS component (2/17/10)
# NB! We MUST update "vol" also and not just "Q".
#---------------------------------------------------------
#--------------
# For testing
#--------------
## print 'self.canal_Q ='
## print self.canal_Q
## print 'self.canals_out_Q ='
## print self.canals_out_Q
## print ' '
#----------------------------------------------------
# "Roll" the canal_Q array downstream (along its
# 2nd index, with index = 1) by one array element
# in each time step. "canal_Q" starts with zeros;
# i.e. canal_Q = zeros([n_canals, n_steps])
#----------------------------------------------------
# In next call to update_canals(), we'll replace
# the first Q-value in each canal.
#----------------------------------------------------
# print '#### update_canals(), roll...'
self.canal_Q = np.roll( self.canal_Q, 1, axis=1 )
# print '#### Exiting update_canals()...'
# update_canals()
#--------------------------------------------------------------------------
| mit | 4,706,929,756,352,890,000 | 45.318489 | 110 | 0.403619 | false |
psyfako/psyfako_member_management | timetable/migrations/0001_initial.py | 1 | 1210 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-22 22:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Slot',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slot_name', models.CharField(max_length=200)),
('start_time', models.DateTimeField(verbose_name='Start Time')),
('end_time', models.DateTimeField(verbose_name='End Time')),
],
),
migrations.CreateModel(
name='Workgroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('moderation', models.CharField(max_length=200)),
('protocol', models.CharField(max_length=200)),
('slot', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='timetable.Slot')),
],
),
]
| gpl-3.0 | 4,596,746,112,430,652,400 | 33.571429 | 114 | 0.575207 | false |
plantbiogeography/BaitsFinder | combined-ini.py | 1 | 34661 | #-*- encoding: utf-8 -*-
import commands
import platform
import sys
import os,os.path
import ctypes
import csv
import glob
import math
import numpy as np
import scipy as sp
import shutil
import traceback
#import globalvar
from functools import partial
from multiprocessing import cpu_count
CPU_n1=cpu_count()
CPU_n2=str(max(1,CPU_n1))
plat=platform.platform()
path=os.path.abspath(os.curdir)
config=path.replace('\\','/')+'/config.ini'
CRC0=os.path.exists(config)
if 'Windows' not in plat and 'Linux' not in plat:
#warning notice, if this script was not running in linux or windows!
print('\033[1;31;40m')
print('*' * 49)
print('***Please USE this script in Linux or windows!!!***')
print('*' * 49)
print('\033[0m')
exit(0)
elif "Linux" in plat:
Sys_ver="L"
elif 'Windows' in plat:
Sys_ver='W'
STD_INPUT_HANDLE = -10
STD_OUTPUT_HANDLE= -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLACK = 0x0
FOREGROUND_BLUE = 0x01
FOREGROUND_GREEN= 0x02
FOREGROUND_RED = 0x04
FOREGROUND_INTENSITY = 0x08
BACKGROUND_BLUE = 0x10
BACKGROUND_GREEN= 0x20
BACKGROUND_RED = 0x40
BACKGROUND_INTENSITY = 0x80
class Color:
''' See http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winprog/winprog/windows_api_reference.asp
for information on Windows APIs. '''
std_out_handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
def set_cmd_color(self, color, handle=std_out_handle):
"""(color) -> bit
Example: set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)
"""
bool = ctypes.windll.kernel32.SetConsoleTextAttribute(handle, color)
return bool
def reset_color(self):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE)
def print_red_text(self, print_text):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_INTENSITY)
print print_text
self.reset_color()
def print_green_text(self, print_text):
self.set_cmd_color(FOREGROUND_GREEN | FOREGROUND_INTENSITY)
print print_text
self.reset_color()
def print_blue_text(self, print_text):
self.set_cmd_color(FOREGROUND_BLUE | FOREGROUND_INTENSITY)
print print_text
self.reset_color()
def print_red_text_with_blue_bg(self, print_text):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_INTENSITY| BACKGROUND_BLUE | BACKGROUND_INTENSITY)
print print_text
self.reset_color()
if os.path.exists(config)==0:
if Sys_ver=='L':
print('\033[1;31;40m')
print('*' * 73)
print('***Please make sure you have the config.ini file in current folder!!!***')
print('*' * 73)
print('\033[0m')
else:
if __name__ == "__main__":
clr = Color()
clr.print_red_text('*'*73)
clr.print_red_text('***Please make sure you have the config.ini file in current folder!!!***')
clr.print_red_text('*'*73)
exit(0)
con_file=open(config).readlines()
CRC0=0
CRC1=0
CRC2=0
rFAAnames = []
rGFFnames=[]
sFNAnames=[]
host1={}
host2={}
sufficient=[]
sorted_length=0
sorted_host_by_identity=0
baits_length=0
for line in con_file:
if '##' not in line:
if 'RAW_path' in line:
CRC0+=1
RAW_path=line.replace('\\','/').split('=')[1].strip()+'/'
RAW_path=RAW_path.replace('//','/')
elif 'Tem_path' in line:
CRC0+=1
Tem_path=line.replace('\\','/').split('=')[1].strip()+'/'
Tem_path=Tem_path.replace('//','/')
elif 'Out_path' in line:
CRC0+=1
Out_path=line.replace('\\','/').split('=')[1].strip()+'/'
Out_path=Out_path.replace('//','/')
elif 'Mafft_path' in line:
CRC0+=1
Mafft_path=line.replace('\\','/').split('=')[1].strip()+'/'
Mafft_path=Mafft_path.replace('//','/')
elif 'Blast_path' in line:
CRC0+=1
Blast_path=line.split('=')[1].strip()+'/'
Blast_path=Blast_path.replace('//','/')
elif 'Blast_ver' in line:
CRC0+=1
Blast_ver=line.split('=')[1].strip()
elif 'Blast_gap' in line:
CRC0+=1
Blast_gap=line.split('=')[1].strip()
elif 'rFAAnames' in line:
CRC0+=1
rFAAnames.append(line.split('=')[1].strip())
elif 'rFNAname' in line:
CRC0+=1
rFNAname=line.split('=')[1].strip()
elif 'rGFFnames' in line:
CRC0+=1
rGFFnames.append(line.split('=')[1].strip())
elif 'sFNAnames' in line:
CRC0+=1
CRC1+=1
sFNAnames.append(line.split('=')[1].strip())
host1[line.split('=')[1].strip()]='0'
host2[line.split('=')[1].strip()]=''
elif '=host_name=' in line:
CRC0+=1
CRC2+=1
a0=line.split('=')[0].strip()
a1=line.split('=')[2].strip()
host2[a0]=a1
if a1 !='NAN':
host1[a0]='1'
elif 'sorted_length' in line:
a0=line.split('=')[1].strip()
sorted_length=eval(a0)
elif 'sorted_host_by_identity' in line:
a0=line.split('=')[1].strip()
sorted_host_by_identity=float(a0)
elif 'sufficient_data' in line:
a0=line.split('=')[1].strip()
if a0!='NAN':
sufficient.append(a0)
elif 'baits_length' in line:
a0=line.split('=')[1].strip()
baits_length=eval(a0)
if CRC0==0 or CRC2!=CRC1 :
if Sys_ver=='L':
print('\033[1;31;40m')
print('*' * 73)
print('***Please make sure you have the config.ini file in current folder!!!***')
print('*' * 73)
print('\033[0m')
else:
if __name__ == "__main__":
clr = Color()
clr.print_red_text('*'*73)
clr.print_red_text('***Please make sure you have the config.ini file in current folder!!!***')
clr.print_red_text('*'*73)
exit(0)
if os.path.exists(Tem_path)==0:
os.makedirs(Tem_path)
if os.path.exists(Out_path)==0:
os.makedirs(Out_path)
files=os.listdir(RAW_path)
shutil.rmtree(Tem_path,True)
if os.path.exists(Tem_path)==0:
os.makedirs(Tem_path)
for fname in files:
shutil.copyfile(RAW_path+fname,Tem_path+fname)
## 2.3. Combine the all_hits files of each query taxon into a single file
i0=0
tcl_files=os.listdir(RAW_path)
Tem_path2=Tem_path+'cleaned/'
Tem_path3=Tem_path+'combined/'
TemL=[]
if os.path.exists(Tem_path2)==0:
os.makedirs(Tem_path2)
if os.path.exists(Tem_path3)==0:
os.makedirs(Tem_path3)
for fname in tcl_files:
if 'out.all_hits' in fname:
i0=i0+1
lines=open(Tem_path+fname).readlines()
myfile2=open(Tem_path2+fname,'w')
for line in lines:
a1=line.split()[1]
if 'no_hits_found' in a1:
continue
else:
myfile2.write(line)
TemL.append(line)
myfile2.close()
if Blast_gap=='1':
all_hits=str(i0)+'_all_hits_nogap'
elif Blast_gap=='2':
all_hits=str(i0)+'_all_hits_gap'
myfile=open(Tem_path3+all_hits,'w')
for line in TemL:
myfile.write(line)
myfile.close()
## 2.4. Remove putative contamination.
## 2.4.1. Construct a joint list of putative contamaninant sequences
## 2.4.2. Merge contaminant sequences into a single file
## 2.4.3. Remove contaminant sequences
CRC3=0
for i1 in host1.values():
i2=eval(i1)
CRC3=CRC3+i2
if CRC3!=0:
if os.path.exists(Tem_path+'all_host'):
os.remove(Tem_path+'all_host')
all_host=open(Tem_path+'all_host','a')
for fname in sFNAnames:
pver=host1[fname]
if pver=='1':
gene=[]
hname=host2[fname]
fname2=fname.split('.')[0]+'.sort_'+hname[:2]+'.out'
lines=open(Tem_path+fname2).readlines()
myfile=open(Tem_path+fname2+'.sort',"a")
for line in lines:
line=line.split("\t")
if(float(line[2])>=sorted_host_by_identity):
gene.append(line[0])
gene=list(set(gene))
for i in gene:
myfile.write(i+"\n")
all_host.write(i+"\n")
myfile.close()
all_host.close()
files=os.listdir(Tem_path)
lines=open(Tem_path+'all_host').readlines()
host_gid=[]
for line in lines:
a0=line.split()[0]
host_gid.append(a0)
for fname in files:
if 'ungap.out.matrix.identity' in fname:
output_file21=str.lower(fname[:2])+'_ungap_host_free'
lines=open(Tem_path+fname).readlines()
myfile=open(Tem_path+output_file21,'w')
for line in lines:
a1=line.split()[0]
if a1 not in host_gid:
myfile.write(line)
myfile.close()
elif 'gap.out.matrix.identity' in fname:
output_file21=str.lower(fname[:2])+'_gap_host_free'
lines=open(Tem_path+fname).readlines()
myfile=open(Tem_path+output_file21,'w')
for line in lines:
a1=line.split()[0]
if a1 not in host_gid:
myfile.write(line)
myfile.close()
print "Host-removed has been finished sucessfully!!"
else:
for fname in files:
if 'ungap.out.matrix.identity' in fname:
output_file21=str.lower(fname[:2])+'_ungap_host_free'
lines=open(Tem_path+fname).readlines()
myfile=open(Tem_path+output_file21,'w')
for line in lines:
myfile.write(line)
myfile.close()
elif 'gap.out.matrix.identity' in fname:
output_file21=str.lower(fname[:2])+'_gap_host_free'
lines=open(Tem_path+fname).readlines()
myfile=open(Tem_path+output_file21,'w')
for line in lines:
myfile.write(line)
myfile.close()
## 2.5. Identify and remove putative paralogues.
## 2.5.1. Add position information to matrix files
files=os.listdir(Tem_path)
lines2=open(Tem_path+'combined/'+all_hits).readlines()
for fname in files:
if ('_ungap_host_free' in fname or '_gap_host_free' in fname) and '.add' not in fname:
d=[]
output_file22=fname+'.add'
lines1=open(Tem_path+fname).readlines()
myfile=open(Tem_path+output_file22,'w')
for line in lines1:
a0=line.split()[0]+'\t'+line.split()[1]+'\t'+line.split()[2]+'\t'+line.split()[3]+'\t'+line.split()[4]
d.append(a0)
for line in lines2:
a1=line.split()[0]+'\t'+line.split()[1]+'\t'+str(float(line.split()[3])/100)+'\t'+line.split()[2]+'\t'+line.split()[5]
a2=line.split()[0]+'\t'+line.split()[10]+'\t'+line.split()[11]+'\t'+line.split()[1]+'\t'+str(3*eval(line.split()[12])-2)+'\t'+str(3*eval(line.split()[13]))+'\t'+line.split()[-1]+'\t'+str(float(line.split()[3])/100)+'\n'
if a1 in d:
myfile.write(a2)
myfile.close()
## 2.5.2. Identify overlapping sequences from focal species that blast against the same reference gene
lines=open(Tem_path+output_file22).readlines()
output_file23=output_file22+'.sorted'
output_file24=output_file23+'.paralogInfo'
output_file25=output_file24+'.out'
myfile=open(Tem_path+output_file23,'w')
d=[]
f={}
x=0
for line in lines:
d.append(line.split())
y=str(x)
f[y]=''
x=x+1
d2=sorted(d, key=lambda result: (result[3],eval(result[4])),reverse=False)
for i in d2:
i1=str(i)
j=''
for k in i1.split():
j=j+k+'\t'
j=j[:-1]+'\n'
j=j.replace('[','')
j=j.replace(']','')
j=j.replace('\'','')
j=j.replace(',','')
myfile.write(j)
myfile.close()
lines=open(Tem_path+output_file23).readlines()
myfile=open(Tem_path+output_file24,'w')
ls=len(lines)
for i in range(1,ls):
l1=lines[i-1]
l2=lines[i]
z1=l1.split()[3]
z2=l2.split()[3]
z3=l1.split()[5]
z4=l2.split()[4]
if z1==z2 and (eval(z3)-eval(z4))>sorted_length:
f[str(i-1)]='PARALOG!'
f[str(i)]='PARALOG!'
for i in range(ls):
l1=lines[i]
if f[str(i)]=='PARALOG!':
l2=l1[:-1]+'\tPARALOG!\n'
else:
l2=l1
myfile.write(l2)
myfile.close()
## 2.5.3. Remove putative paralogues using a custom python script
lines=open(Tem_path+output_file24).readlines()
myfile=open(Tem_path+output_file25,'w')
over_bp=str(sorted_length) ##length of overlap
f={}
d=[]
for line in lines:
if len(line.split())>3:
a1=line.split()[3]
if 'PARALOG' in line:
d.append(a1)
for line in lines:
if len(line.split())>3:
a1=line.split()[3]
if a1 not in d:
myfile.write(line)
myfile.close()
## 2.5.4. Combine the paralogue-free data
all_host_free=[]
files=os.listdir(Tem_path)
i2=0
for fname in files:
if '.sorted.paralogInfo.out' in fname:
i2=i2+1
lines=open(Tem_path+fname).readlines()
for line in lines:
all_host_free.append(line)
if Blast_gap=='1':
fname3=str(i2)+'_nogap_hostfree_'+over_bp+'sorted'
elif Blast_gap=='2':
fname3=str(i2)+'_gap_hostfree_'+over_bp+'sorted'
myfile=open(Tem_path+fname3,'w')
for line in all_host_free:
myfile.write(line)
myfile.close()
print "Part.2 has been finished successfully!!"
## 3.1. Extract sequences of the non-paralogues genes from each of the focal species
lines=open(Tem_path+fname3).readlines()
for fname in sFNAnames :
iD=fname.split('.')[0]
if Blast_gap=='1':
output_file31=fname.split('.')[0]+'.nogap_edit.codons.query_all.seq'
elif Blast_gap=='2':
output_file31=fname.split('.')[0]+'.gap_edit.codons.query_all.seq'
lines1=open(Tem_path+fname).readlines()
myfile=open(Tem_path+output_file31,'w')
x=0
f={}
y=''
for line in lines1:
if '>' in line:
if x==1:
f[a0]=y
y=''
a0=line.split()[0][1:]
x=1
else:
y=y+line.split()[0]
f[a0]=y
for line in lines:
if iD in line:
a0=line.split()[0]
a1=line.split()[1]
a2=line.split()[2]
a3=line.split()[3]
a4=line.split()[4]
a5=line.split()[5]
a6=line.split()[6]
a7=line.split()[7]
a8=eval(a1)
a9=eval(a2)
b=f[a0]
if eval(a1)>eval(a2):
b1=b[a9-1:a8]
c0=' mod: 0.0 reverse '
else:
c0=' mod: t0.0 forward '
b1=b[a8-1:a9]
e1='>'+a0+' length: '+str(abs(eval(a1)-eval(a2)+1))+c0+a1+'-'+a2+' ['+a3+' '+a4+'-'+a5+']\n'
e2=b1+'\n'
myfile.write(e1)
myfile.write(e2)
myfile.close()
## 3.2. Combine, per gene, sequences of the reference species and of all focal species into a single file
if os.path.exists(Tem_path+'group1')==0:
os.mkdir(Tem_path+'group1')
Tem_path3=Tem_path+'/group1/'
lines1=open(Tem_path+fname3).readlines()
output_file32=fname3+'.map'
myfile=open(Tem_path+output_file32,'w')
d=[]
f={}
f1={}
f2={}
f3={}
f4={}
for line in lines1:
a0=line.split()[0]
a1=line.split()[1]
a2=line.split()[2]
a3=line.split()[3]
a4=eval(line.split()[4])
a5=eval(line.split()[5])
a6=a0+'*'+a1+'-'+a2
if a3 not in d:
d.append(a3)
f3[a3]=a4
f4[a3]=a5
f[a3]=a6
else:
f3[a3]=min(f3[a3],a4)
f4[a3]=max(f4[a3],a5)
f[a3]=f[a3]+'\t'+a6
for i in d:
myfile.write(i+'\t||\t'+f[i]+'\n')
myfile.close()
lines2=open(RAW_path+rFNAname).readlines()
g={}
b0=''
b1=''
b2=0
for line in lines2:
if '>' in line:
if b2 !=0:
g[b0]=b1+'\n'
b1=''
b0=line.split()[0][1:]
b2=1
else:
b1=b1+line.split()[0]
g[b0]=b1+'\n'
files=os.listdir(Tem_path)
x={}
x1={}
for fname in files:
if '.nogap_edit.codons.query_all.seq' in fname:
lines=open(Tem_path+fname).readlines()
ls=len(lines)/2
for i in range(ls):
l1=lines[2*i]
l2=lines[2*i+1]
y0=l1.split()[0][1:]+'*'+l1.split()[6]
y1=l1.split()[0][1:]+'*'+l1.split()[6]+'*'+l1.split()[-4]+'*'+l1.split()[-2]+'*'+l1.split()[-1]
x[y0]=l2
x1[y0]=y1
if '.gap_edit.codons.query_all.seq' in fname:
lines=open(Tem_path+fname).readlines()
ls=len(lines)/2
for i in range(ls):
l1=lines[2*i]
l2=lines[2*i+1]
y0=l1.split()[0][1:]+'*'+l1.split()[6]
y1=l1.split()[0][1:]+'*'+l1.split()[6]+'*'+l1.split()[-4]+'*'+l1.split()[-2]+'*'+l1.split()[-1]
x[y0]=l2
x1[y0]=y1
for i in d:
myfile=open(Tem_path3+i+'.fa','w')
myfile.write('>'+i+'\t'+str(f3[i])+'-'+str(f4[i])+'\n')
myfile.write(g[i][(f3[i]-1):f4[i]]+'\n')
j=f[i]
for k in j.split():
myfile.write('>'+x1[k].replace('*','\t')+'\n')
myfile.write(x[k])
myfile.close()
## 3.3 Bring sequences to the same orientation relative to the reference species
if os.path.exists(Tem_path+'group2')==0:
os.mkdir(Tem_path+'group2')
rootdir1=Tem_path+'/group1/'
rootdir2=Tem_path+'/group2/'
files=os.listdir(Tem_path+'/group1/')
f={}
f['A']='T'
f['T']='A'
f['U']='A'
f['C']='G'
f['G']='C'
f['N']='N'
for file1 in files:
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
ls=len(lines)/2
for i in range(ls):
l1=lines[2*i]
l2=lines[2*i+1]
if 'reverse' not in l1:
myfile.write(l1)
myfile.write(l2)
else:
myfile.write(l1)
l3=''
for j in l2[:-1][::-1]:
l3=l3+f[j]
l3=l3+'\n'
myfile.write(l3)
myfile.close()
## 3.4. Retain only genes with data from the best-quality focal species
if len(sufficient)>0:
if os.path.exists(Tem_path+'group3')==0:
os.mkdir(Tem_path+'group3')
rootdir1=Tem_path+'/group2/'
rootdir2=Tem_path+'/group3/'
files=os.listdir(Tem_path+'/group2/')
for file1 in files:
x=0
lines=open(rootdir1+file1).readlines()
for line in lines:
for i in sufficient:
j=i.split('.')[0]
if j in line:
x=1
if x==1:
myfile=open(rootdir2+file1,'w')
for line in lines:
myfile.write(line)
myfile.close()
else:
if os.path.exists(Tem_path+'group3')==0:
os.mkdir(Tem_path+'group3')
rootdir1=Tem_path+'/group2/'
rootdir2=Tem_path+'/group3/'
files=os.listdir(rootdir1)
for fname in files:
shutil.copyfile(rootdir1+fname,rootdir2+fname)
print "Part.3 has been finished successfully!!"
if Blast_gap=='1':
## 4.u1. Align sequences of the focal species and the reference species using information on start and end points of the BLAST alignment
if os.path.exists(Tem_path+'group5')==0:
os.mkdir(Tem_path+'group5')
rootdir1=Tem_path+'/group3/'
rootdir2=Tem_path+'/group5/'
files=os.listdir(Tem_path+'/group3/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
a0=lines[0].split()[-1]
a1=eval(a0.split('-')[0])
a2=eval(a0.split('-')[1])
l3='*'.join(lines[0].split())+'*length='+str(a2-a1+1)+'\n'
myfile.write(l3)
myfile.write(lines[1])
ls=len(lines)/2
for i in range(1,ls):
l1=lines[2*i]
l2=lines[2*i+1][:-1]
b0=l1.split()[-1][:-1]
b1=eval(b0.split('-')[0])
b2=eval(b0.split('-')[1])
c1=min(b1,b2)
c2=max(b1,b2)
l1='*'.join(l1.split())+'*length='+str(c2-c1+1)+'\n'
myfile.write(l1)
for i in range(a1,c1):
l2='-'+l2
for i in range(c2,a2):
l2=l2+'-'
myfile.write(l2+'\n')
myfile.close()
elif Blast_gap=='2':
## 4.g1. Align sequences of the focal species and the reference species using MAFFT
if os.path.exists(Tem_path+'mafft_out')==0:
os.mkdir(Tem_path+'mafft_out')
rootdir1=Tem_path+'/group3/'
rootdir2=Tem_path+'/mafft_out/'
myfile0=open('Mafft_error.log','w')
files=os.listdir(Tem_path+'group3')
for file1 in files:
if Sys_ver=='L':
val= os.system('mafft --ep 0 --genafpair --maxiterate 1000 '+rootdir1+file1+ '>' +rootdir2+file1)
elif Sys_ver=='W':
val= os.system(Mafft_path+'mafft.bat --ep 0 --genafpair --maxiterate 1000 '+rootdir1+file1+ '>' +rootdir2+file1)
if val!=0:
myfile0.write(file1+'\n')
myfile0.close()
## 4.g2. Change the MAFFT output from multi-line to single-line fasta
if os.path.exists(Tem_path+'group4')==0:
os.mkdir(Tem_path+'group4')
rootdir1=Tem_path+'/mafft_out/'
rootdir2=Tem_path+'/group4/'
files=os.listdir(Tem_path+'/mafft_out/')
for file1 in files:
x=0
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
for line in lines:
if '>' in line:
if x!=0:
myfile.write('\n')
else:
x=1
myfile.write(line)
else:
myfile.write(line.split()[0])
myfile.write('\n')
myfile.close()
## 4.g3. Change white spaces in the sequence titles to asterisks
if os.path.exists(Tem_path+'group5')==0:
os.mkdir(Tem_path+'group5')
rootdir1=Tem_path+'/group4/'
rootdir2=Tem_path+'/group5/'
files=os.listdir(Tem_path+'/group4/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
a0=lines[0].split()[-1]
a1=eval(a0.split('-')[0])
a2=eval(a0.split('-')[1])
l3='*'.join(lines[0].split())+'*length='+str(a2-a1+1)+'\n'
myfile.write(l3)
myfile.write(lines[1])
ls=len(lines)/2
for i in range(1,ls):
l1=lines[2*i]
l2=lines[2*i+1]
b0=l1.split()[-1][:-1]
b1=eval(b0.split('-')[0])
b2=eval(b0.split('-')[1])
c1=min(b1,b2)
c2=max(b1,b2)
l1='*'.join(l1.split())+'*length='+str(c2-c1+1)+'\n'
myfile.write(l1)
myfile.write(l2)
myfile.close()
print "Part.4 has been finished successfully!!"
## 5.1 Extend sequences to start witrh the first position of the cds
if os.path.exists(Tem_path+'filling-in')==0:
os.mkdir(Tem_path+'filling-in')
rootdir1=Tem_path+'/group5/'
rootdir2=Tem_path+'/filling-in/'
files=os.listdir(Tem_path+'/group5/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
l1=lines[0]
l1=l1.replace('-','\t')
l1=l1.replace('_','\t')
a0=l1.split()[0]
a0=a0.split('*')[1]
if eval(a0) !=1:
for line in lines:
if '>' in line:
myfile.write(line)
else:
for i in range(eval(a0)-1):
myfile.write('+')
myfile.write(line)
else:
for line in lines:
myfile.write(line)
myfile.close()
## 5.2. Transpose the aligned sequences
if os.path.exists(Tem_path+'group6')==0:
os.mkdir(Tem_path+'group6')
rootdir1=Tem_path+'/filling-in/'
rootdir2=Tem_path+'/group6/'
files=os.listdir(Tem_path+'/filling-in/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
ls=len(lines)/2
ls2=len(lines[1])
for i in range(ls):
l1=lines[2*i][:-1]
l1='~'.join(l1.split())
myfile.write(l1+'\t')
myfile.write('\n')
for j in range(ls2):
for l in range(ls):
l2=lines[2*l+1]
l2=l2.split()[0]
l2=l2+'\n'
myfile.write(l2[j]+'\t')
myfile.write('\n')
myfile.close()
## 5.3. Clean the files with transposed sequences
if os.path.exists(Tem_path+'group7')==0:
os.mkdir(Tem_path+'group7')
rootdir1=Tem_path+'/group6/'
rootdir2=Tem_path+'/group7/'
files=os.listdir(Tem_path+'/group6/')
for file1 in files:
file2=file1.replace('.txt','.fa')
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file2,'w')
for line in lines:
if len(line.split())>=2:
myfile.write(line)
myfile.close()
## 5.4. Compile start and end points of all exons per gene in a single file
myfile=open(Tem_path+'map','w')
for fname in rGFFnames:
lines=open(RAW_path+fname).readlines()
d=[]
f={}
i=0
j=''
for line in lines:
if len(line.split())>3:
if line.split()[2]=='mRNA':
if i !=0:
myfile.write(j+'\t'+ax+'\t||\t'+f[j]+'\n')
i=i+1
ax=line.split()[6]
a0=line.split()[8]
a1=a0.split(';')[0]
a2=a1.split('=')[1]
a3=a2.split('.TAIR')[0]
j=a3
elif line.split()[2]=='CDS':
b0=line.split()[3]
b1=line.split()[4]
c=b0+'\t'+b1+'\t|\t'
if j in d:
f[j]=f[j]+c
else:
d.append(j)
f[j]=c
myfile.close()
## 5.5. Add flags for split site positions in the transposed alignment
if os.path.exists(Tem_path+'group8')==0:
os.mkdir(Tem_path+'group8')
lines=open(Tem_path+'/map').readlines()
rootdir1=Tem_path+'/group7/'
rootdir2=Tem_path+'/group8/'
files=os.listdir(Tem_path+'/group7/')
f={}
d=[]
for line in lines:
g=[]
a0=line.split()[0]
a1=line.split()[3]
a2=line.split()[4]
b1=eval(a1)
b2=eval(a2)
d.append(a0)
for j in line.split('|')[2:-1]:
c0=j.split()[0]
c1=j.split()[1]
c2=eval(c0)
c3=eval(c1)
c4=abs(c3-c2+1)
c5=str(c4)
g.append(c5)
f[a0]=g
for file1 in files:
k0=file1.split('_')[0]
k1=k0.split('.')[0]+'.'+k0.split('.')[1]
k2=f[k1]
lines1=open(rootdir1+file1).readlines()
myfile=open(rootdir2+k1,'w')
ls=len(lines1[1].split())
i=0
k3=[]
j=0
for x in k2:
j=j+eval(x)
k3.append(j)
for line in lines1:
if i in k3:
myfile.write(line)
for k in range(ls):
myfile.write('S\t')
myfile.write('\n')
else:
myfile.write(line)
if len(line.split())>=1 and line.split()[0] !='-':
i=i+1
myfile.close()
## 5.6. Extract exons into separate files
if os.path.exists(Tem_path+'group9')==0:
os.mkdir(Tem_path+'group9')
rootdir1=Tem_path+'/group8/'
rootdir2=Tem_path+'/group9/'
files=os.listdir(Tem_path+'/group8/')
for file1 in files:
i=0
j=0
k=0
lines=open(rootdir1+file1).readlines()
l1=lines[0]
for line in lines[1:]:
if 'S' not in line:
myfile=open(rootdir2+file1+'.'+str(i+1),'a')
if j==0:
myfile.write(l1)
j=j+1
myfile.write(line)
myfile.close()
else:
j=0
i=i+1
## 5.7. Remove exons of insufficient length
if os.path.exists(Tem_path+'group10')==0:
os.mkdir(Tem_path+'group10')
rootdir1=Tem_path+'/group9/'
rootdir2=Tem_path+'/group10/'
files=os.listdir(Tem_path+'/group9/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
d=[]
for line in lines[1:]:
if len(line.split())>=1:
a0=line.split()[0]
if a0 !='+' and a0 !='-':
d.append(a0)
if len(d)>=baits_length:
myfile=open(rootdir2+file1,'w')
for line in lines:
myfile.write(line)
myfile.close()
print "Part.5 has been finished successfully!!"
## 6.1. Re-transpose files containing the exon sequences to fasta files
if os.path.exists(Tem_path+'group11')==0:
os.mkdir(Tem_path+'group11')
rootdir1=Tem_path+'/group10/'
rootdir2=Tem_path+'/group11/'
files=os.listdir(Tem_path+'/group10/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file1,'w')
d=[]
e=[]
l1=lines[0]
a1=len(l1.split())
for i in l1.split():
j=i+'\n'
j=j.replace('~',' ')
d.append(j)
for k in range(a1):
myfile.write(d[k])
for line in lines[1:]:
if len(line.split())==a1:
l=line.split()[k]
l=l.replace('+','')
myfile.write(l)
myfile.write('\n')
myfile.close()
## 6.2. Remove gaps in exons
if os.path.exists(Tem_path+'group12')==0:
os.mkdir(Tem_path+'group12')
if os.path.exists(Tem_path+'group13')==0:
os.mkdir(Tem_path+'group13')
rootdir1=Tem_path+'/group11/'
rootdir2=Tem_path+'/group12/'
rootdir3=Tem_path+'/group13/'
files=os.listdir(Tem_path+'/group11/')
for file1 in files:
file2=file1+'.fas'
file3=file1+'.120bp.fas'
lines=open(rootdir1+file1).readlines()
myfile=open(rootdir2+file2,'w')
myfile2=open(rootdir3+file3,'w')
ls=len(lines)/2
for i in range(ls):
l1=lines[2*i]
l2=lines[2*i+1]
l1=l1.replace('*','\t')
l3=l2.replace('-','')
ls2=len(l3)
if ls2>1:
myfile.write(l1)
myfile.write(l3)
if ls2>=(baits_length+1):
myfile2.write(l1)
myfile2.write(l3)
myfile.close()
myfile2.close()
## 6.3. Extract baits
if os.path.exists(Tem_path+'group14')==0:
os.mkdir(Tem_path+'group14')
rootdir1=Tem_path+'/group13/'
rootdir2=Tem_path+'/group14/'
files=os.listdir(Tem_path+'/group13/')
for file1 in files:
lines=open(rootdir1+file1).readlines()
ls=len(lines)/2
a0=file1[:-4]+'_'
for i in range(1,ls):
x1=0
l1=lines[2*i]
l2=lines[2*i+1]
a1=l1.split()[0][1:]
myfile=open(rootdir2+a0+a1+'.fasta','w')
y=(len(l2)-1)%60
if y>40:
x1=1
x3=(len(l2)-1)/60-1
for j in range(x3):
b0=60*(j)
b1=60*(j+2)
myfile.write(l1[:-1]+'_'+str(j)+'\n')
myfile.write(l2[b0:b1]+'\n')
if x1==1:
myfile.write(l1[:-1]+'_'+str(j+1)+'_reversed\n')
myfile.write(l2[-baits_length-1:])
myfile.close()
files2=os.listdir(Tem_path+'/group14/')
if Blast_gap=='1':
myfile=open(Out_path+'/nogap_baits.combined.fasta','w')
elif Blast_gap=='2':
myfile=open(Out_path+'/gap_baits.combined.fasta','w')
for file1 in files2:
a0=file1.split('_')[0]
lines=open(rootdir2+file1).readlines()
for line in lines:
if '>' in line:
line='>'+a0+'_'+line[1:]
myfile.write(line)
myfile.close()
## 6.4. Combine exon sequences into single files per species
rootdir=Tem_path+'group13/'
for fname in sFNAnames:
fname1=fname[:4]+'.fasta'
myfile=open(Tem_path+fname1,'w')
files=os.listdir(Tem_path+'group13/')
for file1 in files:
a=file1[:-9]
lines=open(rootdir+file1).readlines()
ls=len(lines)/2
for i in range(ls):
l1=lines[2*i]
l3='>'+a+'_'+l1[1:]
l2=lines[2*i+1]
a0=l1[1:5]
if a0 in fname1:
myfile.write(l3)
myfile.write(l2)
myfile.close()
print "Part.6 has been finished successfully!!"
## 7.1. Remove genes with too few baits in teh best-quality focal species
if len(sufficient)>0:
if Blast_gap=='1':
lines=open(Out_path+'nogap_baits.combined.fasta').readlines()
myfile=open(Out_path+'nogap_sorted.result.fa','w')
elif Blast_gap=='2':
lines=open(Out_path+'gap_baits.combined.fasta').readlines()
myfile=open(Out_path+'gap_sorted.result.fa','w')
f={}
d=[]
e=[]
for line in lines:
x0=0
for x1 in sufficient:
x1=x1.split('.')[0]
if x1 in line:
x0=1
if x0==1:
a1=line.split()[3]
if a1 not in d:
d.append(a1)
f[a1]=1
else:
f[a1]=f[a1]+1
for i in d:
j=f[i]
if j>=4:
e.append(i)
ls=len(lines)/2
for k in range(ls):
l1=lines[2*k]
l2=lines[2*k+1]
c=l1.split()[3]
if c in e:
myfile.write(l1)
myfile.write(l2)
myfile.close()
## Wait for CD-HIT-EST
| gpl-2.0 | 7,990,784,050,546,604,000 | 29.424297 | 231 | 0.510603 | false |
drvinceknight/Axelrod | axelrod/strategies/meta.py | 1 | 4202 | import random
from axelrod import Player
class MetaPlayer(Player):
"""A generic player that has its own team of players."""
team = []
def __init__(self):
Player.__init__(self)
# Make sure we don't use any meta players to avoid infinite recursion.
self.team = [t for t in self.team if not issubclass(t, MetaPlayer)]
self.nteam = len(self.team)
# Initiate all the player in out team.
self.team = [t() for t in self.team]
# If the team will have stochastic players, this meta is also stochastic.
self.stochastic = any([t.stochastic for t in self.team])
def strategy(self, opponent):
# Make sure the history of all hunters is current.
for ih in range(len(self.team)):
self.team[ih].history = self.history
# Get the results of all our players.
results = [player.strategy(opponent) for player in self.team]
# A subclass should just define a way to choose the result based on team results.
return self.meta_strategy(results)
def meta_strategy(self, results):
"""Determine the meta result based on results of all players."""
pass
class MetaMajority(MetaPlayer):
"""A player who goes by the majority vote of all other non-meta players."""
name = "Meta Majority"
def __init__(self):
# We need to import the list of strategies at runtime, since
# _strategies import also _this_ module before defining the list.
from _strategies import ordinary_strategies
self.team = ordinary_strategies
MetaPlayer.__init__(self)
def meta_strategy(self, results):
if results.count('D') > results.count('C'):
return 'D'
return 'C'
class MetaMinority(MetaPlayer):
"""A player who goes by the minority vote of all other non-meta players."""
name = "Meta Minority"
def __init__(self):
# We need to import the list of strategies at runtime, since
# _strategies import also _this_ module before defining the list.
from _strategies import ordinary_strategies
self.team = ordinary_strategies
MetaPlayer.__init__(self)
def meta_strategy(self, results):
if results.count('D') < results.count('C'):
return 'D'
return 'C'
class MetaWinner(MetaPlayer):
"""A player who goes by the strategy of the current winner."""
name = "Meta Winner"
def __init__(self, team=None):
# The default is to used all strategies available, but we need to import the list
# at runtime, since _strategies import also _this_ module before defining the list.
if team:
self.team = team
else:
from _strategies import ordinary_strategies
self.team = ordinary_strategies
MetaPlayer.__init__(self)
# For each player, we will keep the history of proposed moves and
# a running score since the beginning of the game.
for t in self.team:
t.proposed_history = []
t.score = 0
def strategy(self, opponent):
# Update the running score for each player, before determining the next move.
if len(self.history):
for player in self.team:
pl_C = player.proposed_history[-1] == "C"
opp_C = opponent.history[-1] == "C"
s = 2 * (pl_C and opp_C) or 5 * (pl_C and not opp_C) or 4 * (not pl_C and not opp_C) or 0
player.score += s
return MetaPlayer.strategy(self, opponent)
def meta_strategy(self, results):
scores = [pl.score for pl in self.team]
bestscore = min(scores)
beststrategies = [i for i, pl in enumerate(self.team) if pl.score == bestscore]
bestproposals = [results[i] for i in beststrategies]
bestresult = "C" if "C" in bestproposals else "D"
# Update each player's proposed history with his proposed result, but always after
# the new result has been settled based on scores accumulated until now.
for r, t in zip(results, self.team):
t.proposed_history.append(r)
return bestresult | mit | -4,386,408,245,011,585,500 | 31.330769 | 105 | 0.618039 | false |
hpleva/pyemto | pyemto/examples/alloy_discovery/collect_alloy_final.py | 1 | 6838 | import pyemto
import pyemto.utilities as utils
import numpy as np
import os
latpath = "../../../" # Path do bmdl, kstr and shape directories
# each system need to have same number of alloy elements
#systems = [['Fe','Al'],['Fe','Cr']]
systems = [['Fe'],['Al']]
systems = [['Al']]
#concentrations = [[0.5,0.5]]
concentrations = [[1.0]]
magn = "NM" # Possible values DLM, FM and NM
# Sanity checks
for s in systems:
if not len(s) == len(systems[0]):
print("Each system need to have same number of alloy elements!")
exit()
for c in concentrations:
if not len(c) == len(systems[0]):
print("Each given concetrations must have same number number as elements in system!")
exit()
# Next check magnetic states of system and initialize splits
splits = []
if magn == "FM":
for s in systems:
splt = []
for atom in s:
if atom == "Fe":
splt.append(2.0)
else:
splt.append(0.5)
splits.append(splt)
elif magn == "DLM":
# First duplicate each atoms and concetration
newsystems = []
newconcs = []
for i in range(len(systems)):
news = []
newc = []
splt = []
for j in range(len(systems[i])):
print(i,j)
news.append(systems[i][j])
news.append(systems[i][j])
if systems[i][j] == "Fe":
splt.append( 2.0)
splt.append(-2.0)
else:
splt.append( 0.5)
splt.append(-0.5)
splits.append(splt)
newsystems.append(news)
systems = newsystems
for c in concentrations:
newc = []
for conc in c:
newc.append(conc)
newc.append(conc)
newconcs.append(newc)
concentrations = newconcs
else:
for s in systems:
splt = []
for atom in s:
splt.append(0.0)
splits.append(splt)
results = []
#We are ready to make inputs
for si in range(len(systems)):
s = systems[si]
# Create main directory
sname = ""
if magn == "DLM":
nlist = [s[i] for i in range(0,len(s),2)]
else:
nlist = s
for atom in nlist:
sname = sname + atom
#
# Make directories
if not os.path.lexists(sname):
os.makedirs(sname)
for c in concentrations:
sc_res = []
# Make subdirectory for concentration
cname = ""
count = 0
if magn == "DLM":
clist = [c[i] for i in range(0,len(c),2)]
else:
clist = c
for conc in clist:
count += 1
cname = cname +str(int(conc*1000)).zfill(4)
if not count == len(clist):
cname = cname+"-"
apath = os.path.join(sname,cname)
if not os.path.lexists(apath):
os.makedirs(apath)
# Make subdirectory for magnetic state
apath = os.path.join(apath,magn)
if not os.path.lexists(apath):
os.makedirs(apath)
# Construct base jobname
jobname = ""
for i in range(len(nlist)):
if jobname == "":
pass
else:
jobname = jobname + "_"
jobname = jobname + nlist[i].lower() + "%4.2f" % (clist[i])
finalname = jobname + "_final"
# BCC first
alloy = pyemto.System(folder=apath)
initialsws = 3.0 # We need some clever way to get this
alloy.bulk(lat='bcc', jobname=jobname+"_bcc",atoms=s,concs=c,
latpath=latpath,sws=initialsws, xc='PBE')
swsrange = np.linspace(initialsws-0.1,initialsws+0.1,7) # A list of 7 different volumes
#alloy.lattice_constants_batch_generate(sws=swsrange)
sws0, B0, e0 = alloy.lattice_constants_analyze(sws=swsrange,prn=False)
alloy.bulk(lat='bcc',jobname=finalname+"_bcc",latpath=latpath,
sws=sws0,atoms = s,concs = c)
# get energy of final
e_dft = alloy.get_energy()
sc_res.append([e_dft,sws0,B0,e0])
# FCC second
alloy = pyemto.System(folder=apath)
initialsws = 3.0 # We need some clever way to get this
alloy.bulk(lat='fcc', jobname=jobname+"_fcc",atoms=s,concs=c,
latpath=latpath,sws=initialsws, xc='PBE')
swsrange = np.linspace(initialsws-0.1,initialsws+0.1,7) # A list of 7 different volumes
sws0, B0, e0 = alloy.lattice_constants_analyze(sws=swsrange,prn=False)
alloy.bulk(lat='fcc', jobname=finalname+"_fcc", latpath=latpath,sws=sws0,
atoms = s, concs = c)
# get energy of final
e_dft = alloy.get_energy()
sc_res.append([e_dft,sws0,B0,e0])
# HCP last
alloy = pyemto.System(folder=apath)
initialsws = 3.0 # We need some clever way to get this
alloy.bulk(lat='hcp',
#jobname=jobname+"_hcp",
jobname=jobname, # hcp add automatically hcp string to jobname
latpath=latpath, sws=initialsws, atoms = s,
concs = c, xc='PBE')
swsrange = np.linspace(initialsws-0.1,initialsws+0.1,7) # A list of 7 different volumes
#alloy.lattice_constants_batch_generate(sws=swsrange)
sws0, c_over_a0, B0, e0, R0, cs0 = alloy.lattice_constants_analyze(sws=swsrange,prn=False)
alloy.sws = sws0
ca = round(c_over_a0,3)
hcpname ="hcp_"+str(ca) # Structure name
alloy.bulk(lat='hcp', jobname=finalname+"_hcp",latpath=latpath, latname=hcpname,
sws=sws0, ca= ca, atoms = s, concs = c)
alloy.write_inputs()
# get energy of final
e_dft = alloy.get_energy()
sc_res.append([e_dft,sws0,B0,e0,ca])
results.append([[s,c],sc_res])
for r in results:
# Generate system name
sname = ""
for i in range(len(r[0][0])):
sname=sname+r[0][0][i]+str(r[0][1][i])
output = "System: "+sname+"\n"
output = output + " Magn: " +magn+"\n"
bcc = r[1][0]
bcc_lc = utils.wsrad_to_latparam(bcc[1],'bcc')
output = output+"# Strc. dft E lc sws B fit E (c/a)\n"
output = output+" bcc: %f %f %f %f %f\n" %(bcc[0],bcc_lc,bcc[1],bcc[2],bcc[3])
fcc = r[1][1]
fcc_lc = utils.wsrad_to_latparam(fcc[1],'fcc')
output = output + " fcc: %f %f %f %f %f\n" %(fcc[0],fcc_lc,fcc[1],fcc[2],fcc[3])
hcp = r[1][2]
hcp_lc = utils.wsrad_to_latparam(hcp[1],'hcp',ca=hcp[4])
output = output +" hpc: %f %f %f %f %f %f\n" %(hcp[0],hcp_lc,hcp[1],hcp[2],hcp[3],hcp[4])
if magn == "DLM" or magn == "FM":
# Print magnetic states of system if available
pass
print(output)
| mit | 5,948,215,581,552,546,000 | 31.407583 | 98 | 0.535244 | false |
ECP-CANDLE/Benchmarks | Pilot1/Uno_UQ/data_utils_/uno_combined_data_loader.py | 1 | 19034 | from __future__ import print_function
import collections
import json
import logging
import os
import pickle
import pandas as pd
import numpy as np
from sklearn.model_selection import ShuffleSplit, KFold
import cellline_data
import drug_data
import response_data
from uno import loggerUno as logger
from uno import dict_compare
SEED = 2019
def encode_sources(sources):
df = pd.get_dummies(sources, prefix='source', prefix_sep='.')
df['Source'] = sources
source_l1 = df['Source'].str.extract('^(\S+)\.', expand=False)
df1 = pd.get_dummies(source_l1, prefix='source.L1', prefix_sep='.')
df = pd.concat([df1, df], axis=1)
df = df.set_index('Source').reset_index()
return df
def read_set_from_file(path):
if path:
with open(path, 'r') as f:
text = f.read().strip()
subset = text.split()
else:
subset = None
return subset
def assign_partition_groups(df, partition_by='drug_pair'):
if partition_by == 'cell':
group = df['Sample']
elif partition_by == 'drug_pair':
df_info = drug_data.load_drug_info()
id_dict = df_info[['ID', 'PUBCHEM']].drop_duplicates(['ID']).set_index('ID').iloc[:, 0]
group = df['Drug1'].copy()
group[(df['Drug2'].notnull()) & (df['Drug1'] <= df['Drug2'])] = df['Drug1'] + ',' + df['Drug2']
group[(df['Drug2'].notnull()) & (df['Drug1'] > df['Drug2'])] = df['Drug2'] + ',' + df['Drug1']
group2 = group.map(id_dict)
mapped = group2.notnull()
group[mapped] = group2[mapped]
elif partition_by == 'index':
group = df.reset_index()['index']
logger.info('Grouped response data by %s: %d groups', partition_by, group.nunique())
return group
class CombinedDataLoader(object):
def __init__(self, seed=SEED):
self.seed = seed
self.test_indexes = [[]]
def load_from_cache(self, cache, params):
param_fname = '{}.params.json'.format(cache)
if not os.path.isfile(param_fname):
logger.warning('Cache parameter file does not exist: %s', param_fname)
return False
with open(param_fname) as param_file:
try:
cached_params = json.load(param_file)
except json.JSONDecodeError as e:
logger.warning('Could not decode parameter file %s: %s', param_fname, e)
return False
ignore_keys = ['cache', 'partition_by', 'single']
equal, diffs = dict_compare(params, cached_params, ignore_keys)
if not equal:
logger.warning('Cache parameter mismatch: %s\nSaved: %s\nAttemptd to load: %s', diffs, cached_params, params)
logger.warning('\nRemove %s to rebuild data cache.\n', param_fname)
raise ValueError('Could not load from a cache with incompatible keys:', diffs)
else:
fname = '{}.pkl'.format(cache)
if not os.path.isfile(fname):
logger.warning('Cache file does not exist: %s', fname)
return False
with open(fname, 'rb') as f:
obj = pickle.load(f)
self.__dict__.update(obj.__dict__)
logger.info('Loaded data from cache: %s', fname)
return True
return False
def save_to_cache(self, cache, params):
for k in ['self', 'cache', 'single']:
if k in params:
del params[k]
param_fname = '{}.params.json'.format(cache)
with open(param_fname, 'w') as param_file:
json.dump(params, param_file, sort_keys=True)
fname = '{}.pkl'.format(cache)
with open(fname, 'wb') as f:
pickle.dump(self, f, pickle.HIGHEST_PROTOCOL)
logger.info('Saved data to cache: %s', fname)
def partition_data(self, partition_by=None, cv_folds=1, train_split=0.7, val_split=0.2,
cell_types=None, by_cell=None, by_drug=None,
cell_subset_path=None, drug_subset_path=None,
exclude_cells=[], exclude_drugs=[], exclude_indices=[]):
seed = self.seed
train_sep_sources = self.train_sep_sources
test_sep_sources = self.test_sep_sources
df_response = self.df_response
if not partition_by:
if by_drug and by_cell:
partition_by = 'index'
elif by_drug:
partition_by = 'cell'
else:
partition_by = 'drug_pair'
# Exclude specified cells / drugs / indices
if exclude_cells != []:
df_response = df_response[~df_response['Sample'].isin(exclude_cells)]
if exclude_drugs != []:
if np.isin('Drug', df_response.columns.values):
df_response = df_response[~df_response['Drug1'].isin(exclude_drugs)]
else:
df_response = df_response[~df_response['Drug1'].isin(exclude_drugs) & ~df_response['Drug2'].isin(exclude_drugs)]
if exclude_indices != []:
df_response = df_response.drop(exclude_indices, axis=0)
logger.info('Excluding indices specified')
if partition_by != self.partition_by:
df_response = df_response.assign(Group = assign_partition_groups(df_response, partition_by))
mask = df_response['Source'].isin(train_sep_sources)
test_mask = df_response['Source'].isin(test_sep_sources)
if by_drug:
drug_ids = drug_data.drug_name_to_ids(by_drug)
logger.info('Mapped drug IDs for %s: %s', by_drug, drug_ids)
mask &= (df_response['Drug1'].isin(drug_ids)) & (df_response['Drug2'].isnull())
test_mask &= (df_response['Drug1'].isin(drug_ids)) & (df_response['Drug2'].isnull())
if by_cell:
cell_ids = cellline_data.cell_name_to_ids(by_cell)
logger.info('Mapped sample IDs for %s: %s', by_cell, cell_ids)
mask &= (df_response['Sample'].isin(cell_ids))
test_mask &= (df_response['Sample'].isin(cell_ids))
if cell_subset_path:
cell_subset = read_set_from_file(cell_subset_path)
mask &= (df_response['Sample'].isin(cell_subset))
test_mask &= (df_response['Sample'].isin(cell_subset))
if drug_subset_path:
drug_subset = read_set_from_file(drug_subset_path)
mask &= (df_response['Drug1'].isin(drug_subset)) & ((df_response['Drug2'].isnull()) | (df_response['Drug2'].isin(drug_subset)))
test_mask &= (df_response['Drug1'].isin(drug_subset)) & ((df_response['Drug2'].isnull()) | (df_response['Drug2'].isin(drug_subset)))
if cell_types:
df_type = cellline_data.load_cell_metadata()
cell_ids = set()
for cell_type in cell_types:
cells = df_type[~df_type['TUMOR_TYPE'].isnull() & df_type['TUMOR_TYPE'].str.contains(cell_type, case=False)]
cell_ids |= set(cells['ANL_ID'].tolist())
logger.info('Mapped sample tissue types for %s: %s', cell_type, set(cells['TUMOR_TYPE'].tolist()))
mask &= (df_response['Sample'].isin(cell_ids))
test_mask &= (df_response['Sample'].isin(cell_ids))
df_group = df_response[mask]['Group'].drop_duplicates().reset_index(drop=True)
if cv_folds > 1:
selector = KFold(n_splits=cv_folds, shuffle=True, random_state=seed)
else:
selector = ShuffleSplit(n_splits=1, train_size=train_split, test_size=val_split, random_state=seed)
splits = selector.split(df_group)
train_indexes = []
val_indexes = []
test_indexes = []
for index, (train_group_index, val_group_index) in enumerate(splits):
train_groups = set(df_group.values[train_group_index])
val_groups = set(df_group.values[val_group_index])
train_index = df_response.index[df_response['Group'].isin(train_groups) & mask]
val_index = df_response.index[df_response['Group'].isin(val_groups) & mask]
test_index = df_response.index[~df_response['Group'].isin(train_groups) & ~df_response['Group'].isin(val_groups) & test_mask]
train_indexes.append(train_index)
val_indexes.append(val_index)
test_indexes.append(test_index)
if logger.isEnabledFor(logging.DEBUG):
logger.debug('CV fold %d: train data = %s, val data = %s, test data = %s', index, train_index.shape[0], val_index.shape[0], test_index.shape[0])
logger.debug(' train groups (%d): %s', df_response.loc[train_index]['Group'].nunique(), df_response.loc[train_index]['Group'].unique())
logger.debug(' val groups ({%d}): %s', df_response.loc[val_index]['Group'].nunique(), df_response.loc[val_index]['Group'].unique())
logger.debug(' test groups ({%d}): %s', df_response.loc[test_index]['Group'].nunique(), df_response.loc[test_index]['Group'].unique())
self.partition_by = partition_by
self.cv_folds = cv_folds
self.train_indexes = train_indexes
self.val_indexes = val_indexes
self.test_indexes = test_indexes
def build_feature_list(self, single=False):
input_features = collections.OrderedDict()
feature_shapes = collections.OrderedDict()
if not self.agg_dose:
doses = ['dose1', 'dose2'] if not single else ['dose1']
for dose in doses:
input_features[dose] = 'dose'
feature_shapes['dose'] = (1,)
if self.encode_response_source:
input_features['response.source'] = 'response.source'
feature_shapes['response.source'] = (self.df_source.shape[1] - 1,)
for fea in self.cell_features:
feature_type = 'cell.' + fea
feature_name = 'cell.' + fea
df_cell = getattr(self, self.cell_df_dict[fea])
input_features[feature_name] = feature_type
feature_shapes[feature_type] = (df_cell.shape[1] - 1,)
drugs = ['drug1', 'drug2'] if not single else ['drug1']
for drug in drugs:
for fea in self.drug_features:
feature_type = 'drug.' + fea
feature_name = drug + '.' + fea
df_drug = getattr(self, self.drug_df_dict[fea])
input_features[feature_name] = feature_type
feature_shapes[feature_type] = (df_drug.shape[1] - 1,)
input_dim = sum([np.prod(feature_shapes[x]) for x in input_features.values()])
self.input_features = input_features
self.feature_shapes = feature_shapes
self.input_dim = input_dim
logger.info('Input features shapes:')
for k, v in self.input_features.items():
logger.info(' {}: {}'.format(k, self.feature_shapes[v]))
logger.info('Total input dimensions: {}'.format(self.input_dim))
def load(self, cache=None, ncols=None, scaling='std', dropna=None,
agg_dose=None, embed_feature_source=True, encode_response_source=True,
cell_features=['rnaseq'], drug_features=['descriptors', 'fingerprints'],
cell_feature_subset_path=None, drug_feature_subset_path=None,
drug_lower_response=1, drug_upper_response=-1, drug_response_span=0,
drug_median_response_min=-1, drug_median_response_max=1,
use_landmark_genes=False, use_filtered_genes=False,
preprocess_rnaseq=None, single=False,
# train_sources=['GDSC', 'CTRP', 'ALMANAC', 'NCI60'],
train_sources=['GDSC', 'CTRP', 'ALMANAC'],
# val_sources='train',
# test_sources=['CCLE', 'gCSI'],
test_sources=['train'],
partition_by='drug_pair'):
params = locals().copy()
del params['self']
if not cell_features or 'none' in [x.lower() for x in cell_features]:
cell_features = []
if not drug_features or 'none' in [x.lower() for x in drug_features]:
drug_features = []
if cache and self.load_from_cache(cache, params):
self.build_feature_list(single=single)
return
logger.info('Loading data from scratch ...')
if agg_dose:
df_response = response_data.load_aggregated_single_response(target=agg_dose, combo_format=True)
else:
df_response = response_data.load_combined_dose_response()
if logger.isEnabledFor(logging.INFO):
logger.info('Summary of combined dose response by source:')
logger.info(response_data.summarize_response_data(df_response, target=agg_dose))
all_sources = df_response['Source'].unique()
df_source = encode_sources(all_sources)
if 'all' in train_sources:
train_sources = all_sources
if 'all' in test_sources:
test_sources = all_sources
elif 'train' in test_sources:
test_sources = train_sources
train_sep_sources = [x for x in all_sources for y in train_sources if x.startswith(y)]
test_sep_sources = [x for x in all_sources for y in test_sources if x.startswith(y)]
ids1 = df_response[['Drug1']].drop_duplicates().rename(columns={'Drug1':'Drug'})
ids2 = df_response[['Drug2']].drop_duplicates().rename(columns={'Drug2':'Drug'})
df_drugs_with_response = pd.concat([ids1, ids2]).drop_duplicates().dropna().reset_index(drop=True)
df_cells_with_response = df_response[['Sample']].drop_duplicates().reset_index(drop=True)
logger.info('Combined raw dose response data has %d unique samples and %d unique drugs', df_cells_with_response.shape[0], df_drugs_with_response.shape[0])
if agg_dose:
df_selected_drugs = None
else:
logger.info('Limiting drugs to those with response min <= %g, max >= %g, span >= %g, median_min <= %g, median_max >= %g ...', drug_lower_response, drug_upper_response, drug_response_span, drug_median_response_min, drug_median_response_max)
df_selected_drugs = response_data.select_drugs_with_response_range(df_response, span=drug_response_span, lower=drug_lower_response, upper=drug_upper_response, lower_median=drug_median_response_min, upper_median=drug_median_response_max)
logger.info('Selected %d drugs from %d', df_selected_drugs.shape[0], df_response['Drug1'].nunique())
cell_feature_subset = read_set_from_file(cell_feature_subset_path)
drug_feature_subset = read_set_from_file(drug_feature_subset_path)
for fea in cell_features:
fea = fea.lower()
if fea == 'rnaseq' or fea == 'expression':
df_cell_rnaseq = cellline_data.load_cell_rnaseq(ncols=ncols, scaling=scaling, use_landmark_genes=use_landmark_genes, use_filtered_genes=use_filtered_genes, feature_subset=cell_feature_subset, preprocess_rnaseq=preprocess_rnaseq, embed_feature_source=embed_feature_source)
for fea in drug_features:
fea = fea.lower()
if fea == 'descriptors':
df_drug_desc = drug_data.load_drug_descriptors(ncols=ncols, scaling=scaling, dropna=dropna, feature_subset=drug_feature_subset)
elif fea == 'fingerprints':
df_drug_fp = drug_data.load_drug_fingerprints(ncols=ncols, scaling=scaling, dropna=dropna, feature_subset=drug_feature_subset)
# df_drug_desc, df_drug_fp = drug_data.load_drug_data(ncols=ncols, scaling=scaling, dropna=dropna)
cell_df_dict = {'rnaseq': 'df_cell_rnaseq'}
drug_df_dict = {'descriptors': 'df_drug_desc',
'fingerprints': 'df_drug_fp'}
# df_cell_ids = df_cell_rnaseq[['Sample']].drop_duplicates()
# df_drug_ids = pd.concat([df_drug_desc[['Drug']], df_drug_fp[['Drug']]]).drop_duplicates()
logger.info('Filtering drug response data...')
df_cell_ids = df_cells_with_response
for fea in cell_features:
df_cell = locals()[cell_df_dict[fea]]
df_cell_ids = df_cell_ids.merge(df_cell[['Sample']]).drop_duplicates()
logger.info(' %d molecular samples with feature and response data', df_cell_ids.shape[0])
df_drug_ids = df_drugs_with_response
for fea in drug_features:
df_drug = locals()[drug_df_dict[fea]]
df_drug_ids = df_drug_ids.merge(df_drug[['Drug']]).drop_duplicates()
if df_selected_drugs is not None:
df_drug_ids = df_drug_ids.merge(df_selected_drugs).drop_duplicates()
logger.info(' %d selected drugs with feature and response data', df_drug_ids.shape[0])
df_response = df_response[df_response['Sample'].isin(df_cell_ids['Sample']) &
df_response['Drug1'].isin(df_drug_ids['Drug']) &
(df_response['Drug2'].isin(df_drug_ids['Drug']) | df_response['Drug2'].isnull())]
df_response = df_response[df_response['Source'].isin(train_sep_sources + test_sep_sources)]
df_response.reset_index(drop=True, inplace=True)
if logger.isEnabledFor(logging.INFO):
logger.info('Summary of filtered dose response by source:')
logger.info(response_data.summarize_response_data(df_response, target=agg_dose))
df_response = df_response.assign(Group = assign_partition_groups(df_response, partition_by))
self.agg_dose = agg_dose
self.cell_features = cell_features
self.drug_features = drug_features
self.cell_df_dict = cell_df_dict
self.drug_df_dict = drug_df_dict
self.df_source = df_source
self.df_response = df_response
self.embed_feature_source = embed_feature_source
self.encode_response_source = encode_response_source
self.all_sources = all_sources
self.train_sources = train_sources
self.test_sources = test_sources
self.train_sep_sources = train_sep_sources
self.test_sep_sources = test_sep_sources
self.partition_by = partition_by
for var in (list(drug_df_dict.values()) + list(cell_df_dict.values())):
value = locals().get(var)
if value is not None:
setattr(self, var, value)
self.build_feature_list(single=single)
if cache:
self.save_to_cache(cache, params)
def get_cells_in_val(self):
val_cell_ids = list(set(self.df_response.loc[self.val_indexes[0]]['Sample'].values))
return val_cell_ids
def get_drugs_in_val(self):
if np.isin('Drug', self.df_response.columns.values):
val_drug_ids = list(set(self.df_response.loc[self.val_indexes[0]]['Drug'].values))
else:
val_drug_ids = list(set(self.df_response.loc[self.val_indexes[0]]['Drug1'].values))
return val_drug_ids
def get_index_in_val(self):
val_indices = list(set(self.val_indexes[0]))
return val_indices
| mit | 7,643,198,976,725,848,000 | 43.576112 | 287 | 0.59504 | false |
cligs/pyzeta | scripts/correlation.py | 1 | 1579 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# file: experimental.py
# version: 0.1.0
# source: https://github.com/maslinych/linis-scripts/blob/master/rbo_calc.py
# ported to Python 3 and slightly modified by Albin Zehe
def calc_rbo(l1, l2, p=0.98):
"""
Returns RBO indefinite rank similarity metric, as described in:
Webber, W., Moffat, A., & Zobel, J. (2010).
A similarity measure for indefinite rankings.
ACM Transactions on Information Systems.
doi:10.1145/1852102.1852106.
"""
sl, ll = sorted([(len(l1), l1), (len(l2), l2)])
s, S = sl
l, L = ll
# Calculate the overlaps at ranks 1 through l
# (the longer of the two lists)
ss = set([])
ls = set([])
overs = {}
for i in range(l):
ls.add(L[i])
if i < s:
ss.add(S[i])
X_d = len(ss.intersection(ls))
d = i + 1
overs[d] = float(X_d)
# (1) \sum_{d=1}^l (X_d / d) * p^d
sum1 = 0
for i in range(l):
d = i + 1
sum1 += overs[d] / d * pow(p, d)
X_s = overs[s]
X_l = overs[l]
# (2) \sum_{d=s+1}^l [(X_s (d - s)) / (sd)] * p^d
sum2 = 0
for i in range(s, l):
d = i + 1
sum2 += (X_s * (d - s) / (s * d)) * pow(p, d)
# (3) [(X_l - X_s) / l + X_s / s] * p^l
sum3 = ((X_l - X_s) / l + X_s / s) * pow(p, l)
# Equation 32.
rbo_ext = (1 - p) / p * (sum1 + sum2) + sum3
return rbo_ext
if __name__ == "__main__":
list1 = ['A', 'B', 'C', 'D', 'E', 'H']
list2 = ['D', 'B', 'F', 'A']
print
calc_rbo(list1, list2, 0.98)
| gpl-3.0 | 6,396,309,194,905,206,000 | 25.762712 | 76 | 0.484484 | false |
luntos/bianalyzer | bianalyzer/main.py | 1 | 4399 | # -*- coding: utf-8 -*-
import sys
import getopt
from bianalyzer import BianalyzerText
from bianalyzer.abstracts import download_abstracts
from bianalyzer.biclustering import get_keyword_biclusters, GreedyBBox, get_keyword_text_biclusters, \
save_keyword_text_biclusters
from bianalyzer.biclustering.keywords_analysis import save_keyword_biclusters
from bianalyzer.keywords import extract_keywords_via_textrank
from bianalyzer.relevance import construct_relevance_matrix, construct_similarity_matrix
usage = "Bianalyzer should be called as:\n" \
"bianalyzer [-s <source>] [-S <springer_api_key>] [-m <relevance_metric>] [-q <query>] [-r] [-d] biclusters " \
"<abtracts_number> <path_to_file>"
supported_sources = ['IEEE', 'Springer']
supported_relevance_metrics = ['tf-idf', 'bm25', 'ast', 'frequency', 'normalized_frequency']
def main():
def print_error(message):
print message + '\n'
print usage
args = sys.argv[1:]
opts, args = getopt.getopt(args, "s:S:m:q:rd")
opts = dict(opts)
opts.setdefault('-s', 'IEEE')
opts.setdefault('-S', None)
opts.setdefault('-m', 'bm25')
opts.setdefault('-q', 'cluster analysis')
opts.setdefault('-r', False)
opts.setdefault('-d', False)
if opts['-r'] == '':
opts['-r'] = True
if opts['-d'] == '':
opts['-d'] = True
# print opts, args
if opts['-d'] and opts['-r']:
print_error('Cannot use both options -r and -d simultaneously')
return 1
if opts['-s'] not in supported_sources:
print_error('Invalid source of abstracts. It should be either IEEE or Springer')
return 1
if opts['-m'] not in supported_relevance_metrics:
print_error('Invalid relevance metric. It should be tf-idf, bm25, ast, frequency or normalized_frequency')
return 1
if opts['-s'] == 'Springer' and opts['-S'] == '':
print_error('Invalid Springer API key')
if len(args) < 3:
print_error('Invalid command format. Please use the following format: biclusters <number> <path_to_file>')
return 1
command = args[0]
abstracts_number = args[1]
path_to_file = args[2]
parsed = True
try:
abstracts_number = int(abstracts_number)
except ValueError:
parsed = False
if not parsed or (abstracts_number > 5000) or (abstracts_number < 1):
print_error('Invalid number of abstracts to download. It should be in range [1, 5000]')
return 1
if command == 'biclusters':
try:
biclusters_file = open(path_to_file, 'w')
except Exception:
print_error('Could not create/open the file specified')
return 1
try:
articles = download_abstracts(opts['-s'], opts['-q'], abstracts_number, springer_api_key=opts['-S'])
except Exception, e:
print_error('Error occurred while downloading: %s' % e)
return 1
bianalyzer_texts = [BianalyzerText(article.abstract_text) for article in articles]
keywords = extract_keywords_via_textrank(bianalyzer_texts)
relevance_matrix = construct_relevance_matrix(keywords, bianalyzer_texts, opts['-m'])
if not opts['-r']:
similarity_matrix = construct_similarity_matrix(relevance_matrix, 0.2)
keyword_biclusters = get_keyword_biclusters(similarity_matrix, GreedyBBox)
save_keyword_biclusters(keyword_biclusters, biclusters_file, min_density=0.1)
if opts['-d']:
try:
from bianalyzer.graphs import construct_keyword_graph, draw_keyword_biclusters
edges = construct_keyword_graph(keyword_biclusters.biclusters, biclusters_num=100)
draw_keyword_biclusters(edges)
except Exception:
print '-------------------------'
print 'Could not draw the graph! Please, install the nodebox-opengl package'
else:
keyword_text_biclusters = get_keyword_text_biclusters(relevance_matrix, GreedyBBox)
save_keyword_text_biclusters(keyword_text_biclusters, biclusters_file, min_density=0.1)
biclusters_file.close()
else:
print_error('Invalid command format. Please use the following format: biclusters <number> <path_to_file>')
return 1
if __name__ == "__main__":
main()
| mit | -5,883,963,076,646,310,000 | 38.630631 | 119 | 0.629234 | false |
uwcirg/true_nth_usa_portal | tests/test_identifier.py | 1 | 3746 | """Test identifiers"""
import json
from flask_webtest import SessionScope
import pytest
from werkzeug.exceptions import Conflict
from portal.extensions import db
from portal.models.identifier import Identifier
from portal.models.user import User
from tests import TEST_USER_ID, TestCase
class TestIdentifier(TestCase):
def testGET(self):
expected = len(self.test_user.identifiers)
self.login()
response = self.client.get('/api/user/{}/identifier'
.format(TEST_USER_ID))
assert response.status_code == 200
assert len(response.json['identifier']) == expected
def testPOST(self):
"""Add an existing and fresh identifier - confirm it sticks"""
expected = len(self.test_user.identifiers) + 2
existing = Identifier(system='http://notreal.com', value='unique')
with SessionScope(db):
db.session.add(existing)
db.session.commit()
existing = db.session.merge(existing)
fresh = Identifier(system='http://another.com', value='unique')
data = {'identifier': [i.as_fhir() for i in (existing, fresh)]}
self.login()
response = self.client.post(
'/api/user/{}/identifier'.format(TEST_USER_ID),
content_type='application/json', data=json.dumps(data))
assert response.status_code == 200
assert len(response.json['identifier']) == expected
user = User.query.get(TEST_USER_ID)
assert len(user.identifiers) == expected
def test_unique(self):
"""Try adding a non-unique identifier, expect exception"""
constrained = Identifier(
system='http://us.truenth.org/identity-codes/external-study-id',
value='unique-one')
with SessionScope(db):
db.session.add(constrained)
second_user = self.add_user('second')
constrained = db.session.merge(constrained)
second_user.add_identifier(constrained)
user = db.session.merge(self.test_user)
with pytest.raises(Conflict):
user.add_identifier(constrained)
def test_unique_api(self):
constrained = Identifier(
system='http://us.truenth.org/identity-codes/external-study-id',
value='unique-one')
with SessionScope(db):
db.session.add(constrained)
second_user = self.add_user('second')
constrained = db.session.merge(constrained)
second_user.add_identifier(constrained)
self.login()
response = self.client.get(
'/api/user/{}/unique'.format(TEST_USER_ID),
query_string={'identifier': '|'.join((
constrained.system, constrained.value))})
assert response.status_code == 200
assert response.json['unique'] is False
def test_unique_deleted(self):
"""Try adding a non-unique identifier from deleted user"""
constrained = Identifier(
system='http://us.truenth.org/identity-codes/external-study-id',
value='unique-one')
with SessionScope(db):
db.session.add(constrained)
second_user = self.add_user('second')
constrained = db.session.merge(constrained)
second_user.add_identifier(constrained)
second_user.delete_user(acting_user=self.test_user)
user = db.session.merge(self.test_user)
user.add_identifier(constrained)
assert constrained in user.identifiers
def test_unicode_value(self):
ex = Identifier(system='http://nonsense.com', value='ascii')
unicode_string = '__invite__justin.emcee\[email protected]'
ex.value = unicode_string
assert ex.value == unicode_string
| bsd-3-clause | -6,782,586,272,188,318,000 | 37.618557 | 76 | 0.630005 | false |
kyleam/pymc3 | pymc/sampling.py | 1 | 7330 | from . import backends
from .backends.base import merge_traces, BaseTrace, MultiTrace
from .backends.ndarray import NDArray
import multiprocessing as mp
from time import time
from .core import *
from . import step_methods
from .progressbar import progress_bar
from numpy.random import seed
__all__ = ['sample', 'iter_sample']
def sample(draws, step, start=None, trace=None, chain=0, njobs=1, tune=None,
progressbar=True, model=None, random_seed=None):
"""
Draw a number of samples using the given step method.
Multiple step methods supported via compound step method
returns the amount of time taken.
Parameters
----------
draws : int
The number of samples to draw
step : function
A step function
start : dict
Starting point in parameter space (or partial point)
Defaults to trace.point(-1)) if there is a trace provided and
model.test_point if not (defaults to empty dict)
trace : backend, list, or MultiTrace
This should be a backend instance, a list of variables to track,
or a MultiTrace object with past values. If a MultiTrace object
is given, it must contain samples for the chain number `chain`.
If None or a list of variables, the NDArray backend is used.
Passing either "text" or "sqlite" is taken as a shortcut to set
up the corresponding backend (with "mcmc" used as the base
name).
chain : int
Chain number used to store sample in backend. If `njobs` is
greater than one, chain numbers will start here.
njobs : int
Number of parallel jobs to start. If None, set to number of cpus
in the system - 2.
tune : int
Number of iterations to tune, if applicable (defaults to None)
progressbar : bool
Flag for progress bar
model : Model (optional if in `with` context)
random_seed : int or list of ints
A list is accepted if more if `njobs` is greater than one.
Returns
-------
MultiTrace object with access to sampling values
"""
if njobs is None:
njobs = max(mp.cpu_count() - 2, 1)
if njobs > 1:
try:
if not len(random_seed) == njobs:
random_seeds = [random_seed] * njobs
else:
random_seeds = random_seed
except TypeError: # None, int
random_seeds = [random_seed] * njobs
chains = list(range(chain, chain + njobs))
pbars = [progressbar] + [False] * (njobs - 1)
argset = zip([draws] * njobs,
[step] * njobs,
[start] * njobs,
[trace] * njobs,
chains,
[tune] * njobs,
pbars,
[model] * njobs,
random_seeds)
sample_func = _mp_sample
sample_args = [njobs, argset]
else:
sample_func = _sample
sample_args = [draws, step, start, trace, chain,
tune, progressbar, model, random_seed]
return sample_func(*sample_args)
def _sample(draws, step, start=None, trace=None, chain=0, tune=None,
progressbar=True, model=None, random_seed=None):
sampling = _iter_sample(draws, step, start, trace, chain,
tune, model, random_seed)
progress = progress_bar(draws)
try:
for i, trace in enumerate(sampling):
if progressbar:
progress.update(i)
except KeyboardInterrupt:
trace.close()
return MultiTrace([trace])
def iter_sample(draws, step, start=None, trace=None, chain=0, tune=None,
model=None, random_seed=None):
"""
Generator that returns a trace on each iteration using the given
step method. Multiple step methods supported via compound step
method returns the amount of time taken.
Parameters
----------
draws : int
The number of samples to draw
step : function
A step function
start : dict
Starting point in parameter space (or partial point)
Defaults to trace.point(-1)) if there is a trace provided and
model.test_point if not (defaults to empty dict)
trace : backend, list, or MultiTrace
This should be a backend instance, a list of variables to track,
or a MultiTrace object with past values. If a MultiTrace object
is given, it must contain samples for the chain number `chain`.
If None or a list of variables, the NDArray backend is used.
chain : int
Chain number used to store sample in backend. If `njobs` is
greater than one, chain numbers will start here.
tune : int
Number of iterations to tune, if applicable (defaults to None)
model : Model (optional if in `with` context)
random_seed : int or list of ints
A list is accepted if more if `njobs` is greater than one.
Example
-------
for trace in iter_sample(500, step):
...
"""
sampling = _iter_sample(draws, step, start, trace, chain, tune,
model, random_seed)
for i, trace in enumerate(sampling):
yield trace[:i + 1]
def _iter_sample(draws, step, start=None, trace=None, chain=0, tune=None,
model=None, random_seed=None):
model = modelcontext(model)
draws = int(draws)
seed(random_seed)
if draws < 1:
raise ValueError('Argument `draws` should be above 0.')
if start is None:
start = {}
trace = _choose_backend(trace, chain, model=model)
if len(trace) > 0:
_soft_update(start, trace.point(-1))
else:
_soft_update(start, model.test_point)
try:
step = step_methods.CompoundStep(step)
except TypeError:
pass
point = Point(start, model=model)
trace.setup(draws, chain)
for i in range(draws):
if i == tune:
step = stop_tuning(step)
point = step.step(point)
trace.record(point)
yield trace
else:
trace.close()
def _choose_backend(trace, chain, shortcuts=None, **kwds):
if isinstance(trace, BaseTrace):
return trace
if isinstance(trace, MultiTrace):
return trace._traces[chain]
if trace is None:
return NDArray(**kwds)
if shortcuts is None:
shortcuts = backends._shortcuts
try:
backend = shortcuts[trace]['backend']
name = shortcuts[trace]['name']
return backend(name, **kwds)
except TypeError:
return NDArray(vars=trace, **kwds)
except KeyError:
raise ValueError('Argument `trace` is invalid.')
def _mp_sample(njobs, args):
p = mp.Pool(njobs)
traces = p.map(argsample, args)
p.close()
return merge_traces(traces)
def stop_tuning(step):
""" stop tuning the current step method """
if hasattr(step, 'tune'):
step.tune = False
elif hasattr(step, 'methods'):
step.methods = [stop_tuning(s) for s in step.methods]
return step
def argsample(args):
""" defined at top level so it can be pickled"""
return _sample(*args)
def _soft_update(a, b):
"""As opposed to dict.update, don't overwrite keys if present.
"""
a.update({k: v for k, v in b.items() if k not in a})
| apache-2.0 | 3,758,578,631,098,075,600 | 30.324786 | 76 | 0.605866 | false |
muisit/freezer | model/archive.py | 1 | 4651 | #
# Copyright Muis IT 2011 - 2016
#
# This file is part of AWS Freezer
#
# AWS Freezer is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# AWS Freezer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with AWS Freezer (see the COPYING file).
# If not, see <http://www.gnu.org/licenses/>.
import awsobj
import globals
class Archive(awsobj.Object):
def __init__(self):
self.name=''
self.id=None
self.vault_id=-1
self.lastupload=''
self.size=0
self.created=''
self.description=''
self.local_file=''
self.key_id=-1
def add_files(self, lst):
for f in lst:
f.save()
if f.id != None:
obj={'a': self.id, 'f': f.id, 'p': f.path}
globals.DB.connection.execute("delete from archive_file where archive_id=:a and file_id=:f",obj)
globals.DB.connection.execute("INSERT INTO archive_file (archive_id, file_id,path) VALUES(:a,:f,:p)",obj)
globals.DB.connection.execute("UPDATE file SET is_dirty=1 WHERE id=:f",obj)
def by_name(self, name):
row=globals.DB.connection.execute("select * from archive where name=:n",{'n': name}).fetchone()
if row != None:
self.read(row)
else:
self.id=None
@staticmethod
def by_vault(vault):
rows=globals.DB.connection.execute("SELECT * FROM archive WHERE vault_id=:n",{'n': vault.id}).fetchall()
retval=[]
for row in rows:
archive = Archive()
archive.read(row)
retval.append(archive)
return retval
def load(self,id=None):
if id != None:
self.id=id
row=globals.DB.connection.execute("select * from archive where id=:id",{'id':self.id}).fetchone()
if row != None:
self.read(row)
else:
self.id=None
def read(self,row):
self.id=self.to_int(row['id'])
self.name=self.to_str(row['name'])
self.vault_id=self.to_int(row['vault_id'])
self.size=self.to_int(row['size'])
self.lastupload=self.to_str(row['lastupload'])
self.created=self.to_str(row['created'])
self.description=self.to_str(row['description'])
self.local_file=self.to_str(row['local_file'])
self.key_id=self.to_int(row['key_id'])
def save(self):
obj = {
"id": self.id,
"n": self.name,
"v": self.vault_id,
"s": self.size,
"d": self.description,
"l": self.lastupload,
"c": self.created,
"lf": self.local_file,
'k': self.key_id
}
if not self.id:
globals.DB.connection.execute("INSERT INTO archive (name,vault_id,size,lastupload,created,description,local_file,key_id) VALUES(:n,:v,:s,:l,:c,:d,:lf,:k)",obj)
else:
globals.DB.connection.execute("UPDATE archive SET name=:n, vault_id=:v, size=:s, lastupload=:l, created=:c, description=:d, local_file=:lf, key_id=:k where id=:id",obj)
globals.DB.connection.commit()
if not self.id:
row = globals.DB.connection.execute("SELECT max(id) as id FROM archive").fetchone()
if row != None:
self.id = row['id']
def import_aws(self, v):
globals.Reporter.message("importing AWS Archive " + str(v),"db")
self.name = self.to_str(v['ArchiveId'])
self.description = self.to_str(v['ArchiveDescription'])
self.lastupload = self.to_str(v['CreationDate'])
self.size = self.to_int(v['Size'])
def copy(self, v):
self.name=v.name
self.description=v.description
self.lastupload=v.lastupload
self.size=v.size
def delete(self):
if self.id != None:
obj = {'id': self.id}
globals.DB.connection.execute("UPDATE file SET is_dirty=1 WHERE id in (SELECT file_id FROM archive_file WHERE archive_id=:id)",obj)
globals.DB.connection.execute("DELETE FROM archive_file WHERE archive_id=:id",obj)
globals.DB.connection.execute("DELETE FROM archive WHERE id=:id",obj)
globals.DB.connection.commit()
| gpl-3.0 | 763,632,750,901,110,500 | 36.813008 | 180 | 0.592346 | false |
ElliotTheRobot/LILACS-mycroft-core | mycroft/configuration/__init__.py | 1 | 7155 | # Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import json
import inflection
import re
from genericpath import exists, isfile
from os.path import join, dirname, expanduser
from mycroft.util.log import getLogger
__author__ = 'seanfitz, jdorleans'
LOG = getLogger(__name__)
DEFAULT_CONFIG = join(dirname(__file__), 'mycroft.conf')
SYSTEM_CONFIG = '/etc/mycroft/mycroft.conf'
USER_CONFIG = join(expanduser('~'), '.mycroft/mycroft.conf')
class ConfigurationLoader(object):
"""
A utility for loading Mycroft configuration files.
"""
@staticmethod
def init_config(config=None):
if not config:
return {}
return config
@staticmethod
def init_locations(locations=None, keep_user_config=True):
if not locations:
locations = [DEFAULT_CONFIG, SYSTEM_CONFIG, USER_CONFIG]
elif keep_user_config:
locations += [USER_CONFIG]
return locations
@staticmethod
def validate(config=None, locations=None):
if not (isinstance(config, dict) and isinstance(locations, list)):
LOG.error("Invalid configuration data type.")
LOG.error("Locations: %s" % locations)
LOG.error("Configuration: %s" % config)
raise TypeError
@staticmethod
def load(config=None, locations=None, keep_user_config=True):
"""
Loads default or specified configuration files
"""
config = ConfigurationLoader.init_config(config)
locations = ConfigurationLoader.init_locations(locations,
keep_user_config)
ConfigurationLoader.validate(config, locations)
for location in locations:
config = ConfigurationLoader.__load(config, location)
return config
@staticmethod
def __load(config, location):
if exists(location) and isfile(location):
try:
with open(location) as f:
config.update(json.load(f))
LOG.debug("Configuration '%s' loaded" % location)
except Exception, e:
LOG.error("Error loading configuration '%s'" % location)
LOG.error(repr(e))
else:
LOG.debug("Configuration '%s' not found" % location)
return config
class RemoteConfiguration(object):
"""
map remote configuration properties to
config in the [core] config section
"""
IGNORED_SETTINGS = ["uuid", "@type", "active", "user", "device"]
@staticmethod
def validate(config):
if not (config and isinstance(config, dict)):
LOG.error("Invalid configuration: %s" % config)
raise TypeError
@staticmethod
def load(config=None):
RemoteConfiguration.validate(config)
update = config.get("server", {}).get("update")
if update:
try:
from mycroft.api import DeviceApi
api = DeviceApi()
setting = api.find_setting()
location = api.find_location()
if location:
setting["location"] = location
RemoteConfiguration.__load(config, setting)
except Exception as e:
LOG.warn("Failed to fetch remote configuration: %s" % repr(e))
else:
LOG.debug("Remote configuration not activated.")
return config
@staticmethod
def __load(config, setting):
for k, v in setting.iteritems():
if k not in RemoteConfiguration.IGNORED_SETTINGS:
key = inflection.underscore(re.sub(r"Setting(s)?", "", k))
if isinstance(v, dict):
config[key] = config.get(key, {})
RemoteConfiguration.__load(config[key], v)
elif isinstance(v, list):
RemoteConfiguration.__load_list(config[key], v)
else:
config[key] = v
@staticmethod
def __load_list(config, values):
for v in values:
module = v["@type"]
if v.get("active"):
config["module"] = module
config[module] = config.get(module, {})
RemoteConfiguration.__load(config[module], v)
class ConfigurationManager(object):
"""
Static management utility for calling up cached configuration.
"""
__config = None
__listener = None
@staticmethod
def init(ws):
ConfigurationManager.__listener = ConfigurationListener(ws)
@staticmethod
def load_defaults():
ConfigurationManager.__config = ConfigurationLoader.load()
return RemoteConfiguration.load(ConfigurationManager.__config)
@staticmethod
def load_local(locations=None, keep_user_config=True):
return ConfigurationLoader.load(ConfigurationManager.get(), locations,
keep_user_config)
@staticmethod
def load_remote():
if not ConfigurationManager.__config:
ConfigurationManager.__config = ConfigurationLoader.load()
return RemoteConfiguration.load(ConfigurationManager.__config)
@staticmethod
def get(locations=None):
"""
Get cached configuration.
:return: A dictionary representing Mycroft configuration.
"""
if not ConfigurationManager.__config:
ConfigurationManager.load_defaults()
if locations:
ConfigurationManager.load_local(locations)
return ConfigurationManager.__config
@staticmethod
def update(config):
"""
Update cached configuration with the new ``config``.
"""
if not ConfigurationManager.__config:
ConfigurationManager.load_defaults()
if config:
ConfigurationManager.__config.update(config)
@staticmethod
def save(config, is_system=False):
"""
Save configuration ``config``.
"""
ConfigurationManager.update(config)
location = SYSTEM_CONFIG if is_system else USER_CONFIG
with open(location, 'rw') as f:
config = json.load(f).update(config)
json.dump(config, f)
class ConfigurationListener(object):
def __init__(self, ws):
super(ConfigurationListener, self).__init__()
ws.on("configuration.updated", self.updated)
@staticmethod
def updated(message):
ConfigurationManager.update(message.data)
| gpl-3.0 | 7,668,583,611,938,717,000 | 31.522727 | 78 | 0.61202 | false |
wenhulove333/ScutServer | Sample/ClientSource/tools/bindings-generator/generator.py | 1 | 34942 | #!/usr/bin/env python
# generator.py
# simple C++ generator, originally targetted for Spidermonkey bindings
#
# Copyright (c) 2011 - Zynga Inc.
from clang import cindex
import sys
import pdb
import ConfigParser
import yaml
import re
import os
import inspect
from Cheetah.Template import Template
type_map = {
cindex.TypeKind.VOID : "void",
cindex.TypeKind.BOOL : "bool",
cindex.TypeKind.CHAR_U : "unsigned char",
cindex.TypeKind.UCHAR : "unsigned char",
cindex.TypeKind.CHAR16 : "char",
cindex.TypeKind.CHAR32 : "char",
cindex.TypeKind.USHORT : "unsigned short",
cindex.TypeKind.UINT : "unsigned int",
cindex.TypeKind.ULONG : "unsigned long",
cindex.TypeKind.ULONGLONG : "unsigned long long",
cindex.TypeKind.CHAR_S : "char",
cindex.TypeKind.SCHAR : "char",
cindex.TypeKind.WCHAR : "wchar_t",
cindex.TypeKind.SHORT : "short",
cindex.TypeKind.INT : "int",
cindex.TypeKind.LONG : "long",
cindex.TypeKind.LONGLONG : "long long",
cindex.TypeKind.FLOAT : "float",
cindex.TypeKind.DOUBLE : "double",
cindex.TypeKind.LONGDOUBLE : "long double",
cindex.TypeKind.NULLPTR : "NULL",
cindex.TypeKind.OBJCID : "id",
cindex.TypeKind.OBJCCLASS : "class",
cindex.TypeKind.OBJCSEL : "SEL",
# cindex.TypeKind.ENUM : "int"
}
INVALID_NATIVE_TYPE = "??"
default_arg_type_arr = [
# An integer literal.
cindex.CursorKind.INTEGER_LITERAL,
# A floating point number literal.
cindex.CursorKind.FLOATING_LITERAL,
# An imaginary number literal.
cindex.CursorKind.IMAGINARY_LITERAL,
# A string literal.
cindex.CursorKind.STRING_LITERAL,
# A character literal.
cindex.CursorKind.CHARACTER_LITERAL,
# [C++ 2.13.5] C++ Boolean Literal.
cindex.CursorKind.CXX_BOOL_LITERAL_EXPR,
# [C++0x 2.14.7] C++ Pointer Literal.
cindex.CursorKind.CXX_NULL_PTR_LITERAL_EXPR
]
def native_name_from_type(ntype, underlying=False):
kind = ntype.get_canonical().kind
const = "const " if ntype.is_const_qualified() else ""
if not underlying and kind == cindex.TypeKind.ENUM:
decl = ntype.get_declaration()
return namespaced_name(decl)
elif kind in type_map:
return const + type_map[kind]
elif kind == cindex.TypeKind.RECORD:
# might be an std::string
decl = ntype.get_declaration()
parent = decl.semantic_parent
if decl.spelling == "string" and parent and parent.spelling == "std":
return "std::string"
else:
# print >> sys.stderr, "probably a function pointer: " + str(decl.spelling)
return const + decl.spelling
else:
# name = ntype.get_declaration().spelling
# print >> sys.stderr, "Unknown type: " + str(kind) + " " + str(name)
return INVALID_NATIVE_TYPE
# pdb.set_trace()
def build_namespace(cursor, namespaces=[]):
'''
build the full namespace for a specific cursor
'''
if cursor:
parent = cursor.semantic_parent
if parent:
if parent.kind == cindex.CursorKind.NAMESPACE or parent.kind == cindex.CursorKind.CLASS_DECL:
namespaces.append(parent.displayname)
build_namespace(parent, namespaces)
return namespaces
def namespaced_name(declaration_cursor):
ns_list = build_namespace(declaration_cursor, [])
ns_list.reverse()
ns = "::".join(ns_list)
if len(ns) > 0:
return ns + "::" + declaration_cursor.displayname
return declaration_cursor.displayname
class NativeType(object):
def __init__(self, ntype):
self.type = ntype
self.is_pointer = False
self.is_object = False
self.not_supported = False
self.namespaced_name = ""
self.name = ""
if ntype.kind == cindex.TypeKind.POINTER:
pointee = ntype.get_pointee()
self.is_pointer = True
if pointee.kind == cindex.TypeKind.RECORD:
decl = pointee.get_declaration()
self.is_object = True
self.name = decl.displayname
self.namespaced_name = namespaced_name(decl)
else:
self.name = native_name_from_type(pointee)
self.namespaced_name = self.name
self.name += "*"
self.namespaced_name += "*"
elif ntype.kind == cindex.TypeKind.LVALUEREFERENCE:
pointee = ntype.get_pointee()
decl = pointee.get_declaration()
self.namespaced_name = namespaced_name(decl)
if pointee.kind == cindex.TypeKind.RECORD:
self.name = decl.displayname
self.is_object = True
else:
self.name = native_name_from_type(pointee)
else:
if ntype.kind == cindex.TypeKind.RECORD:
decl = ntype.get_declaration()
self.is_object = True
self.name = decl.displayname
self.namespaced_name = namespaced_name(decl)
else:
self.name = native_name_from_type(ntype)
self.namespaced_name = self.name
# mark argument as not supported
if self.name == INVALID_NATIVE_TYPE:
self.not_supported = True
def from_native(self, convert_opts):
assert(convert_opts.has_key('generator'))
generator = convert_opts['generator']
name = self.name
if self.is_object:
if self.is_pointer and not name in generator.config['conversions']['from_native']:
name = "object"
elif not generator.config['conversions']['from_native'].has_key(name):
name = "object"
elif self.type.get_canonical().kind == cindex.TypeKind.ENUM:
name = "int"
if generator.config['conversions']['from_native'].has_key(name):
tpl = generator.config['conversions']['from_native'][name]
tpl = Template(tpl, searchList=[convert_opts])
return str(tpl).rstrip()
return "#pragma warning NO CONVERSION FROM NATIVE FOR " + name
def to_native(self, convert_opts):
assert('generator' in convert_opts)
generator = convert_opts['generator']
name = self.name
if self.is_object:
if self.is_pointer and not name in generator.config['conversions']['to_native']:
name = "object"
elif not name in generator.config['conversions']['to_native']:
name = "object"
elif self.type.get_canonical().kind == cindex.TypeKind.ENUM:
name = "int"
if generator.config['conversions']['to_native'].has_key(name):
tpl = generator.config['conversions']['to_native'][name]
tpl = Template(tpl, searchList=[convert_opts])
return str(tpl).rstrip()
return "#pragma warning NO CONVERSION TO NATIVE FOR " + name
def to_string(self, generator):
conversions = generator.config['conversions']
if conversions.has_key('native_types') and conversions['native_types'].has_key(self.namespaced_name):
return conversions['native_types'][self.namespaced_name]
return self.namespaced_name
def __str__(self):
return self.namespaced_name
class NativeField(object):
def __init__(self, cursor):
cursor = cursor.canonical
self.cursor = cursor
self.name = cursor.displayname
self.kind = cursor.type.kind
self.location = cursor.location
member_field_re = re.compile('m_(\w+)')
match = member_field_re.match(self.name)
if match:
self.pretty_name = match.group(1)
else:
self.pretty_name = self.name
# return True if found default argument.
def iterate_param_node(param_node):
for node in param_node.get_children():
if (node.kind in default_arg_type_arr):
# print("------ "+str(node.kind))
return True
if (iterate_param_node(node)):
return True
return False
class NativeFunction(object):
def __init__(self, cursor):
self.cursor = cursor
self.func_name = cursor.spelling
self.signature_name = self.func_name
self.arguments = []
self.static = cursor.kind == cindex.CursorKind.CXX_METHOD and cursor.is_method_static()
self.implementations = []
self.is_constructor = False
self.not_supported = False
result = cursor.result_type
# get the result
if result.kind == cindex.TypeKind.LVALUEREFERENCE:
result = result.get_pointee()
self.ret_type = NativeType(cursor.result_type)
# parse the arguments
# if self.func_name == "spriteWithFile":
# pdb.set_trace()
for arg in cursor.type.argument_types():
nt = NativeType(arg)
self.arguments.append(nt)
# mark the function as not supported if at least one argument is not supported
if nt.not_supported:
self.not_supported = True
found_default_arg = False
index = -1
for arg_node in self.cursor.get_children():
if arg_node.kind == cindex.CursorKind.PARM_DECL:
index+=1
if (iterate_param_node(arg_node)):
found_default_arg = True
break
self.min_args = index if found_default_arg else len(self.arguments)
def generate_code(self, current_class=None, generator=None):
gen = current_class.generator if current_class else generator
config = gen.config
tpl = Template(file=os.path.join(gen.target, "templates", "function.h"),
searchList=[current_class, self])
gen.head_file.write(str(tpl))
if self.static:
if config['definitions'].has_key('sfunction'):
tpl = Template(config['definitions']['sfunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "sfunction.c"),
searchList=[current_class, self])
else:
if not self.is_constructor:
if config['definitions'].has_key('ifunction'):
tpl = Template(config['definitions']['ifunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
else:
if config['definitions'].has_key('constructor'):
tpl = Template(config['definitions']['constructor'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "ifunction.c"),
searchList=[current_class, self])
gen.impl_file.write(str(tpl))
apidoc_function_js = Template(file=os.path.join(gen.target,
"templates",
"apidoc_function.js"),
searchList=[current_class, self])
gen.doc_file.write(str(apidoc_function_js))
class NativeOverloadedFunction(object):
def __init__(self, func_array):
self.implementations = func_array
self.func_name = func_array[0].func_name
self.signature_name = self.func_name
self.min_args = 100
self.is_constructor = False
for m in func_array:
self.min_args = min(self.min_args, m.min_args)
def append(self, func):
self.min_args = min(self.min_args, func.min_args)
self.implementations.append(func)
def generate_code(self, current_class=None):
gen = current_class.generator
config = gen.config
static = self.implementations[0].static
tpl = Template(file=os.path.join(gen.target, "templates", "function.h"),
searchList=[current_class, self])
gen.head_file.write(str(tpl))
if static:
if config['definitions'].has_key('sfunction'):
tpl = Template(config['definitions']['sfunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "sfunction_overloaded.c"),
searchList=[current_class, self])
else:
if not self.is_constructor:
if config['definitions'].has_key('ifunction'):
tpl = Template(config['definitions']['ifunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
else:
if config['definitions'].has_key('constructor'):
tpl = Template(config['definitions']['constructor'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "ifunction_overloaded.c"),
searchList=[current_class, self])
gen.impl_file.write(str(tpl))
class NativeClass(object):
def __init__(self, cursor, generator):
# the cursor to the implementation
self.cursor = cursor
self.class_name = cursor.displayname
self.namespaced_class_name = self.class_name
self.parents = []
self.fields = []
self.methods = {}
self.static_methods = {}
self.generator = generator
self.is_abstract = self.class_name in generator.abstract_classes
self._current_visibility = cindex.AccessSpecifierKind.PRIVATE
registration_name = generator.get_class_or_rename_class(self.class_name)
if generator.remove_prefix:
self.target_class_name = re.sub('^'+generator.remove_prefix, '', registration_name)
else:
self.target_class_name = registration_name
self.namespaced_class_name = namespaced_name(cursor)
self.parse()
def parse(self):
'''
parse the current cursor, getting all the necesary information
'''
self._deep_iterate(self.cursor)
def methods_clean(self):
'''
clean list of methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.methods.iteritems():
should_skip = False
if name == 'constructor':
should_skip = True
else:
if self.generator.should_skip(self.class_name, name):
should_skip = True
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def static_methods_clean(self):
'''
clean list of static methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.static_methods.iteritems():
should_skip = self.generator.should_skip(self.class_name, name)
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def generate_code(self):
'''
actually generate the code. it uses the current target templates/rules in order to
generate the right code
'''
config = self.generator.config
prelude_h = Template(file=os.path.join(self.generator.target, "templates", "prelude.h"),
searchList=[{"current_class": self}])
prelude_c = Template(file=os.path.join(self.generator.target, "templates", "prelude.c"),
searchList=[{"current_class": self}])
apidoc_classhead_js = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_classhead.js"),
searchList=[{"current_class": self}])
self.generator.head_file.write(str(prelude_h))
self.generator.impl_file.write(str(prelude_c))
self.generator.doc_file.write(str(apidoc_classhead_js))
for m in self.methods_clean():
m['impl'].generate_code(self)
for m in self.static_methods_clean():
m['impl'].generate_code(self)
# generate register section
register = Template(file=os.path.join(self.generator.target, "templates", "register.c"),
searchList=[{"current_class": self}])
apidoc_classfoot_js = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_classfoot.js"),
searchList=[{"current_class": self}])
self.generator.impl_file.write(str(register))
self.generator.doc_file.write(str(apidoc_classfoot_js))
def _deep_iterate(self, cursor=None):
for node in cursor.get_children():
if self._process_node(node):
self._deep_iterate(node)
def _process_node(self, cursor):
'''
process the node, depending on the type. If returns true, then it will perform a deep
iteration on its children. Otherwise it will continue with its siblings (if any)
@param: cursor the cursor to analyze
'''
if cursor.kind == cindex.CursorKind.CXX_BASE_SPECIFIER and not self.class_name in self.generator.classes_have_no_parents:
parent = cursor.get_definition()
if parent.displayname not in self.generator.base_classes_to_skip:
#if parent and self.generator.in_listed_classes(parent.displayname):
if not self.generator.generated_classes.has_key(parent.displayname):
parent = NativeClass(parent, self.generator)
self.generator.generated_classes[parent.class_name] = parent
else:
parent = self.generator.generated_classes[parent.displayname]
self.parents.append(parent)
elif cursor.kind == cindex.CursorKind.FIELD_DECL:
self.fields.append(NativeField(cursor))
elif cursor.kind == cindex.CursorKind.CXX_ACCESS_SPEC_DECL:
self._current_visibility = cursor.get_access_specifier()
elif cursor.kind == cindex.CursorKind.CXX_METHOD:
# skip if variadic
if self._current_visibility == cindex.AccessSpecifierKind.PUBLIC and not cursor.type.is_function_variadic():
m = NativeFunction(cursor)
registration_name = self.generator.should_rename_function(self.class_name, m.func_name) or m.func_name
# bail if the function is not supported (at least one arg not supported)
if m.not_supported:
return
if m.static:
if not self.static_methods.has_key(registration_name):
self.static_methods[registration_name] = m
else:
previous_m = self.static_methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.static_methods[registration_name] = NativeOverloadedFunction([m, previous_m])
else:
if not self.methods.has_key(registration_name):
self.methods[registration_name] = m
else:
previous_m = self.methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.methods[registration_name] = NativeOverloadedFunction([m, previous_m])
return True
elif self._current_visibility == cindex.AccessSpecifierKind.PUBLIC and cursor.kind == cindex.CursorKind.CONSTRUCTOR and not self.is_abstract:
m = NativeFunction(cursor)
m.is_constructor = True
if not self.methods.has_key('constructor'):
self.methods['constructor'] = m
else:
previous_m = self.methods['constructor']
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
m = NativeOverloadedFunction([m, previous_m])
m.is_constructor = True
self.methods['constructor'] = m
return True
# else:
# print >> sys.stderr, "unknown cursor: %s - %s" % (cursor.kind, cursor.displayname)
return False
class Generator(object):
def __init__(self, opts):
self.index = cindex.Index.create()
self.outdir = opts['outdir']
self.prefix = opts['prefix']
self.headers = opts['headers'].split(' ')
self.classes = opts['classes']
self.classes_have_no_parents = opts['classes_have_no_parents'].split(' ')
self.base_classes_to_skip = opts['base_classes_to_skip'].split(' ')
self.abstract_classes = opts['abstract_classes'].split(' ')
self.clang_args = opts['clang_args']
self.target = opts['target']
self.remove_prefix = opts['remove_prefix']
self.target_ns = opts['target_ns']
self.impl_file = None
self.head_file = None
self.skip_classes = {}
self.generated_classes = {}
self.rename_functions = {}
self.rename_classes = {}
self.out_file = opts['out_file']
self.script_control_cpp = opts['script_control_cpp'] == "yes"
if opts['skip']:
list_of_skips = re.split(",\n?", opts['skip'])
for skip in list_of_skips:
class_name, methods = skip.split("::")
self.skip_classes[class_name] = []
match = re.match("\[([^]]+)\]", methods)
if match:
self.skip_classes[class_name] = match.group(1).split(" ")
else:
raise Exception("invalid list of skip methods")
if opts['rename_functions']:
list_of_function_renames = re.split(",\n?", opts['rename_functions'])
for rename in list_of_function_renames:
class_name, methods = rename.split("::")
self.rename_functions[class_name] = {}
match = re.match("\[([^]]+)\]", methods)
if match:
list_of_methods = match.group(1).split(" ")
for pair in list_of_methods:
k, v = pair.split("=")
self.rename_functions[class_name][k] = v
else:
raise Exception("invalid list of rename methods")
if opts['rename_classes']:
list_of_class_renames = re.split(",\n?", opts['rename_classes'])
for rename in list_of_class_renames:
class_name, renamed_class_name = rename.split("::")
self.rename_classes[class_name] = renamed_class_name
def should_rename_function(self, class_name, method_name):
if self.rename_functions.has_key(class_name) and self.rename_functions[class_name].has_key(method_name):
# print >> sys.stderr, "will rename %s to %s" % (method_name, self.rename_functions[class_name][method_name])
return self.rename_functions[class_name][method_name]
return None
def get_class_or_rename_class(self, class_name):
if self.rename_classes.has_key(class_name):
# print >> sys.stderr, "will rename %s to %s" % (method_name, self.rename_functions[class_name][method_name])
return self.rename_classes[class_name]
return class_name
def should_skip(self, class_name, method_name, verbose=False):
if class_name == "*" and self.skip_classes.has_key("*"):
for func in self.skip_classes["*"]:
if re.match(func, method_name):
return True
else:
for key in self.skip_classes.iterkeys():
if key == "*" or re.match("^" + key + "$", class_name):
if verbose:
print "%s in skip_classes" % (class_name)
if len(self.skip_classes[key]) == 1 and self.skip_classes[key][0] == "*":
if verbose:
print "%s will be skipped completely" % (class_name)
return True
if method_name != None:
for func in self.skip_classes[key]:
if re.match(func, method_name):
if verbose:
print "%s will skip method %s" % (class_name, method_name)
return True
if verbose:
print "%s will be accepted (%s, %s)" % (class_name, key, self.skip_classes[key])
return False
def in_listed_classes(self, class_name):
"""
returns True if the class is in the list of required classes and it's not in the skip list
"""
for key in self.classes:
md = re.match("^" + key + "$", class_name)
if md and not self.should_skip(class_name, None):
return True
return False
def sorted_classes(self):
'''
sorted classes in order of inheritance
'''
sorted_list = []
for class_name in self.generated_classes.iterkeys():
nclass = self.generated_classes[class_name]
sorted_list += self._sorted_parents(nclass)
# remove dupes from the list
no_dupes = []
[no_dupes.append(i) for i in sorted_list if not no_dupes.count(i)]
return no_dupes
def _sorted_parents(self, nclass):
'''
returns the sorted list of parents for a native class
'''
sorted_parents = []
for p in nclass.parents:
if p.class_name in self.generated_classes.keys():
sorted_parents += self._sorted_parents(p)
if nclass.class_name in self.generated_classes.keys():
sorted_parents.append(nclass.class_name)
return sorted_parents
def generate_code(self):
# must read the yaml file first
stream = file(os.path.join(self.target, "conversions.yaml"), "r")
data = yaml.load(stream)
self.config = data
implfilepath = os.path.join(self.outdir, self.out_file + ".cpp")
headfilepath = os.path.join(self.outdir, self.out_file + ".hpp")
docfilepath = os.path.join(self.outdir, self.out_file + "_api.js")
self.impl_file = open(implfilepath, "w+")
self.head_file = open(headfilepath, "w+")
self.doc_file = open(docfilepath, "w+")
layout_h = Template(file=os.path.join(self.target, "templates", "layout_head.h"),
searchList=[self])
layout_c = Template(file=os.path.join(self.target, "templates", "layout_head.c"),
searchList=[self])
apidoc_ns_js = Template(file=os.path.join(self.target, "templates", "apidoc_ns.js"),
searchList=[self])
self.head_file.write(str(layout_h))
self.impl_file.write(str(layout_c))
self.doc_file.write(str(apidoc_ns_js))
self._parse_headers()
layout_h = Template(file=os.path.join(self.target, "templates", "layout_foot.h"),
searchList=[self])
layout_c = Template(file=os.path.join(self.target, "templates", "layout_foot.c"),
searchList=[self])
self.head_file.write(str(layout_h))
self.impl_file.write(str(layout_c))
self.impl_file.close()
self.head_file.close()
self.doc_file.close()
def _pretty_print(self, diagnostics):
print("====\nErrors in parsing headers:")
severities=['Ignored', 'Note', 'Warning', 'Error', 'Fatal']
for idx, d in enumerate(diagnostics):
print "%s. <severity = %s,\n location = %r,\n details = %r>" % (
idx+1, severities[d.severity], d.location, d.spelling)
print("====\n")
def _parse_headers(self):
for header in self.headers:
tu = self.index.parse(header, self.clang_args)
if len(tu.diagnostics) > 0:
self._pretty_print(tu.diagnostics)
is_fatal = False
for d in tu.diagnostics:
if d.severity >= cindex.Diagnostic.Error:
is_fatal = True
if is_fatal:
print("*** Found errors - can not continue")
raise Exception("Fatal error in parsing headers")
self._deep_iterate(tu.cursor)
def _deep_iterate(self, cursor, depth=0):
# get the canonical type
if cursor.kind == cindex.CursorKind.CLASS_DECL:
if cursor == cursor.type.get_declaration() and self.in_listed_classes(cursor.displayname):
if not self.generated_classes.has_key(cursor.displayname):
nclass = NativeClass(cursor, self)
nclass.generate_code()
self.generated_classes[cursor.displayname] = nclass
return
for node in cursor.get_children():
# print("%s %s - %s" % (">" * depth, node.displayname, node.kind))
self._deep_iterate(node, depth + 1)
def main():
from optparse import OptionParser
parser = OptionParser("usage: %prog [options] {configfile}")
parser.add_option("-s", action="store", type="string", dest="section",
help="sets a specific section to be converted")
parser.add_option("-t", action="store", type="string", dest="target",
help="specifies the target vm. Will search for TARGET.yaml")
parser.add_option("-o", action="store", type="string", dest="outdir",
help="specifies the output directory for generated C++ code")
parser.add_option("-n", action="store", type="string", dest="out_file",
help="specifcies the name of the output file, defaults to the prefix in the .ini file")
(opts, args) = parser.parse_args()
# script directory
workingdir = os.path.dirname(inspect.getfile(inspect.currentframe()))
if len(args) == 0:
parser.error('invalid number of arguments')
userconfig = ConfigParser.SafeConfigParser()
userconfig.read('userconf.ini')
print 'Using userconfig \n ', userconfig.items('DEFAULT')
config = ConfigParser.SafeConfigParser()
config.read(args[0])
if (0 == len(config.sections())):
raise Exception("No sections defined in config file")
sections = []
if opts.section:
if (opts.section in config.sections()):
sections = []
sections.append(opts.section)
else:
raise Exception("Section not found in config file")
else:
print("processing all sections")
sections = config.sections()
# find available targets
targetdir = os.path.join(workingdir, "targets")
targets = []
if (os.path.isdir(targetdir)):
targets = [entry for entry in os.listdir(targetdir)
if (os.path.isdir(os.path.join(targetdir, entry)))]
if 0 == len(targets):
raise Exception("No targets defined")
if opts.target:
if (opts.target in targets):
targets = []
targets.append(opts.target)
if opts.outdir:
outdir = opts.outdir
else:
outdir = os.path.join(workingdir, "gen")
if not os.path.exists(outdir):
os.makedirs(outdir)
for t in targets:
# Fix for hidden '.svn', '.cvs' and '.git' etc. folders - these must be ignored or otherwise they will be interpreted as a target.
if t == ".svn" or t == ".cvs" or t == ".git" or t == ".gitignore":
continue
print "\n.... Generating bindings for target", t
for s in sections:
print "\n.... .... Processing section", s, "\n"
gen_opts = {
'prefix': config.get(s, 'prefix'),
'headers': (config.get(s, 'headers' , 0, dict(userconfig.items('DEFAULT')))),
'classes': config.get(s, 'classes').split(' '),
'clang_args': (config.get(s, 'extra_arguments', 0, dict(userconfig.items('DEFAULT'))) or "").split(" "),
'target': os.path.join(workingdir, "targets", t),
'outdir': outdir,
'remove_prefix': config.get(s, 'remove_prefix'),
'target_ns': config.get(s, 'target_namespace'),
'classes_have_no_parents': config.get(s, 'classes_have_no_parents'),
'base_classes_to_skip': config.get(s, 'base_classes_to_skip'),
'abstract_classes': config.get(s, 'abstract_classes'),
'skip': config.get(s, 'skip'),
'rename_functions': config.get(s, 'rename_functions'),
'rename_classes': config.get(s, 'rename_classes'),
'out_file': opts.out_file or config.get(s, 'prefix'),
'script_control_cpp': config.get(s, 'script_control_cpp') if config.has_option(s, 'script_control_cpp') else 'no'
}
generator = Generator(gen_opts)
generator.generate_code()
if __name__ == '__main__':
try:
main()
except Exception as e:
print e
sys.exit(1)
| mit | 4,857,824,112,327,277,000 | 41.568579 | 149 | 0.548509 | false |
Sotera/datawake-prefetch | memex-datawake-stream/src/datawakestreams/all_settings.py | 1 | 1427 | from __future__ import absolute_import, print_function, unicode_literals
import os
"""
Deployment based configuration
When deploying topology specify a deployment to match with a settings key.
-o "'topology.deployment=\"local\"'"
Spouts / Bolts in the topolgoy will then pull the settings then need from this module
"""
ALL_SETTINGS = {}
ALL_SETTINGS['cluster'] = {
'topology':'cluster',
'appid': 'datawake',
'crawler-in-topic' : 'datawake-crawler-input',
'crawler-out-topic' : 'datawake-crawler-out',
'visited-topic': 'datawake-visited',
'conn_pool' : "",
'crawler_conn_pool' : "",
}
ALL_SETTINGS['local-docker'] = {
'topology':'local',
'appid': 'datawake',
'crawler-in-topic' : 'datawake-crawler-input',
'crawler-out-topic' : 'datawake-crawler-out',
'visited-topic': 'datawake-visited',
'conn_pool' : os.environ['KAFKA_PORT_9092_TCP_ADDR']+":9092" if 'KAFKA_PORT_9092_TCP_ADDR' in os.environ else '',
'crawler_conn_pool' : os.environ['KAFKA_PORT_9092_TCP_ADDR']+":9092" if 'KAFKA_PORT_9092_TCP_ADDR' in os.environ else '',
'user':'root',
'database':'datawake_prefetch',
'password':os.environ['MYSQL_ENV_MYSQL_ROOT_PASSWORD'] if 'MYSQL_ENV_MYSQL_ROOT_PASSWORD' in os.environ else '',
'host':os.environ['MYSQL_PORT_3306_TCP_ADDR'] if 'MYSQL_PORT_3306_TCP_ADDR' in os.environ else ''
}
def get_settings(key):
return ALL_SETTINGS[key]
| apache-2.0 | -275,163,911,461,207,330 | 29.361702 | 125 | 0.666433 | false |
saily/flake8-isort | setup.py | 1 | 1504 | # -*- coding: utf-8 -*-
from setuptools import setup
short_description = 'flake8 plugin that integrates isort .'
long_description = '{0}\n{1}'.format(
open('README.rst').read(),
open('CHANGES.rst').read()
)
setup(
name='flake8-isort',
version='0.3.dev0',
description=short_description,
long_description=long_description,
# Get more from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: OS Independent",
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
],
keywords='pep8 flake8 isort imports',
author='Gil Forcada',
author_email='[email protected]',
url='https://github.com/gforcada/flake8-isort',
license='GPL version 2',
py_modules=['flake8_isort', ],
include_package_data=True,
test_suite = 'run_tests',
zip_safe=False,
install_requires=[
'flake8',
'isort',
],
extras_require={
'test': [
'testfixtures',
'tox',
],
},
entry_points={
'flake8.extension': ['I00 = flake8_isort:Flake8Isort'],
},
)
| gpl-2.0 | 5,845,875,406,787,083,000 | 27.923077 | 75 | 0.588431 | false |
timrchavez/capomastro | capomastro/settings.py | 1 | 2529 | """
Django settings for capomastro project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'bootstrap3',
'jenkins',
'projects',
'credentials',
'archives',
'capomastro.site'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# Added django.core.context_processors.request'
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages'
)
ROOT_URLCONF = 'capomastro.urls'
WSGI_APPLICATION = 'capomastro.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'capomastro', 'templates'),
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'capomastro', 'static'),
)
try:
from local_settings import * # noqa
except ImportError, e:
pass
| mit | -5,883,969,919,784,102,000 | 24.039604 | 71 | 0.710953 | false |
StephenLujan/Naith | game/plugins/dirlight/dirlight.py | 1 | 2681 | # -*- coding: utf-8 -*-
# Copyright Tom SF Haines, Aaron Snoswell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pandac.PandaModules import NodePath, VBase4, BitMask32
from pandac.PandaModules import DirectionalLight as PDirectionalLight
class DirLight:
"""Creates a simple directional light"""
def __init__(self,manager,xml):
self.light = PDirectionalLight('dlight')
self.lightNode = NodePath(self.light)
self.lightNode.setCompass()
if hasattr(self.lightNode.node(), "setCameraMask"):
self.lightNode.node().setCameraMask(BitMask32.bit(3))
self.reload(manager,xml)
def reload(self,manager,xml):
color = xml.find('color')
if color!=None:
self.light.setColor(VBase4(float(color.get('r')), float(color.get('g')), float(color.get('b')), 1.0))
pos = xml.find('pos')
if pos!=None:
self.lightNode.setPos(float(pos.get('x')), float(pos.get('y')), float(pos.get('z')))
else:
self.lightNode.setPos(0, 0, 0)
lookAt = xml.find('lookAt')
if lookAt!=None:
self.lightNode.lookAt(float(lookAt.get('x')), float(lookAt.get('y')), float(lookAt.get('z')))
lens = xml.find('lens')
if lens!=None and hasattr(self.lightNode.node(), 'getLens'):
if bool(int(lens.get('auto'))):
self.lightNode.reparentTo(base.camera)
else:
self.lightNode.reparentTo(render)
lobj = self.lightNode.node().getLens()
lobj.setNearFar(float(lens.get('near', 1.0)), float(lens.get('far', 100000.0)))
lobj.setFilmSize(float(lens.get('width', 1.0)), float(lens.get('height', 1.0)))
lobj.setFilmOffset(float(lens.get('x', 0.0)), float(lens.get('y', 0.0)))
if hasattr(self.lightNode.node(), 'setShadowCaster'):
shadows = xml.find('shadows')
if shadows!=None:
self.lightNode.node().setShadowCaster(True, int(shadows.get('width', 512)), int(shadows.get('height', 512)), int(shadows.get('sort', -10)))
#self.lightNode.node().setPushBias(float(shadows.get('bias', 0.5)))
else:
self.lightNode.node().setShadowCaster(False)
def start(self):
render.setLight(self.lightNode)
def stop(self):
render.clearLight(self.lightNode)
| apache-2.0 | 6,424,593,677,426,341,000 | 37.3 | 147 | 0.675121 | false |
vshulyak/django_datatable | django_datatable/tests/tests.py | 1 | 3514 | import textwrap
from random import Random
from django.utils import simplejson
from django.test import TestCase
from django.test.client import RequestFactory
from django.contrib.auth.models import User
from django_datatable.tests.views import TestViewOne
class DatatableViewCase(TestCase):
dt_js_request = textwrap.dedent("""
/datatable/?json
&sEcho=1
&iColumns=4
&sColumns=
&iDisplayStart=%(display_start)d
&iDisplayLength=%(display_length)d
&sSearch=
&bRegex=false
&sSearch_0=%(search_0)s
&bRegex_0=false
&bSearchable_0=true
&sSearch_1=
&bRegex_1=false
&bSearchable_1=true
&sSearch_2=
&bRegex_2=false
&bSearchable_2=true
&sSearch_3=
&bRegex_3=false
&bSearchable_3=true
&iSortingCols=1
&iSortCol_0=%(sort_col)d
&sSortDir_0=asc
&bSortable_0=true
&bSortable_1=true
&bSortable_2=true
&bSortable_3=true""").replace('\n','')
def setUp(self):
self.dt_list_view = TestViewOne.as_view()
self.factory = RequestFactory()
get_random_name = lambda: ''.join(Random().sample('qwertyuiopasdfghjklzxcvbnm', 10))
for idx in xrange(0,5):
User.objects.create(username='user_%d' % idx,
first_name=get_random_name(), last_name=get_random_name())
def test_non_json_template_response(self):
request = self.factory.get('/datatable/')
r = self.dt_list_view(request)
self.assertEquals(r.status_code, 200)
self.assertEquals(r.template_name[0], TestViewOne.template_name)
def test_json_response(self):
request = self.factory.get('/datatable/?json')
r = self.dt_list_view(request)
self.assertEquals(r.status_code, 200)
def test_dt_request(self):
request = self.factory.get(self.__make_dt_request_line(
display_start = 0,
display_length = 10,
sort_col = 0,
search_0 = ''
))
r = self.dt_list_view(request)
self.assertEquals(r.status_code, 200)
json = self.__parse_dt_response(r.content)
self.assertEquals(len(json["aaData"]), User.objects.count())
def test_pagination(self):
#TODO: todo!
pass
def test_ordering(self):
request = self.factory.get(self.__make_dt_request_line(
display_start = 0,
display_length = 10,
sort_col = 2,
search_0 = ''
))
r = self.dt_list_view(request)
self.assertEquals(r.status_code, 200)
json = self.__parse_dt_response(r.content)
for u, qset_user in zip(json["aaData"], User.objects.order_by('last_name')):
self.assertEquals(u[0], qset_user.username)
def test_filter(self):
FILTER_EXP = 'user_3'
request = self.factory.get(self.__make_dt_request_line(
display_start = 0,
display_length = 10,
sort_col = 2,
search_0 = '_3'
))
r = self.dt_list_view(request)
self.assertEquals(r.status_code, 200)
json = self.__parse_dt_response(r.content)
self.assertEquals(len(json["aaData"]), len(User.objects.filter(username=FILTER_EXP)))
def __make_dt_request_line(self, **kwargs):
return self.dt_js_request % kwargs
def __parse_dt_response(self, response):
return simplejson.loads(response) | bsd-3-clause | 6,370,558,592,047,283,000 | 28.291667 | 93 | 0.586796 | false |
davesque/django-rest-framework-simplejwt | rest_framework_simplejwt/compat.py | 1 | 1286 | import warnings
try:
from django.urls import reverse, reverse_lazy
except ImportError:
from django.core.urlresolvers import reverse, reverse_lazy # NOQA
class RemovedInDjango20Warning(DeprecationWarning):
pass
class CallableBool: # pragma: no cover
"""
An boolean-like object that is also callable for backwards compatibility.
"""
do_not_call_in_templates = True
def __init__(self, value):
self.value = value
def __bool__(self):
return self.value
def __call__(self):
warnings.warn(
"Using user.is_authenticated() and user.is_anonymous() as a method "
"is deprecated. Remove the parentheses to use it as an attribute.",
RemovedInDjango20Warning, stacklevel=2
)
return self.value
def __nonzero__(self): # Python 2 compatibility
return self.value
def __repr__(self):
return 'CallableBool(%r)' % self.value
def __eq__(self, other):
return self.value == other
def __ne__(self, other):
return self.value != other
def __or__(self, other):
return bool(self.value or other)
def __hash__(self):
return hash(self.value)
CallableFalse = CallableBool(False)
CallableTrue = CallableBool(True)
| mit | -1,465,149,060,535,745,800 | 23.264151 | 80 | 0.629082 | false |
aroth-arsoft/arsoft-web-crashupload | app/crashdump/xmlreport.py | 1 | 58814 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# kate: space-indent on; indent-width 4; mixedindent off; indent-mode python;
import sys
import base64
import struct
from datetime import datetime, tzinfo, timedelta
from uuid import UUID
from lxml import etree
from crashdump.exception_info import exception_code_names_per_platform_type, exception_info_per_platform_type
from crashdump.utils import format_version_number, format_memory_usagetype
ZERO = timedelta(0)
# A UTC class.
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
class HexDumpMemoryBlock(object):
def __init__(self, memory):
self._memory = memory
self._hexdump = None
@property
def raw(self):
return self._memory
@property
def size(self):
return len(self._memory)
def __len__(self):
return len(self._memory)
@property
def hexdump(self):
if not self._hexdump:
self._hexdump = self._generate_hexdump()
return self._hexdump
class HexDumpLine(object):
def __init__(self, offset, memory_line, line_length):
self.offset = offset
self.raw = memory_line
self.hex = ''
self.ascii = ''
idx = 0
while idx < 16:
if idx != 0:
self.hex += ' '
if idx < line_length:
c = memory_line[idx]
c_i = int(c)
self.hex += '%02X' % c_i
if c_i < 32 or c_i >= 127:
self.ascii += '.'
else:
self.ascii += chr(c)
else:
self.hex += ' '
self.ascii += ' '
idx += 1
def __str__(self):
return '%06x %32s %16s\n' % (self.offset, self.hex, self.ascii)
class HexDump(object):
def __init__(self, size):
if size > 65536:
self.offset_width = 6
elif size > 256:
self.offset_width = 4
else:
self.offset_width = 2
self._lines = []
self._raw_offset = None
self._raw_hex = None
self._raw_ascii = None
def __iter__(self):
return iter(self._lines)
def _generate_raw(self):
self._raw_offset = ''
self._raw_hex = ''
self._raw_ascii = ''
offset_fmt = '0x%%0%dX' % self.offset_width
first = True
for line in self._lines:
if not first:
self._raw_offset += '\r\n'
self._raw_hex += '\r\n'
self._raw_ascii += '\r\n'
self._raw_offset += offset_fmt % line.offset
self._raw_hex += line.hex
self._raw_ascii += line.ascii
first = False
@property
def raw_offset(self):
if self._raw_offset is None:
self._generate_raw()
return self._raw_offset
@property
def raw_hex(self):
if self._raw_hex is None:
self._generate_raw()
return self._raw_hex
@property
def raw_ascii(self):
if self._raw_ascii is None:
self._generate_raw()
return self._raw_ascii
def __str__(self):
ret = ''
for l in self._lines:
ret += str(l)
return ret
def _generate_hexdump(self):
offset = 0
total_size = len(self._memory)
ret = HexDumpMemoryBlock.HexDump(total_size)
while offset < total_size:
max_row = 16
remain = total_size - offset
if remain < 16:
max_row = remain
line = HexDumpMemoryBlock.HexDumpLine(offset, self._memory[offset:offset + max_row], max_row)
ret._lines.append(line)
offset += 16
return ret
def __getitem__(self, index):
if isinstance(index, int):
return self._memory[index]
elif isinstance(index, slice):
return self._memory[index.start:index.stop: index.step]
else:
raise TypeError("index %s" % index)
def __str__(self):
return str(self.hexdump)
def find(self, needle, start=0):
if isinstance(needle, bytes):
return self._memory.find(needle, start)
elif isinstance(needle, str):
return self._memory.find(needle.encode('us-ascii'), start)
else:
raise TypeError('insupported type for search in memory block: %s' % type(needle))
class MemObject(object):
def __init__(self, owner, memory, is_64_bit):
self._owner = owner
self._memory = memory
self._is_64_bit = is_64_bit
def _read_int32(self, offset):
if offset >= len(self._memory):
raise IndexError(offset)
return struct.unpack('I', self._memory[offset:offset+4])[0]
def _read_ptr(self, offset):
if offset >= len(self._memory):
raise IndexError(offset)
if self._is_64_bit:
return struct.unpack('Q', self._memory[offset:offset+8])[0]
else:
return struct.unpack('I', self._memory[offset:offset+4])[0]
# https://en.wikipedia.org/wiki/Win32_Thread_Information_Block
class Win32_TIB(MemObject):
def __init__(self, owner, memory, is_64_bit):
MemObject.__init__(self, owner, memory, is_64_bit)
self._tls_slots = None
self._tls_array_memory = None
self._hexdump = None
class Win32_TLS_Slots(object):
def __init__(self, teb):
self._teb = teb
def __getitem__(self, index):
if index < 0 or index >= 64:
raise IndexError(index)
offset = (0x1480 + (index * 8)) if self._teb._is_64_bit else (0xE10 + (index * 4))
#print('teb tls slot at %x' % offset)
return self._teb._read_ptr(offset)
class Win32_TLS_Array(MemObject):
def __init__(self, owner, memory, is_64_bit):
MemObject.__init__(self, owner, memory, is_64_bit)
def __getitem__(self, index):
offset = (8 * index) if self._is_64_bit else (4 * index)
return self._read_ptr(offset)
@property
def threadid(self):
if self._is_64_bit:
return self._read_int32(0x48)
else:
return self._read_int32(0x24)
@property
def peb_address(self):
if self._is_64_bit:
return self._read_ptr(0x60)
else:
return self._read_ptr(0x30)
@property
def thread_local_storage_array_address(self):
if self._is_64_bit:
return self._read_int32(0x58)
else:
return self._read_int32(0x2C)
@property
def thread_local_storage_array(self):
if self._tls_array_memory is None:
mem = self._owner._get_memory_block(self.thread_local_storage_array_address)
if mem is not None:
self._tls_array_memory = Win32_TIB.Win32_TLS_Array(self._owner, mem, self._is_64_bit)
return self._tls_array_memory
@property
def tls_slots(self):
if self._tls_slots is None:
self._tls_slots = Win32_TIB.Win32_TLS_Slots(self)
return self._tls_slots
@property
def hexdump(self):
if self._hexdump is None:
self._hexdump = HexDumpMemoryBlock(self._memory)
return self._hexdump
# https://en.wikipedia.org/wiki/Process_Environment_Block
# https://ntopcode.wordpress.com/2018/02/26/anatomy-of-the-process-environment-block-peb-windows-internals/
class Win32_PEB(MemObject):
def __init__(self, owner, memory, is_64_bit):
MemObject.__init__(self, owner, memory, is_64_bit)
@property
def image_base_address(self):
return self._read_ptr(0x10)
class XMLReport(object):
_main_fields = ['crash_info', 'platform_type', 'system_info', 'file_info', 'exception',
'assertion', 'modules', 'threads', 'memory_regions',
'memory_blocks', 'handles', 'stackdumps', 'simplified_info', 'processstatuslinux', 'processstatuswin32',
'processmemoryinfowin32', 'misc_info',
'fast_protect_version_info', 'fast_protect_system_info']
_crash_dump_fields = ['uuid', 'crash_timestamp',
'report_time', 'report_fqdn', 'report_username', 'report_hostname', 'report_domain',
'application', 'command_line',
'symbol_directories', 'image_directories', 'usefulness_id', 'environment']
_system_info_fields = ['platform_type', 'platform_type_id', 'cpu_type', 'cpu_type_id', 'cpu_name', 'cpu_level', 'cpu_revision', 'cpu_vendor',
'number_of_cpus', 'os_version', 'os_version_number', 'os_build_number', 'os_version_info',
'distribution_id', 'distribution_release', 'distribution_codename', 'distribution_description' ]
_file_info_fields = ['log']
_file_info_log_message_fields = ['time', 'text']
_exception_fields = ['threadid', 'code', 'address', 'flags', 'numparams', 'param0', 'param1', 'param2', 'param3']
_assertion_fields = ['expression', 'function', 'source', 'line', 'typeid']
_module_fields = ['base', 'size', 'timestamp', 'product_version', 'product_version_number',
'file_version', 'file_version_number', 'name', 'symbol_file', 'symbol_id', 'symbol_type', 'symbol_type_number',
'image_name', 'module_name', 'module_id',
'flags' ]
_thread_fields = ['id', 'exception', ('name', '_name'), 'memory', 'start_addr', 'main_thread',
'create_time', 'exit_time', 'kernel_time', 'user_time',
'exit_status', 'cpu_affinity', 'stack_addr',
'suspend_count', 'priority_class', 'priority', 'teb', 'tls', 'tib',
'dump_flags', 'dump_error', 'rpc_thread'
]
_memory_region_fields = ['base_addr', 'size', 'alloc_base', 'alloc_prot', 'type', 'protect', 'state' ]
_memory_region_usage_fields = ['threadid', 'usagetype' ]
_memory_block_fields = ['num', 'base', 'size', 'memory']
_handle_fields = ['handle', 'type', 'name', 'count', 'pointers' ]
_stackdump_fields = ['threadid', 'simplified', 'exception']
_stack_frame_fields = ['num', 'addr', 'retaddr', 'param0', 'param1', 'param2', 'param3', 'infosrc', 'trust_level', 'module', 'module_base', 'function', 'funcoff', 'source', 'line', 'lineoff' ]
_simplified_info_fields = ['threadid', 'missing_debug_symbols', 'missing_image_files', 'first_useful_modules', 'first_useful_functions']
_processstatuslinux_fields = ['name', 'state', 'thread_group', 'pid',
'parent_pid', 'tracer_pid', 'real_uid', 'real_gid',
'effective_uid', 'effective_gid', 'saved_set_uid', 'saved_set_gid',
'filesystem_uid', 'filesystem_gid', 'num_file_descriptors', 'supplement_groups',
'vmpeak', 'vmsize', 'vmlocked', 'vmpinned', 'vmhighwatermark',
'vmresidentsetsize', 'vmdata', 'vmstack', 'vmexe', 'vmlib', 'vmpte',
'vmswap', 'num_threads', 'voluntary_context_switches', 'nonvoluntary_context_switches',
]
_processstatuswin32_fields = ['dll_path', 'image_path',
'window_title', 'desktop_name', 'shell_info',
'runtime_data', 'drive_letter_cwd',
'stdin_handle', 'stdout_handle', 'stderr_handle',
'debug_flags', 'console_handle', 'console_flags',
'session_id',
]
_processmemoryinfowin32_fields = [
'page_fault_count', 'peak_working_set_size',
'working_set_size', 'quota_peak_paged_pool_usage',
'quota_paged_pool_usage', 'quota_peak_non_paged_pool_usage',
'quota_non_paged_pool_usage', 'pagefile_usage',
'peak_pagefile_usage', 'private_usage'
]
_misc_info_fields = ['processid', 'process_create_time',
'user_time', 'kernel_time',
'processor_max_mhz', 'processor_current_mhz', 'processor_mhz_limit',
'processor_max_idle_state', 'processor_current_idle_state',
'process_integrity_level',
'process_execute_flags', 'protected_process',
'timezone_id', 'timezone_bias',
'timezone_standard_name', 'timezone_standard_bias', 'timezone_standard_date',
'timezone_daylight_name', 'timezone_daylight_bias', 'timezone_daylight_date',
'build_info', 'debug_build_info'
]
_fast_protect_version_info_fields = [
'product_name',
'product_code_name',
'product_version',
'product_target_version',
'product_build_type',
'product_build_postfix',
'root_revision',
'buildtools_revision',
'external_revision',
'third_party_revision',
'terra3d_revision',
'manual_revision',
'jenkins_job_name',
'jenkins_build_number',
'jenkins_build_id',
'jenkins_build_tag',
'jenkins_build_url',
'jenkins_git_revision',
'jenkins_git_branch',
'jenkins_master',
'jenkins_nodename',
'thread_name_tls_slot',
]
_fast_protect_system_info_fields = [
'hostname',
'domain',
'fqdn',
'username',
'timestamp',
'system_temp_path',
'user_temp_path',
'user_persistent_path',
'terminal_session_id',
'virtual_machine',
'remote_session',
'opengl_vendor',
'opengl_renderer',
'opengl_version',
'opengl_vendor_id',
'opengl_driver_id',
'opengl_chip_class',
'opengl_driver_version',
'opengl_hardware_ok',
'opengl_use_pbuffer',
'opengl_hardware_error',
'opengl_pbuffer_error',
'cpu_name',
'cpu_vendor',
'num_logical_cpus',
'num_physical_cpus',
'hyperthread',
'rawdata'
]
class XMLReportException(Exception):
def __init__(self, report, message):
super(XMLReport.XMLReportException, self).__init__(message)
self.report = report
def __str__(self):
return '%s(%s): %s' % (type(self).__name__, self.report._filename, self.message)
class XMLReportIOError(XMLReportException):
def __init__(self, report, message):
super(XMLReport.XMLReportIOError, self).__init__(report, message)
class XMLReportParserError(XMLReportException):
def __init__(self, report, message):
super(XMLReport.XMLReportParserError, self).__init__(report, message)
class ProxyObject(object):
def __init__(self, report, field_name):
object.__setattr__(self, '_report', report)
object.__setattr__(self, '_field_name', field_name)
object.__setattr__(self, '_real_object', None)
def __getattr__(self, key):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return None
return getattr(self._real_object, key)
def __setattr__(self, key, value):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return None
return setattr(self._real_object, key, value)
def __iter__(self):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return None
return iter(self._real_object)
def __nonzero__(self):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return False
return bool(self._real_object)
def __bool__(self):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return False
return bool(self._real_object)
def __len__(self):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return None
if hasattr(self._real_object, '__len__'):
return len(self._real_object)
else:
return 0
def __contains__(self, key):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
if self._real_object is None:
return False
return key in self._real_object
def __getitem__(self, key):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
return self._real_object[key]
def __repr__(self):
if self._real_object is None:
object.__setattr__(self, '_real_object', getattr(self._report, self._field_name))
return 'ProxyObject(%s, %s, %r)' % (
getattr(self, '_report'),
getattr(self, '_field_name'),
getattr(self, '_real_object')
)
@staticmethod
def unique(items):
found = set()
keep = []
for item in items:
if item not in found:
found.add(item)
keep.append(item)
return keep
def __init__(self, filename=None):
self._filename = filename
self._xml = None
self._crash_info = None
self._system_info = None
self._file_info = None
self._exception = None
self._assertion = None
self._threads = None
self._modules = None
self._memory_regions = None
self._memory_blocks = None
self._handles = None
self._stackdumps = None
self._simplified_info = None
self._processstatuslinux = None
self._processstatuswin32 = None
self._processmemoryinfowin32 = None
self._misc_info = None
self._fast_protect_version_info = None
self._fast_protect_system_info = None
self._is_64_bit = None
self._peb = None
self._peb_address = None
self._peb_memory_block = None
if self._filename:
try:
self._xml = etree.parse(self._filename)
except IOError as e:
raise XMLReport.XMLReportIOError(self, str(e))
except etree.XMLSyntaxError as e:
raise XMLReport.XMLReportParserError(self, str(e))
@property
def is_platform_windows(self):
return self.platform_type == 'Win32' or self.platform_type == 'Windows NT'
class XMLReportEntity(object):
def __init__(self, owner):
self._owner = owner
def __str__(self):
ret = ''
for (k,v) in self.__dict__.items():
if k[0] != '_':
if ret:
ret += ', '
ret = ret + '%s=%s' % (k,v)
return ret
class CrashInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.CrashInfo, self).__init__(owner)
@property
def path(self):
ret = []
env = getattr(self, 'environment', None)
if env:
for (k,v) in env.iteritems():
if k.lower() == 'path':
if self._owner.is_platform_windows:
ret = v.split(';')
else:
ret = v.split(':')
break
return ret
class SystemInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.SystemInfo, self).__init__(owner)
class FileInfoLogMessage(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.FileInfoLogMessage, self).__init__(owner)
class FileInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.FileInfo, self).__init__(owner)
self.log = []
class Exception(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.Exception, self).__init__(owner)
@property
def thread(self):
ret = None
for thread in self._owner.threads:
if thread.id == self.threadid:
ret = thread
break
return ret
@property
def involved_modules(self):
t = self.thread
if t:
return t.stackdump.involved_modules
else:
return None
@property
def params(self):
ret = []
if self.numparams >= 1:
ret.append(self.param0)
if self.numparams >= 2:
ret.append(self.param1)
if self.numparams >= 3:
ret.append(self.param2)
if self.numparams >= 4:
ret.append(self.param3)
return ret
@property
def name(self):
if self._owner.platform_type in exception_code_names_per_platform_type:
code_to_name_map = exception_code_names_per_platform_type[self._owner.platform_type]
if self.code in code_to_name_map:
return code_to_name_map[self.code]
else:
return 'Unknown(%x)' % (self._owner.platform_type, self.code)
else:
return 'UnknownPlatform(%s, %x)' % (self.code, self.code)
@property
def info(self):
if self._owner.platform_type in exception_info_per_platform_type:
ex_info_func = exception_info_per_platform_type[self._owner.platform_type]
return ex_info_func(self)
else:
return 'UnknownPlatform(%s, %x)' % (self.code, self.code)
class Assertion(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.Assertion, self).__init__(owner)
class Module(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.Module, self).__init__(owner)
self._basename = None
@property
def basename(self):
if self._basename is None:
name = self.name
if name is None:
return None
idx = name.rfind('/')
if idx < 0:
idx = name.rfind('\\')
if idx >= 0:
self._basename = name[idx+1:]
else:
self._basename = name
return self._basename
class Thread(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.Thread, self).__init__(owner)
self._teb_memory_block = None
self._teb_memory_region = None
self._teb = None
self._tls_slots = None
self._thread_name = None
@property
def stackdump(self):
ret = None
for st in self._owner.stackdumps:
if st.threadid == self.id and not st.simplified:
ret = st
break
return ret
@property
def simplified_stackdump(self):
ret = None
for st in self._owner.stackdumps:
if st.threadid == self.id and st.simplified:
ret = st
break
return ret
@property
def teb_memory_block(self):
if self._teb_memory_block is None:
self._teb_memory_block = self._owner._get_memory_block(self.teb)
return self._teb_memory_block
@property
def teb_memory_region(self):
if self._teb_memory_region is None:
self._teb_memory_region = self._owner._get_memory_region(self.teb)
return self._teb_memory_region
@property
def teb_data(self):
if self._teb is None:
m = self.teb_memory_block
if m is None:
return None
data = m.get_addr(self.teb, None)
if data:
self._teb = Win32_TIB(self._owner, data, self._owner.is_64_bit)
return self._teb
@property
def peb_address(self):
t = self.teb_data
print('t=%s' % t)
if t is not None:
return t.peb_address
else:
return None
@property
def tls_slots(self):
return self.teb_data.tls_slots
@property
def name(self):
if self._name is not None:
return self._name
elif self._thread_name is None:
tls_slot_index = self._owner.thread_name_tls_slot
if tls_slot_index is not None:
teb = self.teb_data
if teb is not None:
addr = teb.tls_slots[tls_slot_index]
self._thread_name = self._owner._read_memory(addr, max_len=16)
return self._thread_name
@property
def location(self):
s = self.stackdump
if s is not None:
return s.top
else:
return None
class MemoryRegion(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.MemoryRegion, self).__init__(owner)
@property
def base(self):
return self.base_addr
@property
def end_addr(self):
return self.base_addr + self.size
def __str__(self):
if self.base_addr != self.alloc_base:
return 'base=0x%x, alloc=0x%x, size=%i, end=0x%x, type=%i, protect=%x, state=%x, usage=%s' % (self.base_addr, self.alloc_base, self.size, self.end_addr, self.type, self.protect, self.state, self.usage)
else:
return 'base=0x%x, size=%i, end=0x%x, type=%i, protect=%x, state=%x, usage=%s' % (self.base_addr, self.size, self.end_addr, self.type, self.protect, self.state, self.usage)
class MemoryRegionUsage(XMLReportEntity):
def __init__(self, owner, region):
super(XMLReport.MemoryRegionUsage, self).__init__(owner)
self._region = region
@property
def thread(self):
ret = None
for thread in self._owner.threads:
if thread.id == self.threadid:
ret = thread
break
return ret
def __str__(self):
return '(0x%x, %s)' % (self.threadid, format_memory_usagetype(self.usagetype))
def __repr__(self):
return str(self)
class MemoryBlock(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.MemoryBlock, self).__init__(owner)
self._thread_id = None
@property
def hexdump(self):
return self.memory.hexdump
@property
def threadid(self):
if self._thread_id is None:
for thread in self._owner.threads:
if thread.memory == self.base:
self._thread_id = thread.id
break
return self._thread_id
@property
def end_addr(self):
return self.base + self.size
def get_addr(self, addr, size=None):
if addr < self.base or addr > self.end_addr:
return None
offset = addr - self.base
if size is None:
actual_size = self.size - offset
else:
actual_size = min(self.size - offset, size)
return self.memory[offset:offset+actual_size]
def find(self, needle, start=0):
return self.memory.find(needle, start)
def __len__(self):
return len(self.memory)
def __getitem__(self, index):
return self.memory[index]
def __str__(self):
return 'num=%i, base=0x%x, size=%i, end=0x%x' % (self.num, self.base, self.size, self.end_addr)
class Handle(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.Handle, self).__init__(owner)
class StackDumpList(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.StackDumpList, self).__init__(owner)
self._list = []
def append(self, dump):
self._list.append(dump)
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __contains__(self, key):
if isinstance(key, int):
for d in self._list:
if d.threadid == key and d.simplified == False:
return True
elif isinstance(key, str):
if key == 'simplified':
for d in self._list:
if d.simplified:
return True
elif key == 'exception':
for d in self._list:
if d.exception:
return True
return False
def __getitem__(self, key):
if isinstance(key, int):
for d in self._list:
if d.threadid == key and d.simplified == False:
return d
elif isinstance(key, str):
if key == 'simplified':
for d in self._list:
if d.simplified:
return d
elif key == 'exception':
for d in self._list:
if d.exception:
return d
raise KeyError(key)
class StackDump(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.StackDump, self).__init__(owner)
self._thread = None
@property
def thread(self):
if self._thread is None:
for thread in self._owner.threads:
if thread.id == self.threadid:
self._thread = thread
break
return self._thread
@property
def involved_modules(self):
module_order = []
for frm in self.callstack:
if frm.module:
module_order.append(frm.module)
return XMLReport.unique(module_order)
@property
def top(self):
if self.callstack:
return self.callstack[0]
else:
return None
class StackFrame(XMLReportEntity):
def __init__(self, owner, dump):
super(XMLReport.StackFrame, self).__init__(owner)
self._dump = dump
@property
def source_url(self):
if self.source:
return 'file:///' + self.source
else:
return None
@property
def params(self):
# for the moment there are always four parameters
ret = [ self.param0, self.param1, self.param2, self.param3]
return ret
class SimplifiedInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.SimplifiedInfo, self).__init__(owner)
class MiscInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.MiscInfo, self).__init__(owner)
class FastProtectVersionInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.FastProtectVersionInfo, self).__init__(owner)
class FastProtectSystemInfo(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.FastProtectSystemInfo, self).__init__(owner)
self._sytem_info_report = None
self._sytem_info_report_loaded = False
@property
def sytem_info_report(self):
if not self._sytem_info_report_loaded:
from crashdump.systeminforeport import SystemInfoReport
self._sytem_info_report = SystemInfoReport(xmlreport=self._owner)
self._sytem_info_report_loaded = True
return self._sytem_info_report
@property
def machine_type(self):
rep = self.sytem_info_report
return rep['System/MachineType']
@property
def text(self):
rep = self.sytem_info_report
return rep.text
class ProcessStatusLinux(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.ProcessStatusLinux, self).__init__(owner)
class ProcessStatusWin32(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.ProcessStatusWin32, self).__init__(owner)
class ProcessMemoryInfoWin32(XMLReportEntity):
def __init__(self, owner):
super(XMLReport.ProcessMemoryInfoWin32, self).__init__(owner)
@staticmethod
def _value_convert(value_str, data_type):
if data_type == 'uuid':
try:
return UUID(value_str)
except ValueError:
return None
elif data_type == 'QString':
return value_str
elif data_type == 'QDateTime':
try:
dt = datetime.strptime(value_str, '%Y-%m-%d %H:%M:%S')
return dt.replace(tzinfo=UTC())
except ValueError:
return None
elif data_type == 'bool':
if value_str == 'true':
return True
elif value_str == 'false':
return False
else:
return None
elif data_type == 'int' or data_type == 'qlonglong':
try:
return int(value_str, 10)
except ValueError:
return None
elif data_type == 'uint' or data_type == 'qulonglong':
try:
return int(value_str, 16)
except ValueError:
return None
else:
return str(value_str)
@staticmethod
def _get_node_value(node, child, default_value=None):
if node is None:
return default_value
r = node.xpath(child + '/@type')
data_type = r[0] if r else None
if data_type == 'QStringList':
all_subitems = node.xpath(child + '/item/text()')
ret = []
for c in all_subitems:
ret.append(str(c))
elif data_type == 'QVariantMap':
all_subitems = node.xpath(child + '/item')
ret = {}
for item in all_subitems:
r = item.xpath('@key')
item_key = str(r[0]) if r else None
r = item.xpath('@type')
item_data_type = str(r[0]) if r else None
r = item.xpath('text()')
item_value = r[0] if r else None
ret[item_key] = XMLReport._value_convert(item_value, item_data_type)
elif data_type == 'QByteArray':
r = node.xpath(child + '/@encoding-type')
encoding_type = r[0] if r else None
r = node.xpath(child + '/text()')
value = r[0] if r else None
if r:
if encoding_type == 'base64':
ret = HexDumpMemoryBlock(base64.b64decode(r[0]))
else:
ret = HexDumpMemoryBlock(str(r[0]))
else:
ret = default_value
else:
r = node.xpath(child + '/text()')
if r:
ret = XMLReport._value_convert(r[0], data_type)
else:
ret = default_value
return ret
@staticmethod
def _get_attribute(node, attr_name, default_value=None):
if node is None:
return default_value
r = node.xpath('@' + attr_name)
attr_value = r[0] if r else None
ok = False
ret = None
if attr_value:
attr_value_low = attr_value.lower()
if attr_value_low == 'true' or attr_value_low == 'on':
ret = True
ok = True
elif attr_value_low == 'false' or attr_value_low == 'off':
ret = False
ok = True
if not ok:
if attr_value.startswith('0x'):
try:
ret = int(attr_value[2:], 16)
ok = True
except ValueError:
pass
if not ok:
try:
ret = int(attr_value)
ok = True
except ValueError:
pass
if not ok:
ret = str(attr_value)
return ret
@staticmethod
def _get_first_node(node, child):
if node is None:
return None
r = node.xpath('/' + str(child))
return r[0] if r else None
@property
def filename(self):
return self._filename
def _get_memory_block(self, addr):
for m in self.memory_blocks:
if addr >= m.base and addr < m.end_addr:
#print('got %x >= %x < %x' % (m.base, addr, m.end_addr))
return m
return None
def _get_memory_region(self, addr):
for m in self.memory_regions:
if addr >= m.base and addr < m.end_addr:
#print('got %x >= %x < %x' % (m.base, addr, m.end_addr))
return m
return None
def _read_memory(self, addr, max_len=None):
m = self._get_memory_block(addr)
if m is not None:
return m.get_addr(addr, max_len)
else:
return None
def find_in_memory_blocks(self, needle, start=0):
ret = []
for m in self.memory_blocks:
index = m.find(needle, start=start)
if index >= 0:
#print('got %x >= %x < %x' % (m.base, addr, m.end_addr))
ret.append( (m, index) )
return ret
@property
def crash_info(self):
if self._crash_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump')
self._crash_info = XMLReport.CrashInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._crash_dump_fields:
setattr(self._crash_info, f, XMLReport._get_node_value(i, f))
return self._crash_info
@property
def platform_type(self):
s = self.system_info
if s is None:
return None
return s.platform_type
@property
def is_64_bit(self):
if self._is_64_bit is None:
s = self.system_info
if s is None:
return None
# CPUTypeUnknown=-1
# CPUTypeX86=0
# CPUTypeMIPS=1
# CPUTypeAlpha=2
# CPUTypePowerPC=3
# CPUTypeSHX=4
# CPUTypeARM=5
# CPUTypeIA64=6
# CPUTypeAlpha64=7
# CPUTypeMSIL=8
# CPUTypeAMD64=9
# CPUTypeX64_Win64=10
# CPUTypeSparc=11
# CPUTypePowerPC64=12
# CPUTypeARM64=13
if s.cpu_type_id == 0 or \
s.cpu_type_id == 1 or \
s.cpu_type_id == 2 or \
s.cpu_type_id == 3 or \
s.cpu_type_id == 4 or \
s.cpu_type_id == 5 or \
s.cpu_type_id == -1:
self._is_64_bit = False
elif s.cpu_type_id == 6 or \
s.cpu_type_id == 7 or \
s.cpu_type_id == 8 or \
s.cpu_type_id == 9 or \
s.cpu_type_id == 10 or \
s.cpu_type_id == 11 or \
s.cpu_type_id == 12 or \
s.cpu_type_id == 13:
self._is_64_bit = True
else:
self._is_64_bit = False
return self._is_64_bit
@property
def system_info(self):
if self._system_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/system_info')
self._system_info = XMLReport.SystemInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._system_info_fields:
setattr(self._system_info, f, XMLReport._get_node_value(i, f))
if self._system_info.os_version_number is not None and ((self._system_info.os_version_number >> 48) & 0xffff) == 0:
# convert old OS version number with two 32-bit integers
# to the new format using four 16-bit integers
major = (self._system_info.os_version_number >> 32) & 0xffffffff
minor = self._system_info.os_version_number & 0xffffffff
patch = 0
build = (self._system_info.os_build_number & 0xffff)
self._system_info.os_version_number = major << 48 | minor << 32 | patch << 16 | build
return self._system_info
@property
def file_info(self):
if self._file_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/file_info')
self._file_info = XMLReport.FileInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._file_info_fields:
if f != 'log':
setattr(self._file_info, f, XMLReport._get_node_value(i, f))
i = XMLReport._get_first_node(self._xml, 'crash_dump/file_info/log')
all_subitems = i.xpath('message') if i is not None else None
if all_subitems is not None:
for item in all_subitems:
m = XMLReport.FileInfoLogMessage(self)
for f in XMLReport._file_info_log_message_fields:
setattr(m, f, XMLReport._get_node_value(item, f))
self._file_info.log.append(m)
return self._file_info
@property
def exception(self):
if self._exception is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/exception')
self._exception = XMLReport.Exception(self) if i is not None else None
if i is not None:
for f in XMLReport._exception_fields:
setattr(self._exception, f, XMLReport._get_node_value(i, f))
return self._exception
@property
def assertion(self):
if self._assertion is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/assertion')
self._assertion = XMLReport.Assertion(self) if i is not None else None
if i is not None:
for f in XMLReport._assertion_fields:
setattr(self._assertion, f, XMLReport._get_node_value(i, f))
return self._assertion
@property
def modules(self):
if self._modules is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/modules')
self._modules = []
all_subitems = i.xpath('module') if i is not None else None
if all_subitems is not None:
for item in all_subitems:
m = XMLReport.Module(self)
for f in XMLReport._module_fields:
setattr(m, f, XMLReport._get_node_value(item, f))
if m.file_version is None:
m.file_version = format_version_number(m.file_version_number)
if m.product_version is None:
m.product_version = format_version_number(m.product_version_number)
self._modules.append(m)
return self._modules
@property
def threads(self):
if self._threads is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/threads')
self._threads = []
all_subitems = i.xpath('thread') if i is not None else None
if all_subitems is not None:
for item in all_subitems:
m = XMLReport.Thread(self)
for f in XMLReport._thread_fields:
if isinstance(f, tuple):
f_xml, f_prop = f
setattr(m, f_prop, XMLReport._get_node_value(item, f_xml))
else:
setattr(m, f, XMLReport._get_node_value(item, f))
if not self._threads:
m.main_thread = True
self._threads.append(m)
return self._threads
@property
def peb_address(self):
if self._peb_address is None:
for t in self.threads:
self._peb_address = t.peb_address
break
return self._peb_address
@property
def peb_memory_block(self):
if self._peb_memory_block is None:
self._peb_memory_block = self._get_memory_block(self.peb_address)
return self._peb_memory_block
@property
def peb(self):
if self._peb is None:
m = self.peb_memory_block
if m is None:
return None
data = m.get_addr(self.peb_address, 64)
if data:
self._peb = Win32_PEB(self, data, self.is_64_bit)
return self._peb
@property
def memory_regions(self):
if self._memory_regions is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/memory_info')
self._memory_regions = []
all_subitems = i.xpath('memory') if i is not None else None
if all_subitems is not None:
for item in all_subitems:
m = XMLReport.MemoryRegion(self)
for f in XMLReport._memory_region_fields:
setattr(m, f, XMLReport._get_node_value(item, f))
m.usage = []
all_subitems = item.xpath('usage')
if all_subitems is not None:
for item in all_subitems:
usage = XMLReport.MemoryRegionUsage(self, m)
for f in XMLReport._memory_region_usage_fields:
setattr(usage, f, XMLReport._get_node_value(item, f))
m.usage.append(usage)
self._memory_regions.append(m)
self._memory_regions = sorted(self._memory_regions, key=lambda region: region.base_addr)
return self._memory_regions
@property
def memory_blocks(self):
if self._memory_blocks is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/memory_blocks')
self._memory_blocks = []
all_subitems = i.xpath('memory_block') if i is not None else None
if all_subitems is not None:
for item in all_subitems:
m = XMLReport.MemoryBlock(self)
for f in XMLReport._memory_block_fields:
setattr(m, f, XMLReport._get_node_value(item, f))
self._memory_blocks.append(m)
self._memory_blocks = sorted(self._memory_blocks, key=lambda block: block.base)
return self._memory_blocks
@property
def handles(self):
if self._handles is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/handle')
self._handles = []
all_subitems = i.xpath('handle') if i is not None else None
if all_subitems is not None:
for item in all_subitems:
m = XMLReport.Handle(self)
for f in XMLReport._handle_fields:
setattr(m, f, XMLReport._get_node_value(item, f))
self._handles.append(m)
return self._handles
@property
def stackdumps(self):
if self._stackdumps is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/stackdumps')
all_subitems = i.xpath('stackdump') if i is not None else None
if all_subitems is not None:
self._stackdumps = XMLReport.StackDumpList(self)
for item in all_subitems:
dump = XMLReport.StackDump(self)
for f in XMLReport._stackdump_fields:
setattr(dump, f, XMLReport._get_attribute(item, f))
dump.callstack = []
all_subitems = item.xpath('frame')
if all_subitems is not None:
for item in all_subitems:
frame = XMLReport.StackFrame(self, dump)
for f in XMLReport._stack_frame_fields:
setattr(frame, f, XMLReport._get_node_value(item, f))
dump.callstack.append(frame)
self._stackdumps.append(dump)
return self._stackdumps
@property
def simplified_info(self):
if self._simplified_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/simplified_info')
self._simplified_info = XMLReport.SimplifiedInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._simplified_info_fields:
setattr(self._simplified_info, f, XMLReport._get_node_value(i, f))
return self._simplified_info
@property
def processstatuslinux(self):
if self._processstatuslinux is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/processstatuslinux')
self._processstatuslinux = XMLReport.ProcessStatusLinux(self) if i is not None else None
if i is not None:
for f in XMLReport._processstatuslinux_fields:
setattr(self._processstatuslinux, f, XMLReport._get_node_value(i, f))
return self._processstatuslinux
@property
def processstatuswin32(self):
if self._processstatuswin32 is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/processstatuswin32')
self._processstatuswin32 = XMLReport.ProcessStatusWin32(self) if i is not None else None
if i is not None:
for f in XMLReport._processstatuswin32_fields:
setattr(self._processstatuswin32, f, XMLReport._get_node_value(i, f))
return self._processstatuswin32
@property
def processmemoryinfowin32(self):
if self._processmemoryinfowin32 is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/processmemoryinfowin32')
self._processmemoryinfowin32 = XMLReport.ProcessMemoryInfoWin32(self) if i is not None else None
if i is not None:
for f in XMLReport._processmemoryinfowin32_fields:
setattr(self._processmemoryinfowin32, f, XMLReport._get_node_value(i, f))
return self._processmemoryinfowin32
@property
def misc_info(self):
if self._misc_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/misc_info')
self._misc_info = XMLReport.MiscInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._processstatuslinux_fields:
setattr(self._misc_info, f, XMLReport._get_node_value(i, f))
return self._misc_info
@property
def fast_protect_version_info(self):
if self._fast_protect_version_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/fast_protect_version_info')
self._fast_protect_version_info = XMLReport.FastProtectVersionInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._fast_protect_version_info_fields:
setattr(self._fast_protect_version_info, f, XMLReport._get_node_value(i, f))
return self._fast_protect_version_info
@property
def thread_name_tls_slot(self):
s = self.fast_protect_version_info
if s is None:
return None
return s.thread_name_tls_slot
@property
def fast_protect_system_info(self):
if self._fast_protect_system_info is None:
i = XMLReport._get_first_node(self._xml, 'crash_dump/fast_protect_system_info')
self._fast_protect_system_info = XMLReport.FastProtectSystemInfo(self) if i is not None else None
if i is not None:
for f in XMLReport._fast_protect_system_info_fields:
setattr(self._fast_protect_system_info, f, XMLReport._get_node_value(i, f))
return self._fast_protect_system_info
@property
def fields(self):
return self._main_fields
if __name__ == '__main__':
if len(sys.argv) < 2:
print('No crash report XML file(s) specified')
sys.exit(1)
xmlreport = XMLReport(sys.argv[1])
#print(xmlreport.crash_info)
#print(xmlreport.system_info)
#print(xmlreport.file_info)
#for m in xmlreport.modules:
#print(m)
#for m in xmlreport.threads:
#print(m)
#for m in xmlreport.handles:
#print(m)
#for m in xmlreport.memory_regions:
#print(m)
for m in xmlreport.stackdumps:
print('thread %u %s exception (simple %s)' % (m.threadid, 'with' if m.exception else 'without', 'yes' if m.simplified else 'no'))
for f in m.callstack:
print(f)
#dump = xmlreport.stackdumps['exception']
#for f in dump.callstack:
#print(f)
#dump = xmlreport.stackdumps['simplified']
#for f in dump.callstack:
#print(f)
#for m in xmlreport.memory_blocks:
#fmt = '0x%%0%ix: %%s - %%s' % m.hexdump.offset_width
#for l in m.hexdump:
#print(fmt % (l.offset, l.hex, l.ascii))
#for m in xmlreport.memory_blocks:
#print(m.threadid)
#for m in xmlreport.threads:
#print(type(m.id))
def dump_report_entity(entity, indent=0):
for (k,v) in entity.__dict__.items():
if k[0] != '_':
if isinstance(v, list):
dump_report_list(v, indent+2)
else:
print((' ' * indent) + '%s=%s' % (k,v))
def dump_report_list(list, indent=0):
for num, data_elem in enumerate(list):
print((' ' * indent) + '%i:' % num)
dump_report_entity(data_elem, indent + 2)
def dump_report(rep, field, indent=0):
print((' ' * indent) + field + ':')
data = getattr(rep, field)
if data is None:
print(' None')
elif isinstance(data, list):
dump_report_list(data, indent+2)
else:
dump_report_entity(data, indent + 2)
#dump_report(xmlreport, 'crash_info')
#dump_report(xmlreport, 'system_info')
#dump_report(xmlreport, 'file_info')
#dump_report(xmlreport, 'fast_protect_version_info')
#dump_report(xmlreport, 'fast_protect_system_info')
#print('machine_type=%s' % xmlreport.fast_protect_system_info.machine_type)
#dump_report(xmlreport, 'simplified_info')
#dump_report(xmlreport, 'modules')
#if xmlreport.exception is not None:
#print(xmlreport.exception.involved_modules)
#print(xmlreport.exception.params)
#dump_report(xmlreport, 'threads')
#print(xmlreport.peb.image_base_address)
#slot = xmlreport.fast_protect_version_info.thread_name_tls_slot
#print('slot index=%i'% slot)
#r = xmlreport.find_in_memory_blocks('Clt')
#for (m, index) in r:
# print(m)
# print(str(m.hexdump))
#for t in [ xmlreport.threads[0] ]:
#for t in xmlreport.threads:
# teb = t.teb_data
# if teb:
# #print('%05x - %05x, PEB %x, TLS array %x' % (t.id, teb.threadid, teb.peb_address, teb.thread_local_storage_array_address))
# #if teb.thread_local_storage_array:
# #print('%05x - %x' % (t.id, teb.thread_local_storage_array[slot]))
# #print('%05x - %x' % (t.id, teb.tls_slots[1]))
# #print(' %05x - %s' % (t.id, t.teb_memory_region))
# print(' %05x - %s thread name at %x' % (t.id, t.name, teb.tls_slots[slot]))
# #print(teb.hexdump)
# #print('%i - %x, %x, %x' % (t.id, teb.threadid, teb.peb_address, teb.thread_local_storage_array))
#
# #for i in range(slot + 1):
# #print(' slot[%i]=%x' %(i, t.tls_slots[i]))
#dump_report(xmlreport, 'memory_blocks')
#dump_report(xmlreport, 'memory_regions')
#dump_report(xmlreport, 'exception')
#dump_report(xmlreport, 'processmemoryinfowin32')
#pp = XMLReport.ProxyObject(xmlreport, 'memory_regions')
#print(len(pp))
#pp = XMLReport.ProxyObject(xmlreport, 'memory_blocks')
#print(len(pp))
#for m in pp:
#print(m)
#dump_report(xmlreport, 'memory_regions')
#print(xmlreport.crash_info.path)
#sys.stdout.write(xmlreport.fast_protect_system_info.rawdata.raw)
#if xmlreport.exception.thread.stackdump:
#for (no, f) in enumerate(xmlreport.exception.thread.stackdump.callstack):
#print('%i: %s' % (no, f))
#else:
#print(' no stackdump available')
#print('Simplified Stackdump')
#if xmlreport.exception.thread.simplified_stackdump:
#for (no, f) in enumerate(xmlreport.exception.thread.simplified_stackdump.callstack):
#print('%i: %s' % (no, f))
#print('%i: %s' % (no, f.params))
#else:
#print(' no simplified stackdump available')
| gpl-3.0 | -559,323,098,969,346,050 | 35.850877 | 217 | 0.524909 | false |
flags/Reactor-3 | tools/ReactorWatch.py | 1 | 1492 | #This tool was rushed together over the course of an hour or so. Be gentle.
from flask import Flask, render_template, request
import threading
import socket
import json
app = Flask(__name__)
def request(request, value=None):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
sock.connect(('127.0.0.1', 3335))
sock.sendall(json.dumps({'type': 'get', 'what': request, 'value': value}))
data = json.loads(sock.recv(9048))
sock.close()
return data
@app.route('/memory/<life_id>')
def memory(life_id):
memories = request('memory', value=int(life_id))
return render_template('memory.html', life_id=life_id, memories=memories)
@app.route('/life/<life_id>')
def life(life_id):
life = request('life', value=int(life_id))
knows = life['know'].values()
return render_template('life.html', life=life, knows=knows)
@app.route('/camp/<camp_id>')
def camp(camp_id):
camp = request('camp', value=int(camp_id))
return render_template('camp.html', camp=camp)
@app.route('/group/<group_id>')
def group(group_id):
groups = request('groups')
group = groups[group_id]
return render_template('group.html', group_id=group_id, group=group)
@app.route('/')
def index():
groups = request('groups')
life = request('life_list')
life.sort()
#camps = request('camp_list')
#camps.sort()
stats = request('stats')
return render_template('index.html', stats=stats, life=life, groups=groups)
if __name__ == '__main__':
app.run(debug=True, port=3336) | mit | -1,046,577,889,139,109,800 | 23.080645 | 76 | 0.683646 | false |
tuskar/tuskar-ui | openstack_dashboard/dashboards/admin/users/tests.py | 1 | 18692 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from socket import timeout as socket_timeout
from django.core.urlresolvers import reverse
from django import http
from mox import IgnoreArg
from mox import IsA
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
USERS_INDEX_URL = reverse('horizon:admin:users:index')
USER_CREATE_URL = reverse('horizon:admin:users:create')
USER_UPDATE_URL = reverse('horizon:admin:users:update', args=[1])
class UsersViewTests(test.BaseAdminViewTests):
def _get_domain_id(self):
return self.request.session.get('domain_context', None)
def _get_users(self, domain_id):
if not domain_id:
users = self.users.list()
else:
users = [user for user in self.users.list()
if user.domain_id == domain_id]
return users
@test.create_stubs({api.keystone: ('user_list',)})
def test_index(self):
domain_id = self._get_domain_id()
users = self._get_users(domain_id)
api.keystone.user_list(IgnoreArg(), domain=domain_id) \
.AndReturn(users)
self.mox.ReplayAll()
res = self.client.get(USERS_INDEX_URL)
self.assertTemplateUsed(res, 'admin/users/index.html')
self.assertItemsEqual(res.context['table'].data, users)
if domain_id:
for user in res.context['table'].data:
self.assertItemsEqual(user.domain_id, domain_id)
def test_index_with_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_index()
@test.create_stubs({api.keystone: ('user_create',
'tenant_list',
'add_tenant_user_role',
'get_default_role',
'role_list')})
def test_create(self):
user = self.users.get(id="1")
domain_id = self._get_domain_id()
role = self.roles.first()
api.keystone.tenant_list(IgnoreArg(), user=None) \
.AndReturn([self.tenants.list(), False])
api.keystone.user_create(IgnoreArg(),
name=user.name,
email=user.email,
password=user.password,
project=self.tenant.id,
enabled=True,
domain=domain_id).AndReturn(user)
api.keystone.role_list(IgnoreArg()).AndReturn(self.roles.list())
api.keystone.get_default_role(IgnoreArg()).AndReturn(role)
api.keystone.add_tenant_user_role(IgnoreArg(), self.tenant.id,
user.id, role.id)
self.mox.ReplayAll()
formData = {'method': 'CreateUserForm',
'name': user.name,
'email': user.email,
'password': user.password,
'project': self.tenant.id,
'role_id': self.roles.first().id,
'confirm_password': user.password}
res = self.client.post(USER_CREATE_URL, formData)
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
def test_create_with_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_create()
@test.create_stubs({api.keystone: ('tenant_list',
'role_list',
'get_default_role')})
def test_create_with_password_mismatch(self):
user = self.users.get(id="1")
api.keystone.tenant_list(IgnoreArg(), user=None) \
.AndReturn([self.tenants.list(), False])
api.keystone.role_list(IgnoreArg()).AndReturn(self.roles.list())
api.keystone.get_default_role(IgnoreArg()) \
.AndReturn(self.roles.first())
self.mox.ReplayAll()
formData = {'method': 'CreateUserForm',
'name': user.name,
'email': user.email,
'password': user.password,
'project': self.tenant.id,
'role_id': self.roles.first().id,
'confirm_password': "doesntmatch"}
res = self.client.post(USER_CREATE_URL, formData)
self.assertFormError(res, "form", None, ['Passwords do not match.'])
@test.create_stubs({api.keystone: ('tenant_list',
'role_list',
'get_default_role')})
def test_create_validation_for_password_too_short(self):
user = self.users.get(id="1")
api.keystone.tenant_list(IgnoreArg(), user=None) \
.AndReturn([self.tenants.list(), False])
api.keystone.role_list(IgnoreArg()).AndReturn(self.roles.list())
api.keystone.get_default_role(IgnoreArg()) \
.AndReturn(self.roles.first())
self.mox.ReplayAll()
# check password min-len verification
formData = {'method': 'CreateUserForm',
'name': user.name,
'email': user.email,
'password': 'four',
'project': self.tenant.id,
'role_id': self.roles.first().id,
'confirm_password': 'four'}
res = self.client.post(USER_CREATE_URL, formData)
self.assertFormError(
res, "form", 'password',
['Password must be between 8 and 18 characters.'])
@test.create_stubs({api.keystone: ('tenant_list',
'role_list',
'get_default_role')})
def test_create_validation_for_password_too_long(self):
user = self.users.get(id="1")
api.keystone.tenant_list(IgnoreArg(), user=None) \
.AndReturn([self.tenants.list(), False])
api.keystone.role_list(IgnoreArg()).AndReturn(self.roles.list())
api.keystone.get_default_role(IgnoreArg()) \
.AndReturn(self.roles.first())
self.mox.ReplayAll()
# check password min-len verification
formData = {'method': 'CreateUserForm',
'name': user.name,
'email': user.email,
'password': 'MoreThanEighteenChars',
'project': self.tenant.id,
'role_id': self.roles.first().id,
'confirm_password': 'MoreThanEighteenChars'}
res = self.client.post(USER_CREATE_URL, formData)
self.assertFormError(
res, "form", 'password',
['Password must be between 8 and 18 characters.'])
@test.create_stubs({api.keystone: ('user_get',
'tenant_list',
'user_update_tenant',
'user_update_password',
'user_update',
'roles_for_user', )})
def test_update(self):
user = self.users.get(id="1")
test_password = 'normalpwd'
api.keystone.user_get(IsA(http.HttpRequest), '1',
admin=True).AndReturn(user)
api.keystone.tenant_list(IgnoreArg(), user=user.id) \
.AndReturn([self.tenants.list(), False])
api.keystone.user_update(IsA(http.HttpRequest),
user.id,
email=u'[email protected]',
name=u'test_user',
password=test_password,
project=self.tenant.id).AndReturn(None)
self.mox.ReplayAll()
formData = {'method': 'UpdateUserForm',
'id': user.id,
'name': user.name,
'email': user.email,
'password': test_password,
'project': self.tenant.id,
'confirm_password': test_password}
res = self.client.post(USER_UPDATE_URL, formData)
self.assertNoFormErrors(res)
@test.create_stubs({api.keystone: ('user_get',
'tenant_list',
'user_update_tenant',
'keystone_can_edit_user',
'roles_for_user', )})
def test_update_with_keystone_can_edit_user_false(self):
user = self.users.get(id="1")
api.keystone.user_get(IsA(http.HttpRequest),
'1',
admin=True).AndReturn(user)
api.keystone.tenant_list(IgnoreArg(), user=user.id) \
.AndReturn([self.tenants.list(), False])
api.keystone.keystone_can_edit_user().AndReturn(False)
api.keystone.keystone_can_edit_user().AndReturn(False)
self.mox.ReplayAll()
formData = {'method': 'UpdateUserForm',
'id': user.id,
'name': user.name,
'project': self.tenant.id, }
res = self.client.post(USER_UPDATE_URL, formData)
self.assertNoFormErrors(res)
self.assertMessageCount(error=1)
@test.create_stubs({api.keystone: ('user_get', 'tenant_list')})
def test_update_validation_for_password_too_short(self):
user = self.users.get(id="1")
api.keystone.user_get(IsA(http.HttpRequest), '1',
admin=True).AndReturn(user)
api.keystone.tenant_list(IgnoreArg(), user=user.id) \
.AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
formData = {'method': 'UpdateUserForm',
'id': user.id,
'name': user.name,
'email': user.email,
'password': 't',
'project': self.tenant.id,
'confirm_password': 't'}
res = self.client.post(USER_UPDATE_URL, formData)
self.assertFormError(
res, "form", 'password',
['Password must be between 8 and 18 characters.'])
@test.create_stubs({api.keystone: ('user_get', 'tenant_list')})
def test_update_validation_for_password_too_long(self):
user = self.users.get(id="1")
api.keystone.user_get(IsA(http.HttpRequest), '1',
admin=True).AndReturn(user)
api.keystone.tenant_list(IgnoreArg(), user=user.id) \
.AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
formData = {'method': 'UpdateUserForm',
'id': user.id,
'name': user.name,
'email': user.email,
'password': 'ThisIsASuperLongPassword',
'project': self.tenant.id,
'confirm_password': 'ThisIsASuperLongPassword'}
res = self.client.post(USER_UPDATE_URL, formData)
self.assertFormError(
res, "form", 'password',
['Password must be between 8 and 18 characters.'])
@test.create_stubs({api.keystone: ('user_update_enabled', 'user_list')})
def test_enable_user(self):
user = self.users.get(id="2")
domain_id = self._get_domain_id()
users = self._get_users(domain_id)
user.enabled = False
api.keystone.user_list(IgnoreArg(), domain=domain_id).AndReturn(users)
api.keystone.user_update_enabled(IgnoreArg(),
user.id,
True).AndReturn(user)
self.mox.ReplayAll()
formData = {'action': 'users__toggle__%s' % user.id}
res = self.client.post(USERS_INDEX_URL, formData)
self.assertRedirectsNoFollow(res, USERS_INDEX_URL)
@test.create_stubs({api.keystone: ('user_update_enabled', 'user_list')})
def test_disable_user(self):
user = self.users.get(id="2")
domain_id = self._get_domain_id()
users = self._get_users(domain_id)
self.assertTrue(user.enabled)
api.keystone.user_list(IgnoreArg(), domain=domain_id) \
.AndReturn(users)
api.keystone.user_update_enabled(IgnoreArg(),
user.id,
False).AndReturn(user)
self.mox.ReplayAll()
formData = {'action': 'users__toggle__%s' % user.id}
res = self.client.post(USERS_INDEX_URL, formData)
self.assertRedirectsNoFollow(res, USERS_INDEX_URL)
@test.create_stubs({api.keystone: ('user_update_enabled', 'user_list')})
def test_enable_disable_user_exception(self):
user = self.users.get(id="2")
domain_id = self._get_domain_id()
users = self._get_users(domain_id)
user.enabled = False
api.keystone.user_list(IgnoreArg(), domain=domain_id) \
.AndReturn(users)
api.keystone.user_update_enabled(IgnoreArg(), user.id, True) \
.AndRaise(self.exceptions.keystone)
self.mox.ReplayAll()
formData = {'action': 'users__toggle__%s' % user.id}
res = self.client.post(USERS_INDEX_URL, formData)
self.assertRedirectsNoFollow(res, USERS_INDEX_URL)
@test.create_stubs({api.keystone: ('user_list',)})
def test_disabling_current_user(self):
domain_id = self._get_domain_id()
users = self._get_users(domain_id)
for i in range(0, 2):
api.keystone.user_list(IgnoreArg(), domain=domain_id) \
.AndReturn(users)
self.mox.ReplayAll()
formData = {'action': 'users__toggle__%s' % self.request.user.id}
res = self.client.post(USERS_INDEX_URL, formData, follow=True)
self.assertEqual(list(res.context['messages'])[0].message,
u'You cannot disable the user you are currently '
u'logged in as.')
@test.create_stubs({api.keystone: ('user_list',)})
def test_delete_user_with_improper_permissions(self):
domain_id = self._get_domain_id()
users = self._get_users(domain_id)
for i in range(0, 2):
api.keystone.user_list(IgnoreArg(), domain=domain_id) \
.AndReturn(users)
self.mox.ReplayAll()
formData = {'action': 'users__delete__%s' % self.request.user.id}
res = self.client.post(USERS_INDEX_URL, formData, follow=True)
self.assertEqual(list(res.context['messages'])[0].message,
u'You do not have permission to delete user: %s'
% self.request.user.username)
class SeleniumTests(test.SeleniumAdminTestCase):
@test.create_stubs({api.keystone: ('tenant_list',
'get_default_role',
'role_list',
'user_list')})
def test_modal_create_user_with_passwords_not_matching(self):
api.keystone.tenant_list(IgnoreArg(), user=None) \
.AndReturn([self.tenants.list(), False])
api.keystone.role_list(IgnoreArg()).AndReturn(self.roles.list())
api.keystone.user_list(IgnoreArg(), domain=None) \
.AndReturn(self.users.list())
api.keystone.get_default_role(IgnoreArg()) \
.AndReturn(self.roles.first())
self.mox.ReplayAll()
self.selenium.get("%s%s" % (self.live_server_url, USERS_INDEX_URL))
# Open the modal menu
self.selenium.find_element_by_id("users__action_create") \
.send_keys("\n")
wait = self.ui.WebDriverWait(self.selenium, 10,
ignored_exceptions=[socket_timeout])
wait.until(lambda x: self.selenium.find_element_by_id("id_name"))
body = self.selenium.find_element_by_tag_name("body")
self.assertFalse("Passwords do not match" in body.text,
"Error message should not be visible at loading time")
self.selenium.find_element_by_id("id_name").send_keys("Test User")
self.selenium.find_element_by_id("id_password").send_keys("test")
self.selenium.find_element_by_id("id_confirm_password").send_keys("te")
self.selenium.find_element_by_id("id_email").send_keys("[email protected]")
body = self.selenium.find_element_by_tag_name("body")
self.assertTrue("Passwords do not match" in body.text,
"Error message not found in body")
@test.create_stubs({api.keystone: ('tenant_list', 'user_get')})
def test_update_user_with_passwords_not_matching(self):
api.keystone.user_get(IsA(http.HttpRequest), '1',
admin=True).AndReturn(self.user)
api.keystone.tenant_list(IgnoreArg(), user=self.user.id) \
.AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
self.selenium.get("%s%s" % (self.live_server_url, USER_UPDATE_URL))
body = self.selenium.find_element_by_tag_name("body")
self.assertFalse("Passwords do not match" in body.text,
"Error message should not be visible at loading time")
self.selenium.find_element_by_id("id_password").send_keys("test")
self.selenium.find_element_by_id("id_confirm_password").send_keys("te")
self.selenium.find_element_by_id("id_email").clear()
body = self.selenium.find_element_by_tag_name("body")
self.assertTrue("Passwords do not match" in body.text,
"Error message not found in body")
| apache-2.0 | -2,188,680,730,515,127,000 | 39.458874 | 79 | 0.546223 | false |
f2nd/yandex-tank | yandextank/core/tankcore.py | 1 | 25112 | """ The central part of the tool: Core """
import datetime
import fnmatch
import glob
import importlib as il
import json
import logging
import os
import shutil
import socket
import tempfile
import time
import traceback
import threading
import pkg_resources
import sys
import platform
import yaml
from builtins import str
from yandextank.common.interfaces import GeneratorPlugin, MonitoringPlugin
from yandextank.validator.validator import TankConfig, ValidationError, load_multiple
from yandextank.aggregator import TankAggregator
from ..common.util import update_status, pid_exists, monitor_cpu_time
from netort.resource import manager as resource
from netort.process import execute
import configparser
logger = logging.getLogger(__name__)
LOCAL_CONFIG = 'local_conf.yaml'
USER_CONFIG = 'user_conf.yaml'
VALIDATED_CONF = 'validated_conf.yaml'
ERROR_OUTPUT = 'validation_error.yaml'
class Job(object):
def __init__(
self,
monitoring_plugins,
aggregator,
tank,
generator_plugin=None):
"""
:type aggregator: TankAggregator
:type monitoring_plugins: list of
"""
self.monitoring_plugins = monitoring_plugins
self.aggregator = aggregator
self.tank = tank
self.generator_plugin = generator_plugin
def subscribe_plugin(self, plugin):
self.aggregator.add_result_listener(plugin)
for monitoring_plugin in self.monitoring_plugins:
monitoring_plugin.add_listener(plugin)
def parse_plugin(s):
try:
plugin, config_section = s.split()
except ValueError:
plugin, config_section = s, None
return plugin, config_section
class LockError(Exception):
pass
class TankCore(object):
"""
JMeter + dstat inspired :)
"""
SECTION = 'core'
SECTION_META = 'meta'
PLUGIN_PREFIX = 'plugin_'
PID_OPTION = 'pid'
UUID_OPTION = 'uuid'
API_JOBNO = 'api_jobno'
def __init__(
self,
configs,
interrupted_event,
local_configs=None,
user_configs=None,
artifacts_base_dir=None,
artifacts_dir_name=None
):
"""
:param configs: list of dict
:param interrupted_event: threading.Event
"""
self.output = {}
self.raw_configs = configs
self.status = {}
self._plugins = None
self._artifacts_dir = artifacts_dir_name
self.artifact_files = {}
self.artifacts_to_send = []
self._artifacts_base_dir = artifacts_base_dir
self.manual_start = False
self.scheduled_start = None
self.taskset_path = None
self.taskset_affinity = None
self._job = None
self._cfg_snapshot = None
self.local_configs = load_multiple(local_configs)
self.user_configs = load_multiple(user_configs)
self.configinitial = self.user_configs
self.interrupted = interrupted_event
self.error_log = None
self.threads_stats = {}
self.config, self.errors = TankConfig(
self.raw_configs,
with_dynamic_options=True,
core_section=self.SECTION,
error_output=ERROR_OUTPUT
).validate()
if not self.config:
raise ValidationError(self.errors)
self.test_id = self.get_option(self.SECTION, 'artifacts_dir',
datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f"))
self.lock_dir = self.get_option(self.SECTION, 'lock_dir')
with open(os.path.join(self.artifacts_dir, LOCAL_CONFIG), 'w') as f:
yaml.dump(self.local_configs, f)
with open(os.path.join(self.artifacts_dir, USER_CONFIG), 'w') as f:
yaml.dump(self.user_configs, f)
configinfo = self.config.validated.copy()
configinfo.setdefault(self.SECTION, {})
configinfo[self.SECTION][self.API_JOBNO] = self.test_id
with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f:
yaml.dump(configinfo, f)
self.add_artifact_file(os.path.join(self.artifacts_dir, USER_CONFIG))
self.add_artifact_file(os.path.join(self.artifacts_dir, LOCAL_CONFIG))
self.add_artifact_file(os.path.join(self.artifacts_dir, VALIDATED_CONF))
if self.errors:
self.add_artifact_file(os.path.join(self.artifacts_dir, ERROR_OUTPUT))
@property
def cfg_snapshot(self):
if not self._cfg_snapshot:
self._cfg_snapshot = str(self.config)
return self._cfg_snapshot
@staticmethod
def get_available_options():
# todo: should take this from schema
return [
"artifacts_base_dir", "artifacts_dir",
"taskset_path", "affinity"
]
@property
def plugins(self):
"""
:returns: {plugin_name: plugin_class, ...}
:rtype: dict
"""
if self._plugins is None:
self.load_plugins()
if self._plugins is None:
self._plugins = {}
return self._plugins
@property
def artifacts_base_dir(self):
if not self._artifacts_base_dir:
try:
artifacts_base_dir = os.path.abspath(self.get_option(self.SECTION, "artifacts_base_dir"))
except ValidationError:
artifacts_base_dir = os.path.abspath('logs')
if not os.path.exists(artifacts_base_dir):
os.makedirs(artifacts_base_dir)
os.chmod(self.artifacts_base_dir, 0o755)
self._artifacts_base_dir = artifacts_base_dir
return self._artifacts_base_dir
def load_plugins(self):
"""
Tells core to take plugin options and instantiate plugin classes
"""
logger.info(self.get_user_agent())
logger.info('New test id %s' % self.test_id)
logger.info("Loading plugins...")
for (plugin_name, plugin_path, plugin_cfg) in self.config.plugins:
logger.debug("Loading plugin %s from %s", plugin_name, plugin_path)
if plugin_path == "yandextank.plugins.Overload":
logger.warning(
"Deprecated plugin name: 'yandextank.plugins.Overload'\n"
"There is a new generic plugin now.\n"
"Correcting to 'yandextank.plugins.DataUploader overload'")
plugin_path = "yandextank.plugins.DataUploader overload"
try:
plugin = il.import_module(plugin_path)
except ImportError:
logger.warning('Plugin name %s path %s import error', plugin_name, plugin_path)
logger.debug('Plugin name %s path %s import error', plugin_name, plugin_path, exc_info=True)
raise
try:
instance = getattr(plugin, 'Plugin')(self, cfg=plugin_cfg, name=plugin_name)
except AttributeError:
logger.warning('Plugin %s classname should be `Plugin`', plugin_name)
raise
else:
self.register_plugin(self.PLUGIN_PREFIX + plugin_name, instance)
logger.debug("Plugin instances: %s", self._plugins)
@property
def job(self):
if not self._job:
# monitoring plugin
monitorings = [plugin for plugin in list(self.plugins.values()) if isinstance(plugin, MonitoringPlugin)]
# generator plugin
try:
gen = self.get_plugin_of_type(GeneratorPlugin)
except KeyError:
logger.warning("Load generator not found")
gen = GeneratorPlugin(self, {}, 'generator dummy')
# aggregator
aggregator = TankAggregator(gen)
self._job = Job(monitoring_plugins=monitorings,
generator_plugin=gen,
aggregator=aggregator,
tank=socket.getfqdn())
return self._job
def plugins_configure(self):
""" Call configure() on all plugins """
self.publish("core", "stage", "configure")
logger.info("Configuring plugins...")
self.taskset_affinity = self.get_option(self.SECTION, 'affinity')
if self.taskset_affinity:
self.__setup_taskset(self.taskset_affinity, pid=os.getpid())
for plugin in list(self.plugins.values()):
if not self.interrupted.is_set():
logger.debug("Configuring %s", plugin)
plugin.configure()
def plugins_prepare_test(self):
""" Call prepare_test() on all plugins """
logger.info("Preparing test...")
self.publish("core", "stage", "prepare")
threading.Thread(target=monitor_cpu_time, args=(os.getpid(), self.threads_stats), name='CPUMonitor', daemon=True).start()
for plugin in list(self.plugins.values()):
if not self.interrupted.is_set():
logger.debug("Preparing %s", plugin)
plugin.prepare_test()
def plugins_start_test(self):
""" Call start_test() on all plugins """
if not self.interrupted.is_set():
logger.info("Starting test...")
self.publish("core", "stage", "start")
self.job.aggregator.start_test()
for plugin in list(self.plugins.values()):
logger.debug("Starting %s", plugin)
start_time = time.time()
plugin.start_test()
logger.debug("Plugin {0:s} required {1:f} seconds to start".format(
repr(plugin), time.time() - start_time)
)
def wait_for_finish(self):
"""
Call is_test_finished() on all plugins 'till one of them initiates exit
"""
if not self.interrupted.is_set():
logger.info("Waiting for test to finish...")
logger.info('Artifacts dir: {dir}'.format(dir=self.artifacts_dir))
self.publish("core", "stage", "shoot")
if not self.plugins:
raise RuntimeError("It's strange: we have no plugins loaded...")
while not self.interrupted.is_set():
begin_time = time.time()
aggr_retcode = self.job.aggregator.is_test_finished()
if aggr_retcode >= 0:
logger.debug("Stopped by aggregator with code %i", aggr_retcode)
return aggr_retcode
for plugin in list(self.plugins.values()):
plugin_polling_start = time.time()
retcode = plugin.is_test_finished()
logger.debug("Polling %s took %.2fms", plugin, (time.time() - plugin_polling_start) * 1000)
if retcode >= 0:
logger.info("Call for finish by %s, code %s", plugin, retcode)
return retcode
end_time = time.time()
diff = end_time - begin_time
logger.debug("Polling took %.2fms", diff * 1000)
logger.debug("Tank status: %s", json.dumps(self.status))
# screen refresh every 0.5 s
if diff < 0.5:
time.sleep(0.5 - diff)
return 1
def plugins_end_test(self, retcode):
""" Call end_test() on all plugins """
logger.info("Finishing test...")
self.publish("core", "stage", "end")
logger.info("Stopping load generator and aggregator")
retcode = self.job.aggregator.end_test(retcode)
logger.debug("RC after: %s", retcode)
logger.info('Stopping monitoring')
for plugin in self.job.monitoring_plugins:
logger.info('Stopping %s', plugin)
retcode = plugin.end_test(retcode) or retcode
logger.info('RC after: %s', retcode)
for plugin in [p for p in list(self.plugins.values()) if
p is not self.job.generator_plugin and p not in self.job.monitoring_plugins]:
logger.debug("Finalize %s", plugin)
try:
logger.debug("RC before: %s", retcode)
retcode = plugin.end_test(retcode)
logger.debug("RC after: %s", retcode)
except Exception: # FIXME too broad exception clause
logger.error("Failed finishing plugin %s", plugin, exc_info=True)
if not retcode:
retcode = 1
return retcode
def plugins_post_process(self, retcode):
"""
Call post_process() on all plugins
"""
logger.info("Post-processing test...")
self.publish("core", "stage", "post_process")
for plugin in list(self.plugins.values()):
logger.debug("Post-process %s", plugin)
try:
logger.debug("RC before: %s", retcode)
retcode = plugin.post_process(retcode)
logger.debug("RC after: %s", retcode)
except Exception: # FIXME too broad exception clause
logger.error("Failed post-processing plugin %s", plugin, exc_info=True)
if not retcode:
retcode = 1
return retcode
def interrupt(self):
logger.warning('Interrupting')
def __setup_taskset(self, affinity, pid=None, args=None):
""" if pid specified: set process w/ pid `pid` CPU affinity to specified `affinity` core(s)
if args specified: modify list of args for Popen to start w/ taskset w/ affinity `affinity`
"""
self.taskset_path = self.get_option(self.SECTION, 'taskset_path')
if args:
return [self.taskset_path, '-c', affinity] + args
if pid:
args = "%s -pc %s %s" % (self.taskset_path, affinity, pid)
retcode, stdout, stderr = execute(args, shell=True, poll_period=0.1, catch_out=True)
logger.debug('taskset for pid %s stdout: %s', pid, stdout)
if retcode == 0:
logger.info("Enabled taskset for pid %s with affinity %s", str(pid), affinity)
else:
logger.debug('Taskset setup failed w/ retcode :%s', retcode)
raise KeyError(stderr)
def _collect_artifacts(self, validation_failed=False):
logger.debug("Collecting artifacts")
logger.info("Artifacts dir: %s", self.artifacts_dir)
for filename, keep in list(self.artifact_files.items()):
try:
self.__collect_file(filename, keep)
except Exception as ex:
logger.warn("Failed to collect file %s: %s", filename, ex)
def get_option(self, section, option, default=None):
return self.config.get_option(section, option, default)
def set_option(self, section, option, value):
"""
Set an option in storage
"""
raise NotImplementedError
def set_exitcode(self, code):
self.output['core']['exitcode'] = code
def get_plugin_of_type(self, plugin_class):
"""
Retrieve a plugin of desired class, KeyError raised otherwise
"""
logger.debug("Searching for plugin: %s", plugin_class)
matches = [plugin for plugin in list(self.plugins.values()) if isinstance(plugin, plugin_class)]
if matches:
if len(matches) > 1:
logger.debug(
"More then one plugin of type %s found. Using first one.",
plugin_class)
return matches[-1]
else:
raise KeyError("Requested plugin type not found: %s" % plugin_class)
def get_plugins_of_type(self, plugin_class):
"""
Retrieve a list of plugins of desired class, KeyError raised otherwise
"""
logger.debug("Searching for plugins: %s", plugin_class)
matches = [plugin for plugin in list(self.plugins.values()) if isinstance(plugin, plugin_class)]
if matches:
return matches
else:
raise KeyError("Requested plugin type not found: %s" % plugin_class)
def get_jobno(self, plugin_name='plugin_lunapark'):
uploader_plugin = self.plugins[plugin_name]
return uploader_plugin.lp_job.number
def __collect_file(self, filename, keep_original=False):
"""
Move or copy single file to artifacts dir
"""
dest = self.artifacts_dir + '/' + os.path.basename(filename)
logger.debug("Collecting file: %s to %s", filename, dest)
if not filename or not os.path.exists(filename):
logger.warning("File not found to collect: %s", filename)
return
if os.path.exists(dest):
# FIXME: 3 find a way to store artifacts anyway
logger.warning("File already exists: %s", dest)
return
if keep_original:
shutil.copy(filename, self.artifacts_dir)
else:
shutil.move(filename, self.artifacts_dir)
os.chmod(dest, 0o644)
def add_artifact_file(self, filename, keep_original=False):
"""
Add file to be stored as result artifact on post-process phase
"""
if filename:
logger.debug(
"Adding artifact file to collect (keep=%s): %s", keep_original,
filename)
self.artifact_files[filename] = keep_original
def apply_shorthand_options(self, options, default_section='DEFAULT'):
for option_str in options:
key, value = option_str.split('=')
try:
section, option = key.split('.')
except ValueError:
section = default_section
option = key
logger.debug(
"Override option: %s => [%s] %s=%s", option_str, section,
option, value)
self.set_option(section, option, value)
def mkstemp(self, suffix, prefix, directory=None):
"""
Generate temp file name in artifacts base dir
and close temp file handle
"""
if not directory:
directory = self.artifacts_dir
fd, fname = tempfile.mkstemp(suffix, prefix, directory)
os.close(fd)
os.chmod(fname, 0o644) # FIXME: chmod to parent dir's mode?
return fname
def publish(self, publisher, key, value):
update_status(self.status, [publisher] + key.split('.'), value)
def close(self):
"""
Call close() for all plugins
"""
logger.info("Close allocated resources...")
for plugin in list(self.plugins.values()):
logger.debug("Close %s", plugin)
try:
plugin.close()
except Exception as ex:
logger.error("Failed closing plugin %s: %s", plugin, ex)
logger.debug(
"Failed closing plugin: %s", traceback.format_exc(ex))
@property
def artifacts_dir(self):
if not self._artifacts_dir:
new_path = os.path.join(self.artifacts_base_dir, self.test_id)
if not os.path.isdir(new_path):
os.makedirs(new_path)
os.chmod(new_path, 0o755)
self._artifacts_dir = os.path.abspath(new_path)
return self._artifacts_dir
@staticmethod
def get_user_agent():
tank_agent = 'YandexTank/{}'.format(
pkg_resources.require('yandextank')[0].version)
py_info = sys.version_info
python_agent = 'Python/{}.{}.{}'.format(
py_info[0], py_info[1], py_info[2])
os_agent = 'OS/{}'.format(platform.platform())
return ' '.join((tank_agent, python_agent, os_agent))
def register_plugin(self, plugin_name, instance):
if self._plugins is None:
self._plugins = {}
if self._plugins.get(plugin_name, None) is not None:
logger.exception('Plugins\' names should diverse')
self._plugins[plugin_name] = instance
def save_cfg(self, path):
self.config.dump(path)
def plugins_cleanup(self):
for plugin_name, plugin in list(self.plugins.items()):
logger.info('Cleaning up plugin {}'.format(plugin_name))
plugin.cleanup()
class Lock(object):
PID = 'pid'
TEST_ID = 'test_id'
TEST_DIR = 'test_dir'
LOCK_FILE_WILDCARD = 'lunapark_*.lock'
def __init__(self, test_id, test_dir, pid=None):
self.test_id = test_id
self.test_dir = test_dir
self.pid = pid if pid is not None else os.getpid()
self.info = {
self.PID: self.pid,
self.TEST_ID: self.test_id,
self.TEST_DIR: self.test_dir
}
self.lock_file = None
def acquire(self, lock_dir, ignore=False):
is_locked = self.is_locked(lock_dir)
if not ignore and is_locked:
raise LockError("Lock file(s) found\n{}".format(is_locked))
prefix, suffix = self.LOCK_FILE_WILDCARD.split('*')
fh, self.lock_file = tempfile.mkstemp(suffix, prefix, lock_dir)
os.close(fh)
with open(self.lock_file, 'w') as f:
yaml.dump(self.info, f)
os.chmod(self.lock_file, 0o644)
return self
def release(self):
if self.lock_file is not None and os.path.exists(self.lock_file):
logger.info("Releasing lock: %s", self.lock_file)
os.remove(self.lock_file)
else:
logger.warning('Lock file not found')
@classmethod
def load(cls, path):
with open(path) as f:
info = yaml.load(f)
pid = info.get(cls.PID)
test_id = info.get(cls.TEST_ID)
test_dir = info.get(cls.TEST_DIR)
lock = Lock(test_id, test_dir, pid)
lock.lock_file = path
return lock
@classmethod
def is_locked(cls, lock_dir='/var/lock'):
for filename in os.listdir(lock_dir):
if fnmatch.fnmatch(filename, cls.LOCK_FILE_WILDCARD):
full_name = os.path.join(lock_dir, filename)
logger.info("Lock file is found: %s", full_name)
try:
running_lock = cls.load(full_name)
if not running_lock.pid:
msg = 'Failed to get {} from lock file {}.'.format(cls.PID,
full_name)
logger.warning(msg)
return msg
else:
if not pid_exists(int(running_lock.pid)):
logger.info("Lock PID %s not exists, ignoring and trying to remove", running_lock.pid)
try:
os.remove(full_name)
except Exception as exc:
logger.warning("Failed to delete lock %s: %s", full_name, exc)
return False
else:
return "Another test is running with pid {}".format(running_lock.pid)
except Exception:
msg = "Failed to load info from lock %s" % full_name
logger.warn(msg, exc_info=True)
return msg
return False
@classmethod
def running_ids(cls, lock_dir='/var/lock'):
return [Lock.load(fname).test_id for fname in glob.glob(os.path.join(lock_dir, cls.LOCK_FILE_WILDCARD))]
class ConfigManager(object):
""" Option storage class """
def __init__(self):
self.file = None
self.config = configparser.ConfigParser()
def load_files(self, configs):
""" Read configs set into storage """
logger.debug("Reading configs: %s", configs)
config_filenames = [resource.resource_filename(config) for config in configs]
try:
self.config.read(config_filenames)
except Exception as ex:
logger.error("Can't load configs: %s", ex)
raise ex
def flush(self, filename=None):
""" Flush current stat to file """
if not filename:
filename = self.file
if filename:
with open(filename, 'w') as handle:
self.config.write(handle)
def get_options(self, section, prefix=''):
""" Get options list with requested prefix """
res = []
try:
for option in self.config.options(section):
if not prefix or option.find(prefix) == 0:
res += [(
option[len(prefix):], self.config.get(section, option))]
except configparser.NoSectionError as ex:
logger.warning("No section: %s", ex)
logger.debug(
"Section: [%s] prefix: '%s' options:\n%s", section, prefix, res)
return res
def find_sections(self, prefix):
""" return sections with specified prefix """
res = []
for section in self.config.sections():
if section.startswith(prefix):
res.append(section)
return res
| lgpl-2.1 | 6,004,892,421,668,816,000 | 36.819277 | 129 | 0.565546 | false |
laserson/rock-health-python | luigi/luigi-ngrams.py | 1 | 1715 | import os
import re
import luigi
import luigi.hadoop
import luigi.hdfs
class InputText(luigi.ExternalTask):
path = luigi.Parameter()
def output(self):
return luigi.hdfs.HdfsTarget(self.path)
class Ngrams(luigi.hadoop.JobTask):
source = luigi.Parameter()
destination = luigi.Parameter()
# overrides superclass; gets set as jobconf:
n_reduce_tasks = luigi.IntParameter(default=10)
def requires(self):
tasks = []
paths = luigi.hdfs.HdfsClient().listdir(self.source, ignore_directories=True, recursive=True)
for path in paths:
tasks.append(InputText(path))
return tasks
def output(self):
return luigi.hdfs.HdfsTarget(self.destination)
def init_mapper(self):
try:
input_file = os.environ['map_input_file']
except KeyError:
input_file = os.environ['mapreduce_map_input_file']
self.expected_tokens = int(re.findall(r'([\d]+)gram', os.path.basename(input_file))[0])
def mapper(self, line):
data = line.split('\t')
if len(data) < 3:
return
# unpack data
ngram = data[0].split()
year = data[1]
count = int(data[2])
if len(ngram) != self.expected_tokens:
return
# generate key
pair = sorted([ngram[0], ngram[self.expected_tokens - 1]])
k = pair + [year]
yield (k, count)
def combiner(self, key, values):
yield (key, sum(values))
def reducer(self, key, values):
yield "%s\t%s\t%s" % tuple(key), str(sum(values))
if __name__ == '__main__':
luigi.run()
| apache-2.0 | 8,702,826,489,544,018,000 | 25.796875 | 101 | 0.566764 | false |
phac-nml/dynamic-tool-destination | tests/mockGalaxy.py | 1 | 3562 | """
# =============================================================================
Copyright Government of Canada 2015
Written by: Eric Enns, Public Health Agency of Canada,
National Microbiology Laboratory
Daniel Bouchard, Public Health Agency of Canada,
National Microbiology Laboratory
Funded by the National Micriobiology Laboratory
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this work except in compliance with the License. You may obtain a copy of the
License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
# =============================================================================
"""
'''
Created on June 23rd, 2015
@author: Daniel Bouchard
'''
from collections import namedtuple
#Job mock and helpers=======================================
class Job(object):
def __init__(self):
self.input_datasets = []
self.input_library_datasets = []
self.param_values = dict()
def get_param_values(self, app, ignore_errors=False):
return self.param_values
def set_arg_value(self, key, value):
self.param_values[key] = value
def add_input_dataset(self, dataset):
self.input_datasets.append(dataset)
class InputDataset(object):
def __init__(self, name, dataset):
self.name = name
self.dataset = dataset
class NotAFile(object):
pass
class Dataset(object):
def __init__(self, file_name, file_ext, value):
self.file_name = file_name
self.datatype = Datatype(file_ext)
self.ext = file_ext
self.metadata = dict()
self.metadata['sequences'] = value
def get_metadata(self):
return self.metadata
class Datatype(object):
def __init__(self, file_ext):
self.file_ext = file_ext
#Tool mock and helpers=========================================
class Tool(object):
def __init__(self, id):
self.old_id = id
self.installed_tool_dependencies = []
def add_tool_dependency(self, dependency):
self.installed_tool_dependencies.append(dependency)
class ToolDependency(object):
def __init__(self, name, dir_name):
self.name = name
self.dir_name = dir_name
def installation_directory(self, app):
return self.dir_name
#App mock=======================================================
class App(object):
def __init__(self, tool_id, params):
self.job_config = JobConfig( tool_id, params )
class JobConfig(object):
def __init__(self, tool_id, params):
self.info = namedtuple('info', ['id', 'nativeSpec', 'runner'])
self.tool_id = tool_id
self.nativeSpec = params
self.default_id = "waffles_default"
self.defNativeSpec = "-q test.q"
self.defRunner = "drmaa"
self.keys = { tool_id: self.info( self.tool_id, self.nativeSpec, self.defRunner ),
"waffles_default": self.info( self.default_id, self.defNativeSpec, self.defRunner ), }
def get_destination(self, tool_id):
return self.keys[tool_id]
#JobMappingException mock=======================================
class JobMappingException(Exception):
pass
class JobDestination(object):
def __init__(self, **kwd):
self.id = kwd.get('id')
self.nativeSpec = kwd.get('params')['nativeSpecification']
self.runner = kwd.get('runner')
| apache-2.0 | 8,361,889,673,550,643,000 | 29.444444 | 107 | 0.624088 | false |
benschmaus/catapult | telemetry/telemetry/internal/backends/chrome_inspector/inspector_backend.py | 1 | 18595 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import logging
import socket
import sys
from py_trace_event import trace_event
from telemetry.core import exceptions
from telemetry import decorators
from telemetry.internal.backends.chrome_inspector import devtools_http
from telemetry.internal.backends.chrome_inspector import inspector_console
from telemetry.internal.backends.chrome_inspector import inspector_memory
from telemetry.internal.backends.chrome_inspector import inspector_page
from telemetry.internal.backends.chrome_inspector import inspector_runtime
from telemetry.internal.backends.chrome_inspector import inspector_websocket
from telemetry.internal.backends.chrome_inspector import websocket
from telemetry.util import js_template
import py_utils
def _HandleInspectorWebSocketExceptions(func):
"""Decorator for converting inspector_websocket exceptions.
When an inspector_websocket exception is thrown in the original function,
this decorator converts it into a telemetry exception and adds debugging
information.
"""
@functools.wraps(func)
def inner(inspector_backend, *args, **kwargs):
try:
return func(inspector_backend, *args, **kwargs)
except (socket.error, websocket.WebSocketException,
inspector_websocket.WebSocketDisconnected) as e:
inspector_backend._ConvertExceptionFromInspectorWebsocket(e)
return inner
class InspectorBackend(object):
"""Class for communicating with a devtools client.
The owner of an instance of this class is responsible for calling
Disconnect() before disposing of the instance.
"""
__metaclass__ = trace_event.TracedMetaClass
def __init__(self, app, devtools_client, context, timeout=120):
self._websocket = inspector_websocket.InspectorWebsocket()
self._websocket.RegisterDomain(
'Inspector', self._HandleInspectorDomainNotification)
self._app = app
self._devtools_client = devtools_client
# Be careful when using the context object, since the data may be
# outdated since this is never updated once InspectorBackend is
# created. Consider an updating strategy for this. (For an example
# of the subtlety, see the logic for self.url property.)
self._context = context
logging.debug('InspectorBackend._Connect() to %s', self.debugger_url)
try:
self._websocket.Connect(self.debugger_url, timeout)
self._console = inspector_console.InspectorConsole(self._websocket)
self._memory = inspector_memory.InspectorMemory(self._websocket)
self._page = inspector_page.InspectorPage(
self._websocket, timeout=timeout)
self._runtime = inspector_runtime.InspectorRuntime(self._websocket)
except (websocket.WebSocketException, exceptions.TimeoutException,
py_utils.TimeoutException) as e:
self._ConvertExceptionFromInspectorWebsocket(e)
def Disconnect(self):
"""Disconnects the inspector websocket.
This method intentionally leaves the self._websocket object around, so that
future calls it to it will fail with a relevant error.
"""
if self._websocket:
self._websocket.Disconnect()
def __del__(self):
self.Disconnect()
@property
def app(self):
return self._app
@property
def url(self):
"""Returns the URL of the tab, as reported by devtools.
Raises:
devtools_http.DevToolsClientConnectionError
"""
return self._devtools_client.GetUrl(self.id)
@property
def id(self):
return self._context['id']
@property
def debugger_url(self):
return self._context['webSocketDebuggerUrl']
def GetWebviewInspectorBackends(self):
"""Returns a list of InspectorBackend instances associated with webviews.
Raises:
devtools_http.DevToolsClientConnectionError
"""
inspector_backends = []
devtools_context_map = self._devtools_client.GetUpdatedInspectableContexts()
for context in devtools_context_map.contexts:
if context['type'] == 'webview':
inspector_backends.append(
devtools_context_map.GetInspectorBackend(context['id']))
return inspector_backends
def IsInspectable(self):
"""Whether the tab is inspectable, as reported by devtools."""
try:
return self._devtools_client.IsInspectable(self.id)
except devtools_http.DevToolsClientConnectionError:
return False
# Public methods implemented in JavaScript.
@property
@decorators.Cache
def screenshot_supported(self):
return True
@_HandleInspectorWebSocketExceptions
def Screenshot(self, timeout):
assert self.screenshot_supported, 'Browser does not support screenshotting'
return self._page.CaptureScreenshot(timeout)
# Memory public methods.
@_HandleInspectorWebSocketExceptions
def GetDOMStats(self, timeout):
"""Gets memory stats from the DOM.
Raises:
inspector_memory.InspectorMemoryException
exceptions.TimeoutException
exceptions.DevtoolsTargetCrashException
"""
dom_counters = self._memory.GetDOMCounters(timeout)
return {
'document_count': dom_counters['documents'],
'node_count': dom_counters['nodes'],
'event_listener_count': dom_counters['jsEventListeners']
}
# Page public methods.
@_HandleInspectorWebSocketExceptions
def WaitForNavigate(self, timeout):
self._page.WaitForNavigate(timeout)
@_HandleInspectorWebSocketExceptions
def Navigate(self, url, script_to_evaluate_on_commit, timeout):
self._page.Navigate(url, script_to_evaluate_on_commit, timeout)
@_HandleInspectorWebSocketExceptions
def GetCookieByName(self, name, timeout):
return self._page.GetCookieByName(name, timeout)
# Console public methods.
@_HandleInspectorWebSocketExceptions
def GetCurrentConsoleOutputBuffer(self, timeout=10):
return self._console.GetCurrentConsoleOutputBuffer(timeout)
# Runtime public methods.
@_HandleInspectorWebSocketExceptions
def ExecuteJavaScript(self, statement, **kwargs):
"""Executes a given JavaScript statement. Does not return the result.
Example: runner.ExecuteJavaScript('var foo = {{ value }};', value='hi');
Args:
statement: The statement to execute (provided as a string).
Optional keyword args:
timeout: The number of seconds to wait for the statement to execute.
context_id: The id of an iframe where to execute the code; the main page
has context_id=1, the first iframe context_id=2, etc.
Additional keyword arguments provide values to be interpolated within
the statement. See telemetry.util.js_template for details.
Raises:
py_utils.TimeoutException
exceptions.EvaluationException
exceptions.WebSocketException
exceptions.DevtoolsTargetCrashException
"""
# Use the default both when timeout=None or the option is ommited.
timeout = kwargs.pop('timeout', None) or 60
context_id = kwargs.pop('context_id', None)
statement = js_template.Render(statement, **kwargs)
self._runtime.Execute(statement, context_id, timeout)
def EvaluateJavaScript(self, expression, **kwargs):
"""Returns the result of evaluating a given JavaScript expression.
Example: runner.ExecuteJavaScript('document.location.href');
Args:
expression: The expression to execute (provided as a string).
Optional keyword args:
timeout: The number of seconds to wait for the expression to evaluate.
context_id: The id of an iframe where to execute the code; the main page
has context_id=1, the first iframe context_id=2, etc.
Additional keyword arguments provide values to be interpolated within
the expression. See telemetry.util.js_template for details.
Raises:
py_utils.TimeoutException
exceptions.EvaluationException
exceptions.WebSocketException
exceptions.DevtoolsTargetCrashException
"""
# Use the default both when timeout=None or the option is ommited.
timeout = kwargs.pop('timeout', None) or 60
context_id = kwargs.pop('context_id', None)
expression = js_template.Render(expression, **kwargs)
return self._EvaluateJavaScript(expression, context_id, timeout)
def WaitForJavaScriptCondition(self, condition, **kwargs):
"""Wait for a JavaScript condition to become truthy.
Example: runner.WaitForJavaScriptCondition('window.foo == 10');
Args:
condition: The JavaScript condition (provided as string).
Optional keyword args:
timeout: The number in seconds to wait for the condition to become
True (default to 60).
context_id: The id of an iframe where to execute the code; the main page
has context_id=1, the first iframe context_id=2, etc.
Additional keyword arguments provide values to be interpolated within
the expression. See telemetry.util.js_template for details.
Returns:
The value returned by the JavaScript condition that got interpreted as
true.
Raises:
py_utils.TimeoutException
exceptions.EvaluationException
exceptions.WebSocketException
exceptions.DevtoolsTargetCrashException
"""
# Use the default both when timeout=None or the option is ommited.
timeout = kwargs.pop('timeout', None) or 60
context_id = kwargs.pop('context_id', None)
condition = js_template.Render(condition, **kwargs)
def IsJavaScriptExpressionTrue():
return self._EvaluateJavaScript(condition, context_id, timeout)
try:
return py_utils.WaitFor(IsJavaScriptExpressionTrue, timeout)
except py_utils.TimeoutException as e:
# Try to make timeouts a little more actionable by dumping console output.
debug_message = None
try:
debug_message = (
'Console output:\n%s' %
self.GetCurrentConsoleOutputBuffer())
except Exception as e:
debug_message = (
'Exception thrown when trying to capture console output: %s' %
repr(e))
raise py_utils.TimeoutException(
e.message + '\n' + debug_message)
@_HandleInspectorWebSocketExceptions
def EnableAllContexts(self):
"""Allows access to iframes.
Raises:
exceptions.WebSocketDisconnected
exceptions.TimeoutException
exceptions.DevtoolsTargetCrashException
"""
return self._runtime.EnableAllContexts()
@_HandleInspectorWebSocketExceptions
def SynthesizeScrollGesture(self, x=100, y=800, xDistance=0, yDistance=-500,
xOverscroll=None, yOverscroll=None,
preventFling=None, speed=None,
gestureSourceType=None, repeatCount=None,
repeatDelayMs=None, interactionMarkerName=None,
timeout=60):
"""Runs an inspector command that causes a repeatable browser driven scroll.
Args:
x: X coordinate of the start of the gesture in CSS pixels.
y: Y coordinate of the start of the gesture in CSS pixels.
xDistance: Distance to scroll along the X axis (positive to scroll left).
yDistance: Distance to scroll along the Y axis (positive to scroll up).
xOverscroll: Number of additional pixels to scroll back along the X axis.
xOverscroll: Number of additional pixels to scroll back along the Y axis.
preventFling: Prevents a fling gesture.
speed: Swipe speed in pixels per second.
gestureSourceType: Which type of input events to be generated.
repeatCount: Number of additional repeats beyond the first scroll.
repeatDelayMs: Number of milliseconds delay between each repeat.
interactionMarkerName: The name of the interaction markers to generate.
Raises:
exceptions.TimeoutException
exceptions.DevtoolsTargetCrashException
"""
params = {
'x': x,
'y': y,
'xDistance': xDistance,
'yDistance': yDistance
}
if preventFling is not None:
params['preventFling'] = preventFling
if xOverscroll is not None:
params['xOverscroll'] = xOverscroll
if yOverscroll is not None:
params['yOverscroll'] = yOverscroll
if speed is not None:
params['speed'] = speed
if repeatCount is not None:
params['repeatCount'] = repeatCount
if gestureSourceType is not None:
params['gestureSourceType'] = gestureSourceType
if repeatDelayMs is not None:
params['repeatDelayMs'] = repeatDelayMs
if interactionMarkerName is not None:
params['interactionMarkerName'] = interactionMarkerName
scroll_command = {
'method': 'Input.synthesizeScrollGesture',
'params': params
}
return self._runtime.RunInspectorCommand(scroll_command, timeout)
@_HandleInspectorWebSocketExceptions
def DispatchKeyEvent(self, keyEventType='char', modifiers=None,
timestamp=None, text=None, unmodifiedText=None,
keyIdentifier=None, domCode=None, domKey=None,
windowsVirtualKeyCode=None, nativeVirtualKeyCode=None,
autoRepeat=None, isKeypad=None, isSystemKey=None,
timeout=60):
"""Dispatches a key event to the page.
Args:
type: Type of the key event. Allowed values: 'keyDown', 'keyUp',
'rawKeyDown', 'char'.
modifiers: Bit field representing pressed modifier keys. Alt=1, Ctrl=2,
Meta/Command=4, Shift=8 (default: 0).
timestamp: Time at which the event occurred. Measured in UTC time in
seconds since January 1, 1970 (default: current time).
text: Text as generated by processing a virtual key code with a keyboard
layout. Not needed for for keyUp and rawKeyDown events (default: '').
unmodifiedText: Text that would have been generated by the keyboard if no
modifiers were pressed (except for shift). Useful for shortcut
(accelerator) key handling (default: "").
keyIdentifier: Unique key identifier (e.g., 'U+0041') (default: '').
windowsVirtualKeyCode: Windows virtual key code (default: 0).
nativeVirtualKeyCode: Native virtual key code (default: 0).
autoRepeat: Whether the event was generated from auto repeat (default:
False).
isKeypad: Whether the event was generated from the keypad (default:
False).
isSystemKey: Whether the event was a system key event (default: False).
Raises:
exceptions.TimeoutException
exceptions.DevtoolsTargetCrashException
"""
params = {
'type': keyEventType,
}
if modifiers is not None:
params['modifiers'] = modifiers
if timestamp is not None:
params['timestamp'] = timestamp
if text is not None:
params['text'] = text
if unmodifiedText is not None:
params['unmodifiedText'] = unmodifiedText
if keyIdentifier is not None:
params['keyIdentifier'] = keyIdentifier
if domCode is not None:
params['code'] = domCode
if domKey is not None:
params['key'] = domKey
if windowsVirtualKeyCode is not None:
params['windowsVirtualKeyCode'] = windowsVirtualKeyCode
if nativeVirtualKeyCode is not None:
params['nativeVirtualKeyCode'] = nativeVirtualKeyCode
if autoRepeat is not None:
params['autoRepeat'] = autoRepeat
if isKeypad is not None:
params['isKeypad'] = isKeypad
if isSystemKey is not None:
params['isSystemKey'] = isSystemKey
key_command = {
'method': 'Input.dispatchKeyEvent',
'params': params
}
return self._runtime.RunInspectorCommand(key_command, timeout)
# Methods used internally by other backends.
def _HandleInspectorDomainNotification(self, res):
if (res['method'] == 'Inspector.detached' and
res.get('params', {}).get('reason', '') == 'replaced_with_devtools'):
self._WaitForInspectorToGoAway()
return
if res['method'] == 'Inspector.targetCrashed':
exception = exceptions.DevtoolsTargetCrashException(self.app)
self._AddDebuggingInformation(exception)
raise exception
def _WaitForInspectorToGoAway(self):
self._websocket.Disconnect()
raw_input('The connection to Chrome was lost to the inspector ui.\n'
'Please close the inspector and press enter to resume '
'Telemetry run...')
raise exceptions.DevtoolsTargetCrashException(
self.app, 'Devtool connection with the browser was interrupted due to '
'the opening of an inspector.')
def _ConvertExceptionFromInspectorWebsocket(self, error):
"""Converts an Exception from inspector_websocket.
This method always raises a Telemetry exception. It appends debugging
information. The exact exception raised depends on |error|.
Args:
error: An instance of socket.error or websocket.WebSocketException.
Raises:
exceptions.TimeoutException: A timeout occurred.
exceptions.DevtoolsTargetCrashException: On any other error, the most
likely explanation is that the devtool's target crashed.
"""
if isinstance(error, websocket.WebSocketTimeoutException):
new_error = exceptions.TimeoutException()
new_error.AddDebuggingMessage(exceptions.AppCrashException(
self.app, 'The app is probably crashed:\n'))
else:
new_error = exceptions.DevtoolsTargetCrashException(self.app)
original_error_msg = 'Original exception:\n' + str(error)
new_error.AddDebuggingMessage(original_error_msg)
self._AddDebuggingInformation(new_error)
raise new_error, None, sys.exc_info()[2]
def _AddDebuggingInformation(self, error):
"""Adds debugging information to error.
Args:
error: An instance of exceptions.Error.
"""
if self.IsInspectable():
msg = (
'Received a socket error in the browser connection and the tab '
'still exists. The operation probably timed out.'
)
else:
msg = (
'Received a socket error in the browser connection and the tab no '
'longer exists. The tab probably crashed.'
)
error.AddDebuggingMessage(msg)
error.AddDebuggingMessage('Debugger url: %s' % self.debugger_url)
@_HandleInspectorWebSocketExceptions
def _EvaluateJavaScript(self, expression, context_id, timeout):
return self._runtime.Evaluate(expression, context_id, timeout)
@_HandleInspectorWebSocketExceptions
def CollectGarbage(self):
self._page.CollectGarbage()
| bsd-3-clause | -3,750,426,105,838,133,000 | 35.821782 | 80 | 0.703469 | false |
dyzajash/scanlation_cms | scanlation_cms/app.py | 1 | 3430 | # -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
from flask import Flask, render_template, request, send_from_directory
from flask.ext.principal import RoleNeed, UserNeed, identity_loaded
from flask_login import current_user
from scanlation_cms import api, panel, public, user
from scanlation_cms.assets import assets
from scanlation_cms.extensions import (babel, bcrypt, cache, db, debug_toolbar,
flask_log, img_resize, login_manager,
migrate, principal)
from scanlation_cms.settings import LANGUAGES, Config, ProdConfig
from scanlation_cms.utils import RegexConverter
def create_app(config_object=ProdConfig):
"""An application factory.
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
identity_loaded.connect(on_identity_loaded, app, False)
register_special_routes(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
principal.init_app(app)
debug_toolbar.init_app(app)
flask_log.init_app(app)
migrate.init_app(app, db)
img_resize.init_app(app)
babel.init_app(app)
babel.localeselector(get_locale)
return None
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(user.views.blueprint)
app.register_blueprint(panel.views.blueprint)
app.register_blueprint(api.views.blueprint)
return None
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template('errors/{0}.html'.format(error_code)), error_code
for errcode in [401, 403, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_special_routes(app):
"""Register special app routes."""
app.url_map.converters['regex'] = RegexConverter
if app.config['ENV'] != 'prod':
app.add_url_rule(
'/uploads/<regex("([\w\d_/-]+)?.(?:jpe?g|gif|png)"):filename>',
'uploaded_file',
uploaded_file
)
return None
def on_identity_loaded(sender, identity):
"""Flask-Principal signal handler."""
# Set the identity user object
identity.user = current_user
# Add the UserNeed to the identity
if hasattr(current_user, 'get_id'):
identity.provides.add(UserNeed(current_user.get_id))
# Assuming the User model has a list of roles, update the
# identity with the roles that the user provides
if hasattr(current_user, 'roles'):
if current_user.roles:
for role in current_user.roles:
identity.provides.add(RoleNeed(role.name))
def uploaded_file(filename):
"""Sample upload file handler for development."""
return send_from_directory(Config.UPLOADS_DIR, filename)
def get_locale():
"""Get request locale."""
return request.accept_languages.best_match(LANGUAGES.keys())
| bsd-3-clause | 9,077,521,675,694,444,000 | 31.666667 | 80 | 0.669971 | false |
Panagiotis-Kon/empower-runtime | empower/vbsp/vbspconnection.py | 1 | 15120 | #!/usr/bin/env python3
#
# Copyright (c) 2016 Supreeth Herle
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""VBSP Connection."""
import time
import tornado.ioloop
import socket
import sys
from protobuf_to_dict import protobuf_to_dict
from empower.vbsp import EMAGE_VERSION
from empower.vbsp import PRT_UE_JOIN
from empower.vbsp import PRT_UE_LEAVE
from empower.vbsp import PRT_VBSP_HELLO
from empower.vbsp import PRT_VBSP_BYE
from empower.vbsp import PRT_VBSP_REGISTER
from empower.vbsp import PRT_VBSP_TRIGGER_EVENT
from empower.vbsp import PRT_VBSP_AGENT_SCHEDULED_EVENT
from empower.vbsp import PRT_VBSP_SINGLE_EVENT
from empower.vbsp.messages import main_pb2
from empower.vbsp.messages import configs_pb2
from empower.core.utils import hex_to_ether
from empower.core.utils import ether_to_hex
from empower.core.ue import UE
from empower.main import RUNTIME
import empower.logger
LOG = empower.logger.get_logger()
def create_header(t_id, b_id, header):
"""Create message header."""
if not header:
LOG.error("header parameter is None")
header.vers = EMAGE_VERSION
# Set the transaction identifier (module id).
header.t_id = t_id
# Set the Base station identifier.
header.b_id = b_id
# Start the sequence number for messages from zero.
header.seq = 0
def serialize_message(message):
"""Serialize message."""
if not message:
LOG.error("message parameter is None")
return None
return message.SerializeToString()
def deserialize_message(serialized_data):
"""De-Serialize message."""
if not serialized_data:
LOG.error("Received serialized data is None")
return None
msg = main_pb2.emage_msg()
msg.ParseFromString(serialized_data)
return msg
class VBSPConnection(object):
"""VBSP Connection.
Represents a connection to a ENB (EUTRAN Base Station) using
the VBSP Protocol. One VBSPConnection object is created for every
ENB in the network. The object implements the logic for handling
incoming messages. The currently supported messages are:
Attributes:
stream: The stream object used to talk with the ENB.
address: The connection source address, i.e. the ENB IP address.
server: Pointer to the server object.
vbs: Pointer to a VBS object.
"""
def __init__(self, stream, addr, server):
self.stream = stream
self.stream.set_nodelay(True)
self.addr = addr
self.server = server
self.vbs = None
self.seq = 0
self.stream.set_close_callback(self._on_disconnect)
self.__buffer = b''
self._hb_interval_ms = 500
self._hb_worker = tornado.ioloop.PeriodicCallback(self._heartbeat_cb,
self._hb_interval_ms)
self.endian = sys.byteorder
self._hb_worker.start()
self._wait()
def to_dict(self):
"""Return dict representation of object."""
return self.addr
def _heartbeat_cb(self):
"""Check if connection is still active."""
if self.vbs and not self.stream.closed():
timeout = (self.vbs.period / 1000) * 3
if (self.vbs.last_seen_ts + timeout) < time.time():
LOG.info('Client inactive %s at %r', self.vbs.addr, self.addr)
self.stream.close()
def stream_send(self, message):
"""Send message."""
# Update the sequence number of the messages
message.head.seq = self.seq + 1
size = message.ByteSize()
print(message.__str__())
size_bytes = (socket.htonl(size)).to_bytes(4, byteorder=self.endian)
send_buff = serialize_message(message)
buff = size_bytes + send_buff
if buff is None:
LOG.error("errno %u occured")
self.stream.write(buff)
def _on_read(self, line):
""" Appends bytes read from socket to a buffer. Once the full packet
has been read the parser is invoked and the buffers is cleared. The
parsed packet is then passed to the suitable method or dropped if the
packet type in unknown. """
self.__buffer = b''
if line is not None:
self.__buffer = self.__buffer + line
if len(line) == 4:
temp_size = int.from_bytes(line, byteorder=self.endian)
size = socket.ntohl(int(temp_size))
self.stream.read_bytes(size, self._on_read)
return
deserialized_msg = deserialize_message(line)
# Update the sequency number from received message
self.seq = deserialized_msg.head.seq
print(deserialized_msg.__str__())
self._trigger_message(deserialized_msg)
self._wait()
def _trigger_message(self, deserialized_msg):
event_type = deserialized_msg.WhichOneof("event_types")
if event_type == PRT_VBSP_SINGLE_EVENT:
msg_type = deserialized_msg.se.WhichOneof("events")
elif event_type == PRT_VBSP_AGENT_SCHEDULED_EVENT:
msg_type = deserialized_msg.sche.WhichOneof("events")
elif event_type == PRT_VBSP_TRIGGER_EVENT:
msg_type = deserialized_msg.te.WhichOneof("events")
else:
LOG.error("Unknown message event type %s", event_type)
if not msg_type or msg_type not in self.server.pt_types:
LOG.error("Unknown message type %s", msg_type)
return
if msg_type != PRT_VBSP_HELLO and not self.vbs:
return
handler_name = "_handle_%s" % self.server.pt_types[msg_type]
if hasattr(self, handler_name):
handler = getattr(self, handler_name)
handler(deserialized_msg)
if msg_type in self.server.pt_types_handlers:
for handler in self.server.pt_types_handlers[msg_type]:
handler(deserialized_msg)
def _handle_hello(self, main_msg):
"""Handle an incoming HELLO message.
Args:
main_msg, a emage_msg containing HELLO message
Returns:
None
"""
enb_id = main_msg.head.b_id
vbs_id = hex_to_ether(enb_id)
try:
vbs = RUNTIME.vbses[vbs_id]
except KeyError:
LOG.error("Hello from unknown VBS (%s)", (vbs_id))
return
LOG.info("Hello from %s VBS %s seq %u", self.addr[0], vbs.addr,
main_msg.head.seq)
# New connection
if not vbs.connection:
# set pointer to pnfdev object
self.vbs = vbs
# set connection
vbs.connection = self
# request registered UEs
self.send_UEs_id_req()
# generate register message
self.send_register_message_to_self()
# Update VBSP params
vbs.period = main_msg.se.mHello.repl.period
vbs.last_seen = main_msg.head.seq
vbs.last_seen_ts = time.time()
def _handle_UEs_id_repl(self, main_msg):
"""Handle an incoming UEs ID reply.
Args:
message, a emage_msg containing UE IDs (RNTIs)
Returns:
None
"""
active_ues = {}
inactive_ues = {}
event_type = main_msg.WhichOneof("event_types")
msg = protobuf_to_dict(main_msg)
ues_id_msg_repl = msg[event_type]["mUEs_id"]["repl"]
if ues_id_msg_repl["status"] != configs_pb2.CREQS_SUCCESS:
return
# List of active UEs
if "active_ue_id" in ues_id_msg_repl:
for ue in ues_id_msg_repl["active_ue_id"]:
active_ues[(self.vbs.addr, ue["rnti"])] = {}
if "imsi" in ue:
active_ues[(self.vbs.addr, ue["rnti"])]["imsi"] = ue["imsi"]
else:
active_ues[(self.vbs.addr, ue["rnti"])]["imsi"] = None
if "plmn_id" in ue:
active_ues[(self.vbs.addr, ue["rnti"])]["plmn_id"] = \
ue["plmn_id"]
else:
active_ues[(self.vbs.addr, ue["rnti"])]["plmn_id"] = None
# List of inactive UEs
if "inactive_ue_id" in ues_id_msg_repl:
for ue in ues_id_msg_repl["inactive_ue_id"]:
inactive_ues[(self.vbs.addr, ue["rnti"])] = {}
if "imsi" in ue:
inactive_ues[(self.vbs.addr, ue["rnti"])]["imsi"] = \
ue["imsi"]
else:
inactive_ues[(self.vbs.addr, ue["rnti"])]["imsi"] = None
if "plmn_id" in ue:
inactive_ues[(self.vbs.addr, ue["rnti"])]["plmn_id"] = \
ue["plmn_id"]
else:
inactive_ues[(self.vbs.addr, ue["rnti"])]["plmn_id"] = None
for vbs_id, rnti in active_ues.keys():
ue_id = (self.vbs.addr, rnti)
if ue_id not in RUNTIME.ues:
new_ue = UE(ue_id, ue_id[1], self.vbs)
RUNTIME.ues[ue_id] = new_ue
ue = RUNTIME.ues[ue_id]
imsi = active_ues[ue_id]["imsi"]
plmn_id = int(active_ues[ue_id]["plmn_id"])
# Setting IMSI of UE
ue.imsi = imsi
if not ue.plmn_id and plmn_id:
# Setting tenant
ue.tenant = RUNTIME.load_tenant_by_plmn_id(plmn_id)
if ue.tenant:
# Adding UE to tenant
LOG.info("Adding %s to tenant %s", ue.addr,
ue.tenant.plmn_id)
ue.tenant.ues[ue.addr] = ue
# Raise UE join
self.server.send_ue_join_message_to_self(ue)
# Create a trigger for reporting RRC measurements config.
from empower.ue_confs.ue_rrc_meas_confs import ue_rrc_meas_confs
conf_req = {
"event_type": "trigger"
}
ue_rrc_meas_confs(tenant_id=ue.tenant.tenant_id,
vbs=ue.vbs.addr,
ue=ue.rnti,
conf_req=conf_req)
if ue.plmn_id and not plmn_id:
# Raise UE leave
self.server.send_ue_leave_message_to_self(ue)
# Removing UE from tenant
LOG.info("Removing %s from tenant %s", ue.addr,
ue.tenant.plmn_id)
del ue.tenant.ues[ue.addr]
# Resetting tenant
ue.tenant = None
existing_ues = []
existing_ues.extend(RUNTIME.ues.keys())
for ue_addr in existing_ues:
if ue_addr not in active_ues:
RUNTIME.remove_ue(ue_addr)
def _handle_rrc_meas_conf_repl(self, main_msg):
"""Handle an incoming UE's RRC Measurements configuration reply.
Args:
message, a message containing RRC Measurements configuration in UE
Returns:
None
"""
event_type = main_msg.WhichOneof("event_types")
msg = protobuf_to_dict(main_msg)
rrc_m_conf_repl = msg[event_type]["mUE_rrc_meas_conf"]["repl"]
rnti = rrc_m_conf_repl["rnti"]
ue_id = (self.vbs.addr, rnti)
if ue_id not in RUNTIME.ues:
return
ue = RUNTIME.ues[ue_id]
if rrc_m_conf_repl["status"] != configs_pb2.CREQS_SUCCESS:
return
del rrc_m_conf_repl["rnti"]
del rrc_m_conf_repl["status"]
if "ue_rrc_state" in rrc_m_conf_repl:
ue.rrc_state = rrc_m_conf_repl["ue_rrc_state"]
del rrc_m_conf_repl["ue_rrc_state"]
if "capabilities" in rrc_m_conf_repl:
ue.capabilities = rrc_m_conf_repl["capabilities"]
del rrc_m_conf_repl["capabilities"]
ue.rrc_meas_config = rrc_m_conf_repl
def send_UEs_id_req(self):
""" Send request for UEs ID registered in VBS """
ues_id_req = main_pb2.emage_msg()
enb_id = ether_to_hex(self.vbs.addr)
# Transaction identifier is zero by default.
create_header(0, enb_id, ues_id_req.head)
# Creating a trigger message to fetch UE RNTIs
trigger_msg = ues_id_req.te
trigger_msg.action = main_pb2.EA_ADD
UEs_id_msg = trigger_msg.mUEs_id
UEs_id_req_msg = UEs_id_msg.req
UEs_id_req_msg.dummy = 1
LOG.info("Sending UEs request to VBS %s (%u)",
self.vbs.addr, enb_id)
self.stream_send(ues_id_req)
def send_rrc_meas_conf_req(self, ue):
""" Sends a request for RRC measurements configuration of UE """
rrc_m_conf_req = main_pb2.emage_msg()
enb_id = ether_to_hex(self.vbs.addr)
# Transaction identifier is zero by default.
create_header(0, enb_id, rrc_m_conf_req.head)
# Creating a trigger message to fetch UE RNTIs
trigger_msg = rrc_m_conf_req.te
trigger_msg.action = main_pb2.EA_ADD
rrc_m_conf_msg = trigger_msg.mUE_rrc_meas_conf
rrc_m_conf_req_msg = rrc_m_conf_msg.req
rrc_m_conf_req_msg.rnti = ue.rnti
LOG.info("Sending UEs RRC measurement config request to VBS %s (%u)",
self.vbs.addr, enb_id)
self.stream_send(rrc_m_conf_req)
def _wait(self):
""" Wait for incoming packets on signalling channel """
self.stream.read_bytes(4, self._on_read)
def _on_disconnect(self):
"""Handle VBSP disconnection."""
if not self.vbs:
return
LOG.info("VBS disconnected: %s", self.vbs.addr)
# remove hosted ues
for addr in list(RUNTIME.ues.keys()):
ue = RUNTIME.ues[addr]
if ue.vbs == self.vbs:
RUNTIME.remove_ue(ue.addr)
# reset state
self.vbs.last_seen = 0
self.vbs.connection = None
self.vbs.ues = {}
self.vbs.period = 0
self.vbs = None
def send_bye_message_to_self(self):
"""Send a unsollicited BYE message to self."""
for handler in self.server.pt_types_handlers[PRT_VBSP_BYE]:
handler(self.vbs)
def send_register_message_to_self(self):
"""Send a REGISTER message to self."""
for handler in self.server.pt_types_handlers[PRT_VBSP_REGISTER]:
handler(self.vbs)
| apache-2.0 | -7,264,704,974,746,397,000 | 30.565762 | 84 | 0.56164 | false |
OmkarPathak/pygorithm | pygorithm/dynamic_programming/lis.py | 1 | 1439 | """
Author: Omkar Pathak
Created At: 25th August 2017
"""
import inspect
def longest_increasing_subsequence(_list):
"""
The Longest Increasing Subsequence (LIS) problem is to find the length of the longest subsequence of a
given sequence such that all elements of the subsequence are sorted in increasing order. For example,
the length of LIS for [10, 22, 9, 33, 21, 50, 41, 60, 80] is 6 and LIS is [10, 22, 33, 50, 60, 80].
:param _list: an array of elements
:return: returns a tuple of maximum length of lis and an array of the elements of lis
"""
# Initialize list with some value
lis = [1] * len(_list)
# list for storing the elements in an lis
elements = [0] * len(_list)
# Compute optimized LIS values in bottom up manner
for i in range(1, len(_list)):
for j in range(0, i):
if _list[i] > _list[j] and lis[i] < lis[j] + 1:
lis[i] = lis[j]+1
elements[i] = j
# find the maximum of the whole list and get its index in idx
maximum = max(lis)
idx = lis.index(maximum)
# for printing the elements later
seq = [_list[idx]]
while idx != elements[idx]:
idx = elements[idx]
seq.append(_list[idx])
return (maximum, seq[::-1])
def get_code():
"""
returns the code for the longest_increasing_subsequence function
"""
return inspect.getsource(longest_increasing_subsequence)
| mit | 7,613,543,510,586,332,000 | 30.977778 | 106 | 0.628909 | false |
UK992/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/wptcommandline.py | 1 | 36307 | from __future__ import absolute_import, print_function
import argparse
import os
import sys
from collections import OrderedDict
from distutils.spawn import find_executable
from datetime import timedelta
from six import iterkeys, itervalues, iteritems
from . import config
from . import wpttest
from .formatters import chromium, wptreport, wptscreenshot
def abs_path(path):
return os.path.abspath(os.path.expanduser(path))
def url_or_path(path):
from six.moves.urllib.parse import urlparse
parsed = urlparse(path)
if len(parsed.scheme) > 2:
return path
else:
return abs_path(path)
def require_arg(kwargs, name, value_func=None):
if value_func is None:
value_func = lambda x: x is not None
if name not in kwargs or not value_func(kwargs[name]):
print("Missing required argument %s" % name, file=sys.stderr)
sys.exit(1)
def create_parser(product_choices=None):
from mozlog import commandline
from . import products
if product_choices is None:
config_data = config.load()
product_choices = products.products_enabled(config_data)
parser = argparse.ArgumentParser(description="""Runner for web-platform-tests tests.""",
usage="""%(prog)s [OPTION]... [TEST]...
TEST is either the full path to a test file to run, or the URL of a test excluding
scheme host and port.""")
parser.add_argument("--manifest-update", action="store_true", default=None,
help="Regenerate the test manifest.")
parser.add_argument("--no-manifest-update", action="store_false", dest="manifest_update",
help="Prevent regeneration of the test manifest.")
parser.add_argument("--manifest-download", action="store_true", default=None,
help="Attempt to download a preexisting manifest when updating.")
parser.add_argument("--no-manifest-download", action="store_false", dest="manifest_download",
help="Prevent download of the test manifest.")
parser.add_argument("--timeout-multiplier", action="store", type=float, default=None,
help="Multiplier relative to standard test timeout to use")
parser.add_argument("--run-by-dir", type=int, nargs="?", default=False,
help="Split run into groups by directories. With a parameter,"
"limit the depth of splits e.g. --run-by-dir=1 to split by top-level"
"directory")
parser.add_argument("--processes", action="store", type=int, default=None,
help="Number of simultaneous processes to use")
parser.add_argument("--no-capture-stdio", action="store_true", default=False,
help="Don't capture stdio and write to logging")
parser.add_argument("--no-fail-on-unexpected", action="store_false",
default=True,
dest="fail_on_unexpected",
help="Exit with status code 0 when test expectations are violated")
mode_group = parser.add_argument_group("Mode")
mode_group.add_argument("--list-test-groups", action="store_true",
default=False,
help="List the top level directories containing tests that will run.")
mode_group.add_argument("--list-disabled", action="store_true",
default=False,
help="List the tests that are disabled on the current platform")
mode_group.add_argument("--list-tests", action="store_true",
default=False,
help="List all tests that will run")
stability_group = mode_group.add_mutually_exclusive_group()
stability_group.add_argument("--verify", action="store_true",
default=False,
help="Run a stability check on the selected tests")
stability_group.add_argument("--stability", action="store_true",
default=False,
help=argparse.SUPPRESS)
mode_group.add_argument("--verify-log-full", action="store_true",
default=False,
help="Output per-iteration test results when running verify")
mode_group.add_argument("--verify-repeat-loop", action="store",
default=10,
help="Number of iterations for a run that reloads each test without restart.",
type=int)
mode_group.add_argument("--verify-repeat-restart", action="store",
default=5,
help="Number of iterations, for a run that restarts the runner between each iteration",
type=int)
chaos_mode_group = mode_group.add_mutually_exclusive_group()
chaos_mode_group.add_argument("--verify-no-chaos-mode", action="store_false",
default=True,
dest="verify_chaos_mode",
help="Disable chaos mode when running on Firefox")
chaos_mode_group.add_argument("--verify-chaos-mode", action="store_true",
default=True,
dest="verify_chaos_mode",
help="Enable chaos mode when running on Firefox")
mode_group.add_argument("--verify-max-time", action="store",
default=None,
help="The maximum number of minutes for the job to run",
type=lambda x: timedelta(minutes=float(x)))
output_results_group = mode_group.add_mutually_exclusive_group()
output_results_group.add_argument("--verify-no-output-results", action="store_false",
dest="verify_output_results",
default=True,
help="Prints individuals test results and messages")
output_results_group.add_argument("--verify-output-results", action="store_true",
dest="verify_output_results",
default=True,
help="Disable printing individuals test results and messages")
test_selection_group = parser.add_argument_group("Test Selection")
test_selection_group.add_argument("--test-types", action="store",
nargs="*", default=wpttest.enabled_tests,
choices=wpttest.enabled_tests,
help="Test types to run")
test_selection_group.add_argument("--include", action="append",
help="URL prefix to include")
test_selection_group.add_argument("--exclude", action="append",
help="URL prefix to exclude")
test_selection_group.add_argument("--include-manifest", type=abs_path,
help="Path to manifest listing tests to include")
test_selection_group.add_argument("--skip-timeout", action="store_true",
help="Skip tests that are expected to time out")
test_selection_group.add_argument("--skip-implementation-status",
action="append",
choices=["not-implementing", "backlog", "implementing"],
help="Skip tests that have the given implementation status")
test_selection_group.add_argument("--tag", action="append", dest="tags",
help="Labels applied to tests to include in the run. "
"Labels starting dir: are equivalent to top-level directories.")
test_selection_group.add_argument("--default-exclude", action="store_true",
default=False,
help="Only run the tests explicitly given in arguments. "
"No tests will run if the list is empty, and the "
"program will exit with status code 0.")
debugging_group = parser.add_argument_group("Debugging")
debugging_group.add_argument('--debugger', const="__default__", nargs="?",
help="run under a debugger, e.g. gdb or valgrind")
debugging_group.add_argument('--debugger-args', help="arguments to the debugger")
debugging_group.add_argument("--rerun", action="store", type=int, default=1,
help="Number of times to re run each test without restarts")
debugging_group.add_argument("--repeat", action="store", type=int, default=1,
help="Number of times to run the tests, restarting between each run")
debugging_group.add_argument("--repeat-until-unexpected", action="store_true", default=None,
help="Run tests in a loop until one returns an unexpected result")
debugging_group.add_argument('--pause-after-test', action="store_true", default=None,
help="Halt the test runner after each test (this happens by default if only a single test is run)")
debugging_group.add_argument('--no-pause-after-test', dest="pause_after_test", action="store_false",
help="Don't halt the test runner irrespective of the number of tests run")
debugging_group.add_argument('--pause-on-unexpected', action="store_true",
help="Halt the test runner when an unexpected result is encountered")
debugging_group.add_argument('--no-restart-on-unexpected', dest="restart_on_unexpected",
default=True, action="store_false",
help="Don't restart on an unexpected result")
debugging_group.add_argument("--symbols-path", action="store", type=url_or_path,
help="Path or url to symbols file used to analyse crash minidumps.")
debugging_group.add_argument("--stackwalk-binary", action="store", type=abs_path,
help="Path to stackwalker program used to analyse minidumps.")
debugging_group.add_argument("--pdb", action="store_true",
help="Drop into pdb on python exception")
config_group = parser.add_argument_group("Configuration")
config_group.add_argument("--binary", action="store",
type=abs_path, help="Desktop binary to run tests against")
config_group.add_argument('--binary-arg',
default=[], action="append", dest="binary_args",
help="Extra argument for the binary")
config_group.add_argument("--webdriver-binary", action="store", metavar="BINARY",
type=abs_path, help="WebDriver server binary to use")
config_group.add_argument('--webdriver-arg',
default=[], action="append", dest="webdriver_args",
help="Extra argument for the WebDriver binary")
config_group.add_argument("--package-name", action="store",
help="Android package name to run tests against")
config_group.add_argument("--device-serial", action="store",
help="Running Android instance to connect to, if not emulator-5554")
config_group.add_argument("--metadata", action="store", type=abs_path, dest="metadata_root",
help="Path to root directory containing test metadata"),
config_group.add_argument("--tests", action="store", type=abs_path, dest="tests_root",
help="Path to root directory containing test files"),
config_group.add_argument("--manifest", action="store", type=abs_path, dest="manifest_path",
help="Path to test manifest (default is ${metadata_root}/MANIFEST.json)")
config_group.add_argument("--run-info", action="store", type=abs_path,
help="Path to directory containing extra json files to add to run info")
config_group.add_argument("--product", action="store", choices=product_choices,
default=None, help="Browser against which to run tests")
config_group.add_argument("--browser-version", action="store",
default=None, help="Informative string detailing the browser "
"release version. This is included in the run_info data.")
config_group.add_argument("--browser-channel", action="store",
default=None, help="Informative string detailing the browser "
"release channel. This is included in the run_info data.")
config_group.add_argument("--config", action="store", type=abs_path, dest="config",
help="Path to config file")
config_group.add_argument("--install-fonts", action="store_true",
default=None,
help="Install additional system fonts on your system")
config_group.add_argument("--no-install-fonts", dest="install_fonts", action="store_false",
help="Do not install additional system fonts on your system")
config_group.add_argument("--font-dir", action="store", type=abs_path, dest="font_dir",
help="Path to local font installation directory", default=None)
config_group.add_argument("--headless", action="store_true",
help="Run browser in headless mode", default=None)
config_group.add_argument("--no-headless", action="store_false", dest="headless",
help="Don't run browser in headless mode")
config_group.add_argument("--instrument-to-file", action="store",
help="Path to write instrumentation logs to")
build_type = parser.add_mutually_exclusive_group()
build_type.add_argument("--debug-build", dest="debug", action="store_true",
default=None,
help="Build is a debug build (overrides any mozinfo file)")
build_type.add_argument("--release-build", dest="debug", action="store_false",
default=None,
help="Build is a release (overrides any mozinfo file)")
chunking_group = parser.add_argument_group("Test Chunking")
chunking_group.add_argument("--total-chunks", action="store", type=int, default=1,
help="Total number of chunks to use")
chunking_group.add_argument("--this-chunk", action="store", type=int, default=1,
help="Chunk number to run")
chunking_group.add_argument("--chunk-type", action="store", choices=["none", "hash", "dir_hash"],
default=None, help="Chunking type to use")
ssl_group = parser.add_argument_group("SSL/TLS")
ssl_group.add_argument("--ssl-type", action="store", default=None,
choices=["openssl", "pregenerated", "none"],
help="Type of ssl support to enable (running without ssl may lead to spurious errors)")
ssl_group.add_argument("--openssl-binary", action="store",
help="Path to openssl binary", default="openssl")
ssl_group.add_argument("--certutil-binary", action="store",
help="Path to certutil binary for use with Firefox + ssl")
ssl_group.add_argument("--ca-cert-path", action="store", type=abs_path,
help="Path to ca certificate when using pregenerated ssl certificates")
ssl_group.add_argument("--host-key-path", action="store", type=abs_path,
help="Path to host private key when using pregenerated ssl certificates")
ssl_group.add_argument("--host-cert-path", action="store", type=abs_path,
help="Path to host certificate when using pregenerated ssl certificates")
gecko_group = parser.add_argument_group("Gecko-specific")
gecko_group.add_argument("--prefs-root", dest="prefs_root", action="store", type=abs_path,
help="Path to the folder containing browser prefs")
gecko_group.add_argument("--preload-browser", dest="preload_browser", action="store_true",
default=None, help="Preload a gecko instance for faster restarts")
gecko_group.add_argument("--no-preload-browser", dest="preload_browser", action="store_false",
default=None, help="Don't preload a gecko instance for faster restarts")
gecko_group.add_argument("--disable-e10s", dest="gecko_e10s", action="store_false", default=True,
help="Run tests without electrolysis preferences")
gecko_group.add_argument("--enable-webrender", dest="enable_webrender", action="store_true", default=None,
help="Enable the WebRender compositor in Gecko (defaults to disabled).")
gecko_group.add_argument("--no-enable-webrender", dest="enable_webrender", action="store_false",
help="Disable the WebRender compositor in Gecko.")
gecko_group.add_argument("--stackfix-dir", dest="stackfix_dir", action="store",
help="Path to directory containing assertion stack fixing scripts")
gecko_group.add_argument("--setpref", dest="extra_prefs", action='append',
default=[], metavar="PREF=VALUE",
help="Defines an extra user preference (overrides those in prefs_root)")
gecko_group.add_argument("--leak-check", dest="leak_check", action="store_true", default=None,
help="Enable leak checking (enabled by default for debug builds, "
"silently ignored for opt, mobile)")
gecko_group.add_argument("--no-leak-check", dest="leak_check", action="store_false", default=None,
help="Disable leak checking")
gecko_group.add_argument("--stylo-threads", action="store", type=int, default=1,
help="Number of parallel threads to use for stylo")
gecko_group.add_argument("--reftest-internal", dest="reftest_internal", action="store_true",
default=None, help="Enable reftest runner implemented inside Marionette")
gecko_group.add_argument("--reftest-external", dest="reftest_internal", action="store_false",
help="Disable reftest runner implemented inside Marionette")
gecko_group.add_argument("--reftest-screenshot", dest="reftest_screenshot", action="store",
choices=["always", "fail", "unexpected"], default=None,
help="With --reftest-internal, when to take a screenshot")
gecko_group.add_argument("--chaos", dest="chaos_mode_flags", action="store",
nargs="?", const=0xFFFFFFFF, type=int,
help="Enable chaos mode with the specified feature flag "
"(see http://searchfox.org/mozilla-central/source/mfbt/ChaosMode.h for "
"details). If no value is supplied, all features are activated")
servo_group = parser.add_argument_group("Servo-specific")
servo_group.add_argument("--user-stylesheet",
default=[], action="append", dest="user_stylesheets",
help="Inject a user CSS stylesheet into every test.")
sauce_group = parser.add_argument_group("Sauce Labs-specific")
sauce_group.add_argument("--sauce-browser", dest="sauce_browser",
help="Sauce Labs browser name")
sauce_group.add_argument("--sauce-platform", dest="sauce_platform",
help="Sauce Labs OS platform")
sauce_group.add_argument("--sauce-version", dest="sauce_version",
help="Sauce Labs browser version")
sauce_group.add_argument("--sauce-build", dest="sauce_build",
help="Sauce Labs build identifier")
sauce_group.add_argument("--sauce-tags", dest="sauce_tags", nargs="*",
help="Sauce Labs identifying tag", default=[])
sauce_group.add_argument("--sauce-tunnel-id", dest="sauce_tunnel_id",
help="Sauce Connect tunnel identifier")
sauce_group.add_argument("--sauce-user", dest="sauce_user",
help="Sauce Labs user name")
sauce_group.add_argument("--sauce-key", dest="sauce_key",
default=os.environ.get("SAUCE_ACCESS_KEY"),
help="Sauce Labs access key")
sauce_group.add_argument("--sauce-connect-binary",
dest="sauce_connect_binary",
help="Path to Sauce Connect binary")
sauce_group.add_argument("--sauce-init-timeout", action="store",
type=int, default=30,
help="Number of seconds to wait for Sauce "
"Connect tunnel to be available before "
"aborting")
sauce_group.add_argument("--sauce-connect-arg", action="append",
default=[], dest="sauce_connect_args",
help="Command-line argument to forward to the "
"Sauce Connect binary (repeatable)")
webkit_group = parser.add_argument_group("WebKit-specific")
webkit_group.add_argument("--webkit-port", dest="webkit_port",
help="WebKit port")
parser.add_argument("test_list", nargs="*",
help="List of URLs for tests to run, or paths including tests to run. "
"(equivalent to --include)")
def screenshot_api_wrapper(formatter, api):
formatter.api = api
return formatter
commandline.fmt_options["api"] = (screenshot_api_wrapper,
"Cache API (default: %s)" % wptscreenshot.DEFAULT_API,
{"wptscreenshot"}, "store")
commandline.log_formatters["chromium"] = (chromium.ChromiumFormatter, "Chromium Layout Tests format")
commandline.log_formatters["wptreport"] = (wptreport.WptreportFormatter, "wptreport format")
commandline.log_formatters["wptscreenshot"] = (wptscreenshot.WptscreenshotFormatter, "wpt.fyi screenshots")
commandline.add_logging_group(parser)
return parser
def set_from_config(kwargs):
if kwargs["config"] is None:
config_path = config.path()
else:
config_path = kwargs["config"]
kwargs["config_path"] = config_path
kwargs["config"] = config.read(kwargs["config_path"])
keys = {"paths": [("prefs", "prefs_root", True),
("run_info", "run_info", True)],
"web-platform-tests": [("remote_url", "remote_url", False),
("branch", "branch", False),
("sync_path", "sync_path", True)],
"SSL": [("openssl_binary", "openssl_binary", True),
("certutil_binary", "certutil_binary", True),
("ca_cert_path", "ca_cert_path", True),
("host_cert_path", "host_cert_path", True),
("host_key_path", "host_key_path", True)]}
for section, values in iteritems(keys):
for config_value, kw_value, is_path in values:
if kw_value in kwargs and kwargs[kw_value] is None:
if not is_path:
new_value = kwargs["config"].get(section, config.ConfigDict({})).get(config_value)
else:
new_value = kwargs["config"].get(section, config.ConfigDict({})).get_path(config_value)
kwargs[kw_value] = new_value
kwargs["test_paths"] = get_test_paths(kwargs["config"])
if kwargs["tests_root"]:
if "/" not in kwargs["test_paths"]:
kwargs["test_paths"]["/"] = {}
kwargs["test_paths"]["/"]["tests_path"] = kwargs["tests_root"]
if kwargs["metadata_root"]:
if "/" not in kwargs["test_paths"]:
kwargs["test_paths"]["/"] = {}
kwargs["test_paths"]["/"]["metadata_path"] = kwargs["metadata_root"]
if kwargs.get("manifest_path"):
if "/" not in kwargs["test_paths"]:
kwargs["test_paths"]["/"] = {}
kwargs["test_paths"]["/"]["manifest_path"] = kwargs["manifest_path"]
kwargs["suite_name"] = kwargs["config"].get("web-platform-tests", {}).get("name", "web-platform-tests")
check_paths(kwargs)
def get_test_paths(config):
# Set up test_paths
test_paths = OrderedDict()
for section in iterkeys(config):
if section.startswith("manifest:"):
manifest_opts = config.get(section)
url_base = manifest_opts.get("url_base", "/")
test_paths[url_base] = {
"tests_path": manifest_opts.get_path("tests"),
"metadata_path": manifest_opts.get_path("metadata"),
}
if "manifest" in manifest_opts:
test_paths[url_base]["manifest_path"] = manifest_opts.get_path("manifest")
return test_paths
def exe_path(name):
if name is None:
return
path = find_executable(name)
if path and os.access(path, os.X_OK):
return path
else:
return None
def check_paths(kwargs):
for test_paths in itervalues(kwargs["test_paths"]):
if not ("tests_path" in test_paths and
"metadata_path" in test_paths):
print("Fatal: must specify both a test path and metadata path")
sys.exit(1)
if "manifest_path" not in test_paths:
test_paths["manifest_path"] = os.path.join(test_paths["metadata_path"],
"MANIFEST.json")
for key, path in iteritems(test_paths):
name = key.split("_", 1)[0]
if name == "manifest":
# For the manifest we can create it later, so just check the path
# actually exists
path = os.path.dirname(path)
if not os.path.exists(path):
print("Fatal: %s path %s does not exist" % (name, path))
sys.exit(1)
if not os.path.isdir(path):
print("Fatal: %s path %s is not a directory" % (name, path))
sys.exit(1)
def check_args(kwargs):
set_from_config(kwargs)
if kwargs["product"] is None:
kwargs["product"] = "firefox"
if kwargs["manifest_update"] is None:
kwargs["manifest_update"] = True
if "sauce" in kwargs["product"]:
kwargs["pause_after_test"] = False
if kwargs["test_list"]:
if kwargs["include"] is not None:
kwargs["include"].extend(kwargs["test_list"])
else:
kwargs["include"] = kwargs["test_list"]
if kwargs["run_info"] is None:
kwargs["run_info"] = kwargs["config_path"]
if kwargs["this_chunk"] > 1:
require_arg(kwargs, "total_chunks", lambda x: x >= kwargs["this_chunk"])
if kwargs["chunk_type"] is None:
if kwargs["total_chunks"] > 1:
kwargs["chunk_type"] = "dir_hash"
else:
kwargs["chunk_type"] = "none"
if kwargs["processes"] is None:
kwargs["processes"] = 1
if kwargs["debugger"] is not None:
import mozdebug
if kwargs["debugger"] == "__default__":
kwargs["debugger"] = mozdebug.get_default_debugger_name()
debug_info = mozdebug.get_debugger_info(kwargs["debugger"],
kwargs["debugger_args"])
if debug_info and debug_info.interactive:
if kwargs["processes"] != 1:
kwargs["processes"] = 1
kwargs["no_capture_stdio"] = True
kwargs["debug_info"] = debug_info
else:
kwargs["debug_info"] = None
if kwargs["binary"] is not None:
if not os.path.exists(kwargs["binary"]):
print("Binary path %s does not exist" % kwargs["binary"], file=sys.stderr)
sys.exit(1)
if kwargs["ssl_type"] is None:
if None not in (kwargs["ca_cert_path"], kwargs["host_cert_path"], kwargs["host_key_path"]):
kwargs["ssl_type"] = "pregenerated"
elif exe_path(kwargs["openssl_binary"]) is not None:
kwargs["ssl_type"] = "openssl"
else:
kwargs["ssl_type"] = "none"
if kwargs["ssl_type"] == "pregenerated":
require_arg(kwargs, "ca_cert_path", lambda x:os.path.exists(x))
require_arg(kwargs, "host_cert_path", lambda x:os.path.exists(x))
require_arg(kwargs, "host_key_path", lambda x:os.path.exists(x))
elif kwargs["ssl_type"] == "openssl":
path = exe_path(kwargs["openssl_binary"])
if path is None:
print("openssl-binary argument missing or not a valid executable", file=sys.stderr)
sys.exit(1)
kwargs["openssl_binary"] = path
if kwargs["ssl_type"] != "none" and kwargs["product"] == "firefox" and kwargs["certutil_binary"]:
path = exe_path(kwargs["certutil_binary"])
if path is None:
print("certutil-binary argument missing or not a valid executable", file=sys.stderr)
sys.exit(1)
kwargs["certutil_binary"] = path
if kwargs['extra_prefs']:
missing = any('=' not in prefarg for prefarg in kwargs['extra_prefs'])
if missing:
print("Preferences via --setpref must be in key=value format", file=sys.stderr)
sys.exit(1)
kwargs['extra_prefs'] = [tuple(prefarg.split('=', 1)) for prefarg in
kwargs['extra_prefs']]
if kwargs["reftest_internal"] is None:
kwargs["reftest_internal"] = True
if kwargs["reftest_screenshot"] is None:
kwargs["reftest_screenshot"] = "unexpected"
if kwargs["enable_webrender"] is None:
kwargs["enable_webrender"] = False
if kwargs["preload_browser"] is None:
# Default to preloading a gecko instance if we're only running a single process
kwargs["preload_browser"] = kwargs["processes"] == 1
return kwargs
def check_args_update(kwargs):
set_from_config(kwargs)
if kwargs["product"] is None:
kwargs["product"] = "firefox"
if kwargs["patch"] is None:
kwargs["patch"] = kwargs["sync"]
for item in kwargs["run_log"]:
if os.path.isdir(item):
print("Log file %s is a directory" % item, file=sys.stderr)
sys.exit(1)
return kwargs
def create_parser_update(product_choices=None):
from mozlog.structured import commandline
from . import products
if product_choices is None:
config_data = config.load()
product_choices = products.products_enabled(config_data)
parser = argparse.ArgumentParser("web-platform-tests-update",
description="Update script for web-platform-tests tests.")
parser.add_argument("--product", action="store", choices=product_choices,
default=None, help="Browser for which metadata is being updated")
parser.add_argument("--config", action="store", type=abs_path, help="Path to config file")
parser.add_argument("--metadata", action="store", type=abs_path, dest="metadata_root",
help="Path to the folder containing test metadata"),
parser.add_argument("--tests", action="store", type=abs_path, dest="tests_root",
help="Path to web-platform-tests"),
parser.add_argument("--manifest", action="store", type=abs_path, dest="manifest_path",
help="Path to test manifest (default is ${metadata_root}/MANIFEST.json)")
parser.add_argument("--sync-path", action="store", type=abs_path,
help="Path to store git checkout of web-platform-tests during update"),
parser.add_argument("--remote_url", action="store",
help="URL of web-platfrom-tests repository to sync against"),
parser.add_argument("--branch", action="store", type=abs_path,
help="Remote branch to sync against")
parser.add_argument("--rev", action="store", help="Revision to sync to")
parser.add_argument("--patch", action="store_true", dest="patch", default=None,
help="Create a VCS commit containing the changes.")
parser.add_argument("--no-patch", action="store_false", dest="patch",
help="Don't create a VCS commit containing the changes.")
parser.add_argument("--sync", dest="sync", action="store_true", default=False,
help="Sync the tests with the latest from upstream (implies --patch)")
parser.add_argument("--full", action="store_true", default=False,
help=("For all tests that are updated, remove any existing conditions and missing subtests"))
parser.add_argument("--disable-intermittent", nargs="?", action="store", const="unstable", default=None,
help=("Reason for disabling tests. When updating test results, disable tests that have "
"inconsistent results across many runs with the given reason."))
parser.add_argument("--update-intermittent", action="store_true", default=False,
help=("Update test metadata with expected intermittent statuses."))
parser.add_argument("--remove-intermittent", action="store_true", default=False,
help=("Remove obsolete intermittent statuses from expected statuses."))
parser.add_argument("--no-remove-obsolete", action="store_false", dest="remove_obsolete", default=True,
help=("Don't remove metadata files that no longer correspond to a test file"))
parser.add_argument("--no-store-state", action="store_false", dest="store_state",
help="Store state so that steps can be resumed after failure")
parser.add_argument("--continue", action="store_true",
help="Continue a previously started run of the update script")
parser.add_argument("--abort", action="store_true",
help="Clear state from a previous incomplete run of the update script")
parser.add_argument("--exclude", action="store", nargs="*",
help="List of glob-style paths to exclude when syncing tests")
parser.add_argument("--include", action="store", nargs="*",
help="List of glob-style paths to include which would otherwise be excluded when syncing tests")
parser.add_argument("--extra-property", action="append", default=[],
help="Extra property from run_info.json to use in metadata update")
# Should make this required iff run=logfile
parser.add_argument("run_log", nargs="*", type=abs_path,
help="Log file from run of tests")
commandline.add_logging_group(parser)
return parser
def create_parser_reduce(product_choices=None):
parser = create_parser(product_choices)
parser.add_argument("target", action="store", help="Test id that is unstable")
return parser
def parse_args():
parser = create_parser()
rv = vars(parser.parse_args())
check_args(rv)
return rv
def parse_args_update():
parser = create_parser_update()
rv = vars(parser.parse_args())
check_args_update(rv)
return rv
def parse_args_reduce():
parser = create_parser_reduce()
rv = vars(parser.parse_args())
check_args(rv)
return rv
| mpl-2.0 | 869,402,087,617,088,500 | 52.788148 | 132 | 0.577547 | false |
CCI-MOC/nova | nova/image/glance.py | 1 | 26640 | # Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of an image service that uses Glance as the backend."""
from __future__ import absolute_import
import copy
import itertools
import random
import sys
import time
import glanceclient
import glanceclient.exc
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_service import sslutils
from oslo_utils import excutils
from oslo_utils import netutils
from oslo_utils import timeutils
import six
from six.moves import range
import six.moves.urllib.parse as urlparse
from nova import exception
from nova.i18n import _LE, _LI, _LW
import nova.image.download as image_xfers
glance_opts = [
cfg.StrOpt('host',
default='$my_ip',
help='Default glance hostname or IP address'),
cfg.IntOpt('port',
default=9292,
min=1,
max=65535,
help='Default glance port'),
cfg.StrOpt('protocol',
default='http',
choices=('http', 'https'),
help='Default protocol to use when connecting to glance. '
'Set to https for SSL.'),
cfg.ListOpt('api_servers',
help='A list of the glance api servers available to nova. '
'Prefix with https:// for ssl-based glance api servers. '
'([hostname|ip]:port)'),
cfg.BoolOpt('api_insecure',
default=False,
help='Allow to perform insecure SSL (https) requests to '
'glance'),
cfg.IntOpt('num_retries',
default=0,
help='Number of retries when uploading / downloading an image '
'to / from glance.'),
cfg.ListOpt('allowed_direct_url_schemes',
default=[],
help='A list of url scheme that can be downloaded directly '
'via the direct_url. Currently supported schemes: '
'[file].'),
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(glance_opts, 'glance')
CONF.import_opt('auth_strategy', 'nova.api.auth')
CONF.import_opt('my_ip', 'nova.netconf')
def generate_glance_url():
"""Generate the URL to glance."""
glance_host = CONF.glance.host
if netutils.is_valid_ipv6(glance_host):
glance_host = '[%s]' % glance_host
return "%s://%s:%d" % (CONF.glance.protocol, glance_host,
CONF.glance.port)
def generate_image_url(image_ref):
"""Generate an image URL from an image_ref."""
return "%s/images/%s" % (generate_glance_url(), image_ref)
def _parse_image_ref(image_href):
"""Parse an image href into composite parts.
:param image_href: href of an image
:returns: a tuple of the form (image_id, host, port)
:raises ValueError
"""
o = urlparse.urlparse(image_href)
port = o.port or 80
host = o.netloc.rsplit(':', 1)[0]
image_id = o.path.split('/')[-1]
use_ssl = (o.scheme == 'https')
return (image_id, host, port, use_ssl)
def generate_identity_headers(context, status='Confirmed'):
return {
'X-Auth-Token': getattr(context, 'auth_token', None),
'X-User-Id': getattr(context, 'user', None),
'X-Tenant-Id': getattr(context, 'tenant', None),
'X-Roles': ','.join(context.roles),
'X-Identity-Status': status,
}
def _create_glance_client(context, host, port, use_ssl, version=1):
"""Instantiate a new glanceclient.Client object."""
params = {}
if use_ssl:
scheme = 'https'
# https specific params
params['insecure'] = CONF.glance.api_insecure
params['ssl_compression'] = False
sslutils.is_enabled(CONF)
if CONF.ssl.cert_file:
params['cert_file'] = CONF.ssl.cert_file
if CONF.ssl.key_file:
params['key_file'] = CONF.ssl.key_file
if CONF.ssl.ca_file:
params['cacert'] = CONF.ssl.ca_file
else:
scheme = 'http'
if CONF.auth_strategy == 'keystone':
# NOTE(isethi): Glanceclient <= 0.9.0.49 accepts only
# keyword 'token', but later versions accept both the
# header 'X-Auth-Token' and 'token'
params['token'] = context.auth_token
params['identity_headers'] = generate_identity_headers(context)
if netutils.is_valid_ipv6(host):
# if so, it is ipv6 address, need to wrap it with '[]'
host = '[%s]' % host
endpoint = '%s://%s:%s' % (scheme, host, port)
#
# from keystoneauth1 import identity
# from keystoneauth1 import session as ks
# from keystoneauth1.identity.v3.k2k import Keystone2Keystone
#
# if hasattr(context, 'blabla_service_provider'):
# idp_auth = identity.Token(auth_url='http://localhost:35357',
# token=context.auth_token,
# project_id=context.tenant)
#
# auth = Keystone2Keystone(idp_auth,
# context.service_provider,
# project_name='admin',
# project_domain_id='default')
#
# session = ks.Session(auth=auth)
# return glanceclient.Client(str(version), session=session)
return glanceclient.Client(str(version), endpoint, **params)
def get_api_servers():
"""Shuffle a list of CONF.glance.api_servers and return an iterator
that will cycle through the list, looping around to the beginning
if necessary.
"""
api_servers = []
configured_servers = (['%s:%s' % (CONF.glance.host, CONF.glance.port)]
if CONF.glance.api_servers is None
else CONF.glance.api_servers)
for api_server in configured_servers:
if '//' not in api_server:
api_server = 'http://' + api_server
o = urlparse.urlparse(api_server)
port = o.port or 80
host = o.netloc.rsplit(':', 1)[0]
if host[0] == '[' and host[-1] == ']':
host = host[1:-1]
use_ssl = (o.scheme == 'https')
api_servers.append((host, port, use_ssl))
random.shuffle(api_servers)
return itertools.cycle(api_servers)
class GlanceClientWrapper(object):
"""Glance client wrapper class that implements retries."""
def __init__(self, context=None, host=None, port=None, use_ssl=False,
version=1):
if host is not None:
self.client = self._create_static_client(context,
host, port,
use_ssl, version)
else:
self.client = None
self.api_servers = None
def _create_static_client(self, context, host, port, use_ssl, version):
"""Create a client that we'll use for every call."""
self.host = host
self.port = port
self.use_ssl = use_ssl
self.version = version
return _create_glance_client(context,
self.host, self.port,
self.use_ssl, self.version)
def _create_onetime_client(self, context, version):
"""Create a client that will be used for one call."""
if self.api_servers is None:
self.api_servers = get_api_servers()
self.host, self.port, self.use_ssl = next(self.api_servers)
return _create_glance_client(context,
self.host, self.port,
self.use_ssl, version)
def call(self, context, version, method, *args, **kwargs):
"""Call a glance client method. If we get a connection error,
retry the request according to CONF.glance.num_retries.
"""
retry_excs = (glanceclient.exc.ServiceUnavailable,
glanceclient.exc.InvalidEndpoint,
glanceclient.exc.CommunicationError)
retries = CONF.glance.num_retries
if retries < 0:
LOG.warning(_LW("Treating negative config value (%(retries)s) for "
"'glance.num_retries' as 0."),
{'retries': retries})
retries = 0
num_attempts = retries + 1
for attempt in range(1, num_attempts + 1):
client = self.client or self._create_onetime_client(context,
version)
try:
return getattr(client.images, method)(*args, **kwargs)
except retry_excs as e:
host = self.host
port = self.port
if attempt < num_attempts:
extra = "retrying"
else:
extra = 'done trying'
LOG.exception(_LE("Error contacting glance server "
"'%(host)s:%(port)s' for '%(method)s', "
"%(extra)s."),
{'host': host, 'port': port,
'method': method, 'extra': extra})
if attempt == num_attempts:
raise exception.GlanceConnectionFailed(
host=host, port=port, reason=six.text_type(e))
time.sleep(1)
class GlanceImageService(object):
"""Provides storage and retrieval of disk image objects within Glance."""
def __init__(self, client=None):
self._client = client or GlanceClientWrapper()
# NOTE(jbresnah) build the table of download handlers at the beginning
# so that operators can catch errors at load time rather than whenever
# a user attempts to use a module. Note this cannot be done in glance
# space when this python module is loaded because the download module
# may require configuration options to be parsed.
self._download_handlers = {}
download_modules = image_xfers.load_transfer_modules()
for scheme, mod in six.iteritems(download_modules):
if scheme not in CONF.glance.allowed_direct_url_schemes:
continue
try:
self._download_handlers[scheme] = mod.get_download_handler()
except Exception as ex:
LOG.error(_LE('When loading the module %(module_str)s the '
'following error occurred: %(ex)s'),
{'module_str': str(mod), 'ex': ex})
def detail(self, context, **kwargs):
"""Calls out to Glance for a list of detailed image information."""
params = _extract_query_params(kwargs)
try:
images = self._client.call(context, 1, 'list', **params)
except Exception:
_reraise_translated_exception()
_images = []
for image in images:
if _is_image_available(context, image):
_images.append(_translate_from_glance(image))
return _images
def show(self, context, image_id, include_locations=False,
show_deleted=True):
"""Returns a dict with image data for the given opaque image id.
:param context: The context object to pass to image client
:param image_id: The UUID of the image
:param include_locations: (Optional) include locations in the returned
dict of information if the image service API
supports it. If the image service API does
not support the locations attribute, it will
still be included in the returned dict, as an
empty list.
:param show_deleted: (Optional) show the image even the status of
image is deleted.
"""
version = 1
if include_locations:
version = 2
try:
image = self._client.call(context, version, 'get', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
if not show_deleted and getattr(image, 'deleted', False):
raise exception.ImageNotFound(image_id=image_id)
if not _is_image_available(context, image):
raise exception.ImageNotFound(image_id=image_id)
image = _translate_from_glance(image,
include_locations=include_locations)
if include_locations:
locations = image.get('locations', None) or []
du = image.get('direct_url', None)
if du:
locations.append({'url': du, 'metadata': {}})
image['locations'] = locations
return image
def _get_transfer_module(self, scheme):
try:
return self._download_handlers[scheme]
except KeyError:
return None
except Exception:
LOG.error(_LE("Failed to instantiate the download handler "
"for %(scheme)s"), {'scheme': scheme})
return
def download(self, context, image_id, data=None, dst_path=None):
"""Calls out to Glance for data and writes data."""
if CONF.glance.allowed_direct_url_schemes and dst_path is not None:
image = self.show(context, image_id, include_locations=True)
for entry in image.get('locations', []):
loc_url = entry['url']
loc_meta = entry['metadata']
o = urlparse.urlparse(loc_url)
xfer_mod = self._get_transfer_module(o.scheme)
if xfer_mod:
try:
xfer_mod.download(context, o, dst_path, loc_meta)
LOG.info(_LI("Successfully transferred "
"using %s"), o.scheme)
return
except Exception:
LOG.exception(_LE("Download image error"))
try:
image_chunks = self._client.call(context, 1, 'data', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
close_file = False
if data is None and dst_path:
data = open(dst_path, 'wb')
close_file = True
if data is None:
return image_chunks
else:
try:
for chunk in image_chunks:
data.write(chunk)
except Exception as ex:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error writing to %(path)s: %(exception)s"),
{'path': dst_path, 'exception': ex})
finally:
if close_file:
data.close()
def create(self, context, image_meta, data=None):
"""Store the image data and return the new image object."""
sent_service_image_meta = _translate_to_glance(image_meta)
if data:
sent_service_image_meta['data'] = data
try:
recv_service_image_meta = self._client.call(
context, 1, 'create', **sent_service_image_meta)
except glanceclient.exc.HTTPException:
_reraise_translated_exception()
return _translate_from_glance(recv_service_image_meta)
def update(self, context, image_id, image_meta, data=None,
purge_props=True):
"""Modify the given image with the new data."""
image_meta = _translate_to_glance(image_meta)
image_meta['purge_props'] = purge_props
# NOTE(bcwaldon): id is not an editable field, but it is likely to be
# passed in by calling code. Let's be nice and ignore it.
image_meta.pop('id', None)
if data:
image_meta['data'] = data
try:
image_meta = self._client.call(context, 1, 'update',
image_id, **image_meta)
except Exception:
_reraise_translated_image_exception(image_id)
else:
return _translate_from_glance(image_meta)
def delete(self, context, image_id):
"""Delete the given image.
:raises: ImageNotFound if the image does not exist.
:raises: NotAuthorized if the user is not an owner.
:raises: ImageNotAuthorized if the user is not authorized.
"""
try:
self._client.call(context, 1, 'delete', image_id)
except glanceclient.exc.NotFound:
raise exception.ImageNotFound(image_id=image_id)
except glanceclient.exc.HTTPForbidden:
raise exception.ImageNotAuthorized(image_id=image_id)
return True
def _extract_query_params(params):
_params = {}
accepted_params = ('filters', 'marker', 'limit',
'page_size', 'sort_key', 'sort_dir')
for param in accepted_params:
if params.get(param):
_params[param] = params.get(param)
# ensure filters is a dict
_params.setdefault('filters', {})
# NOTE(vish): don't filter out private images
_params['filters'].setdefault('is_public', 'none')
return _params
def _is_image_available(context, image):
"""Check image availability.
This check is needed in case Nova and Glance are deployed
without authentication turned on.
"""
# The presence of an auth token implies this is an authenticated
# request and we need not handle the noauth use-case.
if hasattr(context, 'auth_token') and context.auth_token:
return True
def _is_image_public(image):
# NOTE(jaypipes) V2 Glance API replaced the is_public attribute
# with a visibility attribute. We do this here to prevent the
# glanceclient for a V2 image model from throwing an
# exception from warlock when trying to access an is_public
# attribute.
if hasattr(image, 'visibility'):
return str(image.visibility).lower() == 'public'
else:
return image.is_public
if context.is_admin or _is_image_public(image):
return True
properties = image.properties
if context.project_id and ('owner_id' in properties):
return str(properties['owner_id']) == str(context.project_id)
if context.project_id and ('project_id' in properties):
return str(properties['project_id']) == str(context.project_id)
try:
user_id = properties['user_id']
except KeyError:
return False
return str(user_id) == str(context.user_id)
def _translate_to_glance(image_meta):
image_meta = _convert_to_string(image_meta)
image_meta = _remove_read_only(image_meta)
return image_meta
def _translate_from_glance(image, include_locations=False):
image_meta = _extract_attributes(image,
include_locations=include_locations)
image_meta = _convert_timestamps_to_datetimes(image_meta)
image_meta = _convert_from_string(image_meta)
return image_meta
def _convert_timestamps_to_datetimes(image_meta):
"""Returns image with timestamp fields converted to datetime objects."""
for attr in ['created_at', 'updated_at', 'deleted_at']:
if image_meta.get(attr):
image_meta[attr] = timeutils.parse_isotime(image_meta[attr])
return image_meta
# NOTE(bcwaldon): used to store non-string data in glance metadata
def _json_loads(properties, attr):
prop = properties[attr]
if isinstance(prop, six.string_types):
properties[attr] = jsonutils.loads(prop)
def _json_dumps(properties, attr):
prop = properties[attr]
if not isinstance(prop, six.string_types):
properties[attr] = jsonutils.dumps(prop)
_CONVERT_PROPS = ('block_device_mapping', 'mappings')
def _convert(method, metadata):
metadata = copy.deepcopy(metadata)
properties = metadata.get('properties')
if properties:
for attr in _CONVERT_PROPS:
if attr in properties:
method(properties, attr)
return metadata
def _convert_from_string(metadata):
return _convert(_json_loads, metadata)
def _convert_to_string(metadata):
return _convert(_json_dumps, metadata)
def _extract_attributes(image, include_locations=False):
# NOTE(hdd): If a key is not found, base.Resource.__getattr__() may perform
# a get(), resulting in a useless request back to glance. This list is
# therefore sorted, with dependent attributes as the end
# 'deleted_at' depends on 'deleted'
# 'checksum' depends on 'status' == 'active'
IMAGE_ATTRIBUTES = ['size', 'disk_format', 'owner',
'container_format', 'status', 'id',
'name', 'created_at', 'updated_at',
'deleted', 'deleted_at', 'checksum',
'min_disk', 'min_ram', 'is_public',
'direct_url', 'locations']
queued = getattr(image, 'status') == 'queued'
queued_exclude_attrs = ['disk_format', 'container_format']
include_locations_attrs = ['direct_url', 'locations']
output = {}
for attr in IMAGE_ATTRIBUTES:
if attr == 'deleted_at' and not output['deleted']:
output[attr] = None
elif attr == 'checksum' and output['status'] != 'active':
output[attr] = None
# image may not have 'name' attr
elif attr == 'name':
output[attr] = getattr(image, attr, None)
# NOTE(liusheng): queued image may not have these attributes and 'name'
elif queued and attr in queued_exclude_attrs:
output[attr] = getattr(image, attr, None)
# NOTE(mriedem): Only get location attrs if including locations.
elif attr in include_locations_attrs:
if include_locations:
output[attr] = getattr(image, attr, None)
# NOTE(mdorman): 'size' attribute must not be 'None', so use 0 instead
elif attr == 'size':
output[attr] = getattr(image, attr) or 0
else:
# NOTE(xarses): Anything that is caught with the default value
# will result in a additional lookup to glance for said attr.
# Notable attributes that could have this issue:
# disk_format, container_format, name, deleted, checksum
output[attr] = getattr(image, attr, None)
output['properties'] = getattr(image, 'properties', {})
return output
def _remove_read_only(image_meta):
IMAGE_ATTRIBUTES = ['status', 'updated_at', 'created_at', 'deleted_at']
output = copy.deepcopy(image_meta)
for attr in IMAGE_ATTRIBUTES:
if attr in output:
del output[attr]
return output
def _reraise_translated_image_exception(image_id):
"""Transform the exception for the image but keep its traceback intact."""
exc_type, exc_value, exc_trace = sys.exc_info()
new_exc = _translate_image_exception(image_id, exc_value)
six.reraise(new_exc, None, exc_trace)
def _reraise_translated_exception():
"""Transform the exception but keep its traceback intact."""
exc_type, exc_value, exc_trace = sys.exc_info()
new_exc = _translate_plain_exception(exc_value)
six.reraise(new_exc, None, exc_trace)
def _translate_image_exception(image_id, exc_value):
if isinstance(exc_value, (glanceclient.exc.Forbidden,
glanceclient.exc.Unauthorized)):
return exception.ImageNotAuthorized(image_id=image_id)
if isinstance(exc_value, glanceclient.exc.NotFound):
return exception.ImageNotFound(image_id=image_id)
if isinstance(exc_value, glanceclient.exc.BadRequest):
return exception.Invalid(six.text_type(exc_value))
return exc_value
def _translate_plain_exception(exc_value):
if isinstance(exc_value, (glanceclient.exc.Forbidden,
glanceclient.exc.Unauthorized)):
return exception.Forbidden(six.text_type(exc_value))
if isinstance(exc_value, glanceclient.exc.NotFound):
return exception.NotFound(six.text_type(exc_value))
if isinstance(exc_value, glanceclient.exc.BadRequest):
return exception.Invalid(six.text_type(exc_value))
return exc_value
def get_remote_image_service(context, image_href):
"""Create an image_service and parse the id from the given image_href.
The image_href param can be an href of the form
'http://example.com:9292/v1/images/b8b2c6f7-7345-4e2f-afa2-eedaba9cbbe3',
or just an id such as 'b8b2c6f7-7345-4e2f-afa2-eedaba9cbbe3'. If the
image_href is a standalone id, then the default image service is returned.
:param image_href: href that describes the location of an image
:returns: a tuple of the form (image_service, image_id)
"""
# NOTE(bcwaldon): If image_href doesn't look like a URI, assume its a
# standalone image ID
if '/' not in str(image_href):
image_service = get_default_image_service()
return image_service, image_href
try:
(image_id, glance_host, glance_port, use_ssl) = \
_parse_image_ref(image_href)
glance_client = GlanceClientWrapper(context=context,
host=glance_host, port=glance_port, use_ssl=use_ssl)
except ValueError:
raise exception.InvalidImageRef(image_href=image_href)
image_service = GlanceImageService(client=glance_client)
return image_service, image_id
def get_default_image_service():
return GlanceImageService()
class UpdateGlanceImage(object):
def __init__(self, context, image_id, metadata, stream):
self.context = context
self.image_id = image_id
self.metadata = metadata
self.image_stream = stream
def start(self):
image_service, image_id = (
get_remote_image_service(self.context, self.image_id))
image_service.update(self.context, image_id, self.metadata,
self.image_stream, purge_props=False)
| apache-2.0 | -405,195,565,436,879,100 | 37.002853 | 79 | 0.589902 | false |
steny138/SE3Borrow | home/decorator.py | 1 | 1389 | # -*- coding: utf-8 -*-
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.utils.decorators import available_attrs
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect
from django.contrib import messages
default_message = "您沒有管理者權限。"
def user_passes_test(test_func, message=default_message, redirect_url="/"):
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
decorated_view_func = login_required(request)
if not decorated_view_func.user.is_authenticated():
return decorated_view_func(request) # return redirect to signin
if not test_func(request.user):
messages.error(request, message)
return redirect(redirect_url)
return view_func(request, *args, **kwargs)
return _wrapped_view
return decorator
def super_login_required(view_func=None, message=default_message, redirect_url="/"):
super_login_func = user_passes_test(
lambda u: u.is_superuser,
message=message,
redirect_url=redirect_url
)
if view_func:
return super_login_func(view_func)
return super_login_func
| apache-2.0 | -5,133,462,619,210,425,000 | 34.153846 | 84 | 0.665937 | false |
opendatakosovo/bpo | utils/importer/import_idams.py | 1 | 6870 | # -*- coding: UTF-8 -*-
import csv
import os
import re
from datetime import datetime
from pymongo import MongoClient
import json
# Connect to defualt local instance of MongoClient
client = MongoClient()
# Get database and collection
db = client.bpo
collection = db.idams
def parse():
collection.remove({})
print "Importing Data"
dir_path = os.path.dirname(os.path.realpath(__file__))
count = 0
other_v_count = 0
for filename in os.listdir(dir_path + '/idams/'):
print filename
json_obj = None
if (filename.endswith(".json")):
with open(dir_path + '/idams/' + filename, 'rb') as jsonfile:
json_obj = json.load(jsonfile)
for elem in json_obj:
new_json = {}
if 'actors' in elem['_source']:
if len(elem['_source']['smart_tags']['smart_tags']) > 0 and \
elem['_source']['smart_tags']['smart_tags'][0] != '' and elem['_source']['smart_tags']['smart_tags'][0] =='Terrorism':
new_json['incident_date'] = datetime.strptime(elem['_source']['summary']['date'], '%Y-%m-%d')
new_json['violence_actor'] = elem['_source']['actors']['responders']
new_json['description'] = elem['_source']['summary']['description']
new_json['lat'] = elem['_source']['location_and_source']['latitude']
new_json['lon'] = elem['_source']['location_and_source']['longitude']
new_json['source'] = elem['_source']['location_and_source']['source_url']
new_json['violence_type'] = 'Violent Extremism'
if 'Islamic State of Iraq and Syria' in elem['_source']['actors']['instigators']:
new_json['responders'] = ['New-JMB']
else:
new_json['responders'] = elem['_source']['actors']['instigators']
new_json['causes'] = elem['_source']['causes_of_incident']['causes']
new_json['property_destroyed_type'] = []
new_json['injuries_count'] = 0
new_json['deaths_count'] = 0
new_json['property_destroyed_count'] = 0
if 'victims_and_perpetrators' in elem['_source']:
if len(elem['_source']['victims_and_perpetrators']) > 0:
if elem['_source']['victims_and_perpetrators'][0]['consequence'] == 'Death':
new_json['deaths_count'] = elem['_source']['victims_and_perpetrators'][0]['victims'][
'count']
elif elem['_source']['victims_and_perpetrators'][0]['consequence'] == 'Injury':
new_json['injuries_count'] = elem['_source']['victims_and_perpetrators'][0]['victims'][
'count']
elif elem['_source']['victims_and_perpetrators'][0][
'consequence'] == 'Private property damaged' or \
elem['_source']['victims_and_perpetrators'][0][
'consequence'] == 'Public property damaged':
new_json['property_destroyed_count'] = \
elem['_source']['victims_and_perpetrators'][0]['victims']['count']
new_json['division'] = elem['_source']['location_and_source']['division']
new_json['district'] = elem['_source']['location_and_source']['district']
new_json['upazila'] = elem['_source']['location_and_source']['upazila']
count = count + 1
elif elem['_source']['summary']['incident_type'] in ['Political dispute', 'Border incident', 'IED Attack', 'Arson attack', 'Mob Violence', 'Violent crime']:
new_json['incident_date'] = datetime.strptime(elem['_source']['summary']['date'], '%Y-%m-%d')
new_json['violence_actor'] = elem['_source']['actors']['responders']
new_json['description'] = elem['_source']['summary']['description']
new_json['lat'] = elem['_source']['location_and_source']['latitude']
new_json['lon'] = elem['_source']['location_and_source']['longitude']
new_json['source'] = elem['_source']['location_and_source']['source_url']
if elem['_source']['summary']['incident_type'] == 'Violent crime':
new_json['violence_type'] = 'Violent Crime - Homicides'
else:
new_json['violence_type'] = elem['_source']['summary']['incident_type']
if 'Islamic State of Iraq and Syria' in elem['_source']['actors']['instigators']:
new_json['responders'] = ['New-JMB']
else:
new_json['responders'] = elem['_source']['actors']['instigators']
new_json['causes'] = elem['_source']['causes_of_incident']['causes']
new_json['property_destroyed_type'] = []
new_json['injuries_count'] = 0
new_json['deaths_count'] = 0
new_json['property_destroyed_count'] = 0
if 'victims_and_perpetrators' in elem['_source']:
if len(elem['_source']['victims_and_perpetrators']) > 0:
if elem['_source']['victims_and_perpetrators'][0]['consequence'] == 'Death':
new_json['deaths_count'] = elem['_source']['victims_and_perpetrators'][0]['victims'][
'count']
elif elem['_source']['victims_and_perpetrators'][0]['consequence'] == 'Injury':
new_json['injuries_count'] = elem['_source']['victims_and_perpetrators'][0]['victims']['count']
elif elem['_source']['victims_and_perpetrators'][0]['consequence']=='Private property damaged' or elem['_source']['victims_and_perpetrators'][0]['consequence']=='Public property damaged':
new_json['property_destroyed_count'] = elem['_source']['victims_and_perpetrators'][0]['victims']['count']
new_json['division'] = elem['_source']['location_and_source']['division']
new_json['district'] = elem['_source']['location_and_source']['district']
new_json['upazila'] = elem['_source']['location_and_source']['upazila']
other_v_count = other_v_count + 1
else:
pass
if new_json:
collection.insert(new_json)
parse() | cc0-1.0 | -4,839,804,771,402,337,000 | 59.80531 | 215 | 0.496943 | false |
adityahase/frappe | frappe/hooks.py | 1 | 14046 | from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "frappe"
app_title = "Frappe Framework"
app_publisher = "Frappe Technologies"
app_description = "Full stack web framework with Python, Javascript, MariaDB, Redis, Node"
app_icon = "octicon octicon-circuit-board"
app_color = "orange"
source_link = "https://github.com/frappe/frappe"
app_license = "MIT"
app_logo_url = '/assets/frappe/images/frappe-framework-logo.png'
develop_version = '13.x.x-develop'
app_email = "[email protected]"
docs_app = "frappe_io"
translator_url = "https://translatev2.erpnext.com"
before_install = "frappe.utils.install.before_install"
after_install = "frappe.utils.install.after_install"
page_js = {
"setup-wizard": "public/js/frappe/setup_wizard.js"
}
# website
app_include_js = [
"assets/js/libs.min.js",
"assets/js/desk.min.js",
"assets/js/list.min.js",
"assets/js/form.min.js",
"assets/js/control.min.js",
"assets/js/report.min.js",
]
app_include_css = [
"assets/css/desk.min.css",
"assets/css/list.min.css",
"assets/css/form.min.css",
"assets/css/report.min.css",
]
doctype_js = {
"Web Page": "public/js/frappe/utils/web_template.js",
"Website Settings": "public/js/frappe/utils/web_template.js"
}
web_include_js = [
"website_script.js"
]
web_include_css = []
website_route_rules = [
{"from_route": "/blog/<category>", "to_route": "Blog Post"},
{"from_route": "/kb/<category>", "to_route": "Help Article"},
{"from_route": "/newsletters", "to_route": "Newsletter"},
{"from_route": "/profile", "to_route": "me"},
]
base_template = "templates/base.html"
write_file_keys = ["file_url", "file_name"]
notification_config = "frappe.core.notifications.get_notification_config"
before_tests = "frappe.utils.install.before_tests"
email_append_to = ["Event", "ToDo", "Communication"]
get_rooms = 'frappe.chat.doctype.chat_room.chat_room.get_rooms'
calendars = ["Event"]
leaderboards = "frappe.desk.leaderboard.get_leaderboards"
# login
on_session_creation = [
"frappe.core.doctype.activity_log.feed.login_feed",
"frappe.core.doctype.user.user.notify_admin_access_to_system_manager"
]
on_logout = "frappe.core.doctype.session_default_settings.session_default_settings.clear_session_defaults"
# permissions
permission_query_conditions = {
"Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
"ToDo": "frappe.desk.doctype.todo.todo.get_permission_query_conditions",
"User": "frappe.core.doctype.user.user.get_permission_query_conditions",
"Dashboard Settings": "frappe.desk.doctype.dashboard_settings.dashboard_settings.get_permission_query_conditions",
"Notification Log": "frappe.desk.doctype.notification_log.notification_log.get_permission_query_conditions",
"Dashboard Chart": "frappe.desk.doctype.dashboard_chart.dashboard_chart.get_permission_query_conditions",
"Number Card": "frappe.desk.doctype.number_card.number_card.get_permission_query_conditions",
"Notification Settings": "frappe.desk.doctype.notification_settings.notification_settings.get_permission_query_conditions",
"Note": "frappe.desk.doctype.note.note.get_permission_query_conditions",
"Kanban Board": "frappe.desk.doctype.kanban_board.kanban_board.get_permission_query_conditions",
"Contact": "frappe.contacts.address_and_contact.get_permission_query_conditions_for_contact",
"Address": "frappe.contacts.address_and_contact.get_permission_query_conditions_for_address",
"Communication": "frappe.core.doctype.communication.communication.get_permission_query_conditions_for_communication",
"Workflow Action": "frappe.workflow.doctype.workflow_action.workflow_action.get_permission_query_conditions",
"Prepared Report": "frappe.core.doctype.prepared_report.prepared_report.get_permission_query_condition"
}
has_permission = {
"Event": "frappe.desk.doctype.event.event.has_permission",
"ToDo": "frappe.desk.doctype.todo.todo.has_permission",
"User": "frappe.core.doctype.user.user.has_permission",
"Note": "frappe.desk.doctype.note.note.has_permission",
"Dashboard Chart": "frappe.desk.doctype.dashboard_chart.dashboard_chart.has_permission",
"Number Card": "frappe.desk.doctype.number_card.number_card.has_permission",
"Kanban Board": "frappe.desk.doctype.kanban_board.kanban_board.has_permission",
"Contact": "frappe.contacts.address_and_contact.has_permission",
"Address": "frappe.contacts.address_and_contact.has_permission",
"Communication": "frappe.core.doctype.communication.communication.has_permission",
"Workflow Action": "frappe.workflow.doctype.workflow_action.workflow_action.has_permission",
"File": "frappe.core.doctype.file.file.has_permission",
"Prepared Report": "frappe.core.doctype.prepared_report.prepared_report.has_permission"
}
has_website_permission = {
"Address": "frappe.contacts.doctype.address.address.has_website_permission"
}
standard_queries = {
"User": "frappe.core.doctype.user.user.user_query"
}
doc_events = {
"*": {
"after_insert": [
"frappe.event_streaming.doctype.event_update_log.event_update_log.notify_consumers"
],
"on_update": [
"frappe.desk.notifications.clear_doctype_notifications",
"frappe.core.doctype.activity_log.feed.update_feed",
"frappe.workflow.doctype.workflow_action.workflow_action.process_workflow_actions",
"frappe.automation.doctype.assignment_rule.assignment_rule.apply",
"frappe.automation.doctype.milestone_tracker.milestone_tracker.evaluate_milestone",
"frappe.core.doctype.file.file.attach_files_to_document",
"frappe.event_streaming.doctype.event_update_log.event_update_log.notify_consumers",
"frappe.automation.doctype.assignment_rule.assignment_rule.update_due_date",
],
"after_rename": "frappe.desk.notifications.clear_doctype_notifications",
"on_cancel": [
"frappe.desk.notifications.clear_doctype_notifications",
"frappe.workflow.doctype.workflow_action.workflow_action.process_workflow_actions"
],
"on_trash": [
"frappe.desk.notifications.clear_doctype_notifications",
"frappe.workflow.doctype.workflow_action.workflow_action.process_workflow_actions",
"frappe.event_streaming.doctype.event_update_log.event_update_log.notify_consumers"
],
"on_change": [
"frappe.social.doctype.energy_point_rule.energy_point_rule.process_energy_points"
]
},
"Event": {
"after_insert": "frappe.integrations.doctype.google_calendar.google_calendar.insert_event_in_google_calendar",
"on_update": "frappe.integrations.doctype.google_calendar.google_calendar.update_event_in_google_calendar",
"on_trash": "frappe.integrations.doctype.google_calendar.google_calendar.delete_event_from_google_calendar",
},
"Contact": {
"after_insert": "frappe.integrations.doctype.google_contacts.google_contacts.insert_contacts_to_google_contacts",
"on_update": "frappe.integrations.doctype.google_contacts.google_contacts.update_contacts_to_google_contacts",
},
"DocType": {
"after_insert": "frappe.cache_manager.build_domain_restriced_doctype_cache",
"after_save": "frappe.cache_manager.build_domain_restriced_doctype_cache",
},
"Page": {
"after_insert": "frappe.cache_manager.build_domain_restriced_page_cache",
"after_save": "frappe.cache_manager.build_domain_restriced_page_cache",
}
}
scheduler_events = {
"cron": {
"0/15 * * * *": [
"frappe.oauth.delete_oauth2_data",
"frappe.website.doctype.web_page.web_page.check_publish_status",
"frappe.twofactor.delete_all_barcodes_for_users"
]
},
"all": [
"frappe.email.queue.flush",
"frappe.email.doctype.email_account.email_account.pull",
"frappe.email.doctype.email_account.email_account.notify_unreplied",
"frappe.integrations.doctype.razorpay_settings.razorpay_settings.capture_payment",
'frappe.utils.global_search.sync_global_search',
"frappe.monitor.flush",
],
"hourly": [
"frappe.model.utils.link_count.update_link_count",
'frappe.model.utils.user_settings.sync_user_settings',
"frappe.utils.error.collect_error_snapshots",
"frappe.desk.page.backups.backups.delete_downloadable_backups",
"frappe.deferred_insert.save_to_db",
"frappe.desk.form.document_follow.send_hourly_updates",
"frappe.integrations.doctype.google_calendar.google_calendar.sync",
"frappe.email.doctype.newsletter.newsletter.send_scheduled_email",
"frappe.utils.password.delete_password_reset_cache"
],
"daily": [
"frappe.email.queue.set_expiry_for_email_queue",
"frappe.desk.notifications.clear_notifications",
"frappe.core.doctype.error_log.error_log.set_old_logs_as_seen",
"frappe.desk.doctype.event.event.send_event_digest",
"frappe.sessions.clear_expired_sessions",
"frappe.email.doctype.notification.notification.trigger_daily_alerts",
"frappe.realtime.remove_old_task_logs",
"frappe.utils.scheduler.restrict_scheduler_events_if_dormant",
"frappe.email.doctype.auto_email_report.auto_email_report.send_daily",
"frappe.website.doctype.personal_data_deletion_request.personal_data_deletion_request.remove_unverified_record",
"frappe.desk.form.document_follow.send_daily_updates",
"frappe.social.doctype.energy_point_settings.energy_point_settings.allocate_review_points",
"frappe.integrations.doctype.google_contacts.google_contacts.sync",
"frappe.automation.doctype.auto_repeat.auto_repeat.make_auto_repeat_entry",
"frappe.automation.doctype.auto_repeat.auto_repeat.set_auto_repeat_as_completed",
"frappe.email.doctype.unhandled_email.unhandled_email.remove_old_unhandled_emails",
"frappe.core.doctype.prepared_report.prepared_report.delete_expired_prepared_reports",
"frappe.core.doctype.log_settings.log_settings.run_log_clean_up"
],
"daily_long": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_daily",
"frappe.utils.change_log.check_for_update",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_daily",
"frappe.integrations.doctype.google_drive.google_drive.daily_backup"
],
"weekly_long": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_weekly",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_weekly",
"frappe.desk.doctype.route_history.route_history.flush_old_route_records",
"frappe.desk.form.document_follow.send_weekly_updates",
"frappe.social.doctype.energy_point_log.energy_point_log.send_weekly_summary",
"frappe.integrations.doctype.google_drive.google_drive.weekly_backup"
],
"monthly": [
"frappe.email.doctype.auto_email_report.auto_email_report.send_monthly",
"frappe.social.doctype.energy_point_log.energy_point_log.send_monthly_summary"
],
"monthly_long": [
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_monthly"
]
}
get_translated_dict = {
("doctype", "System Settings"): "frappe.geo.country_info.get_translated_dict",
("page", "setup-wizard"): "frappe.geo.country_info.get_translated_dict"
}
sounds = [
{"name": "email", "src": "/assets/frappe/sounds/email.mp3", "volume": 0.1},
{"name": "submit", "src": "/assets/frappe/sounds/submit.mp3", "volume": 0.1},
{"name": "cancel", "src": "/assets/frappe/sounds/cancel.mp3", "volume": 0.1},
{"name": "delete", "src": "/assets/frappe/sounds/delete.mp3", "volume": 0.05},
{"name": "click", "src": "/assets/frappe/sounds/click.mp3", "volume": 0.05},
{"name": "error", "src": "/assets/frappe/sounds/error.mp3", "volume": 0.1},
{"name": "alert", "src": "/assets/frappe/sounds/alert.mp3", "volume": 0.2},
# {"name": "chime", "src": "/assets/frappe/sounds/chime.mp3"},
# frappe.chat sounds
{ "name": "chat-message", "src": "/assets/frappe/sounds/chat-message.mp3", "volume": 0.1 },
{ "name": "chat-notification", "src": "/assets/frappe/sounds/chat-notification.mp3", "volume": 0.1 }
# frappe.chat sounds
]
bot_parsers = [
'frappe.utils.bot.ShowNotificationBot',
'frappe.utils.bot.GetOpenListBot',
'frappe.utils.bot.ListBot',
'frappe.utils.bot.FindBot',
'frappe.utils.bot.CountBot'
]
setup_wizard_exception = [
"frappe.desk.page.setup_wizard.setup_wizard.email_setup_wizard_exception",
"frappe.desk.page.setup_wizard.setup_wizard.log_setup_wizard_exception"
]
before_migrate = ['frappe.patches.v11_0.sync_user_permission_doctype_before_migrate.execute']
after_migrate = ['frappe.website.doctype.website_theme.website_theme.after_migrate']
otp_methods = ['OTP App','Email','SMS']
user_privacy_documents = [
{
'doctype': 'File',
'match_field': 'attached_to_name',
'personal_fields': ['file_name', 'file_url'],
'applies_to_website_user': 1
},
{
'doctype': 'Email Group Member',
'match_field': 'email',
},
{
'doctype': 'Email Unsubscribe',
'match_field': 'email',
},
{
'doctype': 'Email Queue',
'match_field': 'sender',
},
{
'doctype': 'Email Queue Recipient',
'match_field': 'recipient',
},
{
'doctype': 'Contact',
'match_field': 'email_id',
'personal_fields': ['first_name', 'last_name', 'phone', 'mobile_no'],
},
{
'doctype': 'Contact Email',
'match_field': 'email_id',
},
{
'doctype': 'Address',
'match_field': 'email_id',
'personal_fields': ['address_title', 'address_line1', 'address_line2', 'city', 'county', 'state', 'pincode',
'phone', 'fax'],
},
{
'doctype': 'Communication',
'match_field': 'sender',
'personal_fields': ['sender_full_name', 'phone_no', 'content'],
},
{
'doctype': 'Communication',
'match_field': 'recipients',
},
{
'doctype': 'User',
'match_field': 'name',
'personal_fields': ['email', 'username', 'first_name', 'middle_name', 'last_name', 'full_name', 'birth_date',
'user_image', 'phone', 'mobile_no', 'location', 'banner_image', 'interest', 'bio', 'email_signature'],
'applies_to_website_user': 1
},
]
global_search_doctypes = {
"Default": [
{"doctype": "Contact"},
{"doctype": "Address"},
{"doctype": "ToDo"},
{"doctype": "Note"},
{"doctype": "Event"},
{"doctype": "Blog Post"},
{"doctype": "Dashboard"},
{"doctype": "Country"},
{"doctype": "Currency"},
{"doctype": "Newsletter"},
{"doctype": "Letter Head"},
{"doctype": "Workflow"},
{"doctype": "Web Page"},
{"doctype": "Web Form"}
]
}
| mit | -8,366,801,492,219,079,000 | 37.694215 | 124 | 0.725829 | false |
xaowoodenfish/python-1 | bigml/tests/test_12_public_model_prediction.py | 1 | 5658 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2015 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Creating public model predictions
"""
from world import world, setup_module, teardown_module
import create_source_steps as source_create
import create_dataset_steps as dataset_create
import create_model_steps as model_create
import create_prediction_steps as prediction_create
import compare_predictions_steps as compare_pred
class TestPublicModelPrediction(object):
def setup(self):
"""
Debug information
"""
print "\n-------------------\nTests in: %s\n" % __name__
def teardown(self):
"""
Debug information
"""
print "\nEnd of tests in: %s\n-------------------\n" % __name__
def test_scenario1(self):
"""
Scenario: Successfully creating a prediction using a public model:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a model
And I wait until the model is ready less than <time_3> secs
And I make the model public
And I wait until the model is ready less than <time_3> secs
And I check the model status using the model's public url
When I create a prediction for "<data_input>"
Then the prediction for "<objective>" is "<prediction>"
Examples:
| data | time_1 | time_2 | time_3 | data_input | objective | prediction |
| ../data/iris.csv | 10 | 10 | 10 | {"petal width": 0.5} | 000004 | Iris-setosa |
"""
print self.test_scenario1.__doc__
examples = [
['data/iris.csv', '10', '10', '10', '{"petal width": 0.5}', '000004', 'Iris-setosa']]
for example in examples:
print "\nTesting with:\n", example
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
model_create.i_create_a_model(self)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.make_the_model_public(self)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.model_from_public_url(self)
prediction_create.i_create_a_prediction(self, example[4])
prediction_create.the_prediction_is(self, example[5], example[6])
def test_scenario2(self):
"""
Scenario: Successfully creating a prediction using a shared model:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a model
And I wait until the model is ready less than <time_3> secs
And I make the model shared
And I wait until the model is ready less than <time_3> secs
And I get the model sharing info
And I check the model status using the model's shared url
And I check the model status using the model's shared key
And I create a local model
When I create a local prediction for "<data_input>"
Then the local prediction is "<prediction>"
Examples:
| data | time_1 | time_2 | time_3 | data_input | prediction |
| ../data/iris.csv | 10 | 10 | 10 | {"petal width": 0.5} | Iris-setosa |
"""
print self.test_scenario2.__doc__
examples = [
['data/iris.csv', '10', '10', '10', '{"petal width": 0.5}', 'Iris-setosa']]
for example in examples:
print "\nTesting with:\n", example
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
model_create.i_create_a_model(self)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.make_the_model_shared(self)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.get_sharing_info(self)
model_create.model_from_shared_url(self)
model_create.model_from_shared_key(self)
compare_pred.i_create_a_local_model(self)
compare_pred.i_create_a_local_prediction(self, example[4])
compare_pred.the_local_prediction_is(self, example[5])
| apache-2.0 | 3,362,934,045,737,839,600 | 46.15 | 113 | 0.591552 | false |
cariaso/zebraguide | zebraguide/zebraguide.py | 1 | 9159 | #!/usr/bin/env python
import argparse
import Bio.SeqIO
import sys
def get_args():
parser = argparse.ArgumentParser(
description="""Make the BoulderIO formatter config file for primer3 to direct guide sequences against a reference.
Typical usage:
./zebraguide.py sample2.fasta | primer3_core -format_output
""",
epilog="[email protected] Copyright 2016"
)
parser.add_argument('fasta',
help='fasta file of references and guides')
parser.add_argument('--size-limit', type=int, default=20,
help='cutoff length of references vs guides')
parser.add_argument('--out',
help='output filename, defaults to stdout')
parser.add_argument('--add', action='append', dest='primer3tags',
default=[],
help='Add these tags to each record')
args = parser.parse_args()
return args
def analyze(out, guide, ref, args=None):
guideseq = guide.seq.tostring().upper()
refseq = ref.seq.tostring().upper()
guidestartpos = refseq.find(guideseq)
if guidestartpos == -1:
out.write('COMMENT=%s not found in %s\n' % (guide.name, ref.name))
out.write('=\n')
return
guidendpos = guidestartpos + len(guide)
out.write('SEQUENCE_ID=%s-%s\n' % (guide.name, ref.name))
out.write('SEQUENCE_TEMPLATE=%s\n' % ref.seq.tostring())
out.write('SEQUENCE_TARGET=%s,%s\n' % (guidestartpos, guidendpos))
#out.write('PRIMER_THERMODYNAMIC_PARAMETERS_PATH=/usr/local/Cellar/primer3/2.3.6/share/primer3/primer3_config/\n')
out.write('PRIMER_EXPLAIN_FLAG=1\n')
if args:
for tag in args.primer3tags:
out.write('%s\n' % tag)
# out.write('PRIMER_FIRST_BASE_INDEX=1\n')
# out.write('PRIMER_THERMODYNAMIC_OLIGO_ALIGNMENT=1\n')
# out.write('PRIMER_THERMODYNAMIC_TEMPLATE_ALIGNMENT=0\n')
# out.write('PRIMER_PICK_LEFT_PRIMER=1\n')
# out.write('PRIMER_PICK_INTERNAL_OLIGO=0\n')
# out.write('PRIMER_PICK_RIGHT_PRIMER=1\n')
# out.write('PRIMER_LIBERAL_BASE=1\n')
# out.write('PRIMER_LIB_AMBIGUITY_CODES_CONSENSUS=0\n')
# out.write('PRIMER_LOWERCASE_MASKING=0\n')
# out.write('PRIMER_PICK_ANYWAY=1\n')
# out.write('PRIMER_EXPLAIN_FLAG=1\n')
# out.write('PRIMER_TASK=generic\n')
# out.write('PRIMER_MIN_QUALITY=0\n')
# out.write('PRIMER_MIN_END_QUALITY=0\n')
# out.write('PRIMER_QUALITY_RANGE_MIN=0\n')
# out.write('PRIMER_QUALITY_RANGE_MAX=100\n')
# out.write('PRIMER_MIN_SIZE=18\n')
# out.write('PRIMER_OPT_SIZE=20\n')
# out.write('PRIMER_MAX_SIZE=23\n')
# out.write('PRIMER_MIN_TM=57.0\n')
# out.write('PRIMER_OPT_TM=59.0\n')
# out.write('PRIMER_MAX_TM=62.0\n')
# out.write('PRIMER_PAIR_MAX_DIFF_TM=5.0\n')
# out.write('PRIMER_TM_FORMULA=1\n')
# out.write('PRIMER_PRODUCT_MIN_TM=-1000000.0\n')
# out.write('PRIMER_PRODUCT_OPT_TM=0.0\n')
# out.write('PRIMER_PRODUCT_MAX_TM=1000000.0\n')
# out.write('PRIMER_MIN_GC=30.0\n')
# out.write('PRIMER_OPT_GC_PERCENT=50.0\n')
# out.write('PRIMER_MAX_GC=70.0\n')
# out.write('PRIMER_PRODUCT_SIZE_RANGE=150-250 100-300 301-400 401-500 501-600 601-700 701-850 851-1000\n')
# out.write('PRIMER_NUM_RETURN=5\n')
# out.write('PRIMER_MAX_END_STABILITY=9.0\n')
# out.write('PRIMER_MAX_LIBRARY_MISPRIMING=12.00\n')
# out.write('PRIMER_PAIR_MAX_LIBRARY_MISPRIMING=20.00\n')
# out.write('PRIMER_MAX_TEMPLATE_MISPRIMING_TH=40.00\n')
# out.write('PRIMER_PAIR_MAX_TEMPLATE_MISPRIMING_TH=70.00\n')
# out.write('PRIMER_MAX_SELF_ANY_TH=45.0\n')
# out.write('PRIMER_MAX_SELF_END_TH=35.0\n')
# out.write('PRIMER_PAIR_MAX_COMPL_ANY_TH=45.0\n')
# out.write('PRIMER_PAIR_MAX_COMPL_END_TH=35.0\n')
# out.write('PRIMER_MAX_HAIRPIN_TH=24.0\n')
# out.write('PRIMER_MAX_TEMPLATE_MISPRIMING=12.00\n')
# out.write('PRIMER_PAIR_MAX_TEMPLATE_MISPRIMING=24.00\n')
# out.write('PRIMER_MAX_SELF_ANY=8.00\n')
# out.write('PRIMER_MAX_SELF_END=3.00\n')
# out.write('PRIMER_PAIR_MAX_COMPL_ANY=8.00\n')
# out.write('PRIMER_PAIR_MAX_COMPL_END=3.00\n')
# out.write('PRIMER_MAX_NS_ACCEPTED=0\n')
# out.write('PRIMER_MAX_POLY_X=4\n')
# out.write('PRIMER_INSIDE_PENALTY=-1.0\n')
# out.write('PRIMER_OUTSIDE_PENALTY=0\n')
# out.write('PRIMER_GC_CLAMP=0\n')
# out.write('PRIMER_MAX_END_GC=5\n')
# out.write('PRIMER_MIN_LEFT_THREE_PRIME_DISTANCE=3\n')
# out.write('PRIMER_MIN_RIGHT_THREE_PRIME_DISTANCE=3\n')
# out.write('PRIMER_MIN_5_PRIME_OVERLAP_OF_JUNCTION=7\n')
# out.write('PRIMER_MIN_3_PRIME_OVERLAP_OF_JUNCTION=4\n')
# out.write('PRIMER_SALT_MONOVALENT=50.0\n')
# out.write('PRIMER_SALT_CORRECTIONS=1\n')
# out.write('PRIMER_SALT_DIVALENT=1.5\n')
# out.write('PRIMER_DNTP_CONC=0.6\n')
# out.write('PRIMER_DNA_CONC=50.0\n')
# out.write('PRIMER_SEQUENCING_SPACING=500\n')
# out.write('PRIMER_SEQUENCING_INTERVAL=250\n')
# out.write('PRIMER_SEQUENCING_LEAD=50\n')
# out.write('PRIMER_SEQUENCING_ACCURACY=20\n')
# out.write('PRIMER_WT_SIZE_LT=1.0\n')
# out.write('PRIMER_WT_SIZE_GT=1.0\n')
# out.write('PRIMER_WT_TM_LT=1.0\n')
# out.write('PRIMER_WT_TM_GT=1.0\n')
# out.write('PRIMER_WT_GC_PERCENT_LT=0.0\n')
# out.write('PRIMER_WT_GC_PERCENT_GT=0.0\n')
# out.write('PRIMER_WT_SELF_ANY_TH=0.0\n')
# out.write('PRIMER_WT_SELF_END_TH=0.0\n')
# out.write('PRIMER_WT_HAIRPIN_TH=0.0\n')
# out.write('PRIMER_WT_TEMPLATE_MISPRIMING_TH=0.0\n')
# out.write('PRIMER_WT_SELF_ANY=0.0\n')
# out.write('PRIMER_WT_SELF_END=0.0\n')
# out.write('PRIMER_WT_TEMPLATE_MISPRIMING=0.0\n')
# out.write('PRIMER_WT_NUM_NS=0.0\n')
# out.write('PRIMER_WT_LIBRARY_MISPRIMING=0.0\n')
# out.write('PRIMER_WT_SEQ_QUAL=0.0\n')
# out.write('PRIMER_WT_END_QUAL=0.0\n')
# out.write('PRIMER_WT_POS_PENALTY=0.0\n')
# out.write('PRIMER_WT_END_STABILITY=0.0\n')
# out.write('PRIMER_PAIR_WT_PRODUCT_SIZE_LT=0.0\n')
# out.write('PRIMER_PAIR_WT_PRODUCT_SIZE_GT=0.0\n')
# out.write('PRIMER_PAIR_WT_PRODUCT_TM_LT=0.0\n')
# out.write('PRIMER_PAIR_WT_PRODUCT_TM_GT=0.0\n')
# out.write('PRIMER_PAIR_WT_COMPL_ANY_TH=0.0\n')
# out.write('PRIMER_PAIR_WT_COMPL_END_TH=0.0\n')
# out.write('PRIMER_PAIR_WT_TEMPLATE_MISPRIMING_TH=0.0\n')
# out.write('PRIMER_PAIR_WT_COMPL_ANY=0.0\n')
# out.write('PRIMER_PAIR_WT_COMPL_END=0.0\n')
# out.write('PRIMER_PAIR_WT_TEMPLATE_MISPRIMING=0.0\n')
# out.write('PRIMER_PAIR_WT_DIFF_TM=0.0\n')
# out.write('PRIMER_PAIR_WT_LIBRARY_MISPRIMING=0.0\n')
# out.write('PRIMER_PAIR_WT_PR_PENALTY=1.0\n')
# out.write('PRIMER_PAIR_WT_IO_PENALTY=0.0\n')
# out.write('PRIMER_INTERNAL_MIN_SIZE=18\n')
# out.write('PRIMER_INTERNAL_OPT_SIZE=20\n')
# out.write('PRIMER_INTERNAL_MAX_SIZE=27\n')
# out.write('PRIMER_INTERNAL_MIN_TM=57.0\n')
# out.write('PRIMER_INTERNAL_OPT_TM=60.0\n')
# out.write('PRIMER_INTERNAL_MAX_TM=63.0\n')
# out.write('PRIMER_INTERNAL_MIN_GC=20.0\n')
# out.write('PRIMER_INTERNAL_OPT_GC_PERCENT=50.0\n')
# out.write('PRIMER_INTERNAL_MAX_GC=80.0\n')
# out.write('PRIMER_INTERNAL_MAX_SELF_ANY_TH=47.00\n')
# out.write('PRIMER_INTERNAL_MAX_SELF_END_TH=47.00\n')
# out.write('PRIMER_INTERNAL_MAX_HAIRPIN_TH=47.00\n')
# out.write('PRIMER_INTERNAL_MAX_SELF_ANY=12.00\n')
# out.write('PRIMER_INTERNAL_MAX_SELF_END=12.00\n')
# out.write('PRIMER_INTERNAL_MIN_QUALITY=0\n')
# out.write('PRIMER_INTERNAL_MAX_NS_ACCEPTED=0\n')
# out.write('PRIMER_INTERNAL_MAX_POLY_X=5\n')
# out.write('PRIMER_INTERNAL_MAX_LIBRARY_MISHYB=12.00\n')
# out.write('PRIMER_INTERNAL_SALT_MONOVALENT=50.0\n')
# out.write('PRIMER_INTERNAL_DNA_CONC=50.0\n')
# out.write('PRIMER_INTERNAL_SALT_DIVALENT=1.5\n')
# out.write('PRIMER_INTERNAL_DNTP_CONC=0.0\n')
# out.write('PRIMER_INTERNAL_WT_SIZE_LT=1.0\n')
# out.write('PRIMER_INTERNAL_WT_SIZE_GT=1.0\n')
# out.write('PRIMER_INTERNAL_WT_TM_LT=1.0\n')
# out.write('PRIMER_INTERNAL_WT_TM_GT=1.0\n')
# out.write('PRIMER_INTERNAL_WT_GC_PERCENT_LT=0.0\n')
# out.write('PRIMER_INTERNAL_WT_GC_PERCENT_GT=0.0\n')
# out.write('PRIMER_INTERNAL_WT_SELF_ANY_TH=0.0\n')
# out.write('PRIMER_INTERNAL_WT_SELF_END_TH=0.0\n')
# out.write('PRIMER_INTERNAL_WT_HAIRPIN_TH=0.0\n')
# out.write('PRIMER_INTERNAL_WT_SELF_ANY=0.0\n')
# out.write('PRIMER_INTERNAL_WT_SELF_END=0.0\n')
# out.write('PRIMER_INTERNAL_WT_NUM_NS=0.0\n')
# out.write('PRIMER_INTERNAL_WT_LIBRARY_MISHYB=0.0\n')
# out.write('PRIMER_INTERNAL_WT_SEQ_QUAL=0.0\n')
# out.write('PRIMER_INTERNAL_WT_END_QUAL=0.0\n')
out.write('=\n')
def main():
args = get_args()
infastafh = open(args.fasta)
seq_stream = Bio.SeqIO.parse(infastafh, 'fasta')
if args.out:
outfh = file(args.out,'w')
else:
outfh = sys.stdout
ref = None
for seqobj in seq_stream:
if len(seqobj) > args.size_limit:
ref = seqobj
else:
analyze(outfh, seqobj, ref, args)
if __name__ == '__main__':
main()
| mit | -370,202,171,412,303,400 | 41.799065 | 122 | 0.632165 | false |
unreal666/outwiker | src/outwiker/gui/controls/ultimatelistctrl.py | 2 | 456589 | # --------------------------------------------------------------------------------- #
# ULTIMATELISTCTRL wxPython IMPLEMENTATION
# Inspired by and heavily based on the wxWidgets C++ generic version of wxListCtrl.
#
# Andrea Gavana, @ 08 May 2009
# Latest Revision: 27 Dec 2012, 21.00 GMT
#
#
# TODO List
#
# 1) Subitem selection;
# 2) Watermark? (almost, does not work very well :-( );
# 3) Groups? (Maybe, check ObjectListView);
# 4) Scrolling items as headers and footers;
# 5) Alpha channel for text/background of items;
# 6) Custom renderers for headers/footers (done);
# 7) Fading in and out on mouse motion (a la Windows Vista Aero);
# 8) Sub-text for headers/footers (grey text below the header/footer text);
# 9) Fixing the columns to the left or right side of the control layout;
# 10) Skins for header and scrollbars (implemented for headers/footers).
#
#
# For all kind of problems, requests of enhancements and bug reports, please
# write to me at:
#
# [email protected]
# [email protected]
#
# Or, obviously, to the wxPython mailing list!!!
#
# Tags: phoenix-port, documented, unittest, py3-port
#
# End Of Comments
# --------------------------------------------------------------------------------- #
"""
Description
===========
UltimateListCtrl is a class that mimics the behaviour of :class:`ListCtrl`, with almost
the same base functionalities plus some more enhancements. This class does
not rely on the native control, as it is a full owner-drawn list control.
In addition to the standard :class:`ListCtrl` behaviour this class supports:
Appearance
==========
* Multiple images for items/subitems;
* Images can be of any size and not limited to a single specific pair of `width`, `height`
as it is the case of :class:`wx.ImageList`. Simply use :class:`PyImageList` instead of :class:`wx.ImageList`
to add your images.
* Font, colour, background, custom renderers and formatting for items and subitems;
* Ability to add persistent data to an item using :meth:`~UltimateListCtrl.SetItemPyData` and :meth:`~UltimateListCtrl.GetItemPyData`:
the data can be any Python object and not necessarily an integer as in :class:`ListCtrl`;
* CheckBox-type items and subitems;
* RadioButton-type items and subitems;
* Overflowing items/subitems, a la :class:`grid.Grid`, i.e. an item/subitem may overwrite neighboring
items/subitems if its text would not normally fit in the space allotted to it;
* Hyperlink-type items and subitems: they look like an hyperlink, with the proper mouse
cursor on hovering;
* Multiline text items and subitems;
* Variable row heights depending on the item/subitem kind/text/window;
* User defined item/subitem renderers: these renderer classes **must** implement the methods
`DrawSubItem`, `GetLineHeight` and `GetSubItemWidth` (see the demo);
* Enabling/disabling items (together with their plain or grayed out icons);
* Whatever non-toplevel widget can be attached next to an item/subitem;
* Column headers are fully customizable in terms of icons, colour, font, alignment etc...;
* Column headers can have their own checkbox/radiobutton;
* Column footers are fully customizable in terms of icons, colour, font, alignment etc...;
* Column footers can have their own checkbox/radiobutton;
* Ability to hide/show columns;
* Default selection style, gradient (horizontal/vertical) selection style and Windows
Vista selection style.
And a lot more. Check the demo for an almost complete review of the functionalities.
Usage
=====
Usage example::
import sys
import wx
import wx.lib.agw.ultimatelistctrl as ULC
class MyFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, parent, -1, "UltimateListCtrl Demo")
list = ULC.UltimateListCtrl(self, wx.ID_ANY, agwStyle=wx.LC_REPORT | wx.LC_VRULES | wx.LC_HRULES | wx.LC_SINGLE_SEL)
list.InsertColumn(0, "Column 1")
list.InsertColumn(1, "Column 2")
index = list.InsertStringItem(sys.maxint, "Item 1")
list.SetStringItem(index, 1, "Sub-item 1")
index = list.InsertStringItem(sys.maxint, "Item 2")
list.SetStringItem(index, 1, "Sub-item 2")
choice = wx.Choice(list, -1, choices=["one", "two"])
index = list.InsertStringItem(sys.maxint, "A widget")
list.SetItemWindow(index, 1, choice, expand=True)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(list, 1, wx.EXPAND)
self.SetSizer(sizer)
# our normal wxApp-derived class, as usual
app = wx.App(0)
frame = MyFrame(None)
app.SetTopWindow(frame)
frame.Show()
app.MainLoop()
Window Styles
=============
This class supports the following window styles:
=============================== =========== ====================================================================================================
Window Styles Hex Value Description
=============================== =========== ====================================================================================================
``ULC_VRULES`` 0x1 Draws light vertical rules between rows in report mode.
``ULC_HRULES`` 0x2 Draws light horizontal rules between rows in report mode.
``ULC_ICON`` 0x4 Large icon view, with optional labels.
``ULC_SMALL_ICON`` 0x8 Small icon view, with optional labels.
``ULC_LIST`` 0x10 Multicolumn list view, with optional small icons. Columns are computed automatically, i.e. you don't set columns as in ``ULC_REPORT``. In other words, the list wraps, unlike a :class:`ListBox`.
``ULC_REPORT`` 0x20 Single or multicolumn report view, with optional header.
``ULC_ALIGN_TOP`` 0x40 Icons align to the top. Win32 default, Win32 only.
``ULC_ALIGN_LEFT`` 0x80 Icons align to the left.
``ULC_AUTOARRANGE`` 0x100 Icons arrange themselves. Win32 only.
``ULC_VIRTUAL`` 0x200 The application provides items text on demand. May only be used with ``ULC_REPORT``.
``ULC_EDIT_LABELS`` 0x400 Labels are editable: the application will be notified when editing starts.
``ULC_NO_HEADER`` 0x800 No header in report mode.
``ULC_NO_SORT_HEADER`` 0x1000 No Docs.
``ULC_SINGLE_SEL`` 0x2000 Single selection (default is multiple).
``ULC_SORT_ASCENDING`` 0x4000 Sort in ascending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_SORT_DESCENDING`` 0x8000 Sort in descending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_TILE`` 0x10000 Each item appears as a full-sized icon with a label of one or more lines beside it (partially implemented).
``ULC_NO_HIGHLIGHT`` 0x20000 No highlight when an item is selected.
``ULC_STICKY_HIGHLIGHT`` 0x40000 Items are selected by simply hovering on them, with no need to click on them.
``ULC_STICKY_NOSELEVENT`` 0x80000 Don't send a selection event when using ``ULC_STICKY_HIGHLIGHT`` style.
``ULC_SEND_LEFTCLICK`` 0x100000 Send a left click event when an item is selected.
``ULC_HAS_VARIABLE_ROW_HEIGHT`` 0x200000 The list has variable row heights.
``ULC_AUTO_CHECK_CHILD`` 0x400000 When a column header has a checkbox associated, auto-check all the subitems in that column.
``ULC_AUTO_TOGGLE_CHILD`` 0x800000 When a column header has a checkbox associated, toggle all the subitems in that column.
``ULC_AUTO_CHECK_PARENT`` 0x1000000 Only meaningful foe checkbox-type items: when an item is checked/unchecked its column header item is checked/unchecked as well.
``ULC_SHOW_TOOLTIPS`` 0x2000000 Show tooltips for ellipsized items/subitems (text too long to be shown in the available space) containing the full item/subitem text.
``ULC_HOT_TRACKING`` 0x4000000 Enable hot tracking of items on mouse motion.
``ULC_BORDER_SELECT`` 0x8000000 Changes border colour whan an item is selected, instead of highlighting the item.
``ULC_TRACK_SELECT`` 0x10000000 Enables hot-track selection in a list control. Hot track selection means that an item is automatically selected when the cursor remains over the item for a certain period of time. The delay is retrieved on Windows using the `win32api` call `win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)`, and is defaulted to 400ms on other platforms. This style applies to all views of `UltimateListCtrl`.
``ULC_HEADER_IN_ALL_VIEWS`` 0x20000000 Show column headers in all view modes.
``ULC_NO_FULL_ROW_SELECT`` 0x40000000 When an item is selected, the only the item in the first column is highlighted.
``ULC_FOOTER`` 0x80000000 Show a footer too (only when header is present).
``ULC_USER_ROW_HEIGHT`` 0x100000000 Allows to set a custom row height (one value for all the items, only in report mode).
=============================== =========== ====================================================================================================
Events Processing
=================
This class processes the following events:
======================================== ====================================================================================================
Event Name Description
======================================== ====================================================================================================
``EVT_LIST_BEGIN_DRAG`` Begin dragging with the left mouse button.
``EVT_LIST_BEGIN_RDRAG`` Begin dragging with the right mouse button.
``EVT_LIST_BEGIN_LABEL_EDIT`` Begin editing a label. This can be prevented by calling `Veto()`.
``EVT_LIST_END_LABEL_EDIT`` Finish editing a label. This can be prevented by calling `Veto()`.
``EVT_LIST_DELETE_ITEM`` An item was deleted.
``EVT_LIST_DELETE_ALL_ITEMS`` All items were deleted.
``EVT_LIST_KEY_DOWN`` A key has been pressed.
``EVT_LIST_INSERT_ITEM`` An item has been inserted.
``EVT_LIST_COL_CLICK`` A column (`m_col`) has been left-clicked.
``EVT_LIST_COL_RIGHT_CLICK`` A column (`m_col`) has been right-clicked.
``EVT_LIST_COL_BEGIN_DRAG`` The user started resizing a column - can be vetoed.
``EVT_LIST_COL_END_DRAG`` The user finished resizing a column.
``EVT_LIST_COL_DRAGGING`` The divider between columns is being dragged.
``EVT_LIST_ITEM_SELECTED`` The item has been selected.
``EVT_LIST_ITEM_DESELECTED`` The item has been deselected.
``EVT_LIST_ITEM_RIGHT_CLICK`` The right mouse button has been clicked on an item.
``EVT_LIST_ITEM_MIDDLE_CLICK`` The middle mouse button has been clicked on an item.
``EVT_LIST_ITEM_ACTIVATED`` The item has been activated (``ENTER`` or double click).
``EVT_LIST_ITEM_FOCUSED`` The currently focused item has changed.
``EVT_LIST_CACHE_HINT`` Prepare cache for a virtual list control.
``EVT_LIST_ITEM_CHECKING`` An item/subitem is being checked.
``EVT_LIST_ITEM_CHECKED`` An item/subitem has been checked.
``EVT_LIST_COL_CHECKING`` A column header is being checked.
``EVT_LIST_COL_CHECKED`` A column header has being checked.
``EVT_LIST_FOOTER_CHECKING`` A column footer is being checked.
``EVT_LIST_FOOTER_CHECKED`` A column footer has being checked.
``EVT_LIST_ITEM_HYPERLINK`` An hyperlink item has been clicked.
``EVT_LIST_FOOTER_CLICK`` The user left-clicked on a column footer.
``EVT_LIST_FOOTER_RIGHT_CLICK`` The user right-clicked on a column footer.
``EVT_LIST_ITEM_LEFT_CLICK`` Send a left-click event after an item is selected.
``EVT_LIST_END_DRAG`` Notify an end-drag operation.
======================================== ====================================================================================================
Supported Platforms
===================
UltimateListCtrl has been tested on the following platforms:
* Windows (Windows XP);
License And Version
===================
UltimateListCtrl is distributed under the wxPython license.
Latest Revision: Andrea Gavana @ 27 Dec 2012, 21.00 GMT
Version 0.8
"""
import wx
import math
import bisect
import zlib
from functools import cmp_to_key
import six
from wx.lib.expando import ExpandoTextCtrl
# Version Info
__version__ = "0.8"
# wxPython version string
_VERSION_STRING = wx.VERSION_STRING
# ----------------------------------------------------------------------------
# UltimateListCtrl constants
# ----------------------------------------------------------------------------
# style flags
ULC_VRULES = wx.LC_VRULES
""" Draws light vertical rules between rows in report mode. """
ULC_HRULES = wx.LC_HRULES
""" Draws light horizontal rules between rows in report mode. """
ULC_ICON = wx.LC_ICON
ULC_SMALL_ICON = wx.LC_SMALL_ICON
ULC_LIST = wx.LC_LIST
ULC_REPORT = wx.LC_REPORT
ULC_TILE = 0x10000
ULC_ALIGN_TOP = wx.LC_ALIGN_TOP
ULC_ALIGN_LEFT = wx.LC_ALIGN_LEFT
ULC_AUTOARRANGE = wx.LC_AUTOARRANGE
ULC_VIRTUAL = wx.LC_VIRTUAL
ULC_EDIT_LABELS = wx.LC_EDIT_LABELS
ULC_NO_HEADER = wx.LC_NO_HEADER
ULC_NO_SORT_HEADER = wx.LC_NO_SORT_HEADER
ULC_SINGLE_SEL = wx.LC_SINGLE_SEL
ULC_SORT_ASCENDING = wx.LC_SORT_ASCENDING
ULC_SORT_DESCENDING = wx.LC_SORT_DESCENDING
ULC_NO_HIGHLIGHT = 0x20000
ULC_STICKY_HIGHLIGHT = 0x40000
ULC_STICKY_NOSELEVENT = 0x80000
ULC_SEND_LEFTCLICK = 0x100000
ULC_HAS_VARIABLE_ROW_HEIGHT = 0x200000
ULC_AUTO_CHECK_CHILD = 0x400000 # only meaningful for checkboxes
ULC_AUTO_TOGGLE_CHILD = 0x800000 # only meaningful for checkboxes
ULC_AUTO_CHECK_PARENT = 0x1000000 # only meaningful for checkboxes
ULC_SHOW_TOOLTIPS = 0x2000000 # shows tooltips on items with ellipsis (...)
ULC_HOT_TRACKING = 0x4000000 # enable hot tracking on mouse motion
ULC_BORDER_SELECT = 0x8000000 # changes border colour whan an item is selected, instead of highlighting the item
ULC_TRACK_SELECT = 0x10000000 # Enables hot-track selection in a list control. Hot track selection means that an item
# is automatically selected when the cursor remains over the item for a certain period
# of time. The delay is retrieved on Windows using the win32api call
# win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME), and is defaulted to 400ms
# on other platforms. This style applies to all styles of UltimateListCtrl.
ULC_HEADER_IN_ALL_VIEWS = 0x20000000 # Show column headers in all view modes
ULC_NO_FULL_ROW_SELECT = 0x40000000 # When an item is selected, the only the item in the first column is highlighted
ULC_FOOTER = 0x80000000 # Show a footer too (only when header is present)
ULC_USER_ROW_HEIGHT = 0x100000000 # Allows to set a custom row height (one value for all the items, only in report mode).
ULC_MASK_TYPE = ULC_ICON | ULC_SMALL_ICON | ULC_LIST | ULC_REPORT | ULC_TILE
ULC_MASK_ALIGN = ULC_ALIGN_TOP | ULC_ALIGN_LEFT
ULC_MASK_SORT = ULC_SORT_ASCENDING | ULC_SORT_DESCENDING
# for compatibility only
ULC_USER_TEXT = ULC_VIRTUAL
# Omitted because
# (a) too much detail
# (b) not enough style flags
# (c) not implemented anyhow in the generic version
#
# ULC_NO_SCROLL
# ULC_NO_LABEL_WRAP
# ULC_OWNERDRAW_FIXED
# ULC_SHOW_SEL_ALWAYS
# Mask flags to tell app/GUI what fields of UltimateListItem are valid
ULC_MASK_STATE = wx.LIST_MASK_STATE
ULC_MASK_TEXT = wx.LIST_MASK_TEXT
ULC_MASK_IMAGE = wx.LIST_MASK_IMAGE
ULC_MASK_DATA = wx.LIST_MASK_DATA
ULC_SET_ITEM = wx.LIST_SET_ITEM
ULC_MASK_WIDTH = wx.LIST_MASK_WIDTH
ULC_MASK_FORMAT = wx.LIST_MASK_FORMAT
ULC_MASK_FONTCOLOUR = 0x0080
ULC_MASK_FONT = 0x0100
ULC_MASK_BACKCOLOUR = 0x0200
ULC_MASK_KIND = 0x0400
ULC_MASK_ENABLE = 0x0800
ULC_MASK_CHECK = 0x1000
ULC_MASK_HYPERTEXT = 0x2000
ULC_MASK_WINDOW = 0x4000
ULC_MASK_PYDATA = 0x8000
ULC_MASK_SHOWN = 0x10000
ULC_MASK_RENDERER = 0x20000
ULC_MASK_OVERFLOW = 0x40000
ULC_MASK_FOOTER_TEXT = 0x80000
ULC_MASK_FOOTER_IMAGE = 0x100000
ULC_MASK_FOOTER_FORMAT = 0x200000
ULC_MASK_FOOTER_FONT = 0x400000
ULC_MASK_FOOTER_CHECK = 0x800000
ULC_MASK_FOOTER_KIND = 0x1000000
ULC_MASK_TOOLTIP = 0x2000000
# State flags for indicating the state of an item
ULC_STATE_DONTCARE = wx.LIST_STATE_DONTCARE
ULC_STATE_DROPHILITED = wx.LIST_STATE_DROPHILITED # MSW only
ULC_STATE_FOCUSED = wx.LIST_STATE_FOCUSED
ULC_STATE_SELECTED = wx.LIST_STATE_SELECTED
ULC_STATE_CUT = wx.LIST_STATE_CUT # MSW only
ULC_STATE_DISABLED = wx.LIST_STATE_DISABLED # OS2 only
ULC_STATE_FILTERED = wx.LIST_STATE_FILTERED # OS2 only
ULC_STATE_INUSE = wx.LIST_STATE_INUSE # OS2 only
ULC_STATE_PICKED = wx.LIST_STATE_PICKED # OS2 only
ULC_STATE_SOURCE = wx.LIST_STATE_SOURCE # OS2 only
# Hit test flags, used in HitTest
ULC_HITTEST_ABOVE = wx.LIST_HITTEST_ABOVE # Above the client area.
ULC_HITTEST_BELOW = wx.LIST_HITTEST_BELOW # Below the client area.
ULC_HITTEST_NOWHERE = wx.LIST_HITTEST_NOWHERE # In the client area but below the last item.
ULC_HITTEST_ONITEMICON = wx.LIST_HITTEST_ONITEMICON # On the bitmap associated with an item.
ULC_HITTEST_ONITEMLABEL = wx.LIST_HITTEST_ONITEMLABEL # On the label (string) associated with an item.
ULC_HITTEST_ONITEMRIGHT = wx.LIST_HITTEST_ONITEMRIGHT # In the area to the right of an item.
ULC_HITTEST_ONITEMSTATEICON = wx.LIST_HITTEST_ONITEMSTATEICON # On the state icon for a tree view item that is in a user-defined state.
ULC_HITTEST_TOLEFT = wx.LIST_HITTEST_TOLEFT # To the left of the client area.
ULC_HITTEST_TORIGHT = wx.LIST_HITTEST_TORIGHT # To the right of the client area.
ULC_HITTEST_ONITEMCHECK = 0x1000 # On the checkbox (if any)
ULC_HITTEST_ONITEM = ULC_HITTEST_ONITEMICON | ULC_HITTEST_ONITEMLABEL | ULC_HITTEST_ONITEMSTATEICON | ULC_HITTEST_ONITEMCHECK
# Flags for GetNextItem (MSW only except ULC_NEXT_ALL)
ULC_NEXT_ABOVE = wx.LIST_NEXT_ABOVE # Searches for an item above the specified item
ULC_NEXT_ALL = wx.LIST_NEXT_ALL # Searches for subsequent item by index
ULC_NEXT_BELOW = wx.LIST_NEXT_BELOW # Searches for an item below the specified item
ULC_NEXT_LEFT = wx.LIST_NEXT_LEFT # Searches for an item to the left of the specified item
ULC_NEXT_RIGHT = wx.LIST_NEXT_RIGHT # Searches for an item to the right of the specified item
# Alignment flags for Arrange (MSW only except ULC_ALIGN_LEFT)
ULC_ALIGN_DEFAULT = wx.LIST_ALIGN_DEFAULT
ULC_ALIGN_SNAP_TO_GRID = wx.LIST_ALIGN_SNAP_TO_GRID
# Column format (MSW only except ULC_FORMAT_LEFT)
ULC_FORMAT_LEFT = wx.LIST_FORMAT_LEFT
ULC_FORMAT_RIGHT = wx.LIST_FORMAT_RIGHT
ULC_FORMAT_CENTRE = wx.LIST_FORMAT_CENTRE
ULC_FORMAT_CENTER = ULC_FORMAT_CENTRE
# Autosize values for SetColumnWidth
ULC_AUTOSIZE = wx.LIST_AUTOSIZE
ULC_AUTOSIZE_USEHEADER = wx.LIST_AUTOSIZE_USEHEADER # partly supported by generic version
ULC_AUTOSIZE_FILL = -3
# Flag values for GetItemRect
ULC_RECT_BOUNDS = wx.LIST_RECT_BOUNDS
ULC_RECT_ICON = wx.LIST_RECT_ICON
ULC_RECT_LABEL = wx.LIST_RECT_LABEL
# Flag values for FindItem (MSW only)
ULC_FIND_UP = wx.LIST_FIND_UP
ULC_FIND_DOWN = wx.LIST_FIND_DOWN
ULC_FIND_LEFT = wx.LIST_FIND_LEFT
ULC_FIND_RIGHT = wx.LIST_FIND_RIGHT
# Items/subitems rect
ULC_GETSUBITEMRECT_WHOLEITEM = wx.LIST_GETSUBITEMRECT_WHOLEITEM
# ----------------------------------------------------------------------------
# UltimateListCtrl event macros
# ----------------------------------------------------------------------------
wxEVT_COMMAND_LIST_BEGIN_DRAG = wx.wxEVT_COMMAND_LIST_BEGIN_DRAG
wxEVT_COMMAND_LIST_BEGIN_RDRAG = wx.wxEVT_COMMAND_LIST_BEGIN_RDRAG
wxEVT_COMMAND_LIST_BEGIN_LABEL_EDIT = wx.wxEVT_COMMAND_LIST_BEGIN_LABEL_EDIT
wxEVT_COMMAND_LIST_END_LABEL_EDIT = wx.wxEVT_COMMAND_LIST_END_LABEL_EDIT
wxEVT_COMMAND_LIST_DELETE_ITEM = wx.wxEVT_COMMAND_LIST_DELETE_ITEM
wxEVT_COMMAND_LIST_DELETE_ALL_ITEMS = wx.wxEVT_COMMAND_LIST_DELETE_ALL_ITEMS
wxEVT_COMMAND_LIST_ITEM_SELECTED = wx.wxEVT_COMMAND_LIST_ITEM_SELECTED
wxEVT_COMMAND_LIST_ITEM_DESELECTED = wx.wxEVT_COMMAND_LIST_ITEM_DESELECTED
wxEVT_COMMAND_LIST_KEY_DOWN = wx.wxEVT_COMMAND_LIST_KEY_DOWN
wxEVT_COMMAND_LIST_INSERT_ITEM = wx.wxEVT_COMMAND_LIST_INSERT_ITEM
wxEVT_COMMAND_LIST_COL_CLICK = wx.wxEVT_COMMAND_LIST_COL_CLICK
wxEVT_COMMAND_LIST_ITEM_RIGHT_CLICK = wx.wxEVT_COMMAND_LIST_ITEM_RIGHT_CLICK
wxEVT_COMMAND_LIST_ITEM_MIDDLE_CLICK = wx.wxEVT_COMMAND_LIST_ITEM_MIDDLE_CLICK
wxEVT_COMMAND_LIST_ITEM_ACTIVATED = wx.wxEVT_COMMAND_LIST_ITEM_ACTIVATED
wxEVT_COMMAND_LIST_CACHE_HINT = wx.wxEVT_COMMAND_LIST_CACHE_HINT
wxEVT_COMMAND_LIST_COL_RIGHT_CLICK = wx.wxEVT_COMMAND_LIST_COL_RIGHT_CLICK
wxEVT_COMMAND_LIST_COL_BEGIN_DRAG = wx.wxEVT_COMMAND_LIST_COL_BEGIN_DRAG
wxEVT_COMMAND_LIST_COL_DRAGGING = wx.wxEVT_COMMAND_LIST_COL_DRAGGING
wxEVT_COMMAND_LIST_COL_END_DRAG = wx.wxEVT_COMMAND_LIST_COL_END_DRAG
wxEVT_COMMAND_LIST_ITEM_FOCUSED = wx.wxEVT_COMMAND_LIST_ITEM_FOCUSED
wxEVT_COMMAND_LIST_FOOTER_CLICK = wx.NewEventType()
wxEVT_COMMAND_LIST_FOOTER_RIGHT_CLICK = wx.NewEventType()
wxEVT_COMMAND_LIST_FOOTER_CHECKING = wx.NewEventType()
wxEVT_COMMAND_LIST_FOOTER_CHECKED = wx.NewEventType()
wxEVT_COMMAND_LIST_ITEM_LEFT_CLICK = wx.NewEventType()
wxEVT_COMMAND_LIST_ITEM_CHECKING = wx.NewEventType()
wxEVT_COMMAND_LIST_ITEM_CHECKED = wx.NewEventType()
wxEVT_COMMAND_LIST_ITEM_HYPERLINK = wx.NewEventType()
wxEVT_COMMAND_LIST_END_DRAG = wx.NewEventType()
wxEVT_COMMAND_LIST_COL_CHECKING = wx.NewEventType()
wxEVT_COMMAND_LIST_COL_CHECKED = wx.NewEventType()
EVT_LIST_BEGIN_DRAG = wx.EVT_LIST_BEGIN_DRAG
EVT_LIST_BEGIN_RDRAG = wx.EVT_LIST_BEGIN_RDRAG
EVT_LIST_BEGIN_LABEL_EDIT = wx.EVT_LIST_BEGIN_LABEL_EDIT
EVT_LIST_END_LABEL_EDIT = wx.EVT_LIST_END_LABEL_EDIT
EVT_LIST_DELETE_ITEM = wx.EVT_LIST_DELETE_ITEM
EVT_LIST_DELETE_ALL_ITEMS = wx.EVT_LIST_DELETE_ALL_ITEMS
EVT_LIST_KEY_DOWN = wx.EVT_LIST_KEY_DOWN
EVT_LIST_INSERT_ITEM = wx.EVT_LIST_INSERT_ITEM
EVT_LIST_COL_CLICK = wx.EVT_LIST_COL_CLICK
EVT_LIST_COL_RIGHT_CLICK = wx.EVT_LIST_COL_RIGHT_CLICK
EVT_LIST_COL_BEGIN_DRAG = wx.EVT_LIST_COL_BEGIN_DRAG
EVT_LIST_COL_END_DRAG = wx.EVT_LIST_COL_END_DRAG
EVT_LIST_COL_DRAGGING = wx.EVT_LIST_COL_DRAGGING
EVT_LIST_ITEM_SELECTED = wx.EVT_LIST_ITEM_SELECTED
EVT_LIST_ITEM_DESELECTED = wx.EVT_LIST_ITEM_DESELECTED
EVT_LIST_ITEM_RIGHT_CLICK = wx.EVT_LIST_ITEM_RIGHT_CLICK
EVT_LIST_ITEM_MIDDLE_CLICK = wx.EVT_LIST_ITEM_MIDDLE_CLICK
EVT_LIST_ITEM_ACTIVATED = wx.EVT_LIST_ITEM_ACTIVATED
EVT_LIST_ITEM_FOCUSED = wx.EVT_LIST_ITEM_FOCUSED
EVT_LIST_CACHE_HINT = wx.EVT_LIST_CACHE_HINT
EVT_LIST_ITEM_LEFT_CLICK = wx.PyEventBinder(wxEVT_COMMAND_LIST_ITEM_LEFT_CLICK, 1)
EVT_LIST_ITEM_CHECKING = wx.PyEventBinder(wxEVT_COMMAND_LIST_ITEM_CHECKING, 1)
EVT_LIST_ITEM_CHECKED = wx.PyEventBinder(wxEVT_COMMAND_LIST_ITEM_CHECKED, 1)
EVT_LIST_ITEM_HYPERLINK = wx.PyEventBinder(wxEVT_COMMAND_LIST_ITEM_HYPERLINK, 1)
EVT_LIST_END_DRAG = wx.PyEventBinder(wxEVT_COMMAND_LIST_END_DRAG, 1)
EVT_LIST_COL_CHECKING = wx.PyEventBinder(wxEVT_COMMAND_LIST_COL_CHECKING, 1)
EVT_LIST_COL_CHECKED = wx.PyEventBinder(wxEVT_COMMAND_LIST_COL_CHECKED, 1)
EVT_LIST_FOOTER_CLICK = wx.PyEventBinder(wxEVT_COMMAND_LIST_FOOTER_CLICK, 1)
EVT_LIST_FOOTER_RIGHT_CLICK = wx.PyEventBinder(wxEVT_COMMAND_LIST_FOOTER_RIGHT_CLICK, 1)
EVT_LIST_FOOTER_CHECKING = wx.PyEventBinder(wxEVT_COMMAND_LIST_FOOTER_CHECKING, 1)
EVT_LIST_FOOTER_CHECKED = wx.PyEventBinder(wxEVT_COMMAND_LIST_FOOTER_CHECKED, 1)
# NOTE: If using the wxExtListBox visual attributes works everywhere then this can
# be removed, as well as the #else case below.
_USE_VISATTR = 0
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
SCROLL_UNIT_X = 15
SCROLL_UNIT_Y = 15
# the spacing between the lines (in report mode)
LINE_SPACING = 0
# extra margins around the text label
EXTRA_WIDTH = 4
EXTRA_HEIGHT = 4
if wx.Platform == "__WXGTK__":
EXTRA_HEIGHT = 6
# margin between the window and the items
EXTRA_BORDER_X = 2
EXTRA_BORDER_Y = 2
# offset for the header window
HEADER_OFFSET_X = 1
HEADER_OFFSET_Y = 1
# margin between rows of icons in [small] icon view
MARGIN_BETWEEN_ROWS = 6
# when autosizing the columns, add some slack
AUTOSIZE_COL_MARGIN = 10
# default and minimal widths for the header columns
WIDTH_COL_DEFAULT = 80
WIDTH_COL_MIN = 10
# the space between the image and the text in the report mode
IMAGE_MARGIN_IN_REPORT_MODE = 5
# the space between the image and the text in the report mode in header
HEADER_IMAGE_MARGIN_IN_REPORT_MODE = 2
# and the width of the icon, if any
MARGIN_BETWEEN_TEXT_AND_ICON = 2
# Background Image Style
_StyleTile = 0
_StyleStretch = 1
# Windows Vista Colours
_rgbSelectOuter = wx.Colour(170, 200, 245)
_rgbSelectInner = wx.Colour(230, 250, 250)
_rgbSelectTop = wx.Colour(210, 240, 250)
_rgbSelectBottom = wx.Colour(185, 215, 250)
_rgbNoFocusTop = wx.Colour(250, 250, 250)
_rgbNoFocusBottom = wx.Colour(235, 235, 235)
_rgbNoFocusOuter = wx.Colour(220, 220, 220)
_rgbNoFocusInner = wx.Colour(245, 245, 245)
# Mouse hover time for track selection
HOVER_TIME = 400
if wx.Platform == "__WXMSW__":
try:
import win32gui, win32con
HOVER_TIME = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)
except ImportError:
pass
# For PyImageList
IL_FIXED_SIZE = 0
IL_VARIABLE_SIZE = 1
# Python integers, to make long types to work with CreateListItem
INTEGER_TYPES = six.integer_types
# ----------------------------------------------------------------------------
# Functions
# ----------------------------------------------------------------------------
# Utility method
def to_list(input):
"""
Converts the input data into a Python list.
:param `input`: can be an integer or a Python list (in which case nothing will
be done to `input`.
"""
if isinstance(input, list):
return input
elif isinstance(input, INTEGER_TYPES):
return [input]
else:
raise Exception("Invalid parameter passed to `to_list`: only integers and list are accepted.")
def CheckVariableRowHeight(listCtrl, text):
"""
Checks whether a `text` contains multiline strings and if the `listCtrl` window
style is compatible with multiline strings.
:param `listCtrl`: an instance of :class:`UltimateListCtrl`;
:param `text`: the text to analyze.
"""
if not listCtrl.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
if "\n" in text:
raise Exception("Multiline text items are not allowed without the ULC_HAS_VARIABLE_ROW_HEIGHT style.")
def CreateListItem(itemOrId, col):
"""
Creates a new instance of :class:`UltimateListItem`.
:param `itemOrId`: can be an instance of :class:`UltimateListItem` or an integer;
:param `col`: the item column.
"""
if type(itemOrId) in INTEGER_TYPES:
item = UltimateListItem()
item._itemId = itemOrId
item._col = col
else:
item = itemOrId
return item
# ----------------------------------------------------------------------------
def MakeDisabledBitmap(original):
"""
Creates a disabled-looking bitmap starting from the input one.
:param `original`: an instance of :class:`wx.Bitmap` to be greyed-out.
"""
img = original.ConvertToImage()
return wx.Bitmap(img.ConvertToGreyscale())
# ----------------------------------------------------------------------------
#----------------------------------------------------------------------
def GetdragcursorData():
""" Returns the drag and drop cursor image as a decompressed stream of characters. """
return zlib.decompress(
b"x\xda\xeb\x0c\xf0s\xe7\xe5\x92\xe2b``\xe0\xf5\xf4p\t\x02\xd2\xa2@,\xcf\xc1\
\x06$9z\xda>\x00)\xce\x02\x8f\xc8b\x06\x06na\x10fd\x985G\x02(\xd8W\xe2\x1aQ\
\xe2\x9c\x9f\x9b\x9b\x9aW\xc2\x90\xec\x11\xe4\xab\x90\x9cQ\x9a\x97\x9d\x93\
\x9a\xa7`l\xa4\x90\x99\x9e\x97_\x94\x9a\xc2\xeb\x18\xec\xec\xe9i\xa5\xa0\xa7\
W\xa5\xaa\x07\x01P:7\x1eH\xe4\xe8\xe9\xd9\x808\x11\xbc\x1e\xae\x11V\n\x06@`\
\xeehd\n\xa2-\x0c,\x8cA\xb4\x9b\t\x94o\xe2b\x08\xa2\xcd\\L\xdd@\xb4\xab\x85\
\x993\x886v\xb6p\x02\xd1\x86N\xa6\x16\x12\xf7~\xdf\x05\xbal\xa9\xa7\x8bcH\
\xc5\x9c3W9\xb9\x1a\x14\x04X/\xec\xfc\xbft\xed\x02\xa5\xf4\xc2m\xfa*<N\x17??\
\x0frqy\x9c\xd3\xb2f5\xaf\x89\x8f9Gk\xbc\x08\xa7\xbf\x06\x97\x98\x06S\xd8E\
\xbd\x9cE\xb2\x15\x9da\x89\xe2k\x0f\x9c\xb6|\x1a\xea\x14X\x1d6G\x83E\xe7\x9c\
\x1dO\xa8\xde\xb6\x84l\x15\x9eS\xcf\xc2tf\x15\xde\xf7\xb5\xb2]\xf0\x96+\xf5@\
D\x90\x1d\xef19_\xf5\xde5y\xb6+\xa7\xdeZ\xfbA\x9bu\x9f`\xffD\xafYn\xf6\x9eW\
\xeb>\xb6\x7f\x98\\U\xcb\xf5\xd5\xcb\x9a'\xe7\xf4\xd7\x0b\xba\x9e\xdb\x17E\
\xfdf\x97Z\xcb\xcc\xc0\xf0\xff?3\xc3\x92\xabN\x8arB\xc7\x8f\x03\x1d\xcc\xe0\
\xe9\xea\xe7\xb2\xce)\xa1\t\x00B7|\x00" )
def GetdragcursorBitmap():
""" Returns the drag and drop cursor image as a :class:`wx.Bitmap`. """
return wx.Bitmap(GetdragcursorImage())
def GetdragcursorImage():
""" Returns the drag and drop cursor image as a :class:`wx.Image`. """
stream = six.BytesIO(GetdragcursorData())
return wx.Image(stream)
#-----------------------------------------------------------------------------
# PyImageList
#-----------------------------------------------------------------------------
class PyImageList(object):
"""
A :class:`PyImageList` contains a list of images. Images can have masks for
transparent drawing, and can be made from a variety of sources including
bitmaps and icons.
:class:`PyImageList` is used in conjunction with :class:`UltimateListCtrl`.
:note: The main improvements that :class:`PyImageList` introduces is the removal
of the limitation of same-size images inside the image list. If you use
the style ``IL_VARIABLE_SIZE`` then each image can have any size (in terms
of width and height).
"""
def __init__(self, width, height, mask=True, initialCount=1, style=IL_VARIABLE_SIZE):
"""
Default class constructor.
:param `width`: the width of the images in the image list, in pixels (unused
if you specify the ``IL_VARIABLE_SIZE`` style;
:param `height`: the height of the images in the image list, in pixels (unused
if you specify the ``IL_VARIABLE_SIZE`` style;
:param `mask`: ``True`` if masks should be created for all images (unused in
:class:`PyImageList`);
:param `initialCount`: the initial size of the list (unused in :class:`PyImageList`);
:param `style`: can be one of the following bits:
==================== ===== =================================
Style Flag Value Description
==================== ===== =================================
``IL_FIXED_SIZE`` 0 All the images in :class:`PyImageList` have the same size (width, height)
``IL_VARIABLE_SIZE`` 1 Each image can have any size (in terms of width and height)
==================== ===== =================================
"""
self._width = width
self._height = height
self._mask = mask
self._initialCount = 1
self._style = style
self._images = []
def GetImageCount(self):
""" Returns the number of images in the list. """
return len(self._images)
def Add(self, bitmap):
"""
Adds a new image or images using a bitmap.
:param `bitmap`: a valid :class:`wx.Bitmap` object.
:return: The new zero-based image index.
:note: If the bitmap is wider than the images in the list and you are not using
the ``IL_VARIABLE_SIZE`` style, then the bitmap will automatically be split
into smaller images, each matching the dimensions of the image list.
"""
index = len(self._images)
# Mimic behavior of Windows ImageList_Add that automatically breaks up the added
# bitmap into sub-images of the correct size
if self._style & IL_FIXED_SIZE:
if self._width > 0 and bitmap.GetWidth() > self._width and \
bitmap.GetHeight() >= self._height:
numImages = bitmap.GetWidth()/self._width
for subIndex in range(numImages):
rect = wx.Rect(self._width * subIndex, 0, self._width, self._height)
tmpBmp = bitmap.GetSubBitmap(rect)
self._images.append(tmpBmp)
else:
self._images.append(bitmap)
else:
self._images.append(bitmap)
if self._width == 0 and self._height == 0:
self._width = bitmap.GetWidth()
self._height = bitmap.GetHeight()
return index
def AddIcon(self, icon):
"""
Adds a new image using an icon.
:param `icon`: a valid :class:`Icon` object.
:return: The new zero-based image index.
:note: If the icon is wider than the images in the list and you are not using
the ``IL_VARIABLE_SIZE`` style, then the icon will automatically be split
into smaller images, each matching the dimensions of the image list.
"""
return self.Add(wx.Bitmap(icon))
def AddWithColourMask(self, bitmap, maskColour):
"""
Adds a new image or images using a bitmap and a colour mask.
:param `bitmap`: a valid :class:`wx.Bitmap` object;
:param `colour`: an instance of :class:`wx.Colour`, a colour indicating which parts
of the image are transparent.
:return: The new zero-based image index.
:note: If the bitmap is wider than the images in the list and you are not using
the ``IL_VARIABLE_SIZE`` style, then the bitmap will automatically be split
into smaller images, each matching the dimensions of the image list.
"""
img = bitmap.ConvertToImage()
img.SetMaskColour(maskColour.Red(), maskColour.Green(), maskColour.Blue())
return self.Add(wx.Bitmap(img))
def GetBitmap(self, index):
"""
Returns the bitmap corresponding to the given `index`, or :class:`NullBitmap`
if the index is invalid.
:param `index`: the bitmap index.
"""
if index >= len(self._images):
return wx.NullBitmap
return self._images[index]
def GetIcon(self, index):
"""
Returns the icon corresponding to the given `index`, or :class:`NullIcon`
if the index is invalid.
:param `index`: the icon index.
"""
if index >= len(self._images):
return wx.NullIcon
icon = wx.Icon()
icon.CopyFromBitmap(self.GetBitmap(index))
return icon
def Replace(self, index, bitmap):
"""
Replaces the existing image with the new bitmap.
:param `index`: the index at which the image should be replaced;
:param `bitmap`: the new bitmap to add to the image list, an instance of
:class:`wx.Bitmap`.
"""
if index >= len(self._images):
raise Exception("Wrong index in image list")
self._images[index] = bitmap
return True
def ReplaceIcon(self, index, icon):
"""
Replaces the existing image with the new icon.
:param `index`: the index at which the image should be replaced;
:param `icon`: the new icon to add to the image list, an instance of
:class:`Icon`.
"""
return self.Replace(index, wx.Bitmap(icon))
def Remove(self, index):
"""
Removes the image at the given position.
:param `index`: the zero-based index of the image to be removed.
"""
if index >= len(self._images):
raise Exception("Wrong index in image list")
self._images.pop(index)
return True
def RemoveAll(self):
""" Removes all the images in the list. """
self._images = []
return True
def GetSize(self, index):
"""
Retrieves the size of an image in the list.
:param `index`: the zero-based index of the image.
:return: a tuple of `(width, height)` properties of the chosen bitmap.
"""
if index >= len(self._images):
raise Exception("Wrong index in image list")
bmp = self._images[index]
return bmp.GetWidth(), bmp.GetHeight()
def Draw(self, index, dc, x, y, flags, solidBackground=True):
"""
Draws a specified image onto a device context.
:param `index`: the image index, starting from zero;
:param `dc`: an instance of :class:`wx.DC`;
:param `x`: x position on the device context;
:param `y`: y position on the device context;
:param `flags`: how to draw the image. A bitlist of a selection of the following:
================================= =======================================
Flag Paarameter Description
================================= =======================================
``wx.IMAGELIST_DRAW_NORMAL`` Draw the image normally
``wx.IMAGELIST_DRAW_TRANSPARENT`` Draw the image with transparency
``wx.IMAGELIST_DRAW_SELECTED`` Draw the image in selected state
``wx.IMAGELIST_DRAW_FOCUSED`` Draw the image in a focused state
================================= =======================================
:param `solidBackground`: currently unused.
"""
if index >= len(self._images):
raise Exception("Wrong index in image list")
bmp = self._images[index]
dc.DrawBitmap(bmp, x, y, (flags & wx.IMAGELIST_DRAW_TRANSPARENT) > 0)
return True
class SelectionStore(object):
"""
SelectionStore is used to store the selected items in the virtual
controls, i.e. it is well suited for storing even when the control contains
a huge (practically infinite) number of items.
Of course, internally it still has to store the selected items somehow (as
an array currently) but the advantage is that it can handle the selection
of all items (common operation) efficiently and that it could be made even
smarter in the future (e.g. store the selections as an array of ranges +
individual items) without changing its API.
"""
def __init__(self):
""" Default class constructor. """
# the array of items whose selection state is different from default
self._itemsSel = []
# the default state: normally, False (i.e. off) but maybe set to true if
# there are more selected items than non selected ones - this allows to
# handle selection of all items efficiently
self._defaultState = False
# the total number of items we handle
self._count = 0
# special case of SetItemCount(0)
def Clear(self):
""" Clears the number of selected items. """
self._itemsSel = []
self._count = 0
self._defaultState = False
# return the total number of selected items
def GetSelectedCount(self):
""" Return the total number of selected items. """
return (self._defaultState and [self._count - len(self._itemsSel)] or [len(self._itemsSel)])[0]
def IsSelected(self, item):
"""
Returns ``True`` if the given item is selected.
:param `item`: the item to check for selection state.
"""
isSel = item in self._itemsSel
# if the default state is to be selected, being in m_itemsSel means that
# the item is not selected, so we have to inverse the logic
return (self._defaultState and [not isSel] or [isSel])[0]
def SelectItem(self, item, select=True):
"""
Selects the given item.
:param `item`: the item to select;
:param `select`: ``True`` to select the item, ``False`` otherwise.
:return: ``True`` if the items selection really changed.
"""
# search for the item ourselves as like this we get the index where to
# insert it later if needed, so we do only one search in the array instead
# of two (adding item to a sorted array requires a search)
index = bisect.bisect_right(self._itemsSel, item)
isSel = index < len(self._itemsSel) and self._itemsSel[index] == item
if select != self._defaultState:
if item not in self._itemsSel:
bisect.insort_right(self._itemsSel, item)
return True
else: # reset to default state
if item in self._itemsSel:
self._itemsSel.remove(item)
return True
return False
def SelectRange(self, itemFrom, itemTo, select=True):
"""
Selects a range of items.
:param `itemFrom`: the first index of the selection range;
:param `itemTo`: the last index of the selection range;
:param `select`: ``True`` to select the items, ``False`` otherwise.
:return: ``True`` and fill the `itemsChanged` array with the indices of items
which have changed state if "few" of them did, otherwise return ``False``
(meaning that too many items changed state to bother counting them individually).
"""
# 100 is hardcoded but it shouldn't matter much: the important thing is
# that we don't refresh everything when really few (e.g. 1 or 2) items
# change state
MANY_ITEMS = 100
# many items (> half) changed state
itemsChanged = []
# are we going to have more [un]selected items than the other ones?
if itemTo - itemFrom > self._count/2:
if select != self._defaultState:
# the default state now becomes the same as 'select'
self._defaultState = select
# so all the old selections (which had state select) shouldn't be
# selected any more, but all the other ones should
selOld = self._itemsSel[:]
self._itemsSel = []
# TODO: it should be possible to optimize the searches a bit
# knowing the possible range
for item in range(itemFrom):
if item not in selOld:
self._itemsSel.append(item)
for item in range(itemTo + 1, self._count):
if item not in selOld:
self._itemsSel.append(item)
else: # select == self._defaultState
# get the inclusive range of items between itemFrom and itemTo
count = len(self._itemsSel)
start = bisect.bisect_right(self._itemsSel, itemFrom)
end = bisect.bisect_right(self._itemsSel, itemTo)
if itemFrom < start:
start = itemFrom
if start == count or self._itemsSel[start] < itemFrom:
start += 1
if end == count or self._itemsSel[end] > itemTo:
end -= 1
if start <= end:
# delete all of them (from end to avoid changing indices)
for i in range(end, start-1, -1):
if itemsChanged:
if len(itemsChanged) > MANY_ITEMS:
# stop counting (see comment below)
itemsChanged = []
else:
itemsChanged.append(self._itemsSel[i])
self._itemsSel.pop(i)
else:
self._itemsSel = []
else: # "few" items change state
if itemsChanged:
itemsChanged = []
# just add the items to the selection
for item in range(itemFrom, itemTo+1):
if self.SelectItem(item, select) and itemsChanged:
itemsChanged.append(item)
if len(itemsChanged) > MANY_ITEMS:
# stop counting them, we'll just eat gobs of memory
# for nothing at all - faster to refresh everything in
# this case
itemsChanged = []
# we set it to None if there are many items changing state
return itemsChanged
def OnItemDelete(self, item):
"""
Must be called when an item is deleted.
:param `item`: the item that is being deleted.
"""
count = len(self._itemsSel)
i = bisect.bisect_right(self._itemsSel, item)
if i < count and self._itemsSel[i] == item:
# this item itself was in m_itemsSel, remove it from there
self._itemsSel.pop(i)
count -= 1
# and adjust the index of all which follow it
while i < count:
i += 1
self._itemsSel[i] -= 1
def SetItemCount(self, count):
"""
Sets the total number of items we handle.
:param `count`: the total number of items we handle.
"""
# forget about all items whose indices are now invalid if the size
# decreased
if count < self._count:
for i in range(len(self._itemsSel), 0, -1):
if self._itemsSel[i - 1] >= count:
self._itemsSel.pop(i - 1)
# remember the new number of items
self._count = count
# ----------------------------------------------------------------------------
# UltimateListItemAttr: a structure containing the visual attributes of an item
# ----------------------------------------------------------------------------
class UltimateListItemAttr(object):
"""
Represents the attributes (colour, font, ...) of a :class:`UltimateListCtrl`
:class:`UltimateListItem`.
"""
def __init__(self, colText=wx.NullColour, colBack=wx.NullColour, font=wx.NullFont,
enabled=True, footerColText=wx.NullColour, footerColBack=wx.NullColour,
footerFont=wx.NullFont):
"""
Default class constructor.
:param `colText`: the item text colour;
:param `colBack`: the item background colour;
:param `font`: the item font;
:param `enabled`: ``True`` if the item should be enabled, ``False`` if it is disabled;
:param `footerColText`: for footer items, the item text colour;
:param `footerColBack`: for footer items, the item background colour;
:param `footerFont`: for footer items, the item font.
"""
self._colText = colText
self._colBack = colBack
self._font = font
self._enabled = enabled
self._footerColText = footerColText
self._footerColBack = footerColBack
self._footerFont = footerFont
# setters
def SetTextColour(self, colText):
"""
Sets a new text colour.
:param `colText`: an instance of :class:`wx.Colour`.
"""
self._colText = colText
def SetBackgroundColour(self, colBack):
"""
Sets a new background colour.
:param `colBack`: an instance of :class:`wx.Colour`.
"""
self._colBack = colBack
def SetFont(self, font):
"""
Sets a new font for the item.
:param `font`: an instance of :class:`wx.Font`.
"""
self._font = font
def Enable(self, enable=True):
"""
Enables or disables the item.
:param `enable`: ``True`` to enable the item, ``False`` to disable it.
"""
self._enabled = enable
def SetFooterTextColour(self, colText):
"""
Sets a new footer item text colour.
:param `colText`: an instance of :class:`wx.Colour`.
"""
self._footerColText = colText
def SetFooterBackgroundColour(self, colBack):
"""
Sets a new footer item background colour.
:param `colBack`: an instance of :class:`wx.Colour`.
"""
self._footerColBack = colBack
def SetFooterFont(self, font):
"""
Sets a new font for the footer item.
:param `font`: an instance of :class:`wx.Font`.
"""
self._footerFont = font
# accessors
def HasTextColour(self):
""" Returns ``True`` if the currently set text colour is valid. """
return self._colText.IsOk()
def HasBackgroundColour(self):
""" Returns ``True`` if the currently set background colour is valid. """
return self._colBack.IsOk()
def HasFont(self):
""" Returns ``True`` if the currently set font is valid. """
return self._font.IsOk()
def HasFooterTextColour(self):
"""
Returns ``True`` if the currently set text colour for the footer item
is valid.
"""
return self._footerColText.IsOk()
def HasFooterBackgroundColour(self):
"""
Returns ``True`` if the currently set background colour for the footer item
is valid.
"""
return self._footerColBack.IsOk()
def HasFooterFont(self):
"""
Returns ``True`` if the currently set font for the footer item
is valid.
"""
return self._footerFont.IsOk()
# getters
def GetTextColour(self):
""" Returns the currently set text colour. """
return self._colText
def GetBackgroundColour(self):
""" Returns the currently set background colour. """
return self._colBack
def GetFont(self):
""" Returns the currently set item font. """
return self._font
def GetFooterTextColour(self):
""" Returns the currently set text colour for a footer item. """
return self._footerColText
def GetFooterBackgroundColour(self):
""" Returns the currently set background colour for a footer item. """
return self._footerColBack
def GetFooterFont(self):
""" Returns the currently set font for a footer item. """
return self._footerFont
def IsEnabled(self):
""" Returns ``True`` if the item is enabled. """
return self._enabled
# ----------------------------------------------------------------------------
# UltimateListItem: the item or column info, used to exchange data with UltimateListCtrl
# ----------------------------------------------------------------------------
class UltimateListItem(wx.Object):
""" This class stores information about a :class:`UltimateListCtrl` item or column. """
def __init__(self, item=None):
"""
Default class constructor.
:param `item`: if not ``None``, another instance of :class:`UltimateListItem`.
"""
if not item:
self.Init()
self._attr = None
else:
self._mask = item._mask # Indicates what fields are valid
self._itemId = item._itemId # The zero-based item position
self._col = item._col # Zero-based column, if in report mode
self._state = item._state # The state of the item
self._stateMask = item._stateMask # Which flags of self._state are valid (uses same flags)
self._text = item._text # The label/header text
self._tooltip = item._tooltip # The label/header tooltip text
self._image = item._image[:] # The zero-based indexes into an image list
self._data = item._data # App-defined data
self._pyData = item._pyData # Python-specific data
self._format = item._format # left, right, centre
self._width = item._width # width of column
self._colour = item._colour # item text colour
self._font = item._font # item font
self._checked = item._checked # The checking state for the item (if kind > 0)
self._kind = item._kind # Whether it is a normal, checkbox-like or a radiobutton-like item
self._enabled = item._enabled # Whether the item is enabled or not
self._hypertext = item._hypertext # indicates if the item is hypertext
self._visited = item._visited # visited state for an hypertext item
self._wnd = item._wnd
self._windowenabled = item._windowenabled
self._windowsize = item._windowsize
self._isColumnShown = item._isColumnShown
self._customRenderer = item._customRenderer
self._overFlow = item._overFlow
self._footerChecked = item._footerChecked
self._footerFormat = item._footerFormat
self._footerImage = item._footerImage
self._footerKind = item._footerKind
self._footerText = item._footerText
self._expandWin = item._expandWin
self._attr = None
# copy list item attributes
if item.HasAttributes():
self._attr = item.GetAttributes()[:]
# resetting
def Clear(self):
""" Resets the item state to the default. """
self.Init()
self._text = ""
self.ClearAttributes()
def ClearAttributes(self):
""" Deletes the item attributes if they have been stored. """
if self._attr:
del self._attr
self._attr = None
# setters
def SetMask(self, mask):
"""
Sets the mask of valid fields.
:param `mask`: any combination of the following bits:
============================ ========= ==============================
Mask Bits Hex Value Description
============================ ========= ==============================
``ULC_MASK_STATE`` 0x1 :meth:`~UltimateListItem.GetState` is valid
``ULC_MASK_TEXT`` 0x2 :meth:`~UltimateListItem.GetText` is valid
``ULC_MASK_IMAGE`` 0x4 :meth:`~UltimateListItem.GetImage` is valid
``ULC_MASK_DATA`` 0x8 :meth:`~UltimateListItem.GetData` is valid
``ULC_MASK_WIDTH`` 0x20 :meth:`~UltimateListItem.GetWidth` is valid
``ULC_MASK_FORMAT`` 0x40 :meth:`~UltimateListItem.GetFormat` is valid
``ULC_MASK_FONTCOLOUR`` 0x80 :meth:`~UltimateListItem.GetTextColour` is valid
``ULC_MASK_FONT`` 0x100 :meth:`~UltimateListItem.GetFont` is valid
``ULC_MASK_BACKCOLOUR`` 0x200 :meth:`~UltimateListItem.GetBackgroundColour` is valid
``ULC_MASK_KIND`` 0x400 :meth:`~UltimateListItem.GetKind` is valid
``ULC_MASK_ENABLE`` 0x800 :meth:`~UltimateListItem.IsEnabled` is valid
``ULC_MASK_CHECK`` 0x1000 :meth:`~UltimateListItem.IsChecked` is valid
``ULC_MASK_HYPERTEXT`` 0x2000 :meth:`~UltimateListItem.IsHyperText` is valid
``ULC_MASK_WINDOW`` 0x4000 :meth:`~UltimateListItem.GetWindow` is valid
``ULC_MASK_PYDATA`` 0x8000 :meth:`~UltimateListItem.GetPyData` is valid
``ULC_MASK_SHOWN`` 0x10000 :meth:`~UltimateListItem.IsShown` is valid
``ULC_MASK_RENDERER`` 0x20000 :meth:`~UltimateListItem.GetCustomRenderer` is valid
``ULC_MASK_OVERFLOW`` 0x40000 :meth:`~UltimateListItem.GetOverFlow` is valid
``ULC_MASK_FOOTER_TEXT`` 0x80000 :meth:`~UltimateListItem.GetFooterText` is valid
``ULC_MASK_FOOTER_IMAGE`` 0x100000 :meth:`~UltimateListItem.GetFooterImage` is valid
``ULC_MASK_FOOTER_FORMAT`` 0x200000 :meth:`~UltimateListItem.GetFooterFormat` is valid
``ULC_MASK_FOOTER_FONT`` 0x400000 :meth:`~UltimateListItem.GetFooterFont` is valid
``ULC_MASK_FOOTER_CHECK`` 0x800000 :meth:`~UltimateListItem.IsFooterChecked` is valid
``ULC_MASK_FOOTER_KIND`` 0x1000000 :meth:`~UltimateListItem.GetFooterKind` is valid
============================ ========= ==============================
"""
self._mask = mask
def SetId(self, id):
"""
Sets the zero-based item position.
:param `id`: the zero-based item position.
"""
self._itemId = id
def SetColumn(self, col):
"""
Sets the zero-based column.
:param `col`: the zero-based column.
:note: This method is neaningful only in report mode.
"""
self._col = col
def SetState(self, state):
"""
Sets the item state flags.
:param `state`: any combination of the following bits:
============================ ========= ==============================
State Bits Hex Value Description
============================ ========= ==============================
``ULC_STATE_DONTCARE`` 0x0 Don't care what the state is
``ULC_STATE_DROPHILITED`` 0x1 The item is highlighted to receive a drop event
``ULC_STATE_FOCUSED`` 0x2 The item has the focus
``ULC_STATE_SELECTED`` 0x4 The item is selected
``ULC_STATE_CUT`` 0x8 The item is in the cut state
``ULC_STATE_DISABLED`` 0x10 The item is disabled
``ULC_STATE_FILTERED`` 0x20 The item has been filtered
``ULC_STATE_INUSE`` 0x40 The item is in use
``ULC_STATE_PICKED`` 0x80 The item has been picked
``ULC_STATE_SOURCE`` 0x100 The item is a drag and drop source
============================ ========= ==============================
:note: The valid state flags are influenced by the value of the state mask.
:see: :meth:`~UltimateListItem.SetStateMask`
"""
self._mask |= ULC_MASK_STATE
self._state = state
self._stateMask |= state
def SetStateMask(self, stateMask):
"""
Sets the bitmask that is used to determine which of the state flags are
to be set.
:param `stateMask`: the state bitmask.
:see: :meth:`~UltimateListItem.SetState` for a list of valid state bits.
"""
self._stateMask = stateMask
def SetText(self, text):
"""
Sets the text label for the item.
:param `text`: the text label for the item.
"""
self._mask |= ULC_MASK_TEXT
self._text = text
def SetToolTip(self, text):
"""
Sets the tooltip text for the item.
:param `text`: the tooltip text for the item.
"""
self._mask |= ULC_MASK_TOOLTIP
self._tooltip = text
def SetImage(self, image):
"""
Sets the zero-based indexes of the images associated with the item into the
image list.
:param `image`: a Python list with the zero-based indexes of the images
associated with the item into the image list.
"""
self._mask |= ULC_MASK_IMAGE
if image is None:
image = []
self._image = to_list(image)
def SetData(self, data):
"""
Sets client data for the item.
:param `data`: the client data associated to the item.
:note: Please note that client data is associated with the item and not
with subitems.
"""
self._mask |= ULC_MASK_DATA
self._data = data
def SetPyData(self, pyData):
"""
Sets data for the item, which can be any Python object.
:param `data`: any Python object associated to the item.
:note: Please note that Python data is associated with the item and not
with subitems.
"""
self._mask |= ULC_MASK_PYDATA
self._pyData = pyData
def SetWidth(self, width):
"""
Sets the column width.
:param `width`: the column width.
:note: This method is meaningful only for column headers in report mode.
"""
self._mask |= ULC_MASK_WIDTH
self._width = width
def SetAlign(self, align):
"""
Sets the alignment for the item.
:param `align`: one of the following bits:
============================ ========= ==============================
Alignment Bits Hex Value Description
============================ ========= ==============================
``ULC_FORMAT_LEFT`` 0x0 The item is left-aligned
``ULC_FORMAT_RIGHT`` 0x1 The item is right-aligned
``ULC_FORMAT_CENTRE`` 0x2 The item is centre-aligned
``ULC_FORMAT_CENTER`` 0x2 The item is center-aligned
============================ ========= ==============================
"""
self._mask |= ULC_MASK_FORMAT
self._format = align
def SetTextColour(self, colText):
"""
Sets the text colour for the item.
:param `colText`: a valid :class:`wx.Colour` object.
"""
self.Attributes().SetTextColour(colText)
def SetBackgroundColour(self, colBack):
"""
Sets the background colour for the item.
:param `colBack`: a valid :class:`wx.Colour` object.
"""
self.Attributes().SetBackgroundColour(colBack)
def SetFont(self, font):
"""
Sets the font for the item.
:param `font`: a valid :class:`wx.Font` object.
"""
self.Attributes().SetFont(font)
def SetFooterTextColour(self, colText):
"""
Sets the text colour for the footer item.
:param `colText`: a valid :class:`wx.Colour` object.
"""
self.Attributes().SetFooterTextColour(colText)
def SetFooterBackgroundColour(self, colBack):
"""
Sets the background colour for the footer item.
:param `colBack`: a valid :class:`wx.Colour` object.
"""
self.Attributes().SetFooterBackgroundColour(colBack)
def SetFooterFont(self, font):
"""
Sets the font for the footer item.
:param `font`: a valid :class:`wx.Font` object.
"""
self.Attributes().SetFooterFont(font)
def Enable(self, enable=True):
"""
Enables or disables the item.
:param `enable`: ``True`` to enable the item, ``False`` to disable it.
"""
self.Attributes().Enable(enable)
# accessors
def GetMask(self):
"""
Returns a bit mask indicating which fields of the structure are valid.
:see: :meth:`~UltimateListItem.SetMask` for a list of valid bit masks.
"""
return self._mask
def GetId(self):
""" Returns the zero-based item position. """
return self._itemId
def GetColumn(self):
"""
Returns the zero-based column.
:note: This method is meaningful only in report mode.
"""
return self._col
def GetFormat(self):
""" Returns the header item format. """
return self._format
def GetState(self):
"""
Returns a bit field representing the state of the item.
:see: :meth:`~UltimateListItem.SetState` for a list of valid item states.
"""
return self._state & self._stateMask
def GetText(self):
""" Returns the label/header text. """
return self._text
def GetToolTip(self):
""" Returns the label/header tooltip. """
return self._tooltip
def GetImage(self):
"""
Returns a Python list with the zero-based indexes of the images associated
with the item into the image list.
"""
return self._image
def GetData(self):
"""
Returns client data associated with the control.
:note: Please note that client data is associated with the item and not
with subitems.
"""
return self._data
def GetPyData(self):
"""
Returns data for the item, which can be any Python object.
:note: Please note that Python data is associated with the item and not
with subitems.
"""
return self._pyData
def GetWidth(self):
"""
Returns the column width.
:note: This method is meaningful only for column headers in report mode.
"""
return self._width
def GetAlign(self):
"""
Returns the alignment for the item.
:see: :meth:`~UltimateListItem.SetAlign` for a list of valid alignment bits.
"""
return self._format
def GetAttributes(self):
""" Returns the associated :class:`UltimateListItemAttr` attributes. """
return self._attr
def HasAttributes(self):
""" Returns ``True`` if the item has attributes associated with it. """
return self._attr != None
def GetTextColour(self):
""" Returns the text colour. """
return (self.HasAttributes() and [self._attr.GetTextColour()] or [wx.NullColour])[0]
def GetBackgroundColour(self):
""" Returns the background colour. """
return (self.HasAttributes() and [self._attr.GetBackgroundColour()] or [wx.NullColour])[0]
def GetFont(self):
""" Returns the item font. """
return (self.HasAttributes() and [self._attr.GetFont()] or [wx.NullFont])[0]
def IsEnabled(self):
""" Returns ``True`` if the item is enabled. """
return (self.HasAttributes() and [self._attr.IsEnabled()] or [True])[0]
# creates self._attr if we don't have it yet
def Attributes(self):
"""
Returns the associated attributes if they exist, or create a new :class:`UltimateListItemAttr`
structure and associate it with this item.
"""
if not self._attr:
self._attr = UltimateListItemAttr()
return self._attr
def SetKind(self, kind):
"""
Sets the item kind.
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
self._mask |= ULC_MASK_KIND
self._kind = kind
def GetKind(self):
"""
Returns the item kind.
:see: :meth:`~UltimateListItem.SetKind` for a valid list of item's kind.
"""
return self._kind
def IsChecked(self):
""" Returns whether the item is checked or not. """
return self._checked
def Check(self, checked=True):
"""
Checks/unchecks an item.
:param `checked`: ``True`` to check an item, ``False`` to uncheck it.
:note: This method is meaningful only for check and radio items.
"""
self._mask |= ULC_MASK_CHECK
self._checked = checked
def IsShown(self):
""" Returns ``True`` if the item is shown, or ``False`` if it is hidden. """
return self._isColumnShown
def SetShown(self, shown=True):
"""
Sets an item as shown/hidden.
:param `shown`: ``True`` to show the item, ``False`` to hide it.
"""
self._mask |= ULC_MASK_SHOWN
self._isColumnShown = shown
def SetHyperText(self, hyper=True):
"""
Sets whether the item is hypertext or not.
:param `hyper`: ``True`` to set hypertext behaviour, ``False`` otherwise.
"""
self._mask |= ULC_MASK_HYPERTEXT
self._hypertext = hyper
def SetVisited(self, visited=True):
"""
Sets whether an hypertext item was visited or not.
:param `visited`: ``True`` to set a hypertext item as visited, ``False`` otherwise.
"""
self._mask |= ULC_MASK_HYPERTEXT
self._visited = visited
def GetVisited(self):
""" Returns whether an hypertext item was visited or not. """
return self._visited
def IsHyperText(self):
""" Returns whether the item is hypetext or not. """
return self._hypertext
def SetWindow(self, wnd, expand=False):
"""
Sets the window associated to the item.
:param `wnd`: a non-toplevel window to be displayed next to the item;
:param `expand`: ``True`` to expand the column where the item/subitem lives,
so that the window will be fully visible.
"""
self._mask |= ULC_MASK_WINDOW
self._wnd = wnd
listCtrl = wnd.GetParent()
mainWin = listCtrl._mainWin
wnd.Reparent(mainWin)
if wnd.GetSizer(): # the window is a complex one hold by a sizer
size = wnd.GetBestSize()
else: # simple window, without sizers
size = wnd.GetSize()
# We have to bind the wx.EVT_SET_FOCUS for the associated window
# No other solution to handle the focus changing from an item in
# UltimateListCtrl and the window associated to an item
# Do better strategies exist?
self._wnd.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self._windowsize = size
# The window is enabled only if the item is enabled
self._wnd.Enable(self._enabled)
self._windowenabled = self._enabled
self._expandWin = expand
mainWin._hasWindows = True
mainWin._itemWithWindow.append(self)
# This is needed as otherwise widgets that should be invisible
# are shown at the top left corner of ULC
mainWin.HideWindows()
mainWin.Refresh()
def GetWindow(self):
""" Returns the window associated to the item. """
return self._wnd
def DeleteWindow(self):
""" Deletes the window associated to the item (if any). """
if self._wnd:
listCtrl = self._wnd.GetParent()
if self in listCtrl._itemWithWindow:
listCtrl._itemWithWindow.remove(self)
self._wnd.Destroy()
self._wnd = None
def GetWindowEnabled(self):
""" Returns whether the associated window is enabled or not. """
if not self._wnd:
raise Exception("\nERROR: This Item Has No Window Associated")
return self._windowenabled
def SetWindowEnabled(self, enable=True):
"""
Sets whether the associated window is enabled or not.
:param `enable`: ``True`` to enable the associated window, ``False`` to disable it.
"""
if not self._wnd:
raise Exception("\nERROR: This Item Has No Window Associated")
self._windowenabled = enable
self._wnd.Enable(enable)
def GetWindowSize(self):
""" Returns the associated window size. """
return self._windowsize
def SetCustomRenderer(self, renderer):
"""
Associate a custom renderer to this item.
:param `renderer`: a class able to correctly render the item.
:note: the renderer class **must** implement the methods `DrawSubItem`,
`GetLineHeight` and `GetSubItemWidth`.
"""
self._mask |= ULC_MASK_RENDERER
self._customRenderer = renderer
def GetCustomRenderer(self):
""" Returns the custom renderer associated with this item (if any). """
return self._customRenderer
def SetOverFlow(self, over=True):
"""
Sets the item in the overflow/non overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
:param `over`: ``True`` to set the item in a overflow state, ``False`` otherwise.
"""
self._mask |= ULC_MASK_OVERFLOW
self._overFlow = over
def GetOverFlow(self):
"""
Returns if the item is in the overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
"""
return self._overFlow
def Init(self):
""" Initializes an empty :class:`UltimateListItem`. """
self._mask = 0
self._itemId = 0
self._col = 0
self._state = 0
self._stateMask = 0
self._image = []
self._data = 0
self._pyData = None
self._text = ""
self._tooltip = ""
self._format = ULC_FORMAT_CENTRE
self._width = 0
self._colour = wx.Colour(0, 0, 0)
self._font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
self._kind = 0
self._checked = False
self._enabled = True
self._hypertext = False # indicates if the item is hypertext
self._visited = False # visited state for an hypertext item
self._wnd = None
self._windowenabled = False
self._windowsize = wx.Size()
self._isColumnShown = True
self._customRenderer = None
self._overFlow = False
self._footerChecked = False
self._footerFormat = ULC_FORMAT_CENTRE
self._footerImage = []
self._footerKind = 0
self._footerText = ""
self._expandWin = False
def SetFooterKind(self, kind):
"""
Sets the footer item kind.
:see: :meth:`~UltimateListItem.SetKind` for a list of valid items kind.
"""
self._mask |= ULC_MASK_FOOTER_KIND
self._footerKind = kind
def GetFooterKind(self):
"""
Returns the footer item kind.
:see: :meth:`~UltimateListItem.SetKind` for a list of valid items kind.
"""
return self._footerKind
def IsFooterChecked(self):
""" Returns whether the footer item is checked or not. """
return self._footerChecked
def CheckFooter(self, checked=True):
"""
Checks/unchecks a footer item.
:param `checked`: ``True`` to check an item, ``False`` to uncheck it.
:note: This method is meaningful only for check and radio footer items.
"""
self._mask |= ULC_MASK_FOOTER_CHECK
self._footerChecked = checked
def GetFooterFormat(self):
""" Returns the footer item format. """
return self._footerFormat
def SetFooterFormat(self, format):
"""
Sets the footer item format.
:param `format`: the footer item format.
"""
self._mask |= ULC_MASK_FOOTER_FORMAT
self._footerFormat = format
def GetFooterText(self):
""" Returns the footer text. """
return self._footerText
def SetFooterText(self, text):
"""
Sets the text label for the footer item.
:param `text`: the text label for the footer item.
"""
self._mask |= ULC_MASK_FOOTER_TEXT
self._footerText = text
def GetFooterImage(self):
"""
Returns the zero-based index of the image associated with the footer item into
the image list.
"""
return self._footerImage
def SetFooterImage(self, image):
"""
Sets the zero-based index of the image associated with the footer item into the
image list.
:param `image`: the zero-based index of the image associated with the footer item
into the image list.
"""
self._mask |= ULC_MASK_FOOTER_IMAGE
self._footerImage = to_list(image)
def GetFooterTextColour(self):
""" Returns the footer item text colour. """
return (self.HasAttributes() and [self._attr.GetFooterTextColour()] or [wx.NullColour])[0]
def GetFooterBackgroundColour(self):
""" Returns the footer item background colour. """
return (self.HasAttributes() and [self._attr.GetFooterBackgroundColour()] or [wx.NullColour])[0]
def GetFooterFont(self):
""" Returns the footer item font. """
return (self.HasAttributes() and [self._attr.GetFooterFont()] or [wx.NullFont])[0]
def SetFooterAlign(self, align):
"""
Sets the alignment for the footer item.
:see: :meth:`~UltimateListItem.SetAlign` for a list of valid alignment flags.
"""
self._mask |= ULC_MASK_FOOTER_FORMAT
self._footerFormat = align
def GetFooterAlign(self):
"""
Returns the alignment for the footer item.
:see: :meth:`~UltimateListItem.SetAlign` for a list of valid alignment flags.
"""
return self._footerFormat
def OnSetFocus(self, event):
"""
Handles the ``wx.EVT_SET_FOCUS`` event for the window associated to an item.
:param `event`: a :class:`FocusEvent` event to be processed.
"""
listCtrl = self._wnd.GetParent()
select = listCtrl.GetItemState(self._itemId, ULC_STATE_SELECTED)
# If the window is associated to an item that currently is selected
# (has focus) we don't kill the focus. Otherwise we do it.
if not select:
listCtrl._hasFocus = False
else:
listCtrl._hasFocus = True
listCtrl.SetFocus()
event.Skip()
# ----------------------------------------------------------------------------
# ListEvent - the event class for the UltimateListCtrl notifications
# ----------------------------------------------------------------------------
class CommandListEvent(wx.PyCommandEvent):
"""
A list event holds information about events associated with :class:`UltimateListCtrl`
objects.
"""
def __init__(self, commandTypeOrEvent=None, winid=0):
"""
Default class constructor.
For internal use: do not call it in your code!
:param `commandTypeOrEvent`: the event type or another instance of
:class:`PyCommandEvent`;
:param `winid`: the event identifier.
"""
if type(commandTypeOrEvent) in INTEGER_TYPES:
wx.PyCommandEvent.__init__(self, commandTypeOrEvent, winid)
self.m_code = 0
self.m_oldItemIndex = 0
self.m_itemIndex = 0
self.m_col = 0
self.m_pointDrag = wx.Point()
self.m_item = UltimateListItem()
self.m_editCancelled = False
else:
wx.PyCommandEvent.__init__(self, commandTypeOrEvent.GetEventType(), commandTypeOrEvent.GetId())
self.m_code = commandTypeOrEvent.m_code
self.m_oldItemIndex = commandTypeOrEvent.m_oldItemIndex
self.m_itemIndex = commandTypeOrEvent.m_itemIndex
self.m_col = commandTypeOrEvent.m_col
self.m_pointDrag = commandTypeOrEvent.m_pointDrag
self.m_item = commandTypeOrEvent.m_item
self.m_editCancelled = commandTypeOrEvent.m_editCancelled
def GetKeyCode(self):
""" Returns the key code if the event is a keypress event. """
return self.m_code
def GetIndex(self):
""" Returns the item index. """
return self.m_itemIndex
Index = property(GetIndex, doc="See `GetIndex`")
def GetColumn(self):
"""
Returns the column position: it is only used with ``COL`` events.
For the column dragging events, it is the column to the left of the divider
being dragged, for the column click events it may be -1 if the user clicked
in the list control header outside any column.
"""
return self.m_col
def GetPoint(self):
""" Returns the position of the mouse pointer if the event is a drag event. """
return self.m_pointDrag
def GetLabel(self):
""" Returns the (new) item label for ``EVT_LIST_END_LABEL_EDIT`` event. """
return self.m_item._text
def GetText(self):
""" Returns the item text. """
return self.m_item._text
def GetImage(self):
""" Returns the item image. """
return self.m_item._image
def GetData(self):
""" Returns the item data. """
return self.m_item._data
def GetMask(self):
""" Returns the item mask. """
return self.m_item._mask
def GetItem(self):
""" Returns the item itself. """
return self.m_item
# for wxEVT_COMMAND_LIST_CACHE_HINT only
def GetCacheFrom(self):
"""
Returns the first item which the list control advises us to cache.
:note: This method is meaningful for ``EVT_LIST_CACHE_HINT`` event only.
"""
return self.m_oldItemIndex
def GetCacheTo(self):
"""
Returns the last item (inclusive) which the list control advises us to cache.
:note: This method is meaningful for ``EVT_LIST_CACHE_HINT`` event only.
"""
return self.m_itemIndex
# was label editing canceled? (for wxEVT_COMMAND_LIST_END_LABEL_EDIT only)
def IsEditCancelled(self):
"""
Returns ``True`` if it the label editing has been cancelled by the user
(:meth:`~CommandListEvent.GetLabel` returns an empty string in this case but it doesn't allow
the application to distinguish between really cancelling the edit and
the admittedly rare case when the user wants to rename it to an empty
string).
:note: This method only makes sense for ``EVT_LIST_END_LABEL_EDIT`` messages.
"""
return self.m_editCancelled
def SetEditCanceled(self, editCancelled):
"""
Sets the item editing as cancelled/not cancelled.
:param `editCancelled`: ``True`` to set the item editing as cancelled, ``False``
otherwise.
:note: This method only makes sense for ``EVT_LIST_END_LABEL_EDIT`` messages.
"""
self.m_editCancelled = editCancelled
# ----------------------------------------------------------------------------
# UltimateListEvent is a special class for all events associated with list controls
#
# NB: note that not all accessors make sense for all events, see the event
# descriptions below
# ----------------------------------------------------------------------------
class UltimateListEvent(CommandListEvent):
"""
A list event holds information about events associated with :class:`UltimateListCtrl`
objects.
"""
def __init__(self, commandTypeOrEvent=None, winid=0):
"""
Default class constructor.
For internal use: do not call it in your code!
:param `commandTypeOrEvent`: the event type or another instance of
:class:`PyCommandEvent`;
:param `winid`: the event identifier.
"""
CommandListEvent.__init__(self, commandTypeOrEvent, winid)
if type(commandTypeOrEvent) in INTEGER_TYPES:
self.notify = wx.NotifyEvent(commandTypeOrEvent, winid)
else:
self.notify = wx.NotifyEvent(commandTypeOrEvent.GetEventType(), commandTypeOrEvent.GetId())
def GetNotifyEvent(self):
""" Returns the actual :class:`NotifyEvent`. """
return self.notify
def IsAllowed(self):
"""
Returns ``True`` if the change is allowed (:meth:`~UltimateListEvent.Veto` hasn't been called) or
``False`` otherwise (if it was).
"""
return self.notify.IsAllowed()
def Veto(self):
"""
Prevents the change announced by this event from happening.
:note: It is in general a good idea to notify the user about the reasons
for vetoing the change because otherwise the applications behaviour (which
just refuses to do what the user wants) might be quite surprising.
"""
self.notify.Veto()
def Allow(self):
"""
This is the opposite of :meth:`~UltimateListEvent.Veto`: it explicitly allows the event to be processed.
For most events it is not necessary to call this method as the events are
allowed anyhow but some are forbidden by default (this will be mentioned
in the corresponding event description).
"""
self.notify.Allow()
# ============================================================================
# private classes
# ============================================================================
#-----------------------------------------------------------------------------
# ColWidthInfo (internal)
#-----------------------------------------------------------------------------
class ColWidthInfo(object):
""" A simple class which holds information about :class:`UltimateListCtrl` columns. """
def __init__(self, w=0, needsUpdate=True):
"""
Default class constructor
:param `w`: the initial width of the column;
:param `needsUpdate`: ``True`` if the column needs refreshing, ``False``
otherwise.
"""
self._nMaxWidth = w
self._bNeedsUpdate = needsUpdate
#-----------------------------------------------------------------------------
# UltimateListItemData (internal)
#-----------------------------------------------------------------------------
class UltimateListItemData(object):
"""
A simple class which holds information about :class:`UltimateListItem` visual
attributes (client rectangles, positions, etc...).
"""
def __init__(self, owner):
"""
Default class constructor
:param `owner`: an instance of :class:`UltimateListCtrl`.
"""
# the list ctrl we are in
self._owner = owner
self.Init()
# the item coordinates are not used in report mode, instead this pointer
# is None and the owner window is used to retrieve the item position and
# size
if owner.InReportView():
self._rect = None
else:
self._rect = wx.Rect()
def SetImage(self, image):
"""
Sets the zero-based indexes of the images associated with the item into the
image list.
:param `image`: a Python list with the zero-based indexes of the images
associated with the item into the image list.
"""
self._image = to_list(image)
def SetData(self, data):
"""
Sets client data for the item.
:param `data`: the client data associated to the item.
:note: Please note that client data is associated with the item and not
with subitems.
"""
self._data = data
def HasText(self):
""" Returns ``True`` if the item text is not the empty string. """
return self._text != ""
def GetText(self):
""" Returns the item text. """
return self._text
def GetToolTip(self):
""" Returns the item tooltip. """
return self._tooltip
def GetBackgroundColour(self):
""" Returns the currently set background colour. """
return self._backColour
def GetColour(self):
""" Returns the currently set text colour. """
return self._colour
def GetFont(self):
""" Returns the currently set font. """
return (self._hasFont and [self._font] or [wx.NullFont])[0]
def SetText(self, text):
"""
Sets the text label for the item.
:param `text`: the text label for the item.
"""
self._text = text
def SetToolTip(self, tooltip):
"""
Sets the tooltip for the item
:param `tooltip`: the tooltip text
"""
self._tooltip = tooltip
def SetColour(self, colour):
"""
Sets the text colour for the item.
:param `colour`: an instance of :class:`wx.Colour`.
"""
if colour == wx.NullColour or colour == None:
if self._hasColour:
self._hasColour = False
del self._colour
return
self._hasColour = True
self._colour = colour
def SetFont(self, font):
"""
Sets the text font for the item.
:param `font`: an instance of :class:`wx.Font`.
"""
if font == wx.NullFont:
self._hasFont = False
del self._font
return
self._hasFont = True
self._font = font
def SetBackgroundColour(self, colour):
"""
Sets the background colour for the item.
:param `colour`: an instance of :class:`wx.Colour`.
"""
if colour == wx.NullColour:
self._hasBackColour = False
del self._backColour
return
self._hasBackColour = True
self._backColour = colour
# we can't use empty string for measuring the string width/height, so
# always return something
def GetTextForMeasuring(self):
"""
Returns the item text or a simple string if the item text is the
empty string.
"""
s = self.GetText()
if not s.strip():
s = 'H'
return s
def GetImage(self):
"""
Returns a Python list with the zero-based indexes of the images associated
with the item into the image list.
"""
return self._image
def HasImage(self):
""" Returns ``True`` if the item has at least one image associated with it. """
return len(self._image) > 0
def SetKind(self, kind):
"""
Sets the item kind.
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
self._kind = kind
def GetKind(self):
"""
Returns the item kind.
:see: :meth:`~UltimateListItemData.SetKind` for a list of valid item kinds.
"""
return self._kind
def IsChecked(self):
""" Returns whether the item is checked or not. """
return self._checked
def Check(self, checked=True):
"""
Checks/unchecks an item.
:param `checked`: ``True`` to check an item, ``False`` to uncheck it.
:note: This method is meaningful only for check and radio items.
"""
self._checked = checked
def SetHyperText(self, hyper=True):
"""
Sets whether the item is hypertext or not.
:param `hyper`: ``True`` to set hypertext behaviour, ``False`` otherwise.
"""
self._hypertext = hyper
def SetVisited(self, visited=True):
"""
Sets whether an hypertext item was visited or not.
:param `visited`: ``True`` to set a hypertext item as visited, ``False`` otherwise.
"""
self._visited = visited
def GetVisited(self):
"""Returns whether an hypertext item was visited or not."""
return self._visited
def IsHyperText(self):
"""Returns whether the item is hypetext or not."""
return self._hypertext
def SetWindow(self, wnd, expand=False):
"""
Sets the window associated to the item.
:param `wnd`: a non-toplevel window to be displayed next to the item;
:param `expand`: ``True`` to expand the column where the item/subitem lives,
so that the window will be fully visible.
"""
self._mask |= ULC_MASK_WINDOW
self._wnd = wnd
if wnd.GetSizer(): # the window is a complex one hold by a sizer
size = wnd.GetBestSize()
else: # simple window, without sizers
size = wnd.GetSize()
# We have to bind the wx.EVT_SET_FOCUS for the associated window
# No other solution to handle the focus changing from an item in
# UltimateListCtrl and the window associated to an item
# Do better strategies exist?
self._windowsize = size
# The window is enabled only if the item is enabled
self._wnd.Enable(self._enabled)
self._windowenabled = self._enabled
self._expandWin = expand
def GetWindow(self):
""" Returns the window associated to the item. """
return self._wnd
def DeleteWindow(self):
""" Deletes the window associated to the item (if any). """
if self._wnd:
self._wnd.Destroy()
self._wnd = None
def GetWindowEnabled(self):
""" Returns whether the associated window is enabled or not. """
if not self._wnd:
raise Exception("\nERROR: This Item Has No Window Associated")
return self._windowenabled
def SetWindowEnabled(self, enable=True):
"""
Sets whether the associated window is enabled or not.
:param `enable`: ``True`` to enable the associated window, ``False`` to disable it.
"""
if not self._wnd:
raise Exception("\nERROR: This Item Has No Window Associated")
self._windowenabled = enable
self._wnd.Enable(enable)
def GetWindowSize(self):
""" Returns the associated window size. """
return self._windowsize
def SetAttr(self, attr):
"""
Sets the item attributes.
:param `attr`: an instance of :class:`UltimateListItemAttr`.
"""
self._attr = attr
def GetAttr(self):
""" Returns the item attributes. """
return self._attr
def HasColour(self):
""" Returns ``True`` if the currently set text colour is valid. """
return self._hasColour
def HasFont(self):
""" Returns ``True`` if the currently set font is valid. """
return self._hasFont
def HasBackgroundColour(self):
""" Returns ``True`` if the currently set background colour is valid. """
return self._hasBackColour
def SetCustomRenderer(self, renderer):
"""
Associate a custom renderer to this item.
:param `renderer`: a class able to correctly render the item.
:note: the renderer class **must** implement the methods `DrawSubItem`,
`GetLineHeight` and `GetSubItemWidth`.
"""
self._mask |= ULC_MASK_RENDERER
self._customRenderer = renderer
def GetCustomRenderer(self):
""" Returns the custom renderer associated with this item (if any). """
return self._customRenderer
def SetOverFlow(self, over=True):
"""
Sets the item in the overflow/non overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
:param `over`: ``True`` to set the item in a overflow state, ``False`` otherwise.
"""
self._mask |= ULC_MASK_OVERFLOW
self._overFlow = over
def GetOverFlow(self):
"""
Returns if the item is in the overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
"""
return self._overFlow
def Init(self):
""" Initializes the item data structure. """
# the item image or -1
self._image = []
# user data associated with the item
self._data = 0
self._pyData = None
self._colour = wx.Colour(0, 0, 0)
self._hasColour = False
self._hasFont = False
self._hasBackColour = False
self._text = ""
self._tooltip = ""
# kind = 0: normal item
# kind = 1: checkbox-type item
self._kind = 0
self._checked = False
self._enabled = True
# custom attributes or None
self._attr = None
self._hypertext = False
self._visited = False
self._wnd = None
self._windowenabled = True
self._windowsize = wx.Size()
self._isColumnShown = True
self._customRenderer = None
self._overFlow = False
self._expandWin = False
def SetItem(self, info):
"""
Sets information about the item.
:param `info`: an instance of :class:`UltimateListItemData`.
"""
if info._mask & ULC_MASK_TEXT:
CheckVariableRowHeight(self._owner, info._text)
self.SetText(info._text)
if info._mask & ULC_MASK_TOOLTIP:
self.SetToolTip(info._tooltip)
if info._mask & ULC_MASK_KIND:
self._kind = info._kind
if info._mask & ULC_MASK_CHECK:
self._checked = info._checked
if info._mask & ULC_MASK_ENABLE:
self._enabled = info._enabled
if info._mask & ULC_MASK_IMAGE:
self._image = info._image[:]
if info._mask & ULC_MASK_DATA:
self._data = info._data
if info._mask & ULC_MASK_PYDATA:
self._pyData = info._pyData
if info._mask & ULC_MASK_HYPERTEXT:
self._hypertext = info._hypertext
self._visited = info._visited
if info._mask & ULC_MASK_FONTCOLOUR:
self.SetColour(info.GetTextColour())
if info._mask & ULC_MASK_FONT:
self.SetFont(info.GetFont())
if info._mask & ULC_MASK_BACKCOLOUR:
self.SetBackgroundColour(info.GetBackgroundColour())
if info._mask & ULC_MASK_WINDOW:
self._wnd = info._wnd
self._windowenabled = info._windowenabled
self._windowsize = info._windowsize
self._expandWin = info._expandWin
if info._mask & ULC_MASK_SHOWN:
self._isColumnShown = info._isColumnShown
if info._mask & ULC_MASK_RENDERER:
self._customRenderer = info._customRenderer
if info._mask & ULC_MASK_OVERFLOW:
self._overFlow = info._overFlow
if info.HasAttributes():
if self._attr:
self._attr = info.GetAttributes()
else:
self._attr = UltimateListItemAttr(info.GetTextColour(), info.GetBackgroundColour(),
info.GetFont(), info.IsEnabled(), info.GetFooterTextColour(),
info.GetFooterBackgroundColour(), info.GetFooterFont())
if self._rect:
self._rect.x = -1
self._rect.y = -1
self._rect.height = 0
self._rect.width = info._width
def SetPosition(self, x, y):
"""
Sets the item position.
:param `x`: the item `x` position;
:param `y`: the item `y` position.
"""
self._rect.x = x
self._rect.y = y
def SetSize(self, width, height):
"""
Sets the item size.
:param `width`: the item width, in pixels;
:param `height`: the item height, in pixels.
"""
if width != -1:
self._rect.width = width
if height != -1:
self._rect.height = height
def IsHit(self, x, y):
"""
Returns ``True`` if the input position is inside the item client rectangle.
:param `x`: the `x` mouse position;
:param `y`: the `y` mouse position.
"""
return wx.Rect(self.GetX(), self.GetY(), self.GetWidth(), self.GetHeight()).Contains((x, y))
def GetX(self):
""" Returns the item `x` position. """
return self._rect.x
def GetY(self):
""" Returns the item `y` position. """
return self._rect.y
def GetWidth(self):
""" Returns the item width, in pixels. """
return self._rect.width
def GetHeight(self):
""" Returns the item height, in pixels. """
return self._rect.height
def GetItem(self, info):
"""
Returns information about the item.
:param `info`: an instance of :class:`UltimateListItemData`.
"""
mask = info._mask
if not mask:
# by default, get everything for backwards compatibility
mask = -1
if mask & ULC_MASK_TEXT:
info._text = self._text
if mask & ULC_MASK_TOOLTIP:
info._tooltip = self._tooltip
if mask & ULC_MASK_IMAGE:
info._image = self._image[:]
if mask & ULC_MASK_DATA:
info._data = self._data
if mask & ULC_MASK_PYDATA:
info._pyData = self._pyData
if info._mask & ULC_MASK_FONT:
info.SetFont(self.GetFont())
if mask & ULC_MASK_KIND:
info._kind = self._kind
if mask & ULC_MASK_CHECK:
info._checked = self._checked
if mask & ULC_MASK_ENABLE:
info._enabled = self._enabled
if mask & ULC_MASK_HYPERTEXT:
info._hypertext = self._hypertext
info._visited = self._visited
if mask & ULC_MASK_WINDOW:
info._wnd = self._wnd
info._windowenabled = self._windowenabled
info._windowsize = self._windowsize
info._expandWin = self._expandWin
if mask & ULC_MASK_SHOWN:
info._isColumnShown = self._isColumnShown
if mask & ULC_MASK_RENDERER:
info._customRenderer = self._customRenderer
if mask & ULC_MASK_OVERFLOW:
info._overFlow = self._overFlow
if self._attr:
if self._attr.HasTextColour():
info.SetTextColour(self._attr.GetTextColour())
if self._attr.HasBackgroundColour():
info.SetBackgroundColour(self._attr.GetBackgroundColour())
if self._attr.HasFont():
info.SetFont(self._attr.GetFont())
info.Enable(self._attr.IsEnabled())
return info
def IsEnabled(self):
""" Returns ``True`` if the item is enabled, ``False`` if it is disabled. """
return self._enabled
def Enable(self, enable=True):
"""
Enables or disables the item.
:param `enable`: ``True`` to enable the item, ``False`` to disable it.
"""
self._enabled = enable
#-----------------------------------------------------------------------------
# UltimateListHeaderData (internal)
#-----------------------------------------------------------------------------
class UltimateListHeaderData(object):
"""
A simple class which holds information about :class:`UltimateListItem` visual
attributes for the header/footer items (client rectangles, positions, etc...).
"""
def __init__(self, item=None):
"""
Default class constructor.
:param `item`: another instance of :class:`UltimateListHeaderData`.
"""
self.Init()
if item:
self.SetItem(item)
def HasText(self):
""" Returns ``True`` if the currently set text colour is valid. """
return self._text != ""
def GetText(self):
""" Returns the header/footer item text. """
return self._text
def GetToolTip(self):
""" Returns the header/footer item tooltip. """
return self._tooltip
def SetText(self, text):
"""
Sets the header/footer item text.
:param `text`: the new header/footer text.
"""
self._text = text
def SetToolTip(self, tip):
"""
Sets the header/footer item tooltip.
:param `tip`: the new header/footer tooltip.
"""
self._tip = tip
def GetFont(self):
""" Returns the header/footer item font. """
return self._font
def Init(self):
""" Initializes the header/footer item. """
self._mask = 0
self._image = []
self._format = 0
self._width = 0
self._xpos = 0
self._ypos = 0
self._height = 0
self._text = ""
self._tooltip = ""
self._kind = 0
self._checked = False
self._font = wx.NullFont
self._state = 0
self._isColumnShown = True
self._customRenderer = None
self._footerImage = []
self._footerFormat = 0
self._footerText = ""
self._footerKind = 0
self._footerChecked = False
self._footerFont = wx.NullFont
def SetItem(self, item):
"""
Sets information about the header/footer item.
:param `info`: an instance of :class:`UltimateListHeaderData`.
"""
self._mask = item._mask
if self._mask & ULC_MASK_TEXT:
self._text = item._text
if self._mask & ULC_MASK_TOOLTIP:
self._tooltip = item._tooltip
if self._mask & ULC_MASK_FOOTER_TEXT:
self._footerText = item._footerText
if self._mask & ULC_MASK_IMAGE:
self._image = item._image[:]
if self._mask & ULC_MASK_FOOTER_IMAGE:
self._footerImage = item._footerImage[:]
if self._mask & ULC_MASK_FORMAT:
self._format = item._format
if self._mask & ULC_MASK_FOOTER_FORMAT:
self._footerFormat = item._footerFormat
if self._mask & ULC_MASK_WIDTH:
self.SetWidth(item._width)
if self._mask & ULC_MASK_FONT:
self._font = item._font
if self._mask & ULC_MASK_FOOTER_FONT:
self._footerFont = item._footerFont
if self._mask & ULC_MASK_FOOTER_KIND:
self._footerKind = item._footerKind
self._footerChecked = item._footerChecked
if self._mask & ULC_MASK_KIND:
self._kind = item._kind
self._checked = item._checked
if self._mask & ULC_MASK_CHECK:
self._kind = item._kind
self._checked = item._checked
if self._mask & ULC_MASK_FOOTER_CHECK:
self._footerKind = item._footerKind
self._footerChecked = item._footerChecked
if self._mask & ULC_MASK_STATE:
self.SetState(item._state)
if self._mask & ULC_MASK_SHOWN:
self._isColumnShown = item._isColumnShown
if self._mask & ULC_MASK_RENDERER:
self._customRenderer = item._customRenderer
def SetState(self, flag):
"""
Sets the item state flags.
:param `state`: any combination of the following bits:
============================ ========= ==============================
State Bits Hex Value Description
============================ ========= ==============================
``ULC_STATE_DONTCARE`` 0x0 Don't care what the state is
``ULC_STATE_DROPHILITED`` 0x1 The item is highlighted to receive a drop event
``ULC_STATE_FOCUSED`` 0x2 The item has the focus
``ULC_STATE_SELECTED`` 0x4 The item is selected
``ULC_STATE_CUT`` 0x8 The item is in the cut state
``ULC_STATE_DISABLED`` 0x10 The item is disabled
``ULC_STATE_FILTERED`` 0x20 The item has been filtered
``ULC_STATE_INUSE`` 0x40 The item is in use
``ULC_STATE_PICKED`` 0x80 The item has been picked
``ULC_STATE_SOURCE`` 0x100 The item is a drag and drop source
============================ ========= ==============================
"""
self._state = flag
def SetPosition(self, x, y):
"""
Sets the header/footer item position.
:param `x`: the item `x` position;
:param `y`: the item `y` position.
"""
self._xpos = x
self._ypos = y
def SetHeight(self, h):
"""
Sets the header/footer item height, in pixels.
:param `h`: an integer value representing the header/footer height.
"""
self._height = h
def SetWidth(self, w):
"""
Sets the header/footer item width, in pixels.
:param `w`: an integer value representing the header/footer width.
"""
self._width = w
if self._width < 0:
self._width = WIDTH_COL_DEFAULT
elif self._width < WIDTH_COL_MIN:
self._width = WIDTH_COL_MIN
def SetFormat(self, format):
"""
Sets the header item format.
:param `format`: the header item format.
"""
self._format = format
def SetFooterFormat(self, format):
"""
Sets the footer item format.
:param `format`: the footer item format.
"""
self._footerFormat = format
def HasImage(self):
"""
Returns ``True`` if the header item has at least one image associated
with it.
"""
return len(self._image) > 0
def HasFooterImage(self):
"""
Returns ``True`` if the footer item has at least one image associated
with it.
"""
return len(self._footerImage) > 0
def IsHit(self, x, y):
"""
Returns ``True`` if the input position is inside the item client rectangle.
:param `x`: the `x` mouse position;
:param `y`: the `y` mouse position.
"""
return ((x >= self._xpos) and (x <= self._xpos+self._width) and (y >= self._ypos) and (y <= self._ypos+self._height))
def GetItem(self, item):
"""
Returns information about the item.
:param `item`: an instance of :class:`UltimateListHeaderData`.
"""
item._mask = self._mask
item._text = self._text
item._tooltip = self._tooltip
item._image = self._image[:]
item._format = self._format
item._width = self._width
if self._font:
item._font = self._font
item.Attributes().SetFont(self._font)
item._kind = self._kind
item._checked = self._checked
item._state = self._state
item._isColumnShown = self._isColumnShown
item._footerImage = self._footerImage
item._footerFormat = self._footerFormat
item._footerText = self._footerText
item._footerKind = self._footerKind
item._footerChecked = self._footerChecked
item._footerFont = self._footerFont
item._customRenderer = self._customRenderer
return item
def GetState(self):
"""
Returns a bit field representing the state of the item.
:see: :meth:`~UltimateListHeaderData.SetState` for a list of valid item states.
"""
return self._state
def GetImage(self):
"""
Returns a Python list with the zero-based indexes of the images associated
with the header item into the image list.
"""
return self._image
def GetFooterImage(self):
"""
Returns a Python list with the zero-based indexes of the images associated
with the footer item into the image list.
"""
return self._footerImage
def GetWidth(self):
""" Returns the header/footer item width, in pixels. """
return self._width
def GetFormat(self):
""" Returns the header item format. """
return self._format
def GetFooterFormat(self):
""" Returns the footer item format. """
return self._footerFormat
def SetFont(self, font):
"""
Sets a new font for the header item.
:param `font`: an instance of :class:`wx.Font`.
"""
self._font = font
def SetFooterFont(self, font):
"""
Sets a new font for the footer item.
:param `font`: an instance of :class:`wx.Font`.
"""
self._footerFont = font
def SetKind(self, kind):
"""
Sets the header item kind.
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
self._kind = kind
def SetFooterKind(self, kind):
"""
Sets the footer item kind.
:param `kind`: the footer item kind.
:see: :meth:`~UltimateListHeaderData.SetKind` for a list of valid item kinds.
"""
self._footerKind = kind
def GetKind(self):
"""
Returns the header item kind.
:see: :meth:`~UltimateListHeaderData.SetKind` for a list of valid item kinds.
"""
return self._kind
def GetFooterKind(self):
"""
Returns the footer item kind.
:see: :meth:`~UltimateListHeaderData.SetKind` for a list of valid item kinds.
"""
return self._footerKind
def IsChecked(self):
""" Returns whether the header item is checked or not. """
return self._checked
def Check(self, checked=True):
"""
Checks/unchecks a header item.
:param `checked`: ``True`` to check an item, ``False`` to uncheck it.
:note: This method is meaningful only for check and radio header items.
"""
self._checked = checked
def IsFooterChecked(self):
""" Returns whether the footer item is checked or not. """
return self._footerChecked
def CheckFooter(self, check=True):
"""
Checks/unchecks a footer item.
:param `checked`: ``True`` to check an item, ``False`` to uncheck it.
:note: This method is meaningful only for check and radio footer items.
"""
self._footerChecked = check
def SetCustomRenderer(self, renderer):
"""
Associate a custom renderer to this item.
:param `renderer`: a class able to correctly render the item.
:note: the renderer class **must** implement the methods `DrawHeaderButton`
and `GetForegroundColor`.
"""
self._mask |= ULC_MASK_RENDERER
self._customRenderer = renderer
def GetCustomRenderer(self):
""" Returns the custom renderer associated with this item (if any). """
return self._customRenderer
#-----------------------------------------------------------------------------
# GeometryInfo (internal)
# this is not used in report view
#-----------------------------------------------------------------------------
class GeometryInfo(object):
"""
A simple class which holds items geometries for :class:`UltimateListCtrl` not in
report mode.
"""
def __init__(self):
""" Default class constructor. """
# total item rect
self._rectAll = wx.Rect()
# label only
self._rectLabel = wx.Rect()
# icon only
self._rectIcon = wx.Rect()
# the part to be highlighted
self._rectHighlight = wx.Rect()
# the checkbox/radiobutton rect (if any)
self._rectCheck = wx.Rect()
# extend all our rects to be centered inside the one of given width
def ExtendWidth(self, w):
"""
Extends all our rectangles to be centered inside the one of given width.
:param `w`: the given width.
"""
if self._rectAll.width > w:
raise Exception("width can only be increased")
self._rectAll.width = w
self._rectLabel.x = self._rectAll.x + (w - self._rectLabel.width)/2
self._rectIcon.x = self._rectAll.x + (w - self._rectIcon.width)/2
self._rectHighlight.x = self._rectAll.x + (w - self._rectHighlight.width)/2
#-----------------------------------------------------------------------------
# UltimateListLineData (internal)
#-----------------------------------------------------------------------------
class UltimateListLineData(object):
""" A simple class which holds line geometries for :class:`UltimateListCtrl`. """
def __init__(self, owner):
"""
Default class constructor.
:param `owner`: an instance of :class:`UltimateListCtrl`.
"""
# the list of subitems: only may have more than one item in report mode
self._items = []
# is this item selected? [NB: not used in virtual mode]
self._highlighted = False
# back pointer to the list ctrl
self._owner = owner
self._height = self._width = self._x = self._y = -1
if self.InReportView():
self._gi = None
else:
self._gi = GeometryInfo()
if self.GetMode() in [ULC_REPORT, ULC_TILE] or self.HasMode(ULC_HEADER_IN_ALL_VIEWS):
self.InitItems(self._owner.GetColumnCount())
else:
self.InitItems(1)
def SetReportView(self, inReportView):
"""
Sets whether :class:`UltimateListLineData` is in report view or not.
:param `inReportView`: ``True`` to set :class:`UltimateListLineData` in report view, ``False``
otherwise.
"""
# we only need m_gi when we're not in report view so update as needed
if inReportView:
del self._gi
self._gi = None
else:
self._gi = GeometryInfo()
def GetHeight(self):
""" Returns the line height, in pixels. """
return self._height
def SetHeight(self, height):
"""
Sets the line height.
:param `height`: the new line height.
"""
self._height = height
def GetWidth(self):
""" Returns the line width. """
return self._width
def SetWidth(self, width):
"""
Sets the line width.
:param `width`: the new line width.
"""
self._width = width
def GetX(self):
""" Returns the line `x` position. """
return self._x
def SetX(self, x):
"""
Sets the line `x` position.
:param `x`: the new line `x` position.
"""
self._x = x
def GetY(self):
""" Returns the line `y` position. """
return self._y
def SetY(self, y):
"""
Sets the line `y` position.
:param `y`: the new line `y` position.
"""
self._y = y
def ResetDimensions(self):
""" Resets the line dimensions (client rectangle). """
self._height = self._width = self._x = self._y = -1
def HasImage(self, col=0):
"""
Returns ``True`` if the first item in the line has at least one image
associated with it.
"""
return self.GetImage(col) != []
def HasText(self):
"""
Returns ``True`` if the text of first item in the line is not the empty
string.
"""
return self.GetText(0) != ""
def IsHighlighted(self):
""" Returns ``True`` if the line is highlighted. """
if self.IsVirtual():
raise Exception("unexpected call to IsHighlighted")
return self._highlighted
def GetMode(self):
""" Returns the current highlighting mode. """
return self._owner.GetListCtrl().GetAGWWindowStyleFlag() & ULC_MASK_TYPE
def HasMode(self, mode):
"""
Returns ``True`` if the parent :class:`UltimateListCtrl` has the window
style specified by `mode`.
:param `mode`: the window style to check.
"""
return self._owner.GetListCtrl().HasAGWFlag(mode)
def InReportView(self):
""" Returns ``True`` if the parent :class:`UltimateListCtrl` is in report view. """
return self._owner.HasAGWFlag(ULC_REPORT)
def IsVirtual(self):
""" Returns ``True`` if the parent :class:`UltimateListCtrl` has the ``ULC_VIRTUAL`` style set. """
return self._owner.IsVirtual()
def CalculateSize(self, dc, spacing):
"""
Calculates the line size and item positions.
:param `dc`: an instance of :class:`wx.DC`;
:param `spacing`: the spacing between the items, in pixels.
"""
item = self._items[0]
mode = self.GetMode()
if mode in [ULC_ICON, ULC_SMALL_ICON]:
self._gi._rectAll.width = spacing
s = item.GetText()
if not s:
lh = -1
self._gi._rectLabel.width = 0
self._gi._rectLabel.height = 0
else:
lw, lh = dc.GetTextExtent(s)
lw += EXTRA_WIDTH
lh += EXTRA_HEIGHT
self._gi._rectAll.height = spacing + lh
if lw > spacing:
self._gi._rectAll.width = lw
self._gi._rectLabel.width = lw
self._gi._rectLabel.height = lh
if item.HasImage():
w, h = self._owner.GetImageSize(item.GetImage())
self._gi._rectIcon.width = w + 8
self._gi._rectIcon.height = h + 8
if self._gi._rectIcon.width > self._gi._rectAll.width:
self._gi._rectAll.width = self._gi._rectIcon.width
if self._gi._rectIcon.height + lh > self._gi._rectAll.height - 4:
self._gi._rectAll.height = self._gi._rectIcon.height + lh + 4
if item.HasText():
self._gi._rectHighlight.width = self._gi._rectLabel.width
self._gi._rectHighlight.height = self._gi._rectLabel.height
else:
self._gi._rectHighlight.width = self._gi._rectIcon.width
self._gi._rectHighlight.height = self._gi._rectIcon.height
elif mode == ULC_LIST:
s = item.GetTextForMeasuring()
lw, lh = dc.GetTextExtent(s)
lw += EXTRA_WIDTH
lh += EXTRA_HEIGHT
self._gi._rectLabel.width = lw
self._gi._rectLabel.height = lh
self._gi._rectAll.width = lw
self._gi._rectAll.height = lh
if item.HasImage():
w, h = self._owner.GetImageSize(item.GetImage())
h += 4
self._gi._rectIcon.width = w
self._gi._rectIcon.height = h
self._gi._rectAll.width += 4 + w
if h > self._gi._rectAll.height:
self._gi._rectAll.height = h
if item.GetKind() in [1, 2]:
w, h = self._owner.GetCheckboxImageSize()
h += 4
self._gi._rectCheck.width = w
self._gi._rectCheck.height = h
self._gi._rectAll.width += 4 + w
if h > self._gi._rectAll.height:
self._gi._rectAll.height = h
self._gi._rectHighlight.width = self._gi._rectAll.width
self._gi._rectHighlight.height = self._gi._rectAll.height
elif mode == ULC_REPORT:
raise Exception("unexpected call to SetSize")
else:
raise Exception("unknown mode")
def SetPosition(self, x, y, spacing):
"""
Sets the line position.
:param `x`: the current `x` coordinate;
:param `y`: the current `y` coordinate;
:param `spacing`: the spacing between items, in pixels.
"""
item = self._items[0]
mode = self.GetMode()
if mode in [ULC_ICON, ULC_SMALL_ICON]:
self._gi._rectAll.x = x
self._gi._rectAll.y = y
if item.HasImage():
self._gi._rectIcon.x = self._gi._rectAll.x + 4 + (self._gi._rectAll.width - self._gi._rectIcon.width)/2
self._gi._rectIcon.y = self._gi._rectAll.y + 4
if item.HasText():
if self._gi._rectLabel.width > spacing:
self._gi._rectLabel.x = self._gi._rectAll.x + 2
else:
self._gi._rectLabel.x = self._gi._rectAll.x + 2 + (spacing/2) - (self._gi._rectLabel.width/2)
self._gi._rectLabel.y = self._gi._rectAll.y + self._gi._rectAll.height + 2 - self._gi._rectLabel.height
self._gi._rectHighlight.x = self._gi._rectLabel.x - 2
self._gi._rectHighlight.y = self._gi._rectLabel.y - 2
else:
self._gi._rectHighlight.x = self._gi._rectIcon.x - 4
self._gi._rectHighlight.y = self._gi._rectIcon.y - 4
elif mode == ULC_LIST:
self._gi._rectAll.x = x
self._gi._rectAll.y = y
wcheck = hcheck = 0
if item.GetKind() in [1, 2]:
wcheck, hcheck = self._owner.GetCheckboxImageSize()
wcheck += 2
self._gi._rectCheck.x = self._gi._rectAll.x + 2
self._gi._rectCheck.y = self._gi._rectAll.y + 2
self._gi._rectHighlight.x = self._gi._rectAll.x
self._gi._rectHighlight.y = self._gi._rectAll.y
self._gi._rectLabel.y = self._gi._rectAll.y + 2
if item.HasImage():
self._gi._rectIcon.x = self._gi._rectAll.x + wcheck + 2
self._gi._rectIcon.y = self._gi._rectAll.y + 2
self._gi._rectLabel.x = self._gi._rectAll.x + 6 + self._gi._rectIcon.width + wcheck
else:
self._gi._rectLabel.x = self._gi._rectAll.x + 2 + wcheck
elif mode == ULC_REPORT:
raise Exception("unexpected call to SetPosition")
else:
raise Exception("unknown mode")
def InitItems(self, num):
"""
Initializes the list of items.
:param `num`: the initial number of items to store.
"""
for i in range(num):
self._items.append(UltimateListItemData(self._owner))
def SetItem(self, index, info):
"""
Sets information about the item.
:param `index`: the index of the item;
:param `info`: an instance of :class:`UltimateListItem`.
"""
item = self._items[index]
item.SetItem(info)
def GetItem(self, index, info):
"""
Returns information about the item.
:param `index`: the index of the item;
:param `info`: an instance of :class:`UltimateListItem`.
"""
item = self._items[index]
return item.GetItem(info)
def GetText(self, index):
"""
Returns the item text at the position `index`.
:param `index`: the index of the item.
"""
item = self._items[index]
return item.GetText()
def SetText(self, index, s):
"""
Sets the item text at the position `index`.
:param `index`: the index of the item;
:param `s`: the new item text.
"""
item = self._items[index]
item.SetText(s)
def GetToolTip(self, index):
"""
Returns the item tooltip at the position `index`.
:param `index`: the index of the item.
"""
item = self._items[index]
return item.GetToolTip()
def SetToolTip(self, index, s):
"""
Sets the item tooltip at the position `index`.
:param `index`: the index of the item;
:param `s`: the new item tooltip.
"""
item = self._items[index]
item.SetToolTip(s)
def SetImage(self, index, image):
"""
Sets the zero-based indexes of the images associated with the item into the
image list.
:param `index`: the index of the item;
:param `image`: a Python list with the zero-based indexes of the images
associated with the item into the image list.
"""
item = self._items[index]
item.SetImage(image)
def GetImage(self, index=0):
"""
Returns a Python list with the zero-based indexes of the images associated
with the item into the image list.
:param `index`: the index of the item.
"""
item = self._items[index]
return item.GetImage()
def Check(self, index, checked=True):
"""
Checks/unchecks an item.
:param `index`: the index of the item;
:param `checked`: ``True`` to check an item, ``False`` to uncheck it.
:note: This method is meaningful only for check and radio items.
"""
item = self._items[index]
item.Check(checked)
def SetKind(self, index, kind=0):
"""
Sets the item kind.
:param `index`: the index of the item;
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
item = self._items[index]
item.SetKind(kind)
def GetKind(self, index=0):
"""
Returns the item kind.
:param `index`: the index of the item.
:see: :meth:`~UltimateListLineData.SetKind` for a list of valid item kinds.
"""
item = self._items[index]
return item.GetKind()
def IsChecked(self, index):
"""
Returns whether the item is checked or not.
:param `index`: the index of the item.
"""
item = self._items[index]
return item.IsChecked()
def SetColour(self, index, c):
"""
Sets the text colour for the item.
:param `index`: the index of the item;
:param `c`: an instance of :class:`wx.Colour`.
"""
item = self._items[index]
item.SetColour(c)
def GetAttr(self):
"""
Returns an instance of :class:`UltimateListItemAttr` associated with the first item
in the line.
"""
item = self._items[0]
return item.GetAttr()
def SetAttr(self, attr):
"""
Sets an instance of :class:`UltimateListItemAttr` to the first item in the line.
:param `attr`: an instance of :class:`UltimateListItemAttr`.
"""
item = self._items[0]
item.SetAttr(attr)
def SetAttributes(self, dc, attr, highlighted):
"""
Sets various attributes to the input device context.
:param `dc`: an instance of :class:`wx.DC`;
:param `attr`: an instance of :class:`UltimateListItemAttr`;
:param `highlighted`: ``True`` if the item is highlighted, ``False`` otherwise.
"""
listctrl = self._owner.GetParent()
# fg colour
# don't use foreground colour for drawing highlighted items - this might
# make them completely invisible (and there is no way to do bit
# arithmetics on wxColour, unfortunately)
if not self._owner.HasAGWFlag(ULC_BORDER_SELECT) and not self._owner.HasAGWFlag(ULC_NO_FULL_ROW_SELECT):
if highlighted:
if wx.Platform == "__WXMAC__":
if self._owner.HasFocus():
colText = wx.WHITE
else:
colText = wx.BLACK
else:
colText = wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)
else:
if attr and attr.HasTextColour():
colText = attr.GetTextColour()
else:
colText = listctrl.GetForegroundColour()
elif attr and attr.HasTextColour():
colText = attr.GetTextColour()
else:
colText = listctrl.GetForegroundColour()
dc.SetTextForeground(colText)
# font
if attr and attr.HasFont():
font = attr.GetFont()
else:
font = listctrl.GetFont()
dc.SetFont(font)
# bg colour
hasBgCol = attr and attr.HasBackgroundColour()
if highlighted or hasBgCol:
if highlighted:
dc.SetBrush(self._owner.GetHighlightBrush())
else:
dc.SetBrush(wx.Brush(attr.GetBackgroundColour(), wx.BRUSHSTYLE_SOLID))
dc.SetPen(wx.TRANSPARENT_PEN)
return True
return False
def Draw(self, line, dc):
"""
Draws the line on the specified device context.
:param `line`: an instance of :class:`UltimateListLineData`;
:param `dc`: an instance of :class:`wx.DC`.
"""
item = self._items[0]
highlighted = self.IsHighlighted()
attr = self.GetAttr()
useGradient, gradientStyle = self._owner._usegradients, self._owner._gradientstyle
useVista = self._owner._vistaselection
hasFocus = self._owner._hasFocus
borderOnly = self._owner.HasAGWFlag(ULC_BORDER_SELECT)
drawn = False
if self.SetAttributes(dc, attr, highlighted):
drawn = True
if not borderOnly:
if useGradient:
if gradientStyle == 0:
# horizontal gradient
self.DrawHorizontalGradient(dc, self._gi._rectAll, hasFocus)
else:
# vertical gradient
self.DrawVerticalGradient(dc, self._gi._rectAll, hasFocus)
elif useVista:
# Vista selection style
self.DrawVistaRectangle(dc, self._gi._rectAll, hasFocus)
else:
if highlighted:
flags = wx.CONTROL_SELECTED
if self._owner.HasFocus() and wx.Platform == "__WXMAC__":
flags |= wx.CONTROL_FOCUSED
wx.RendererNative.Get().DrawItemSelectionRect(self._owner, dc, self._gi._rectHighlight, flags)
else:
dc.DrawRectangle(self._gi._rectHighlight)
else:
if borderOnly:
dc.SetBrush(wx.WHITE_BRUSH)
dc.SetPen(wx.TRANSPARENT_PEN)
dc.DrawRectangle(self._gi._rectAll)
if item.GetKind() in [1, 2]:
rectCheck = self._gi._rectCheck
self._owner.DrawCheckbox(dc, rectCheck.x, rectCheck.y, item.GetKind(), item.IsChecked(), item.IsEnabled())
if item.HasImage():
# centre the image inside our rectangle, this looks nicer when items
# ae aligned in a row
rectIcon = self._gi._rectIcon
self._owner.DrawImage(item.GetImage()[0], dc, rectIcon.x, rectIcon.y, True)
if item.HasText():
rectLabel = self._gi._rectLabel
dc.SetClippingRegion(rectLabel)
dc.DrawText(item.GetText(), rectLabel.x, rectLabel.y)
dc.DestroyClippingRegion()
if self._owner.HasAGWFlag(ULC_HOT_TRACKING):
if line == self._owner._newHotCurrent and not drawn:
r = wx.Rect(*self._gi._rectAll)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(wx.Colour("orange")))
dc.DrawRoundedRectangle(r, 3)
if borderOnly and drawn:
dc.SetPen(wx.Pen(wx.Colour(0, 191, 255), 2))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
r = wx.Rect(*self._gi._rectAll)
r.x += 1
r.y += 1
r.width -= 1
r.height -= 1
dc.DrawRoundedRectangle(r, 4)
def HideItemWindow(self, item):
"""
If the input item has a window associated with it, hide it.
:param `item`: an instance of :class:`UltimateListItem`.
"""
wnd = item.GetWindow()
if wnd and wnd.IsShown():
wnd.Hide()
def DrawInReportMode(self, dc, line, rect, rectHL, highlighted, current, enabled, oldPN, oldBR):
"""
Draws the line on the specified device context when the parent :class:`UltimateListCtrl`
is in report mode.
:param `dc`: an instance of :class:`wx.DC`;
:param `line`: an instance of :class:`UltimateListLineData`;
:param `rect`: the item client rectangle;
:param `rectHL`: the item client rectangle when the item is highlighted;
:param `highlighted`: ``True`` if the item is highlighted, ``False`` otherwise;
:param `current`: ``True`` if the item is the current item;
:param `enabled`: ``True`` if the item is enabled, ``False`` otherwise;
:param `oldPN`: an instance of :class:`wx.Pen`, to save and restore at the end of
the drawing;
:param `oldBR`: an instance of :class:`wx.Brush`, to save and restore at the end of
the drawing.
"""
attr = self.GetAttr()
useGradient, gradientStyle = self._owner._usegradients, self._owner._gradientstyle
useVista = self._owner._vistaselection
hasFocus = self._owner._hasFocus
borderOnly = self._owner.HasAGWFlag(ULC_BORDER_SELECT)
nofullRow = self._owner.HasAGWFlag(ULC_NO_FULL_ROW_SELECT)
drawn = False
dc.SetBrush(wx.TRANSPARENT_BRUSH)
if nofullRow:
x = rect.x + HEADER_OFFSET_X
y = rect.y
height = rect.height
for col, item in enumerate(self._items):
width = self._owner.GetColumnWidth(col)
if self._owner.IsColumnShown(col):
paintRect = wx.Rect(x, y, self._owner.GetColumnWidth(col)-2*HEADER_OFFSET_X, rect.height)
break
xOld = x
x += width
else:
paintRect = wx.Rect(*rectHL)
if self.SetAttributes(dc, attr, highlighted) and enabled:
drawn = True
if not borderOnly:
if useGradient:
if gradientStyle == 0:
# horizontal gradient
self.DrawHorizontalGradient(dc, paintRect, hasFocus)
else:
# vertical gradient
self.DrawVerticalGradient(dc, paintRect, hasFocus)
elif useVista:
# Vista selection style
self.DrawVistaRectangle(dc, paintRect, hasFocus)
else:
if highlighted:
flags = wx.CONTROL_SELECTED
if hasFocus:
flags |= wx.CONTROL_FOCUSED
if current:
flags |= wx.CONTROL_CURRENT
wx.RendererNative.Get().DrawItemSelectionRect(self._owner, dc, paintRect, flags)
else:
dc.DrawRectangle(paintRect)
else:
if borderOnly:
dc.SetBrush(wx.WHITE_BRUSH)
dc.SetPen(wx.TRANSPARENT_PEN)
dc.DrawRectangle(paintRect)
x = rect.x + HEADER_OFFSET_X
y = rect.y
height = rect.height
boldFont = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
boldFont.SetWeight(wx.FONTWEIGHT_BOLD)
for col, item in enumerate(self._items):
if not self._owner.IsColumnShown(col):
self.HideItemWindow(item)
continue
width = self._owner.GetColumnWidth(col)
xOld = x
x += width
if item.GetCustomRenderer():
customRect = wx.Rect(xOld-HEADER_OFFSET_X, rect.y, width, rect.height)
item.GetCustomRenderer().DrawSubItem(dc, customRect, line, highlighted, enabled)
continue
overflow = item.GetOverFlow() and item.HasText()
if item.GetKind() in [1, 2]:
# We got a checkbox-type item
ix, iy = self._owner.GetCheckboxImageSize()
checked = item.IsChecked()
self._owner.DrawCheckbox(dc, xOld, y + (height-iy+1)/2, item.GetKind(), checked, enabled)
xOld += ix
width -= ix
if item.HasImage():
images = item.GetImage()
for img in images:
ix, iy = self._owner.GetImageSize([img])
self._owner.DrawImage(img, dc, xOld, y + (height-iy)/2, enabled)
xOld += ix
width -= ix
## if images:
## width -= IMAGE_MARGIN_IN_REPORT_MODE - MARGIN_BETWEEN_TEXT_AND_ICON
wnd = item.GetWindow()
xSize = 0
if wnd:
xSize, ySize = item.GetWindowSize()
wndx = xOld - HEADER_OFFSET_X + width - xSize - 3
xa, ya = self._owner.CalcScrolledPosition((0, rect.y))
wndx += xa
if rect.height > ySize and not item._expandWin:
ya += (rect.height - ySize)/2
itemRect = wx.Rect(xOld-2*HEADER_OFFSET_X, rect.y, width-xSize-HEADER_OFFSET_X, rect.height)
if overflow:
itemRect = wx.Rect(xOld-2*HEADER_OFFSET_X, rect.y, rectHL.width-xSize-HEADER_OFFSET_X, rect.height)
dc.SetClippingRegion(itemRect)
if item.HasBackgroundColour():
dc.SetBrush(wx.Brush(item.GetBackgroundColour()))
dc.SetPen(wx.Pen(item.GetBackgroundColour()))
dc.DrawRectangle(itemRect)
dc.SetBrush(oldBR)
dc.SetPen(oldPN)
if item.HasText():
coloured = item.HasColour()
c = dc.GetTextForeground()
oldTF = wx.Colour(c.Red(),c.Green(),c.Blue())
oldFT = dc.GetFont()
font = item.HasFont()
if not enabled:
dc.SetTextForeground(self._owner.GetDisabledTextColour())
else:
if coloured:
dc.SetTextForeground(item.GetColour())
elif useVista and drawn:
dc.SetTextForeground(wx.BLACK)
if item.IsHyperText():
dc.SetFont(self._owner.GetHyperTextFont())
if item.GetVisited():
dc.SetTextForeground(self._owner.GetHyperTextVisitedColour())
else:
dc.SetTextForeground(self._owner.GetHyperTextNewColour())
font = True
coloured = True
else:
if font:
dc.SetFont(item.GetFont())
itemRect = wx.Rect(itemRect.x+MARGIN_BETWEEN_TEXT_AND_ICON, itemRect.y, itemRect.width-8, itemRect.height)
self.DrawTextFormatted(dc, item.GetText(), line, col, itemRect, overflow)
if coloured:
dc.SetTextForeground(oldTF)
if font:
dc.SetFont(oldFT)
dc.DestroyClippingRegion()
if wnd:
if not wnd.IsShown():
wnd.Show()
if item._expandWin:
wRect = wx.Rect(*itemRect)
wRect.x += xa + 2
wRect.width = width - 8
wRect.y = ya + 2
wRect.height -= 4
if wnd.GetRect() != wRect:
wnd.SetRect(wRect)
else:
if wnd.GetPosition() != (wndx, ya):
wnd.SetPosition((wndx, ya))
if self._owner.HasAGWFlag(ULC_HOT_TRACKING):
if line == self._owner._newHotCurrent and not drawn:
r = wx.Rect(*paintRect)
r.y += 1
r.height -= 1
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(wx.Colour("orange")))
dc.DrawRoundedRectangle(r, 3)
dc.SetPen(oldPN)
if borderOnly and drawn:
dc.SetPen(wx.Pen(wx.Colour(0, 191, 255), 2))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
rect = wx.Rect(*paintRect)
rect.y += 1
rect.height -= 1
dc.DrawRoundedRectangle(rect, 3)
dc.SetPen(oldPN)
def DrawTextFormatted(self, dc, text, row, col, itemRect, overflow):
"""
Draws the item text, correctly formatted.
:param `dc`: an instance of :class:`wx.DC`;
:param `text`: the item text;
:param `row`: the line number to which this item belongs to;
:param `col`: the column number to which this item belongs to;
:param `itemRect`: the item client rectangle;
:param `overflow`: ``True`` if the item should overflow into neighboring columns,
``False`` otherwise.
"""
# determine if the string can fit inside the current width
w, h, dummy = dc.GetFullMultiLineTextExtent(text)
width = itemRect.width
shortItems = self._owner._shortItems
tuples = (row, col)
# it can, draw it using the items alignment
item = self._owner.GetColumn(col)
align = item.GetAlign()
if align == ULC_FORMAT_RIGHT:
textAlign = wx.ALIGN_RIGHT
elif align == ULC_FORMAT_CENTER:
textAlign = wx.ALIGN_CENTER
else:
textAlign = wx.ALIGN_LEFT
if w <= width:
if tuples in shortItems:
shortItems.remove(tuples)
dc.DrawLabel(text, itemRect, textAlign|wx.ALIGN_CENTER_VERTICAL)
else: # otherwise, truncate and add an ellipsis if possible
if tuples not in shortItems:
shortItems.append(tuples)
# determine the base width
ellipsis = "..."
base_w, h = dc.GetTextExtent(ellipsis)
# continue until we have enough space or only one character left
newText = text.split("\n")
theText = ""
for text in newText:
lenText = len(text)
drawntext = text
w, dummy = dc.GetTextExtent(text)
while lenText > 1:
if w + base_w <= width:
break
w_c, h_c = dc.GetTextExtent(drawntext[-1])
drawntext = drawntext[0:-1]
lenText -= 1
w -= w_c
# if still not enough space, remove ellipsis characters
while len(ellipsis) > 0 and w + base_w > width:
ellipsis = ellipsis[0:-1]
base_w, h = dc.GetTextExtent(ellipsis)
theText += drawntext + ellipsis + "\n"
theText = theText.rstrip()
# now draw the text
dc.DrawLabel(theText, itemRect, textAlign|wx.ALIGN_CENTER_VERTICAL)
def DrawVerticalGradient(self, dc, rect, hasfocus):
"""
Gradient fill from colour 1 to colour 2 from top to bottom.
:param `dc`: an instance of :class:`wx.DC`;
:param `rect`: the rectangle to be filled with the gradient shading;
:param `hasfocus`: ``True`` if the main :class:`UltimateListCtrl` has focus, ``False``
otherwise.
"""
oldpen = dc.GetPen()
oldbrush = dc.GetBrush()
dc.SetPen(wx.TRANSPARENT_PEN)
# calculate gradient coefficients
if hasfocus:
col2 = self._owner._secondcolour
col1 = self._owner._firstcolour
else:
col2 = self._owner._highlightUnfocusedBrush.GetColour()
col1 = self._owner._highlightUnfocusedBrush2.GetColour()
r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue())
r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue())
flrect = float(rect.height)
rstep = float((r2 - r1)) / flrect
gstep = float((g2 - g1)) / flrect
bstep = float((b2 - b1)) / flrect
rf, gf, bf = 0, 0, 0
for y in range(rect.y, rect.y + rect.height):
currCol = (r1 + rf, g1 + gf, b1 + bf)
dc.SetBrush(wx.Brush(currCol, wx.BRUSHSTYLE_SOLID))
dc.DrawRectangle(rect.x, y, rect.width, 1)
rf = rf + rstep
gf = gf + gstep
bf = bf + bstep
dc.SetPen(oldpen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(rect)
dc.SetBrush(oldbrush)
def DrawHorizontalGradient(self, dc, rect, hasfocus):
"""
Gradient fill from colour 1 to colour 2 from left to right.
:param `dc`: an instance of :class:`wx.DC`;
:param `rect`: the rectangle to be filled with the gradient shading;
:param `hasfocus`: ``True`` if the main :class:`UltimateListCtrl` has focus, ``False``
otherwise.
"""
oldpen = dc.GetPen()
oldbrush = dc.GetBrush()
dc.SetPen(wx.TRANSPARENT_PEN)
# calculate gradient coefficients
if hasfocus:
col2 = self._owner._secondcolour
col1 = self._owner._firstcolour
else:
col2 = self._owner._highlightUnfocusedBrush.GetColour()
col1 = self._owner._highlightUnfocusedBrush2.GetColour()
r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue())
r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue())
flrect = float(rect.width)
rstep = float((r2 - r1)) / flrect
gstep = float((g2 - g1)) / flrect
bstep = float((b2 - b1)) / flrect
rf, gf, bf = 0, 0, 0
for x in range(rect.x, rect.x + rect.width):
currCol = (int(r1 + rf), int(g1 + gf), int(b1 + bf))
dc.SetBrush(wx.Brush(currCol, wx.BRUSHSTYLE_SOLID))
dc.DrawRectangle(x, rect.y, 1, rect.height)
rf = rf + rstep
gf = gf + gstep
bf = bf + bstep
dc.SetPen(oldpen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(rect)
dc.SetBrush(oldbrush)
def DrawVistaRectangle(self, dc, rect, hasfocus):
"""
Draws the selected item(s) with the Windows Vista style.
:param `dc`: an instance of :class:`wx.DC`;
:param `rect`: the rectangle to be filled with the gradient shading;
:param `hasfocus`: ``True`` if the main :class:`UltimateListCtrl` has focus, ``False``
otherwise.
"""
if hasfocus:
outer = _rgbSelectOuter
inner = _rgbSelectInner
top = _rgbSelectTop
bottom = _rgbSelectBottom
else:
outer = _rgbNoFocusOuter
inner = _rgbNoFocusInner
top = _rgbNoFocusTop
bottom = _rgbNoFocusBottom
oldpen = dc.GetPen()
oldbrush = dc.GetBrush()
bdrRect = wx.Rect(*rect.Get())
filRect = wx.Rect(*rect.Get())
filRect.Deflate(1,1)
r1, g1, b1 = int(top.Red()), int(top.Green()), int(top.Blue())
r2, g2, b2 = int(bottom.Red()), int(bottom.Green()), int(bottom.Blue())
flrect = float(filRect.height)
if flrect < 1:
flrect = self._owner._lineHeight
rstep = float((r2 - r1)) / flrect
gstep = float((g2 - g1)) / flrect
bstep = float((b2 - b1)) / flrect
rf, gf, bf = 0, 0, 0
dc.SetPen(wx.TRANSPARENT_PEN)
for y in range(filRect.y, filRect.y + filRect.height):
currCol = (r1 + rf, g1 + gf, b1 + bf)
dc.SetBrush(wx.Brush(currCol, wx.BRUSHSTYLE_SOLID))
dc.DrawRectangle(filRect.x, y, filRect.width, 1)
rf = rf + rstep
gf = gf + gstep
bf = bf + bstep
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(outer))
dc.DrawRoundedRectangle(bdrRect, 3)
bdrRect.Deflate(1, 1)
dc.SetPen(wx.Pen(inner))
dc.DrawRoundedRectangle(bdrRect, 2)
dc.SetPen(oldpen)
dc.SetBrush(oldbrush)
def Highlight(self, on):
"""
Sets the current line as highlighted or not highlighted.
:param `on`: ``True`` to set the current line as highlighted, ``False``
otherwise.
"""
if on == self._highlighted:
return False
self._highlighted = on
return True
def ReverseHighlight(self):
"""
Reverses the line highlighting, switching it off if it was on and vice-versa.
"""
self.Highlight(not self.IsHighlighted())
#-----------------------------------------------------------------------------
# UltimateListHeaderWindow (internal)
#-----------------------------------------------------------------------------
class UltimateListHeaderWindow(wx.Control):
"""
This class holds the header window for :class:`UltimateListCtrl`.
"""
def __init__(self, win, id, owner, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0, validator=wx.DefaultValidator,
name="UltimateListCtrlcolumntitles", isFooter=False):
"""
Default class constructor.
:param `parent`: parent window. Must not be ``None``;
:param `id`: window identifier. A value of -1 indicates a default value;
:param `owner`: an instance of :class:`UltimateListCtrl`;
:param `pos`: the control position. A value of (-1, -1) indicates a default position,
chosen by either the windowing system or wxPython, depending on platform;
:param `size`: the control size. A value of (-1, -1) indicates a default size,
chosen by either the windowing system or wxPython, depending on platform;
:param `style`: the window style;
:param `validator`: the window validator;
:param `name`: the window name;
:param `isFooter`: ``True`` if the :class:`UltimateListHeaderWindow` is in a footer
position, ``False`` otherwise.
"""
wx.Control.__init__(self, win, id, pos, size, style|wx.NO_BORDER, validator, name)
self._isFooter = isFooter
self._owner = owner
self._currentCursor = wx.NullCursor
self._resizeCursor = wx.Cursor(wx.CURSOR_SIZEWE)
self._isDragging = False
self._headerHeight = None
self._footerHeight = None
# Custom renderer for every column
self._headerCustomRenderer = None
# column being resized or -1
self._column = -1
# divider line position in logical (unscrolled) coords
self._currentX = 0
# minimal position beyond which the divider line can't be dragged in
# logical coords
self._minX = 0
# needs refresh
self._dirty = False
self._hasFont = False
self._sendSetColumnWidth = False
self._colToSend = -1
self._widthToSend = 0
self._leftDown = False
self._enter = False
self._currentColumn = -1
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None)
self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse)
self.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self.Bind(wx.EVT_ENTER_WINDOW, self.OnEnterWindow)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeaveWindow)
if _USE_VISATTR:
attr = wx.Panel.GetClassDefaultAttributes()
self.SetOwnForegroundColour(attr.colFg)
self.SetOwnBackgroundColour(attr.colBg)
if not self._hasFont:
self.SetOwnFont(attr.font)
else:
self.SetOwnForegroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOWTEXT))
self.SetOwnBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
if not self._hasFont:
self.SetOwnFont(wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT))
def SetCustomRenderer(self, renderer=None):
"""
Associate a custom renderer with the header - all columns will use it
:param `renderer`: a class able to correctly render header buttons
:note: the renderer class **must** implement the methods `DrawHeaderButton`
and `GetForegroundColor`.
"""
if not self._owner.HasAGWFlag(ULC_REPORT):
raise Exception("Custom renderers can be used on with style = ULC_REPORT")
self._headerCustomRenderer = renderer
def DoGetBestSize(self):
"""
Gets the size which best suits the window: for a control, it would be the
minimal size which doesn't truncate the control, for a panel - the same size
as it would have after a call to `Fit()`.
"""
if not self._isFooter:
if self._headerHeight is not None:
self.GetParent()._headerHeight = self._headerHeight
return wx.Size(200, self._headerHeight)
else:
if self._footerHeight is not None:
self.GetParent()._footerHeight = self._footerHeight
return wx.Size(200, self._footerHeight)
w, h, d, dummy = self.GetFullTextExtent("Hg")
maxH = self.GetTextHeight()
nativeH = wx.RendererNative.Get().GetHeaderButtonHeight(self.GetParent())
if not self._isFooter:
maxH = max(max(h, maxH), nativeH)
maxH += d
self.GetParent()._headerHeight = maxH
else:
maxH = max(h, nativeH)
maxH += d
self.GetParent()._footerHeight = maxH
return wx.Size(200, maxH)
def GetWindowHeight(self):
""" Returns the :class:`UltimateListHeaderWindow` height, in pixels. """
return self.DoGetBestSize()
def IsColumnShown(self, column):
"""
Returns ``True`` if the input column is shown, ``False`` if it is hidden.
:param `column`: an integer specifying the column index.
"""
if column < 0 or column >= self._owner.GetColumnCount():
raise Exception("Invalid column")
return self._owner.IsColumnShown(column)
# shift the DC origin to match the position of the main window horz
# scrollbar: this allows us to always use logical coords
def AdjustDC(self, dc):
"""
Shifts the :class:`wx.DC` origin to match the position of the main window horizontal
scrollbar: this allows us to always use logical coordinates.
:param `dc`: an instance of :class:`wx.DC`.
"""
xpix, dummy = self._owner.GetScrollPixelsPerUnit()
x, dummy = self._owner.GetViewStart()
# account for the horz scrollbar offset
dc.SetDeviceOrigin(-x*xpix, 0)
def GetTextHeight(self):
""" Returns the column text height, in pixels. """
maxH = 0
numColumns = self._owner.GetColumnCount()
dc = wx.ClientDC(self)
for i in range(numColumns):
if not self.IsColumnShown(i):
continue
item = self._owner.GetColumn(i)
if item.GetFont().IsOk():
dc.SetFont(item.GetFont())
else:
dc.SetFont(self.GetFont())
wLabel, hLabel, dummy = dc.GetFullMultiLineTextExtent(item.GetText())
maxH = max(maxH, hLabel)
return maxH
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for :class:`UltimateListHeaderWindow`.
:param `event`: a :class:`PaintEvent` event to be processed.
"""
dc = wx.BufferedPaintDC(self)
# width and height of the entire header window
w, h = self.GetClientSize()
w, dummy = self._owner.CalcUnscrolledPosition(w, 0)
dc.SetBrush(wx.Brush(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE)))
dc.SetPen(wx.TRANSPARENT_PEN)
dc.DrawRectangle(0, -1, w, h+2)
self.AdjustDC(dc)
dc.SetBackgroundMode(wx.TRANSPARENT)
dc.SetTextForeground(self.GetForegroundColour())
x = HEADER_OFFSET_X
numColumns = self._owner.GetColumnCount()
item = UltimateListItem()
renderer = wx.RendererNative.Get()
enabled = self.GetParent().IsEnabled()
virtual = self._owner.IsVirtual()
isFooter = self._isFooter
for i in range(numColumns):
# Reset anything in the dc that a custom renderer might have changed
dc.SetTextForeground(self.GetForegroundColour())
if x >= w:
break
if not self.IsColumnShown(i):
continue # do next column if not shown
item = self._owner.GetColumn(i)
wCol = item._width
cw = wCol
ch = h
flags = 0
if not enabled:
flags |= wx.CONTROL_DISABLED
# NB: The code below is not really Mac-specific, but since we are close
# to 2.8 release and I don't have time to test on other platforms, I
# defined this only for wxMac. If this behavior is desired on
# other platforms, please go ahead and revise or remove the #ifdef.
if "__WXMAC__" in wx.PlatformInfo:
if not virtual and item._mask & ULC_MASK_STATE and item._state & ULC_STATE_SELECTED:
flags |= wx.CONTROL_SELECTED
if i == 0:
flags |= wx.CONTROL_SPECIAL # mark as first column
if i == self._currentColumn:
if self._leftDown:
flags |= wx.CONTROL_PRESSED
else:
if self._enter:
flags |= wx.CONTROL_CURRENT
# the width of the rect to draw: make it smaller to fit entirely
# inside the column rect
header_rect = wx.Rect(x-1, HEADER_OFFSET_Y-1, cw-1, ch)
if self._headerCustomRenderer != None:
self._headerCustomRenderer.DrawHeaderButton(dc, header_rect, flags)
# The custom renderer will specify the color to draw the header text and buttons
dc.SetTextForeground(self._headerCustomRenderer.GetForegroundColour())
elif item._mask & ULC_MASK_RENDERER:
item.GetCustomRenderer().DrawHeaderButton(dc, header_rect, flags)
# The custom renderer will specify the color to draw the header text and buttons
dc.SetTextForeground(item.GetCustomRenderer().GetForegroundColour())
else:
renderer.DrawHeaderButton(self, dc, header_rect, flags)
# see if we have enough space for the column label
if isFooter:
if item.GetFooterFont().IsOk():
dc.SetFont(item.GetFooterFont())
else:
dc.SetFont(self.GetFont())
else:
if item.GetFont().IsOk():
dc.SetFont(item.GetFont())
else:
dc.SetFont(self.GetFont())
wcheck = hcheck = 0
kind = (isFooter and [item.GetFooterKind()] or [item.GetKind()])[0]
checked = (isFooter and [item.IsFooterChecked()] or [item.IsChecked()])[0]
if kind in [1, 2]:
# We got a checkbox-type item
ix, iy = self._owner.GetCheckboxImageSize()
# We draw it on the left, always
self._owner.DrawCheckbox(dc, x + HEADER_OFFSET_X, HEADER_OFFSET_Y + (h - 4 - iy)/2, kind, checked, enabled)
wcheck += ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE
cw -= ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE
# for this we need the width of the text
text = (isFooter and [item.GetFooterText()] or [item.GetText()])[0]
wLabel, hLabel, dummy = dc.GetFullMultiLineTextExtent(text)
wLabel += 2*EXTRA_WIDTH
# and the width of the icon, if any
image = (isFooter and [item._footerImage] or [item._image])[0]
if image:
imageList = self._owner._small_image_list
if imageList:
for img in image:
if img >= 0:
ix, iy = imageList.GetSize(img)
wLabel += ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE
else:
imageList = None
# ignore alignment if there is not enough space anyhow
align = (isFooter and [item.GetFooterAlign()] or [item.GetAlign()])[0]
align = (wLabel < cw and [align] or [ULC_FORMAT_LEFT])[0]
if align == ULC_FORMAT_LEFT:
xAligned = x + wcheck
elif align == ULC_FORMAT_RIGHT:
xAligned = x + cw - wLabel - HEADER_OFFSET_X
elif align == ULC_FORMAT_CENTER:
xAligned = x + wcheck + (cw - wLabel)/2
# if we have an image, draw it on the right of the label
if imageList:
for indx, img in enumerate(image):
if img >= 0:
imageList.Draw(img, dc,
xAligned + wLabel - (ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE)*(indx+1),
HEADER_OFFSET_Y + (h - 4 - iy)/2,
wx.IMAGELIST_DRAW_TRANSPARENT)
cw -= ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE
# draw the text clipping it so that it doesn't overwrite the column
# boundary
dc.SetClippingRegion(x, HEADER_OFFSET_Y, cw, h - 4)
self.DrawTextFormatted(dc, text, wx.Rect(xAligned+EXTRA_WIDTH, HEADER_OFFSET_Y, cw-EXTRA_WIDTH, h-4))
x += wCol
dc.DestroyClippingRegion()
# Fill in what's missing to the right of the columns, otherwise we will
# leave an unpainted area when columns are removed (and it looks better)
if x < w:
header_rect = wx.Rect(x, HEADER_OFFSET_Y, w - x, h)
if self._headerCustomRenderer != None:
# Why does the custom renderer need this adjustment??
header_rect.x = header_rect.x - 1
header_rect.y = header_rect.y - 1
self._headerCustomRenderer.DrawHeaderButton(dc, header_rect, wx.CONTROL_SPECIAL)
else:
renderer.DrawHeaderButton(self, dc, header_rect, wx.CONTROL_SPECIAL) # mark as last column
def DrawTextFormatted(self, dc, text, rect):
"""
Draws the item text, correctly formatted.
:param `dc`: an instance of :class:`wx.DC`;
:param `text`: the item text;
:param `rect`: the item client rectangle.
"""
# determine if the string can fit inside the current width
w, h, dummy = dc.GetFullMultiLineTextExtent(text)
width = rect.width
if w <= width:
dc.DrawLabel(text, rect, wx.ALIGN_CENTER_VERTICAL)
else:
# determine the base width
ellipsis = "..."
base_w, h = dc.GetTextExtent(ellipsis)
# continue until we have enough space or only one character left
newText = text.split("\n")
theText = ""
for text in newText:
lenText = len(text)
drawntext = text
w, dummy = dc.GetTextExtent(text)
while lenText > 1:
if w + base_w <= width:
break
w_c, h_c = dc.GetTextExtent(drawntext[-1])
drawntext = drawntext[0:-1]
lenText -= 1
w -= w_c
# if still not enough space, remove ellipsis characters
while len(ellipsis) > 0 and w + base_w > width:
ellipsis = ellipsis[0:-1]
base_w, h = dc.GetTextExtent(ellipsis)
theText += drawntext + ellipsis + "\n"
theText = theText.rstrip()
dc.DrawLabel(theText, rect, wx.ALIGN_CENTER_VERTICAL)
def OnInternalIdle(self):
"""
This method is normally only used internally, but sometimes an application
may need it to implement functionality that should not be disabled by an
application defining an `OnIdle` handler in a derived class.
This method may be used to do delayed painting, for example, and most
implementations call :meth:`wx.Window.UpdateWindowUI` in order to send update events
to the window in idle time.
"""
wx.Control.OnInternalIdle(self)
if self._isFooter:
return
if self._sendSetColumnWidth:
self._owner.SetColumnWidth(self._colToSend, self._widthToSend)
self._sendSetColumnWidth = False
def DrawCurrent(self):
""" Force the redrawing of the column window. """
self._sendSetColumnWidth = True
self._colToSend = self._column
self._widthToSend = self._currentX - self._minX
def OnMouse(self, event):
"""
Handles the ``wx.EVT_MOUSE_EVENTS`` event for :class:`UltimateListHeaderWindow`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
# we want to work with logical coords
x, dummy = self._owner.CalcUnscrolledPosition(event.GetX(), 0)
y = event.GetY()
columnX, columnY = x, y
if self._isDragging:
self.SendListEvent(wxEVT_COMMAND_LIST_COL_DRAGGING, event.GetPosition())
# we don't draw the line beyond our window, but we allow dragging it
# there
w, dummy = self.GetClientSize()
w, dummy = self._owner.CalcUnscrolledPosition(w, 0)
w -= 6
# erase the line if it was drawn
if self._currentX < w:
self.DrawCurrent()
if event.ButtonUp():
self.ReleaseMouse()
self._isDragging = False
self._dirty = True
self._owner.SetColumnWidth(self._column, self._currentX - self._minX)
self.SendListEvent(wxEVT_COMMAND_LIST_COL_END_DRAG, event.GetPosition())
else:
if x > self._minX + 7:
self._currentX = x
else:
self._currentX = self._minX + 7
# draw in the new location
if self._currentX < w:
self.DrawCurrent()
else: # not dragging
self._minX = 0
hit_border = False
# end of the current column
xpos = 0
# find the column where this event occurred
countCol = self._owner.GetColumnCount()
broken = False
tipCol = -1
for col in range(countCol):
if not self.IsColumnShown(col):
continue
xpos += self._owner.GetColumnWidth(col)
self._column = col
if abs(x-xpos) < 3 and y < 22:
# near the column border
hit_border = True
broken = True
tipCol = col
break
if x < xpos:
# inside the column
broken = True
tipCol = col
break
self._minX = xpos
if not broken:
self._column = -1
if tipCol >= 0:
# First check to see if we have a tooltip to display
colItem = self._owner.GetColumn(col)
if colItem.GetToolTip() != "":
self.SetToolTip(colItem.GetToolTip())
else:
self.SetToolTip("")
if event.LeftUp():
self._leftDown = False
self.Refresh()
if event.LeftDown() or event.RightUp():
if hit_border and event.LeftDown():
if not self._isFooter:
if self.SendListEvent(wxEVT_COMMAND_LIST_COL_BEGIN_DRAG,
event.GetPosition()):
self._isDragging = True
self._currentX = x
self.CaptureMouse()
self.DrawCurrent()
#else: column resizing was vetoed by the user code
else: # click on a column
# record the selected state of the columns
if event.LeftDown():
for i in range(self._owner.GetColumnCount()):
if not self.IsColumnShown(col):
continue
colItem = self._owner.GetColumn(i)
state = colItem.GetState()
if i == self._column:
colItem.SetState(state | ULC_STATE_SELECTED)
theX = x
else:
colItem.SetState(state & ~ULC_STATE_SELECTED)
self._leftDown = True
self._owner.SetColumn(i, colItem)
x += self._owner.GetColumnWidth(i)
if self.HandleColumnCheck(self._column, event.GetPosition()):
return
if not self._isFooter:
self.SendListEvent((event.LeftDown() and [wxEVT_COMMAND_LIST_COL_CLICK] or \
[wxEVT_COMMAND_LIST_COL_RIGHT_CLICK])[0], event.GetPosition())
else:
self.SendListEvent((event.LeftDown() and [wxEVT_COMMAND_LIST_FOOTER_CLICK] or \
[wxEVT_COMMAND_LIST_FOOTER_RIGHT_CLICK])[0], event.GetPosition())
self._leftDown = True
self._currentColumn = self._column
elif event.Moving():
setCursor = False
if not self._isFooter:
if hit_border:
setCursor = self._currentCursor == wx.STANDARD_CURSOR
self._currentCursor = self._resizeCursor
else:
setCursor = self._currentCursor != wx.STANDARD_CURSOR
self._currentCursor = wx.STANDARD_CURSOR
if setCursor:
self.SetCursor(self._currentCursor)
else:
column = self.HitTestColumn(columnX, columnY)
self._enter = True
self._currentColumn = column
if _VERSION_STRING < "2.9":
leftDown = wx.GetMouseState().LeftDown()
else:
leftDown = wx.GetMouseState().LeftIsDown()
self._leftDown = leftDown
self.Refresh()
elif event.ButtonDClick():
self.HandleColumnCheck(self._column, event.GetPosition())
def HandleColumnCheck(self, column, pos):
"""
Handles the case in which a column contains a checkbox-like item.
:param `column`: the column index;
:param `pos`: the mouse position.
"""
if column < 0 or column >= self._owner.GetColumnCount():
return False
colItem = self._owner.GetColumn(column)
# Let's see if it is a checkbox-type item
kind = (self._isFooter and [colItem.GetFooterKind()] or [colItem.GetKind()])[0]
if kind not in [1, 2]:
return False
x = HEADER_OFFSET_X
for i in range(self._owner.GetColumnCount()):
if not self.IsColumnShown(i):
continue
if i == self._column:
theX = x
break
x += self._owner.GetColumnWidth(i)
parent = self.GetParent()
w, h = self.GetClientSize()
ix, iy = self._owner.GetCheckboxImageSize()
rect = wx.Rect(theX + HEADER_OFFSET_X, HEADER_OFFSET_Y + (h - 4 - iy)/2, ix, iy)
if rect.Contains(pos):
# User clicked on the checkbox
evt = (self._isFooter and [wxEVT_COMMAND_LIST_FOOTER_CHECKING] or [wxEVT_COMMAND_LIST_COL_CHECKING])[0]
if self.SendListEvent(evt, pos):
# No veto for the item checking
if self._isFooter:
isChecked = colItem.IsFooterChecked()
colItem.CheckFooter(not isChecked)
else:
isChecked = colItem.IsChecked()
colItem.Check(not isChecked)
self._owner.SetColumn(column, colItem)
self.RefreshRect(rect)
if self._isFooter:
return True
if parent.HasAGWFlag(ULC_AUTO_CHECK_CHILD):
self._owner.AutoCheckChild(isChecked, self._column)
elif parent.HasAGWFlag(ULC_AUTO_TOGGLE_CHILD):
self._owner.AutoToggleChild(self._column)
evt = (self._isFooter and [wxEVT_COMMAND_LIST_FOOTER_CHECKED] or [wxEVT_COMMAND_LIST_COL_CHECKED])[0]
self.SendListEvent(evt, pos)
return True
return False
def OnEnterWindow(self, event):
"""
Handles the ``wx.EVT_ENTER_WINDOW`` event for :class:`UltimateListHeaderWindow`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
x, y = self._owner.CalcUnscrolledPosition(*self.ScreenToClient(wx.GetMousePosition()))
column = self.HitTestColumn(x, y)
if _VERSION_STRING < "2.9":
leftDown = wx.GetMouseState().LeftDown()
else:
leftDown = wx.GetMouseState().LeftIsDown()
self._leftDown = leftDown
self._enter = column >= 0 and column < self._owner.GetColumnCount()
self._currentColumn = column
self.Refresh()
def OnLeaveWindow(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`UltimateListHeaderWindow`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
self._enter = False
self._leftDown = False
self._currentColumn = -1
self.Refresh()
def HitTestColumn(self, x, y):
"""
HitTest method for column headers.
:param `x`: the mouse `x` position;
:param `y`: the mouse `y` position.
:return: The column index if any column client rectangle contains the mouse
position, ``wx.NOT_FOUND`` otherwise.
"""
xOld = 0
for i in range(self._owner.GetColumnCount()):
if not self.IsColumnShown(i):
continue
xOld += self._owner.GetColumnWidth(i)
if x <= xOld:
return i
return -1
def OnSetFocus(self, event):
"""
Handles the ``wx.EVT_SET_FOCUS`` event for :class:`UltimateListHeaderWindow`.
:param `event`: a :class:`FocusEvent` event to be processed.
"""
self._owner.SetFocusIgnoringChildren()
self._owner.Update()
def SendListEvent(self, eventType, pos):
"""
Sends a :class:`UltimateListEvent` for the parent window.
:param `eventType`: the event type;
:param `pos`: an instance of :class:`wx.Point`.
"""
parent = self.GetParent()
le = UltimateListEvent(eventType, parent.GetId())
le.SetEventObject(parent)
le.m_pointDrag = pos
# the position should be relative to the parent window, not
# this one for compatibility with MSW and common sense: the
# user code doesn't know anything at all about this header
# window, so why should it get positions relative to it?
le.m_pointDrag.y -= self.GetSize().y
le.m_col = self._column
return (not parent.GetEventHandler().ProcessEvent(le) or le.IsAllowed())
def GetOwner(self):
""" Returns the header window owner, an instance of :class:`UltimateListCtrl`. """
return self._owner
#-----------------------------------------------------------------------------
# UltimateListRenameTimer (internal)
#-----------------------------------------------------------------------------
class UltimateListRenameTimer(wx.Timer):
""" Timer used for enabling in-place edit. """
def __init__(self, owner):
"""
Default class constructor.
For internal use: do not call it in your code!
:param `owner`: an instance of :class:`UltimateListCtrl`.
"""
wx.Timer.__init__(self)
self._owner = owner
def Notify(self):
""" The timer has expired. """
self._owner.OnRenameTimer()
#-----------------------------------------------------------------------------
# UltimateListTextCtrl (internal)
#-----------------------------------------------------------------------------
class UltimateListTextCtrl(ExpandoTextCtrl):
"""
Control used for in-place edit.
This is a subclass of :class:`~wx.lib.expando.ExpandoTextCtrl` as :class:`UltimateListCtrl`
supports multiline text items.
:note: To add a newline character in a multiline item, press ``Shift`` + ``Enter``
as the ``Enter`` key alone is consumed by :class:`UltimateListCtrl` to finish
the editing and ``Ctrl`` + ``Enter`` is consumed by the platform for tab navigation.
"""
def __init__(self, owner, itemEdit):
"""
Default class constructor.
For internal use: do not call it in your code!
:param `owner`: the control parent (an instance of :class:`UltimateListCtrl` );
:param `itemEdit`: an instance of :class:`UltimateListItem`.
"""
self._startValue = owner.GetItemText(itemEdit)
self._currentValue = self._startValue
self._itemEdited = itemEdit
self._owner = owner
self._finished = False
self._aboutToFinish = False
rectLabel = owner.GetLineLabelRect(itemEdit)
rectLabel.x, rectLabel.y = self._owner.CalcScrolledPosition(rectLabel.x, rectLabel.y)
xSize, ySize = rectLabel.width + 10, rectLabel.height
expandoStyle = wx.WANTS_CHARS
if wx.Platform in ["__WXGTK__", "__WXMAC__"]:
expandoStyle |= wx.SIMPLE_BORDER
else:
expandoStyle |= wx.SUNKEN_BORDER
ExpandoTextCtrl.__init__(self, owner, -1, self._startValue, wx.Point(rectLabel.x, rectLabel.y),
wx.Size(xSize, ySize), expandoStyle)
self.Bind(wx.EVT_CHAR, self.OnChar)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
def AcceptChanges(self):
""" Accepts/refuses the changes made by the user. """
value = self.GetValue()
if value == self._startValue:
# nothing changed, always accept
# when an item remains unchanged, the owner
# needs to be notified that the user decided
# not to change the tree item label, and that
# the edit has been cancelled
self._owner.OnRenameCancelled(self._itemEdited)
return True
if not self._owner.OnRenameAccept(self._itemEdited, value):
# vetoed by the user
return False
# accepted, do rename the item
self._owner.SetItemText(self._itemEdited, value)
if value.count("\n") != self._startValue.count("\n"):
self._owner.ResetLineDimensions()
self._owner.Refresh()
return True
def Finish(self):
""" Finish editing. """
try:
if not self._finished:
self._finished = True
self._owner.SetFocusIgnoringChildren()
self._owner.ResetTextControl()
except RuntimeError:
return
def OnChar(self, event):
"""
Handles the ``wx.EVT_CHAR`` event for :class:`UltimateListTextCtrl`.
:param `event`: a :class:`KeyEvent` event to be processed.
"""
keycode = event.GetKeyCode()
shiftDown = event.ShiftDown()
if keycode in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
if shiftDown:
event.Skip()
else:
self._aboutToFinish = True
self.SetValue(self._currentValue)
# Notify the owner about the changes
self.AcceptChanges()
# Even if vetoed, close the control (consistent with MSW)
wx.CallAfter(self.Finish)
elif keycode == wx.WXK_ESCAPE:
self.StopEditing()
else:
event.Skip()
def OnKeyUp(self, event):
"""
Handles the ``wx.EVT_KEY_UP`` event for :class:`UltimateListTextCtrl`.
:param `event`: a :class:`KeyEvent` event to be processed.
"""
if not self._finished:
# auto-grow the textctrl:
parentSize = self._owner.GetSize()
myPos = self.GetPosition()
mySize = self.GetSize()
dc = wx.ClientDC(self)
sx, sy, dummy = dc.GetFullMultiLineTextExtent(self.GetValue() + "M")
if myPos.x + sx > parentSize.x:
sx = parentSize.x - myPos.x
if mySize.x > sx:
sx = mySize.x
self.SetSize((sx, -1))
self._currentValue = self.GetValue()
event.Skip()
def OnKillFocus(self, event):
"""
Handles the ``wx.EVT_KILL_FOCUS`` event for :class:`UltimateListTextCtrl`.
:param `event`: a :class:`FocusEvent` event to be processed.
"""
if not self._finished and not self._aboutToFinish:
# We must finish regardless of success, otherwise we'll get
# focus problems:
if not self.AcceptChanges():
self._owner.OnRenameCancelled(self._itemEdited)
# We must let the native text control handle focus, too, otherwise
# it could have problems with the cursor (e.g., in wxGTK).
event.Skip()
wx.CallAfter(self.Finish)
def StopEditing(self):
""" Suddenly stops the editing. """
self._owner.OnRenameCancelled(self._itemEdited)
self.Finish()
#-----------------------------------------------------------------------------
# UltimateListMainWindow (internal)
#-----------------------------------------------------------------------------
class UltimateListMainWindow(wx.ScrolledWindow):
"""
This is the main widget implementation of :class:`UltimateListCtrl`.
"""
def __init__(self, parent, id, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0, agwStyle=0, name="listctrlmainwindow"):
"""
Default class constructor.
:param `parent`: parent window. Must not be ``None``;
:param `id`: window identifier. A value of -1 indicates a default value;
:param `pos`: the control position. A value of (-1, -1) indicates a default position,
chosen by either the windowing system or wxPython, depending on platform;
:param `size`: the control size. A value of (-1, -1) indicates a default size,
chosen by either the windowing system or wxPython, depending on platform;
:param `style`: the underlying :class:`ScrolledWindow` window style;
:param `agwStyle`: the AGW-specific window style; can be almost any combination of the following
bits:
=============================== =========== ====================================================================================================
Window Styles Hex Value Description
=============================== =========== ====================================================================================================
``ULC_VRULES`` 0x1 Draws light vertical rules between rows in report mode.
``ULC_HRULES`` 0x2 Draws light horizontal rules between rows in report mode.
``ULC_ICON`` 0x4 Large icon view, with optional labels.
``ULC_SMALL_ICON`` 0x8 Small icon view, with optional labels.
``ULC_LIST`` 0x10 Multicolumn list view, with optional small icons. Columns are computed automatically, i.e. you don't set columns as in ``ULC_REPORT``. In other words, the list wraps, unlike a :class:`ListBox`.
``ULC_REPORT`` 0x20 Single or multicolumn report view, with optional header.
``ULC_ALIGN_TOP`` 0x40 Icons align to the top. Win32 default, Win32 only.
``ULC_ALIGN_LEFT`` 0x80 Icons align to the left.
``ULC_AUTOARRANGE`` 0x100 Icons arrange themselves. Win32 only.
``ULC_VIRTUAL`` 0x200 The application provides items text on demand. May only be used with ``ULC_REPORT``.
``ULC_EDIT_LABELS`` 0x400 Labels are editable: the application will be notified when editing starts.
``ULC_NO_HEADER`` 0x800 No header in report mode.
``ULC_NO_SORT_HEADER`` 0x1000 No Docs.
``ULC_SINGLE_SEL`` 0x2000 Single selection (default is multiple).
``ULC_SORT_ASCENDING`` 0x4000 Sort in ascending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_SORT_DESCENDING`` 0x8000 Sort in descending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_TILE`` 0x10000 Each item appears as a full-sized icon with a label of one or more lines beside it (partially implemented).
``ULC_NO_HIGHLIGHT`` 0x20000 No highlight when an item is selected.
``ULC_STICKY_HIGHLIGHT`` 0x40000 Items are selected by simply hovering on them, with no need to click on them.
``ULC_STICKY_NOSELEVENT`` 0x80000 Don't send a selection event when using ``ULC_STICKY_HIGHLIGHT`` style.
``ULC_SEND_LEFTCLICK`` 0x100000 Send a left click event when an item is selected.
``ULC_HAS_VARIABLE_ROW_HEIGHT`` 0x200000 The list has variable row heights.
``ULC_AUTO_CHECK_CHILD`` 0x400000 When a column header has a checkbox associated, auto-check all the subitems in that column.
``ULC_AUTO_TOGGLE_CHILD`` 0x800000 When a column header has a checkbox associated, toggle all the subitems in that column.
``ULC_AUTO_CHECK_PARENT`` 0x1000000 Only meaningful foe checkbox-type items: when an item is checked/unchecked its column header item is checked/unchecked as well.
``ULC_SHOW_TOOLTIPS`` 0x2000000 Show tooltips for ellipsized items/subitems (text too long to be shown in the available space) containing the full item/subitem text.
``ULC_HOT_TRACKING`` 0x4000000 Enable hot tracking of items on mouse motion.
``ULC_BORDER_SELECT`` 0x8000000 Changes border colour whan an item is selected, instead of highlighting the item.
``ULC_TRACK_SELECT`` 0x10000000 Enables hot-track selection in a list control. Hot track selection means that an item is automatically selected when the cursor remains over the item for a certain period of time. The delay is retrieved on Windows using the `win32api` call `win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)`, and is defaulted to 400ms on other platforms. This style applies to all views of `UltimateListCtrl`.
``ULC_HEADER_IN_ALL_VIEWS`` 0x20000000 Show column headers in all view modes.
``ULC_NO_FULL_ROW_SELECT`` 0x40000000 When an item is selected, the only the item in the first column is highlighted.
``ULC_FOOTER`` 0x80000000 Show a footer too (only when header is present).
``ULC_USER_ROW_HEIGHT`` 0x100000000 Allows to set a custom row height (one value for all the items, only in report mode).
=============================== =========== ====================================================================================================
:param `name`: the window name.
"""
wx.ScrolledWindow.__init__(self, parent, id, pos, size, style|wx.HSCROLL|wx.VSCROLL, name)
# the list of column objects
self._columns = []
# the array of all line objects for a non virtual list control (for the
# virtual list control we only ever use self._lines[0])
self._lines = []
# currently focused item or -1
self._current = -1
# the number of lines per page
self._linesPerPage = 0
# Automatically resized column - this column expands to fill the width of the window
self._resizeColumn = -1
self._resizeColMinWidth = None
# this flag is set when something which should result in the window
# redrawing happens (i.e. an item was added or deleted, or its appearance
# changed) and OnPaint() doesn't redraw the window while it is set which
# allows to minimize the number of repaintings when a lot of items are
# being added. The real repainting occurs only after the next OnIdle()
# call
self._dirty = False
self._parent = parent
self.Init()
self._highlightBrush = wx.Brush(wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHT), wx.BRUSHSTYLE_SOLID)
btnshadow = wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNSHADOW)
self._highlightUnfocusedBrush = wx.Brush(btnshadow, wx.BRUSHSTYLE_SOLID)
r, g, b = btnshadow.Red(), btnshadow.Green(), btnshadow.Blue()
backcolour = (max((r >> 1) - 20, 0),
max((g >> 1) - 20, 0),
max((b >> 1) - 20, 0))
backcolour = wx.Colour(backcolour[0], backcolour[1], backcolour[2])
self._highlightUnfocusedBrush2 = wx.Brush(backcolour)
self.SetScrollbars(0, 0, 0, 0, 0, 0)
attr = wx.ListCtrl.GetClassDefaultAttributes()
self.SetOwnForegroundColour(attr.colFg)
self.SetOwnBackgroundColour(attr.colBg)
self.SetOwnFont(attr.font)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse)
self.Bind(wx.EVT_CHILD_FOCUS, self.OnChildFocus)
self.Bind(wx.EVT_CHAR, self.OnChar)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
self.Bind(wx.EVT_SCROLLWIN, self.OnScroll)
self.Bind(wx.EVT_TIMER, self.OnHoverTimer, self._hoverTimer)
def Init(self):
""" Initializes the :class:`UltimateListMainWindow` widget. """
self._dirty = True
self._countVirt = 0
self._lineFrom = None
self._lineTo = - 1
self._linesPerPage = 0
self._headerWidth = 0
self._lineHeight = 0
self._userLineHeight = None
self._small_image_list = None
self._normal_image_list = None
self._small_spacing = 30
self._normal_spacing = 40
self._hasFocus = False
self._dragCount = 0
self._isCreated = False
self._lastOnSame = False
self._renameTimer = UltimateListRenameTimer(self)
self._textctrl = None
self._current = -1
self._lineLastClicked = -1
self._lineSelectSingleOnUp = -1
self._lineBeforeLastClicked = -1
self._dragStart = wx.Point(-1, -1)
self._aColWidths = []
self._selStore = SelectionStore()
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
# Background image settings
self._backgroundImage = None
self._imageStretchStyle = _StyleTile
# Disabled items colour
self._disabledColour = wx.Colour(180, 180, 180)
# Gradient selection colours
self._firstcolour = colour= wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHT)
self._secondcolour = wx.WHITE
self._usegradients = False
self._gradientstyle = 1 # Vertical Gradient
# Vista Selection Styles
self._vistaselection = False
self.SetImageListCheck(16, 16)
# Disabled items colour
self._disabledColour = wx.Colour(180, 180, 180)
# Hyperlinks things
normalFont = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
self._hypertextfont = wx.Font(normalFont.GetPointSize(), normalFont.GetFamily(),
normalFont.GetStyle(), wx.FONTWEIGHT_NORMAL, True,
normalFont.GetFaceName(), normalFont.GetEncoding())
self._hypertextnewcolour = wx.BLUE
self._hypertextvisitedcolour = wx.Colour(200, 47, 200)
self._isonhyperlink = False
self._itemWithWindow = []
self._hasWindows = False
self._shortItems = []
self._isDragging = False
self._cursor = wx.STANDARD_CURSOR
image = GetdragcursorImage()
# since this image didn't come from a .cur file, tell it where the hotspot is
image.SetOption(wx.IMAGE_OPTION_CUR_HOTSPOT_X, 1)
image.SetOption(wx.IMAGE_OPTION_CUR_HOTSPOT_Y, 1)
# make the image into a cursor
self._dragCursor = wx.Cursor(image)
self._dragItem = None
self._dropTarget = None
self._oldHotCurrent = None
self._newHotCurrent = None
self._waterMark = None
self._hoverTimer = wx.Timer(self, wx.ID_ANY)
self._hoverItem = -1
def GetMainWindowOfCompositeControl(self):
""" Returns the :class:`UltimateListMainWindow` parent. """
return self.GetParent()
def DoGetBestSize(self):
"""
Gets the size which best suits the window: for a control, it would be the
minimal size which doesn't truncate the control, for a panel - the same size
as it would have after a call to `Fit()`.
"""
return wx.Size(100, 80)
def HasAGWFlag(self, flag):
"""
Returns ``True`` if the window has the given `flag` bit set.
:param `flag`: the bit to check.
:see: :meth:`UltimateListCtrl.SetSingleStyle() <UltimateListCtrl.SetSingleStyle>` for a list of valid flags.
"""
return self._parent.HasAGWFlag(flag)
def IsColumnShown(self, column):
"""
Returns ``True`` if the input column is shown, ``False`` if it is hidden.
:param `column`: an integer specifying the column index.
"""
return self.GetColumn(column).IsShown()
# return True if this is a virtual list control
def IsVirtual(self):
""" Returns ``True`` if the window has the ``ULC_VIRTUAL`` style set. """
return self.HasAGWFlag(ULC_VIRTUAL)
# return True if the control is in report mode
def InReportView(self):
""" Returns ``True`` if the window is in report mode. """
return self.HasAGWFlag(ULC_REPORT)
def InTileView(self):
"""
Returns ``True`` if the window is in tile mode (partially implemented).
.. todo:: Fully implement tile view for :class:`UltimateListCtrl`.
"""
return self.HasAGWFlag(ULC_TILE)
# return True if we are in single selection mode, False if multi sel
def IsSingleSel(self):
""" Returns ``True`` if we are in single selection mode, ``False`` if multi selection. """
return self.HasAGWFlag(ULC_SINGLE_SEL)
def HasFocus(self):
""" Returns ``True`` if the window has focus. """
return self._hasFocus
# do we have a header window?
def HasHeader(self):
""" Returns ``True`` if the header window is shown. """
if (self.InReportView() or self.InTileView()) and not self.HasAGWFlag(ULC_NO_HEADER):
return True
if self.HasAGWFlag(ULC_HEADER_IN_ALL_VIEWS):
return True
return False
# do we have a footer window?
def HasFooter(self):
""" Returns ``True`` if the footer window is shown. """
if self.HasHeader() and self.HasAGWFlag(ULC_FOOTER):
return True
return False
# toggle the line state and refresh it
def ReverseHighlight(self, line):
"""
Toggles the line state and refreshes it.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
self.HighlightLine(line, not self.IsHighlighted(line))
self.RefreshLine(line)
def SetUserLineHeight(self, height):
"""
Sets a custom value for the :class:`UltimateListMainWindow` item height.
:param `height`: the custom height for all the items, in pixels.
:note: This method can be used only with ``ULC_REPORT`` and ``ULC_USER_ROW_HEIGHT`` styles set.
"""
if self.HasAGWFlag(ULC_REPORT) and self.HasAGWFlag(ULC_USER_ROW_HEIGHT):
self._userLineHeight = height
return
raise Exception("SetUserLineHeight can only be used with styles ULC_REPORT and ULC_USER_ROW_HEIGHT set.")
def GetUserLineHeight(self):
"""
Returns the custom value for the :class:`UltimateListMainWindow` item height, if previously set with
:meth:`~UltimateListMainWindow.SetUserLineHeight`.
:note: This method can be used only with ``ULC_REPORT`` and ``ULC_USER_ROW_HEIGHT`` styles set.
"""
if self.HasAGWFlag(ULC_REPORT) and self.HasAGWFlag(ULC_USER_ROW_HEIGHT):
return self._userLineHeight
raise Exception("GetUserLineHeight can only be used with styles ULC_REPORT and ULC_USER_ROW_HEIGHT set.")
# get the size of the total line rect
def GetLineSize(self, line):
"""
Returns the size of the total line client rectangle.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
return self.GetLineRect(line).GetSize()
# bring the current item into view
def MoveToFocus(self):
""" Brings tyhe current item into view. """
self.MoveToItem(self._current)
def GetColumnCount(self):
""" Returns the total number of columns in the :class:`UltimateListCtrl`. """
return len(self._columns)
def GetItemText(self, item):
"""
Returns the item text.
:param `item`: an instance of :class:`UltimateListItem`.
"""
info = UltimateListItem()
info._mask = ULC_MASK_TEXT
info._itemId = item
info = self.GetItem(info)
return info._text
def SetItemText(self, item, value):
"""
Sets the item text.
:param `item`: an instance of :class:`UltimateListItem`;
:param `value`: the new item text.
"""
info = UltimateListItem()
info._mask = ULC_MASK_TEXT
info._itemId = item
info._text = value
self.SetItem(info)
def IsEmpty(self):
""" Returns ``True`` if the window has no items in it. """
return self.GetItemCount() == 0
def ResetCurrent(self):
""" Resets the current item to ``None``. """
self.ChangeCurrent(-1)
def HasCurrent(self):
"""
Returns ``True`` if the current item has been set, either programmatically
or by user intervention.
"""
return self._current != -1
# override base class virtual to reset self._lineHeight when the font changes
def SetFont(self, font):
"""
Overridden base class virtual to reset the line height when the font changes.
:param `font`: a valid :class:`wx.Font` object.
:note: Overridden from :class:`ScrolledWindow`.
"""
if not wx.ScrolledWindow.SetFont(self, font):
return False
self._lineHeight = 0
self.ResetLineDimensions()
return True
def ResetLineDimensions(self, force=False):
"""
Resets the line dimensions, so that client rectangles and positions are
recalculated.
:param `force`: ``True`` to reset all line dimensions.
"""
if (self.HasAGWFlag(ULC_REPORT) and self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT) and not self.IsVirtual()) or force:
for l in range(self.GetItemCount()):
line = self.GetLine(l)
line.ResetDimensions()
# these are for UltimateListLineData usage only
# get the backpointer to the list ctrl
def GetListCtrl(self):
""" Returns the parent widget, an instance of :class:`UltimateListCtrl`. """
return self.GetParent()
# get the brush to use for the item highlighting
def GetHighlightBrush(self):
""" Returns the brush to use for the item highlighting. """
return (self._hasFocus and [self._highlightBrush] or [self._highlightUnfocusedBrush])[0]
# get the line data for the given index
def GetLine(self, n):
"""
Returns the line data for the given index.
:param `n`: the line index.
"""
if self.IsVirtual():
self.CacheLineData(n)
n = 0
return self._lines[n]
# force us to recalculate the range of visible lines
def ResetVisibleLinesRange(self, reset=False):
"""
Forces us to recalculate the range of visible lines.
:param `reset`: ``True`` to reset all line dimensions, which will then be
recalculated.
"""
self._lineFrom = -1
if self.IsShownOnScreen() and reset:
self.ResetLineDimensions()
# Called on EVT_SIZE to resize the _resizeColumn to fill the width of the window
def ResizeColumns(self):
"""
If ``ULC_AUTOSIZE_FILL`` was passed to :meth:`UltimateListCtrl.SetColumnWidth() <UltimateListCtrl.SetColumnWidth>` then
that column's width will be expanded to fill the window on a resize event.
Called by :meth:`UltimateListCtrl.OnSize() <UltimateListCtrl.OnSize>` when the window is resized.
"""
if not self: # Avoid RuntimeError on Mac
return
if self._resizeColumn == -1:
return
numCols = self.GetColumnCount()
if numCols == 0: return # Nothing to resize.
resizeCol = self._resizeColumn
if self._resizeColMinWidth == None:
self._resizeColMinWidth = self.GetColumnWidth(resizeCol)
# We're showing the vertical scrollbar -> allow for scrollbar width
# NOTE: on GTK, the scrollbar is included in the client size, but on
# Windows it is not included
listWidth = self.GetClientSize().width
if wx.Platform != '__WXMSW__':
if self.GetItemCount() > self.GetCountPerPage():
scrollWidth = wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X)
listWidth = listWidth - scrollWidth
totColWidth = 0 # Width of all columns except last one.
for col in range(numCols):
if col != (resizeCol) and self.IsColumnShown(col):
totColWidth = totColWidth + self.GetColumnWidth(col)
resizeColWidth = self.GetColumnWidth(resizeCol)
if totColWidth + self._resizeColMinWidth > listWidth:
# We haven't got the width to show the last column at its minimum
# width -> set it to its minimum width and allow the horizontal
# scrollbar to show.
self.SetColumnWidth(resizeCol, self._resizeColMinWidth)
return
# Resize the last column to take up the remaining available space.
self.SetColumnWidth(resizeCol, listWidth - totColWidth)
# get the colour to be used for drawing the rules
def GetRuleColour(self):
""" Returns the colour to be used for drawing the horizontal and vertical rules. """
return wx.SystemSettings.GetColour(wx.SYS_COLOUR_3DLIGHT)
def SetReportView(self, inReportView):
"""
Sets whether :class:`UltimateListCtrl` is in report view or not.
:param `inReportView`: ``True`` to set :class:`UltimateListCtrl` in report view, ``False``
otherwise.
"""
for line in self._lines:
line.SetReportView(inReportView)
def CacheLineData(self, line):
"""
Saves the current line attributes.
:param `line`: an instance of :class:`UltimateListLineData`.
:note: This method is used only if the :class:`UltimateListCtrl` has the ``ULC_VIRTUAL``
style set.
"""
listctrl = self.GetListCtrl()
ld = self.GetDummyLine()
countCol = self.GetColumnCount()
for col in range(countCol):
ld.SetText(col, listctrl.OnGetItemText(line, col))
ld.SetToolTip(col, listctrl.OnGetItemToolTip(line, col))
ld.SetColour(col, listctrl.OnGetItemTextColour(line, col))
ld.SetImage(col, listctrl.OnGetItemColumnImage(line, col))
kind = listctrl.OnGetItemColumnKind(line, col)
ld.SetKind(col, kind)
if kind > 0:
ld.Check(col, listctrl.OnGetItemColumnCheck(line, col))
ld.SetAttr(listctrl.OnGetItemAttr(line))
def GetDummyLine(self):
"""
Returns a dummy line.
:note: This method is used only if the :class:`UltimateListCtrl` has the ``ULC_VIRTUAL``
style set.
"""
if self.IsEmpty():
raise Exception("invalid line index")
if not self.IsVirtual():
raise Exception("GetDummyLine() shouldn't be called")
# we need to recreate the dummy line if the number of columns in the
# control changed as it would have the incorrect number of fields
# otherwise
if len(self._lines) > 0 and len(self._lines[0]._items) != self.GetColumnCount():
self._lines = []
if not self._lines:
line = UltimateListLineData(self)
self._lines.append(line)
return self._lines[0]
# ----------------------------------------------------------------------------
# line geometry (report mode only)
# ----------------------------------------------------------------------------
def GetLineHeight(self, item=None):
"""
Returns the line height for a specific item.
:param `item`: if not ``None``, an instance of :class:`UltimateListItem`.
"""
# we cache the line height as calling GetTextExtent() is slow
if self.HasAGWFlag(ULC_REPORT) and self.HasAGWFlag(ULC_USER_ROW_HEIGHT):
if self._userLineHeight is not None:
return self._userLineHeight
if item is None or not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
if not self._lineHeight:
dc = wx.ClientDC(self)
dc.SetFont(self.GetFont())
dummy, y = dc.GetTextExtent("H")
if self._small_image_list and self._small_image_list.GetImageCount():
iw, ih = self._small_image_list.GetSize(0)
y = max(y, ih)
y += EXTRA_HEIGHT
self._lineHeight = y + LINE_SPACING
return self._lineHeight
else:
line = self.GetLine(item)
LH = line.GetHeight()
if LH != -1:
return LH
dc = wx.ClientDC(self)
allTextY = 0
for col, items in enumerate(line._items):
if items.GetCustomRenderer():
allTextY = max(allTextY, items.GetCustomRenderer().GetLineHeight())
continue
if items.HasFont():
dc.SetFont(items.GetFont())
else:
dc.SetFont(self.GetFont())
text_x, text_y, dummy = dc.GetFullMultiLineTextExtent(items.GetText())
allTextY = max(text_y, allTextY)
if items.GetWindow():
xSize, ySize = items.GetWindowSize()
allTextY = max(allTextY, ySize)
if self._small_image_list and self._small_image_list.GetImageCount():
for img in items._image:
iw, ih = self._small_image_list.GetSize(img)
allTextY = max(allTextY, ih)
allTextY += EXTRA_HEIGHT
line.SetHeight(allTextY)
return allTextY
def GetLineY(self, line):
"""
Returns the line `y` position.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if self.IsVirtual():
return LINE_SPACING + line*self.GetLineHeight()
lineItem = self.GetLine(line)
lineY = lineItem.GetY()
if lineY != -1:
return lineY
lineY = 0
for l in range(line):
lineY += self.GetLineHeight(l)
lineItem.SetY(LINE_SPACING + lineY)
return LINE_SPACING + lineY
def GetLineRect(self, line):
"""
Returns the line client rectangle.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if not self.InReportView():
return self.GetLine(line)._gi._rectAll
rect = wx.Rect(HEADER_OFFSET_X, self.GetLineY(line), self.GetHeaderWidth(), self.GetLineHeight(line))
return rect
def GetLineLabelRect(self, line, col=0):
"""
Returns the line client rectangle for the item text only.
Note this is the full column width unless an image or
checkbox exists. It is not the width of the text itself
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if not self.InReportView():
return self.GetLine(line)._gi._rectLabel
image_x = 0
image_width = 0
for c in range(col):
image_x += self.GetColumnWidth(c)
item = self.GetLine(line)
if item.HasImage(col):
ix, iy = self.GetImageSize(item.GetImage(col))
image_x += ix
image_width = ix
if item.GetKind(col) in [1, 2]:
image_x += self.GetCheckboxImageSize()[0]
image_width += self.GetCheckboxImageSize()[0]
rect = wx.Rect(image_x + HEADER_OFFSET_X, self.GetLineY(line), self.GetColumnWidth(col) - image_width, self.GetLineHeight(line))
return rect
def GetLineIconRect(self, line):
"""
Returns the line client rectangle for the item image only.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if not self.InReportView():
return self.GetLine(line)._gi._rectIcon
ld = self.GetLine(line)
image_x = HEADER_OFFSET_X
if ld.GetKind() in [1, 2]:
image_x += self.GetCheckboxImageSize()[0]
rect = wx.Rect(image_x, self.GetLineY(line), *self.GetImageSize(ld.GetImage()))
return rect
def GetLineCheckboxRect(self, line):
"""
Returns the line client rectangle for the item checkbox image only.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if not self.InReportView():
return self.GetLine(line)._gi._rectCheck
ld = self.GetLine(line)
LH = self.GetLineHeight(line)
wcheck, hcheck = self.GetCheckboxImageSize()
rect = wx.Rect(HEADER_OFFSET_X, self.GetLineY(line) + LH/2 - hcheck/2, wcheck, hcheck)
return rect
def GetLineHighlightRect(self, line):
"""
Returns the line client rectangle when the line is highlighted.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
return (self.InReportView() and [self.GetLineRect(line)] or [self.GetLine(line)._gi._rectHighlight])[0]
def HitTestLine(self, line, x, y):
"""
HitTest method for a :class:`UltimateListCtrl` line.
:param `line`: an instance of :class:`UltimateListLineData`;
:param `x`: the mouse `x` position;
:param `y`: the mouse `y` position.
:return: a tuple of values, representing the item hit and a hit flag. The
hit flag can be one of the following bits:
=============================== ========= ================================
HitTest Flag Hex Value Description
=============================== ========= ================================
``ULC_HITTEST_ABOVE`` 0x1 Above the client area
``ULC_HITTEST_BELOW`` 0x2 Below the client area
``ULC_HITTEST_NOWHERE`` 0x4 In the client area but below the last item
``ULC_HITTEST_ONITEM`` 0x2a0 Anywhere on the item (text, icon, checkbox image)
``ULC_HITTEST_ONITEMICON`` 0x20 On the bitmap associated with an item
``ULC_HITTEST_ONITEMLABEL`` 0x80 On the label (string) associated with an item
``ULC_HITTEST_ONITEMRIGHT`` 0x100 In the area to the right of an item
``ULC_HITTEST_ONITEMSTATEICON`` 0x200 On the state icon for a list view item that is in a user-defined state
``ULC_HITTEST_TOLEFT`` 0x400 To the left of the client area
``ULC_HITTEST_TORIGHT`` 0x800 To the right of the client area
``ULC_HITTEST_ONITEMCHECK`` 0x1000 On the item checkbox (if any)
=============================== ========= ================================
"""
ld = self.GetLine(line)
if self.InReportView():# and not self.IsVirtual():
lineY = self.GetLineY(line)
xstart = HEADER_OFFSET_X
for col, item in enumerate(ld._items):
if not self.IsColumnShown(col):
continue
width = self.GetColumnWidth(col)
xOld = xstart
xstart += width
ix = 0
#if (line, col) in self._shortItems:
#rect = wx.Rect(xOld, lineY, width, self.GetLineHeight(line))
rect = self.GetLineLabelRect(line,col)
if rect.Contains((x, y)):
newItem = self.GetParent().GetItem(line, col)
return newItem, ULC_HITTEST_ONITEMLABEL
if item.GetKind() in [1, 2]:
# We got a checkbox-type item
ix, iy = self.GetCheckboxImageSize()
LH = self.GetLineHeight(line)
rect = wx.Rect(xOld, lineY + LH/2 - iy/2, ix, iy)
if rect.Contains((x, y)):
newItem = self.GetParent().GetItem(line, col)
return newItem, ULC_HITTEST_ONITEMCHECK
if item.IsHyperText():
start, end = self.GetItemTextSize(item)
label_rect = self.GetLineLabelRect(line, col)
rect = wx.Rect(xOld+start, lineY, min(end, label_rect.width), self.GetLineHeight(line))
if rect.Contains((x, y)):
newItem = self.GetParent().GetItem(line, col)
return newItem, ULC_HITTEST_ONITEMLABEL
xOld += ix
if ld.HasImage() and self.GetLineIconRect(line).Contains((x, y)):
return self.GetParent().GetItem(line), ULC_HITTEST_ONITEMICON
# VS: Testing for "ld.HasText() || InReportView()" instead of
# "ld.HasText()" is needed to make empty lines in report view
# possible
if ld.HasText() or self.InReportView():
if self.InReportView():
rect = self.GetLineRect(line)
else:
checkRect = self.GetLineCheckboxRect(line)
if checkRect.Contains((x, y)):
return self.GetParent().GetItem(line), ULC_HITTEST_ONITEMCHECK
rect = self.GetLineLabelRect(line)
if rect.Contains((x, y)):
return self.GetParent().GetItem(line), ULC_HITTEST_ONITEMLABEL
rect = self.GetLineRect(line)
if rect.Contains((x, y)):
return self.GetParent().GetItem(line), ULC_HITTEST_ONITEM
return None, 0
# ----------------------------------------------------------------------------
# highlight (selection) handling
# ----------------------------------------------------------------------------
def IsHighlighted(self, line):
"""
Returns ``True`` if the input line is highlighted.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if self.IsVirtual():
return self._selStore.IsSelected(line)
else: # !virtual
ld = self.GetLine(line)
return ld.IsHighlighted()
def HighlightLines(self, lineFrom, lineTo, highlight=True):
"""
Highlights a range of lines in :class:`UltimateListCtrl`.
:param `lineFrom`: an integer representing the first line to highlight;
:param `lineTo`: an integer representing the last line to highlight;
:param `highlight`: ``True`` to highlight the lines, ``False`` otherwise.
"""
if self.IsVirtual():
linesChanged = self._selStore.SelectRange(lineFrom, lineTo, highlight)
if not linesChanged:
# many items changed state, refresh everything
self.RefreshLines(lineFrom, lineTo)
else: # only a few items changed state, refresh only them
for n in range(len(linesChanged)):
self.RefreshLine(linesChanged[n])
else: # iterate over all items in non report view
for line in range(lineFrom, lineTo+1):
if self.HighlightLine(line, highlight):
self.RefreshLine(line)
def HighlightLine(self, line, highlight=True):
"""
Highlights a line in :class:`UltimateListCtrl`.
:param `line`: an instance of :class:`UltimateListLineData`;
:param `highlight`: ``True`` to highlight the line, ``False`` otherwise.
"""
changed = False
if self.IsVirtual():
changed = self._selStore.SelectItem(line, highlight)
else: # !virtual
ld = self.GetLine(line)
changed = ld.Highlight(highlight)
dontNotify = self.HasAGWFlag(ULC_STICKY_HIGHLIGHT) and self.HasAGWFlag(ULC_STICKY_NOSELEVENT)
if changed and not dontNotify:
self.SendNotify(line, (highlight and [wxEVT_COMMAND_LIST_ITEM_SELECTED] or [wxEVT_COMMAND_LIST_ITEM_DESELECTED])[0])
return changed
def RefreshLine(self, line):
"""
Redraws the input line.
:param `line`: an instance of :class:`UltimateListLineData`.
"""
if self.InReportView():
visibleFrom, visibleTo = self.GetVisibleLinesRange()
if line < visibleFrom or line > visibleTo:
return
rect = self.GetLineRect(line)
rect.x, rect.y = self.CalcScrolledPosition(rect.x, rect.y)
self.RefreshRect(rect)
def RefreshLines(self, lineFrom, lineTo):
"""
Redraws a range of lines in :class:`UltimateListCtrl`.
:param `lineFrom`: an integer representing the first line to refresh;
:param `lineTo`: an integer representing the last line to refresh.
"""
if self.InReportView():
visibleFrom, visibleTo = self.GetVisibleLinesRange()
if lineFrom < visibleFrom:
lineFrom = visibleFrom
if lineTo > visibleTo:
lineTo = visibleTo
rect = wx.Rect()
rect.x = 0
rect.y = self.GetLineY(lineFrom)
rect.width = self.GetClientSize().x
rect.height = self.GetLineY(lineTo) - rect.y + self.GetLineHeight(lineTo)
rect.x, rect.y = self.CalcScrolledPosition(rect.x, rect.y)
self.RefreshRect(rect)
else: # !report
# TODO: this should be optimized...
for line in range(lineFrom, lineTo+1):
self.RefreshLine(line)
def RefreshAfter(self, lineFrom):
"""
Redraws all the lines after the input one.
:param `lineFrom`: an integer representing the first line to refresh.
"""
if self.InReportView():
visibleFrom, visibleTo = self.GetVisibleLinesRange()
if lineFrom < visibleFrom:
lineFrom = visibleFrom
elif lineFrom > visibleTo:
return
rect = wx.Rect()
rect.x = 0
rect.y = self.GetLineY(lineFrom)
rect.x, rect.y = self.CalcScrolledPosition(rect.x, rect.y)
size = self.GetClientSize()
rect.width = size.x
# refresh till the bottom of the window
rect.height = size.y - rect.y
self.RefreshRect(rect)
else: # !report
# TODO: how to do it more efficiently?
self._dirty = True
def RefreshSelected(self):
""" Redraws the selected lines. """
if self.IsEmpty():
return
if self.InReportView():
fromm, to = self.GetVisibleLinesRange()
else: # !virtual
fromm = 0
to = self.GetItemCount() - 1
if self.HasCurrent() and self._current >= fromm and self._current <= to:
self.RefreshLine(self._current)
for line in range(fromm, to+1):
# NB: the test works as expected even if self._current == -1
if line != self._current and self.IsHighlighted(line):
self.RefreshLine(line)
def HideWindows(self):
""" Hides the windows associated to the items. Used internally. """
for child in self._itemWithWindow:
wnd = child.GetWindow()
if wnd:
wnd.Hide()
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`PaintEvent` event to be processed.
"""
# Note: a wxPaintDC must be constructed even if no drawing is
# done (a Windows requirement).
dc = wx.BufferedPaintDC(self)
dc.SetBackgroundMode(wx.TRANSPARENT)
self.PrepareDC(dc)
dc.SetBackground(wx.Brush(self.GetBackgroundColour()))
dc.SetPen(wx.TRANSPARENT_PEN)
dc.Clear()
self.TileBackground(dc)
self.PaintWaterMark(dc)
if self.IsEmpty():
# nothing to draw or not the moment to draw it
return
if self._dirty:
# delay the repainting until we calculate all the items positions
self.RecalculatePositions(False)
useVista, useGradient = self._vistaselection, self._usegradients
dev_x, dev_y = self.CalcScrolledPosition(0, 0)
dc.SetFont(self.GetFont())
if self.InReportView():
visibleFrom, visibleTo = self.GetVisibleLinesRange()
# mrcs: draw additional items
if visibleFrom > 0:
visibleFrom -= 1
if visibleTo < self.GetItemCount() - 1:
visibleTo += 1
xOrig = dc.LogicalToDeviceX(0)
yOrig = dc.LogicalToDeviceY(0)
# tell the caller cache to cache the data
if self.IsVirtual():
evCache = UltimateListEvent(wxEVT_COMMAND_LIST_CACHE_HINT, self.GetParent().GetId())
evCache.SetEventObject(self.GetParent())
evCache.m_oldItemIndex = visibleFrom
evCache.m_itemIndex = visibleTo
self.GetParent().GetEventHandler().ProcessEvent(evCache)
no_highlight = self.HasAGWFlag(ULC_NO_HIGHLIGHT)
for line in range(visibleFrom, visibleTo+1):
rectLine = self.GetLineRect(line)
if not self.IsExposed(rectLine.x + xOrig, rectLine.y + yOrig, rectLine.width, rectLine.height):
# don't redraw unaffected lines to avoid flicker
continue
theLine = self.GetLine(line)
enabled = theLine.GetItem(0, CreateListItem(line, 0)).IsEnabled()
oldPN, oldBR = dc.GetPen(), dc.GetBrush()
theLine.DrawInReportMode(dc, line, rectLine,
self.GetLineHighlightRect(line),
self.IsHighlighted(line) and not no_highlight,
line==self._current, enabled, oldPN, oldBR)
if self.HasAGWFlag(ULC_HRULES):
pen = wx.Pen(self.GetRuleColour(), 1, wx.PENSTYLE_SOLID)
clientSize = self.GetClientSize()
# Don't draw the first one
start = (visibleFrom > 0 and [visibleFrom] or [1])[0]
dc.SetPen(pen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
for i in range(start, visibleTo+1):
lineY = self.GetLineY(i)
dc.DrawLine(0 - dev_x, lineY, clientSize.x - dev_x, lineY)
# Draw last horizontal rule
if visibleTo == self.GetItemCount() - 1:
lineY = self.GetLineY(visibleTo) + self.GetLineHeight(visibleTo)
dc.SetPen(pen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawLine(0 - dev_x, lineY, clientSize.x - dev_x , lineY)
# Draw vertical rules if required
if self.HasAGWFlag(ULC_VRULES) and not self.IsEmpty():
pen = wx.Pen(self.GetRuleColour(), 1, wx.PENSTYLE_SOLID)
firstItemRect = self.GetItemRect(visibleFrom)
lastItemRect = self.GetItemRect(visibleTo)
x = firstItemRect.GetX()
dc.SetPen(pen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
for col in range(self.GetColumnCount()):
if not self.IsColumnShown(col):
continue
colWidth = self.GetColumnWidth(col)
x += colWidth
x_pos = x - dev_x
if col < self.GetColumnCount()-1:
x_pos -= 2
dc.DrawLine(x_pos, firstItemRect.GetY() - 1 - dev_y, x_pos, lastItemRect.GetBottom() + 1 - dev_y)
else: # !report
for i in range(self.GetItemCount()):
self.GetLine(i).Draw(i, dc)
if wx.Platform not in ["__WXMAC__", "__WXGTK__"]:
# Don't draw rect outline under Mac at all.
# Draw it elsewhere on GTK
if self.HasCurrent():
if self._hasFocus and not self.HasAGWFlag(ULC_NO_HIGHLIGHT) and not useVista and not useGradient \
and not self.HasAGWFlag(ULC_BORDER_SELECT) and not self.HasAGWFlag(ULC_NO_FULL_ROW_SELECT):
dc.SetPen(wx.BLACK_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(self.GetLineHighlightRect(self._current))
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`EraseEvent` event to be processed.
:note: This method is intentionally empty to reduce flicker.
"""
pass
def TileBackground(self, dc):
"""
Tiles the background image to fill all the available area.
:param `dc`: an instance of :class:`wx.DC`.
.. todo:: Support background images also in stretch and centered modes.
"""
if not self._backgroundImage:
return
if self._imageStretchStyle != _StyleTile:
# Can we actually do something here (or in OnPaint()) To Handle
# background images that are stretchable or always centered?
# I tried but I get enormous flickering...
return
sz = self.GetClientSize()
w = self._backgroundImage.GetWidth()
h = self._backgroundImage.GetHeight()
x = 0
while x < sz.width:
y = 0
while y < sz.height:
dc.DrawBitmap(self._backgroundImage, x, y, True)
y = y + h
x = x + w
def PaintWaterMark(self, dc):
"""
Draws a watermark at the bottom right of :class:`UltimateListCtrl`.
:param `dc`: an instance of :class:`wx.DC`.
.. todo:: Better support for this is needed.
"""
if not self._waterMark:
return
width, height = self.CalcUnscrolledPosition(*self.GetClientSize())
bitmapW = self._waterMark.GetWidth()
bitmapH = self._waterMark.GetHeight()
x = width - bitmapW - 5
y = height - bitmapH - 5
dc.DrawBitmap(self._waterMark, x, y, True)
def HighlightAll(self, on=True):
"""
Highlights/unhighlights all the lines in :class:`UltimateListCtrl`.
:param `on`: ``True`` to highlight all the lines, ``False`` to unhighlight them.
"""
if self.IsSingleSel():
if on:
raise Exception("can't do this in a single sel control")
# we just have one item to turn off
if self.HasCurrent() and self.IsHighlighted(self._current):
self.HighlightLine(self._current, False)
self.RefreshLine(self._current)
else: # multi sel
if not self.IsEmpty():
self.HighlightLines(0, self.GetItemCount() - 1, on)
def OnChildFocus(self, event):
"""
Handles the ``wx.EVT_CHILD_FOCUS`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`ChildFocusEvent` event to be processed.
.. note::
This method is intentionally empty to prevent the default handler in
:class:`ScrolledWindow` from needlessly scrolling the window when the edit
control is dismissed.
"""
# Do nothing here. This prevents the default handler in wx.ScrolledWindow
# from needlessly scrolling the window when the edit control is
# dismissed. See ticket #9563.
pass
def SendNotify(self, line, command, point=wx.DefaultPosition):
"""
Actually sends a :class:`UltimateListEvent`.
:param `line`: an instance of :class:`UltimateListLineData`;
:param `command`: the event type to send;
:param `point`: an instance of :class:`wx.Point`.
"""
bRet = True
le = UltimateListEvent(command, self.GetParent().GetId())
le.SetEventObject(self.GetParent())
le.m_itemIndex = line
# set only for events which have position
if point != wx.DefaultPosition:
le.m_pointDrag = point
# don't try to get the line info for virtual list controls: the main
# program has it anyhow and if we did it would result in accessing all
# the lines, even those which are not visible now and this is precisely
# what we're trying to avoid
if not self.IsVirtual():
if line != -1:
self.GetLine(line).GetItem(0, le.m_item)
#else: this happens for wxEVT_COMMAND_LIST_ITEM_FOCUSED event
#else: there may be no more such item
self.GetParent().GetEventHandler().ProcessEvent(le)
bRet = le.IsAllowed()
return bRet
def ChangeCurrent(self, current):
"""
Changes the current line to the specified one.
:param `current`: an integer specifying the index of the current line.
"""
self._current = current
# as the current item changed, we shouldn't start editing it when the
# "slow click" timer expires as the click happened on another item
if self._renameTimer.IsRunning():
self._renameTimer.Stop()
self.SendNotify(current, wxEVT_COMMAND_LIST_ITEM_FOCUSED)
def EditLabel(self, item):
"""
Starts editing an item label.
:param `item`: an instance of :class:`UltimateListItem`.
"""
if item < 0 or item >= self.GetItemCount():
raise Exception("wrong index in UltimateListCtrl.EditLabel()")
le = UltimateListEvent(wxEVT_COMMAND_LIST_BEGIN_LABEL_EDIT, self.GetParent().GetId())
le.SetEventObject(self.GetParent())
le.m_itemIndex = item
data = self.GetLine(item)
le.m_item = data.GetItem(0, le.m_item)
self._textctrl = UltimateListTextCtrl(self, item)
if self.GetParent().GetEventHandler().ProcessEvent(le) and not le.IsAllowed():
# vetoed by user code
return
# We have to call this here because the label in question might just have
# been added and no screen update taken place.
if self._dirty:
wx.SafeYield()
# Pending events dispatched by wx.SafeYield might have changed the item
# count
if item >= self.GetItemCount():
return None
# modified
self._textctrl.SetFocus()
return self._textctrl
def OnRenameTimer(self):
""" The timer for renaming has expired. Start editing. """
if not self.HasCurrent():
raise Exception("unexpected rename timer")
self.EditLabel(self._current)
def OnRenameAccept(self, itemEdit, value):
"""
Called by :class:`UltimateListTextCtrl`, to accept the changes and to send the
``EVT_LIST_END_LABEL_EDIT`` event.
:param `itemEdit`: an instance of :class:`UltimateListItem`;
:param `value`: the new value of the item label.
"""
le = UltimateListEvent(wxEVT_COMMAND_LIST_END_LABEL_EDIT, self.GetParent().GetId())
le.SetEventObject(self.GetParent())
le.m_itemIndex = itemEdit
data = self.GetLine(itemEdit)
le.m_item = data.GetItem(0, le.m_item)
le.m_item._text = value
return not self.GetParent().GetEventHandler().ProcessEvent(le) or le.IsAllowed()
def OnRenameCancelled(self, itemEdit):
"""
Called by :class:`UltimateListTextCtrl`, to cancel the changes and to send the
``EVT_LIST_END_LABEL_EDIT`` event.
:param `item`: an instance of :class:`UltimateListItem`.
"""
# let owner know that the edit was cancelled
le = UltimateListEvent(wxEVT_COMMAND_LIST_END_LABEL_EDIT, self.GetParent().GetId())
le.SetEditCanceled(True)
le.SetEventObject(self.GetParent())
le.m_itemIndex = itemEdit
data = self.GetLine(itemEdit)
le.m_item = data.GetItem(0, le.m_item)
self.GetEventHandler().ProcessEvent(le)
def OnMouse(self, event):
"""
Handles the ``wx.EVT_MOUSE_EVENTS`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
if wx.Platform == "__WXMAC__":
# On wxMac we can't depend on the EVT_KILL_FOCUS event to properly
# shutdown the edit control when the mouse is clicked elsewhere on the
# listctrl because the order of events is different (or something like
# that,) so explicitly end the edit if it is active.
if event.LeftDown() and self._textctrl:
self._textctrl.AcceptChanges()
self._textctrl.Finish()
if event.LeftDown():
self.SetFocusIgnoringChildren()
event.SetEventObject(self.GetParent())
if self.GetParent().GetEventHandler().ProcessEvent(event):
return
if event.GetEventType() == wx.wxEVT_MOUSEWHEEL:
# let the base handle mouse wheel events.
self.Refresh()
event.Skip()
return
if self.IsEmpty():
if event.RightDown():
self.SendNotify(-1, wxEVT_COMMAND_LIST_ITEM_RIGHT_CLICK, event.GetPosition())
evtCtx = wx.ContextMenuEvent(wx.wxEVT_CONTEXT_MENU, self.GetParent().GetId(),
self.ClientToScreen(event.GetPosition()))
evtCtx.SetEventObject(self.GetParent())
self.GetParent().GetEventHandler().ProcessEvent(evtCtx)
return
if self._dirty:
return
if not (event.Dragging() or event.ButtonDown() or event.LeftUp() or \
event.ButtonDClick() or event.Moving() or event.RightUp()):
return
x = event.GetX()
y = event.GetY()
x, y = self.CalcUnscrolledPosition(x, y)
# where did we hit it (if we did)?
hitResult = 0
newItem = None
count = self.GetItemCount()
if self.InReportView():
if not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
current = y // self.GetLineHeight()
if current < count:
newItem, hitResult = self.HitTestLine(current, x, y)
else:
return
else:
for current in range(count):
newItem, hitResult = self.HitTestLine(current, x, y)
if hitResult:
break
else:
# TODO: optimize it too! this is less simple than for report view but
# enumerating all items is still not a way to do it!!
for current in range(count):
newItem, hitResult = self.HitTestLine(current, x, y)
if hitResult:
break
theItem = None
if not self.IsVirtual():
theItem = CreateListItem(current, 0)
theItem = self.GetItem(theItem)
if event.GetEventType() == wx.wxEVT_MOTION and not event.Dragging():
if current >= 0 and current < count and self.HasAGWFlag(ULC_TRACK_SELECT) and not self._hoverTimer.IsRunning():
self._hoverItem = current
self._hoverTimer.Start(HOVER_TIME, wx.TIMER_ONE_SHOT)
if newItem and newItem.IsHyperText() and (hitResult & ULC_HITTEST_ONITEMLABEL) and theItem and theItem.IsEnabled():
self.SetCursor(wx.Cursor(wx.CURSOR_HAND))
self._isonhyperlink = True
else:
if self._isonhyperlink:
self.SetCursor(wx.Cursor(wx.CURSOR_ARROW))
self._isonhyperlink = False
if self.HasAGWFlag(ULC_STICKY_HIGHLIGHT) and hitResult:
if not self.IsHighlighted(current):
self.HighlightAll(False)
self.ChangeCurrent(current)
self.ReverseHighlight(self._current)
if self.HasAGWFlag(ULC_SHOW_TOOLTIPS):
if newItem and hitResult & ULC_HITTEST_ONITEMLABEL:
r,c = (newItem._itemId, newItem._col)
line = self.GetLine(r)
tt = line.GetToolTip(c)
if tt and not tt == "":
if self.GetToolTip() and self.GetToolTip().GetTip() != tt:
self.SetToolTip(tt)
elif (r,c) in self._shortItems: # if the text didn't fit in the column
text = newItem.GetText()
if self.GetToolTip() and self.GetToolTip().GetTip() != text:
self.SetToolTip(text)
else:
self.SetToolTip("")
else:
self.SetToolTip("")
if self.HasAGWFlag(ULC_HOT_TRACKING):
if hitResult:
if self._oldHotCurrent != current:
if self._oldHotCurrent is not None:
self.RefreshLine(self._oldHotCurrent)
self._newHotCurrent = current
self.RefreshLine(self._newHotCurrent)
self._oldHotCurrent = current
event.Skip()
return
if event.Dragging():
if not self._isDragging:
if self._lineLastClicked == -1 or not hitResult or not theItem or not theItem.IsEnabled():
return
if self._dragCount == 0:
# we have to report the raw, physical coords as we want to be
# able to call HitTest(event.m_pointDrag) from the user code to
# get the item being dragged
self._dragStart = event.GetPosition()
self._dragCount += 1
if self._dragCount != 3:
return
command = (event.RightIsDown() and [wxEVT_COMMAND_LIST_BEGIN_RDRAG] or [wxEVT_COMMAND_LIST_BEGIN_DRAG])[0]
le = UltimateListEvent(command, self.GetParent().GetId())
le.SetEventObject(self.GetParent())
le.m_itemIndex = self._lineLastClicked
le.m_pointDrag = self._dragStart
self.GetParent().GetEventHandler().ProcessEvent(le)
# we're going to drag this item
self._isDragging = True
self._dragItem = current
# remember the old cursor because we will change it while
# dragging
self._oldCursor = self._cursor
self.SetCursor(self._dragCursor)
else:
if current != self._dropTarget:
self.SetCursor(self._dragCursor)
# unhighlight the previous drop target
if self._dropTarget is not None:
self.RefreshLine(self._dropTarget)
move = current
if self._dropTarget:
move = (current > self._dropTarget and [current+1] or [current-1])[0]
self._dropTarget = current
self.MoveToItem(move)
else:
if self._dragItem == current:
self.SetCursor(wx.Cursor(wx.CURSOR_NO_ENTRY))
if self.HasAGWFlag(ULC_REPORT) and self._dragItem != current:
self.DrawDnDArrow()
return
else:
self._dragCount = 0
if theItem and not theItem.IsEnabled():
self.DragFinish(event)
event.Skip()
return
if not hitResult:
# outside of any item
if event.RightDown():
self.SendNotify(-1, wxEVT_COMMAND_LIST_ITEM_RIGHT_CLICK, event.GetPosition())
evtCtx = wx.ContextMenuEvent(wx.wxEVT_CONTEXT_MENU, self.GetParent().GetId(),
self.ClientToScreen(event.GetPosition()))
evtCtx.SetEventObject(self.GetParent())
self.GetParent().GetEventHandler().ProcessEvent(evtCtx)
else:
self.HighlightAll(False)
self.DragFinish(event)
return
forceClick = False
if event.ButtonDClick():
if self._renameTimer.IsRunning():
self._renameTimer.Stop()
self._lastOnSame = False
if current == self._lineLastClicked:
self.SendNotify(current, wxEVT_COMMAND_LIST_ITEM_ACTIVATED)
if newItem and newItem.GetKind() in [1, 2] and (hitResult & ULC_HITTEST_ONITEMCHECK):
self.CheckItem(newItem, not self.IsItemChecked(newItem))
return
else:
# The first click was on another item, so don't interpret this as
# a double click, but as a simple click instead
forceClick = True
if event.LeftUp():
if self.DragFinish(event):
return
if self._lineSelectSingleOnUp != - 1:
# select single line
self.HighlightAll(False)
self.ReverseHighlight(self._lineSelectSingleOnUp)
if self._lastOnSame:
if (current == self._current) and (hitResult == ULC_HITTEST_ONITEMLABEL) and self.HasAGWFlag(ULC_EDIT_LABELS):
if not self.InReportView() or self.GetLineLabelRect(current).Contains((x, y)):
# This wx.SYS_DCLICK_MSEC is not yet wrapped in wxPython...
# dclick = wx.SystemSettings.GetMetric(wx.SYS_DCLICK_MSEC)
# m_renameTimer->Start(dclick > 0 ? dclick : 250, True)
self._renameTimer.Start(250, True)
self._lastOnSame = False
self._lineSelectSingleOnUp = -1
elif event.RightUp():
if self.DragFinish(event):
return
else:
# This is necessary, because after a DnD operation in
# from and to ourself, the up event is swallowed by the
# DnD code. So on next non-up event (which means here and
# now) self._lineSelectSingleOnUp should be reset.
self._lineSelectSingleOnUp = -1
if event.RightDown():
if self.SendNotify(current, wxEVT_COMMAND_LIST_ITEM_RIGHT_CLICK, event.GetPosition()):
self._lineBeforeLastClicked = self._lineLastClicked
self._lineLastClicked = current
# If the item is already selected, do not update the selection.
# Multi-selections should not be cleared if a selected item is clicked.
if not self.IsHighlighted(current):
self.HighlightAll(False)
self.ChangeCurrent(current)
self.ReverseHighlight(self._current)
# Allow generation of context menu event
event.Skip()
elif event.MiddleDown():
self.SendNotify(current, wxEVT_COMMAND_LIST_ITEM_MIDDLE_CLICK)
elif event.LeftDown() or forceClick:
self._lineBeforeLastClicked = self._lineLastClicked
self._lineLastClicked = current
oldCurrent = self._current
oldWasSelected = self.IsHighlighted(self._current)
cmdModifierDown = event.CmdDown()
if self.IsSingleSel() or not (cmdModifierDown or event.ShiftDown()):
if self.IsSingleSel() or not self.IsHighlighted(current):
self.HighlightAll(False)
self.ChangeCurrent(current)
self.ReverseHighlight(self._current)
else: # multi sel & current is highlighted & no mod keys
self._lineSelectSingleOnUp = current
self.ChangeCurrent(current) # change focus
else: # multi sel & either ctrl or shift is down
if cmdModifierDown:
self.ChangeCurrent(current)
self.ReverseHighlight(self._current)
elif event.ShiftDown():
self.ChangeCurrent(current)
lineFrom, lineTo = oldCurrent, current
shift = 0
if lineTo < lineFrom:
lineTo = lineFrom
lineFrom = self._current
if not self.IsHighlighted(lineFrom):
shift = 1
for i in range(lineFrom+1, lineTo+1):
if self.IsHighlighted(i):
self.HighlightLine(i, False)
self.RefreshLine(i)
lineTo -= 1
self.HighlightLines(lineFrom, lineTo+shift)
else: # !ctrl, !shift
# test in the enclosing if should make it impossible
raise Exception("how did we get here?")
if newItem:
if event.LeftDown():
if newItem.GetKind() in [1, 2] and (hitResult & ULC_HITTEST_ONITEMCHECK):
self.CheckItem(newItem, not self.IsItemChecked(newItem))
if newItem.IsHyperText():
self.SetItemVisited(newItem, True)
self.HandleHyperLink(newItem)
if self._current != oldCurrent:
self.RefreshLine(oldCurrent)
# forceClick is only set if the previous click was on another item
self._lastOnSame = not forceClick and (self._current == oldCurrent) and oldWasSelected
if self.HasAGWFlag(ULC_STICKY_HIGHLIGHT) and self.HasAGWFlag(ULC_STICKY_NOSELEVENT) and self.HasAGWFlag(ULC_SEND_LEFTCLICK):
self.SendNotify(current, wxEVT_COMMAND_LIST_ITEM_LEFT_CLICK, event.GetPosition())
def DrawDnDArrow(self):
""" Draws a drag and drop visual representation of an arrow. """
dc = wx.ClientDC(self)
lineY = self.GetLineY(self._dropTarget)
width = self.GetTotalWidth()
dc.SetPen(wx.Pen(wx.BLACK, 2))
x, y = self.CalcScrolledPosition(HEADER_OFFSET_X, lineY+2*HEADER_OFFSET_Y)
tri1 = [wx.Point(x+1, y-2), wx.Point(x+1, y+4), wx.Point(x+4, y+1)]
tri2 = [wx.Point(x+width-1, y-2), wx.Point(x+width-1, y+4), wx.Point(x+width-4, y+1)]
dc.DrawPolygon(tri1)
dc.DrawPolygon(tri2)
dc.DrawLine(x, y+1, width, y+1)
def DragFinish(self, event):
"""
A drag and drop operation has just finished.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
if not self._isDragging:
return False
self._isDragging = False
self._dragCount = 0
self._dragItem = None
self.SetCursor(self._oldCursor)
self.Refresh()
le = UltimateListEvent(wxEVT_COMMAND_LIST_END_DRAG, self.GetParent().GetId())
le.SetEventObject(self.GetParent())
le.m_itemIndex = self._dropTarget
le.m_pointDrag = event.GetPosition()
self.GetParent().GetEventHandler().ProcessEvent(le)
return True
def HandleHyperLink(self, item):
"""
Handles the hyperlink items, sending the ``EVT_LIST_ITEM_HYPERLINK`` event.
:param `item`: an instance of :class:`UltimateListItem`.
"""
if self.IsItemHyperText(item):
self.SendNotify(item._itemId, wxEVT_COMMAND_LIST_ITEM_HYPERLINK)
def OnHoverTimer(self, event):
"""
Handles the ``wx.EVT_TIMER`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`TimerEvent` event to be processed.
"""
x, y = self.ScreenToClient(wx.GetMousePosition())
x, y = self.CalcUnscrolledPosition(x, y)
item, hitResult = self.HitTestLine(self._hoverItem, x, y)
if item and item._itemId == self._hoverItem:
if not self.IsHighlighted(self._hoverItem):
dontNotify = self.HasAGWFlag(ULC_STICKY_HIGHLIGHT) and self.HasAGWFlag(ULC_STICKY_NOSELEVENT)
if not dontNotify:
self.SendNotify(self._hoverItem, wxEVT_COMMAND_LIST_ITEM_SELECTED)
self.HighlightAll(False)
self.ChangeCurrent(self._hoverItem)
self.ReverseHighlight(self._current)
def MoveToItem(self, item):
"""
Scrolls the input item into view.
:param `item`: an instance of :class:`UltimateListItem`.
"""
if item == -1:
return
if item >= self.GetItemCount():
item = self.GetItemCount() - 1
rect = self.GetLineRect(item)
client_w, client_h = self.GetClientSize()
hLine = self.GetLineHeight(item)
view_x = SCROLL_UNIT_X*self.GetScrollPos(wx.HORIZONTAL)
view_y = hLine*self.GetScrollPos(wx.VERTICAL)
if self.InReportView():
# the next we need the range of lines shown it might be different, so
# recalculate it
self.ResetVisibleLinesRange()
if not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
if rect.y < view_y:
self.Scroll(-1, rect.y/hLine)
if rect.y+rect.height+5 > view_y+client_h:
self.Scroll(-1, (rect.y+rect.height-client_h+hLine)/hLine)
if wx.Platform == "__WXMAC__":
# At least on Mac the visible lines value will get reset inside of
# Scroll *before* it actually scrolls the window because of the
# Update() that happens there, so it will still have the wrong value.
# So let's reset it again and wait for it to be recalculated in the
# next paint event. I would expect this problem to show up in wxGTK
# too but couldn't duplicate it there. Perhaps the order of events
# is different... --Robin
self.ResetVisibleLinesRange()
else:
view_y = SCROLL_UNIT_Y*self.GetScrollPos(wx.VERTICAL)
start_y, height = rect.y, rect.height
if start_y < view_y:
while start_y > view_y:
start_y -= SCROLL_UNIT_Y
self.Scroll(-1, start_y/SCROLL_UNIT_Y)
if start_y + height > view_y + client_h:
while start_y + height < view_y + client_h:
start_y += SCROLL_UNIT_Y
self.Scroll(-1, (start_y+height-client_h+SCROLL_UNIT_Y)/SCROLL_UNIT_Y)
else: # !report
sx = sy = -1
if rect.x-view_x < 5:
sx = (rect.x - 5)/SCROLL_UNIT_X
if rect.x+rect.width-5 > view_x+client_w:
sx = (rect.x + rect.width - client_w + SCROLL_UNIT_X)/SCROLL_UNIT_X
if rect.y-view_y < 5:
sy = (rect.y - 5)/hLine
if rect.y + rect.height - 5 > view_y + client_h:
sy = (rect.y + rect.height - client_h + hLine)/hLine
self.Scroll(sx, sy)
# ----------------------------------------------------------------------------
# keyboard handling
# ----------------------------------------------------------------------------
def GetNextActiveItem(self, item, down=True):
"""
Returns the next active item. Used Internally at present.
:param `item`: an instance of :class:`UltimateListItem`;
:param `down`: ``True`` to search downwards for an active item, ``False``
to search upwards.
"""
count = self.GetItemCount()
initialItem = item
while 1:
if item >= count or item < 0:
return initialItem
listItem = CreateListItem(item, 0)
listItem = self.GetItem(listItem, 0)
if listItem.IsEnabled():
return item
item = (down and [item+1] or [item-1])[0]
def OnArrowChar(self, newCurrent, event):
"""
Handles the keyboard arrows key events.
:param `newCurrent`: an integer specifying the new current item;
:param `event`: a :class:`KeyEvent` event to be processed.
"""
oldCurrent = self._current
newCurrent = self.GetNextActiveItem(newCurrent, newCurrent > oldCurrent)
# in single selection we just ignore Shift as we can't select several
# items anyhow
if event.ShiftDown() and not self.IsSingleSel():
self.ChangeCurrent(newCurrent)
# refresh the old focus to remove it
self.RefreshLine(oldCurrent)
# select all the items between the old and the new one
if oldCurrent > newCurrent:
newCurrent = oldCurrent
oldCurrent = self._current
self.HighlightLines(oldCurrent, newCurrent)
else: # !shift
# all previously selected items are unselected unless ctrl is held
# in a multi-selection control
if not event.ControlDown() or self.IsSingleSel():
self.HighlightAll(False)
self.ChangeCurrent(newCurrent)
# refresh the old focus to remove it
self.RefreshLine(oldCurrent)
if not event.ControlDown() or self.IsSingleSel():
self.HighlightLine(self._current, True)
self.RefreshLine(self._current)
self.MoveToFocus()
def OnKeyDown(self, event):
"""
Handles the ``wx.EVT_KEY_DOWN`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`KeyEvent` event to be processed.
"""
parent = self.GetParent()
# we propagate the key event upwards
ke = event.Clone()
ke.SetEventObject(parent)
if parent.GetEventHandler().ProcessEvent(ke):
event.Skip()
return
event.Skip()
def OnKeyUp(self, event):
"""
Handles the ``wx.EVT_KEY_UP`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`KeyEvent` event to be processed.
"""
parent = self.GetParent()
# we propagate the key event upwards
ke = event.Clone()
ke.SetEventObject(parent)
if parent.GetEventHandler().ProcessEvent(ke):
return
event.Skip()
def OnChar(self, event):
"""
Handles the ``wx.EVT_CHAR`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`KeyEvent` event to be processed.
"""
parent = self.GetParent()
if self.IsVirtual() and self.GetItemCount() == 0:
event.Skip()
return
# we send a list_key event up
if self.HasCurrent():
le = UltimateListEvent(wxEVT_COMMAND_LIST_KEY_DOWN, self.GetParent().GetId())
le.m_itemIndex = self._current
le.m_item = self.GetLine(self._current).GetItem(0, le.m_item)
le.m_code = event.GetKeyCode()
le.SetEventObject(parent)
parent.GetEventHandler().ProcessEvent(le)
keyCode = event.GetKeyCode()
if keyCode not in [wx.WXK_UP, wx.WXK_DOWN, wx.WXK_RIGHT, wx.WXK_LEFT, \
wx.WXK_PAGEUP, wx.WXK_PAGEDOWN, wx.WXK_END, wx.WXK_HOME]:
# propagate the char event upwards
ke = event.Clone()
ke.SetEventObject(parent)
if parent.GetEventHandler().ProcessEvent(ke):
return
if event.GetKeyCode() == wx.WXK_TAB:
nevent = wx.NavigationKeyEvent()
nevent.SetWindowChange(event.ControlDown())
nevent.SetDirection(not event.ShiftDown())
nevent.SetEventObject(self.GetParent().GetParent())
nevent.SetCurrentFocus(self._parent)
if self.GetParent().GetParent().GetEventHandler().ProcessEvent(nevent):
return
# no item . nothing to do
if not self.HasCurrent():
event.Skip()
return
keyCode = event.GetKeyCode()
if keyCode == wx.WXK_UP:
if self._current > 0:
self.OnArrowChar(self._current - 1, event)
if self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
self._dirty = True
elif keyCode == wx.WXK_DOWN:
if self._current < self.GetItemCount() - 1:
self.OnArrowChar(self._current + 1, event)
if self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
self._dirty = True
elif keyCode == wx.WXK_END:
if not self.IsEmpty():
self.OnArrowChar(self.GetItemCount() - 1, event)
self._dirty = True
elif keyCode == wx.WXK_HOME:
if not self.IsEmpty():
self.OnArrowChar(0, event)
self._dirty = True
elif keyCode == wx.WXK_PAGEUP:
steps = (self.InReportView() and [self._linesPerPage - 1] or [self._current % self._linesPerPage])[0]
index = self._current - steps
if index < 0:
index = 0
self.OnArrowChar(index, event)
self._dirty = True
elif keyCode == wx.WXK_PAGEDOWN:
steps = (self.InReportView() and [self._linesPerPage - 1] or [self._linesPerPage - (self._current % self._linesPerPage) - 1])[0]
index = self._current + steps
count = self.GetItemCount()
if index >= count:
index = count - 1
self.OnArrowChar(index, event)
self._dirty = True
elif keyCode == wx.WXK_LEFT:
if not self.InReportView():
index = self._current - self._linesPerPage
if index < 0:
index = 0
self.OnArrowChar(index, event)
elif keyCode == wx.WXK_RIGHT:
if not self.InReportView():
index = self._current + self._linesPerPage
count = self.GetItemCount()
if index >= count:
index = count - 1
self.OnArrowChar(index, event)
elif keyCode == wx.WXK_SPACE:
if self.IsSingleSel():
if event.ControlDown():
self.ReverseHighlight(self._current)
else: # normal space press
self.SendNotify(self._current, wxEVT_COMMAND_LIST_ITEM_ACTIVATED)
else:
# select it in ReverseHighlight() below if unselected
self.ReverseHighlight(self._current)
elif keyCode in [wx.WXK_RETURN, wx.WXK_EXECUTE, wx.WXK_NUMPAD_ENTER]:
self.SendNotify(self._current, wxEVT_COMMAND_LIST_ITEM_ACTIVATED)
else:
event.Skip()
# ----------------------------------------------------------------------------
# focus handling
# ----------------------------------------------------------------------------
def OnSetFocus(self, event):
"""
Handles the ``wx.EVT_SET_FOCUS`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`FocusEvent` event to be processed.
"""
if self.GetParent():
event = wx.FocusEvent(wx.wxEVT_SET_FOCUS, self.GetParent().GetId())
event.SetEventObject(self.GetParent())
if self.GetParent().GetEventHandler().ProcessEvent(event):
return
# wxGTK sends us EVT_SET_FOCUS events even if we had never got
# EVT_KILL_FOCUS before which means that we finish by redrawing the items
# which are already drawn correctly resulting in horrible flicker - avoid
# it
if not self._hasFocus:
self._hasFocus = True
self.Refresh()
def OnKillFocus(self, event):
"""
Handles the ``wx.EVT_KILL_FOCUS`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`FocusEvent` event to be processed.
"""
if self.GetParent():
event = wx.FocusEvent(wx.wxEVT_KILL_FOCUS, self.GetParent().GetId())
event.SetEventObject(self.GetParent())
if self.GetParent().GetEventHandler().ProcessEvent(event):
return
self._hasFocus = False
self.Refresh()
def DrawImage(self, index, dc, x, y, enabled):
"""
Draws one of the item images.
:param `index`: the index of the image inside the image list;
:param `dc`: an instance of :class:`wx.DC`;
:param `x`: the x position where to draw the image;
:param `y`: the y position where to draw the image;
:param `enabled`: ``True`` if the item is enabled, ``False`` if it is disabled.
"""
if self.HasAGWFlag(ULC_ICON) and self._normal_image_list:
imgList = (enabled and [self._normal_image_list] or [self._normal_grayed_image_list])[0]
imgList.Draw(index, dc, x, y, wx.IMAGELIST_DRAW_TRANSPARENT)
elif self.HasAGWFlag(ULC_SMALL_ICON) and self._small_image_list:
imgList = (enabled and [self._small_image_list] or [self._small_grayed_image_list])[0]
imgList.Draw(index, dc, x, y, wx.IMAGELIST_DRAW_TRANSPARENT)
elif self.HasAGWFlag(ULC_LIST) and self._small_image_list:
imgList = (enabled and [self._small_image_list] or [self._small_grayed_image_list])[0]
imgList.Draw(index, dc, x, y, wx.IMAGELIST_DRAW_TRANSPARENT)
elif self.InReportView() and self._small_image_list:
imgList = (enabled and [self._small_image_list] or [self._small_grayed_image_list])[0]
imgList.Draw(index, dc, x, y, wx.IMAGELIST_DRAW_TRANSPARENT)
def DrawCheckbox(self, dc, x, y, kind, checked, enabled):
"""
Draws the item checkbox/radiobutton image.
:param `dc`: an instance of :class:`wx.DC`;
:param `x`: the x position where to draw the image;
:param `y`: the y position where to draw the image;
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
:param `checked`: ``True`` if the item is checked, ``False`` otherwise;
:param `enabled`: ``True`` if the item is enabled, ``False`` if it is disabled.
"""
imgList = (enabled and [self._image_list_check] or [self._grayed_check_list])[0]
if kind == 1:
# checkbox
index = (checked and [0] or [1])[0]
else:
# radiobutton
index = (checked and [2] or [3])[0]
imgList.Draw(index, dc, x, y, wx.IMAGELIST_DRAW_TRANSPARENT)
def GetCheckboxImageSize(self):
""" Returns the checkbox/radiobutton image size. """
bmp = self._image_list_check.GetBitmap(0)
return bmp.GetWidth(), bmp.GetHeight()
def GetImageSize(self, index):
"""
Returns the image size for the item.
:param `index`: the image index.
"""
width = height = 0
if self.HasAGWFlag(ULC_ICON) and self._normal_image_list:
for indx in index:
w, h = self._normal_image_list.GetSize(indx)
width += w + MARGIN_BETWEEN_TEXT_AND_ICON
height = max(height, h)
elif self.HasAGWFlag(ULC_SMALL_ICON) and self._small_image_list:
for indx in index:
w, h = self._small_image_list.GetSize(indx)
width += w + MARGIN_BETWEEN_TEXT_AND_ICON
height = max(height, h)
elif self.HasAGWFlag(ULC_LIST) and self._small_image_list:
for indx in index:
w, h = self._small_image_list.GetSize(indx)
width += w + MARGIN_BETWEEN_TEXT_AND_ICON
height = max(height, h)
elif self.InReportView() and self._small_image_list:
for indx in index:
w, h = self._small_image_list.GetSize(indx)
width += w + MARGIN_BETWEEN_TEXT_AND_ICON
height = max(height, h)
return width, height
def GetTextLength(self, s):
"""
Returns the text width for the input string.
:param `s`: the string to measure.
"""
dc = wx.ClientDC(self)
dc.SetFont(self.GetFont())
lw, lh, dummy = dc.GetFullMultiLineTextExtent(s)
return lw + AUTOSIZE_COL_MARGIN
def SetImageList(self, imageList, which):
"""
Sets the image list associated with the control.
:param `imageList`: an instance of :class:`wx.ImageList` or an instance of :class:`PyImageList`;
:param `which`: one of ``wx.IMAGE_LIST_NORMAL``, ``wx.IMAGE_LIST_SMALL``,
``wx.IMAGE_LIST_STATE`` (the last is unimplemented).
:note: Using :class:`PyImageList` enables you to have images of different size inside the
image list. In your derived class, instead of doing this::
imageList = wx.ImageList(16, 16)
imageList.Add(someBitmap)
self.SetImageList(imageList, wx.IMAGE_LIST_SMALL)
You should do this::
imageList = PyImageList(16, 16)
imageList.Add(someBitmap)
self.SetImageList(imageList, wx.IMAGE_LIST_SMALL)
"""
self._dirty = True
if isinstance(imageList, PyImageList):
# We have a custom PyImageList with variable image sizes
cls = PyImageList
else:
cls = wx.ImageList
# calc the spacing from the icon size
width = height = 0
if imageList and imageList.GetImageCount():
width, height = imageList.GetSize(0)
if which == wx.IMAGE_LIST_NORMAL:
self._normal_image_list = imageList
self._normal_grayed_image_list = cls(width, height, True, 0)
for ii in range(imageList.GetImageCount()):
bmp = imageList.GetBitmap(ii)
newbmp = MakeDisabledBitmap(bmp)
self._normal_grayed_image_list.Add(newbmp)
self._normal_spacing = width + 8
if which == wx.IMAGE_LIST_SMALL:
self._small_image_list = imageList
self._small_spacing = width + 14
self._small_grayed_image_list = cls(width, height, True, 0)
for ii in range(imageList.GetImageCount()):
bmp = imageList.GetBitmap(ii)
newbmp = MakeDisabledBitmap(bmp)
self._small_grayed_image_list.Add(newbmp)
self._lineHeight = 0 # ensure that the line height will be recalc'd
self.ResetLineDimensions()
def SetImageListCheck(self, sizex, sizey, imglist=None):
"""
Sets the checkbox/radiobutton image list.
:param `sizex`: the width of the bitmaps in the `imglist`;
:param `sizey`: the height of the bitmaps in the `imglist`;
:param `imglist`: an instance of :class:`wx.ImageList`.
"""
# Image list to hold disabled versions of each control
self._grayed_check_list = wx.ImageList(sizex, sizey, True, 0)
if imglist is None:
self._image_list_check = wx.ImageList(sizex, sizey)
# Get the Checkboxes
self._image_list_check.Add(self.GetControlBmp(checkbox=True,
checked=True,
enabled=True,
x=sizex, y=sizey))
self._grayed_check_list.Add(self.GetControlBmp(checkbox=True,
checked=True,
enabled=False,
x=sizex, y=sizey))
self._image_list_check.Add(self.GetControlBmp(checkbox=True,
checked=False,
enabled=True,
x=sizex, y=sizey))
self._grayed_check_list.Add(self.GetControlBmp(checkbox=True,
checked=False,
enabled=False,
x=sizex, y=sizey))
# Get the Radio Buttons
self._image_list_check.Add(self.GetControlBmp(checkbox=False,
checked=True,
enabled=True,
x=sizex, y=sizey))
self._grayed_check_list.Add(self.GetControlBmp(checkbox=False,
checked=True,
enabled=False,
x=sizex, y=sizey))
self._image_list_check.Add(self.GetControlBmp(checkbox=False,
checked=False,
enabled=True,
x=sizex, y=sizey))
self._grayed_check_list.Add(self.GetControlBmp(checkbox=False,
checked=False,
enabled=False,
x=sizex, y=sizey))
else:
sizex, sizey = imglist.GetSize(0)
self._image_list_check = imglist
for ii in range(self._image_list_check.GetImageCount()):
bmp = self._image_list_check.GetBitmap(ii)
newbmp = MakeDisabledBitmap(bmp)
self._grayed_check_list.Add(newbmp)
self._dirty = True
if imglist:
self.RecalculatePositions()
def GetControlBmp(self, checkbox=True, checked=False, enabled=True, x=16, y=16):
"""
Returns a native looking checkbox or radio button bitmap.
:param `checkbox`: ``True`` to get a checkbox image, ``False`` for a radiobutton
one;
:param `checked`: ``True`` if the control is marked, ``False`` if it is not;
:param `enabled`: ``True`` if the control is enabled, ``False`` if it is not;
:param `x`: the width of the bitmap, in pixels;
:param `y`: the height of the bitmap, in pixels.
"""
bmp = wx.Bitmap(x, y)
mdc = wx.MemoryDC(bmp)
mdc.SetBrush(wx.BLACK_BRUSH)
mdc.Clear()
render = wx.RendererNative.Get()
if checked:
flag = wx.CONTROL_CHECKED
else:
flag = 0
if not enabled:
flag |= wx.CONTROL_DISABLED
if checkbox:
render.DrawCheckBox(self, mdc, (0, 0, x, y), flag)
else:
if _VERSION_STRING < "2.9":
render.DrawRadioButton(self, mdc, (0, 0, x, y), flag)
else:
render.DrawRadioBitmap(self, mdc, (0, 0, x, y), flag)
mdc.SelectObject(wx.NullBitmap)
return bmp
def SetItemSpacing(self, spacing, isSmall=False):
"""
Sets the spacing between item texts and icons.
:param `spacing`: the spacing between item texts and icons, in pixels;
:param `isSmall`: ``True`` if using a ``wx.IMAGE_LIST_SMALL`` image list,
``False`` if using a ``wx.IMAGE_LIST_NORMAL`` image list.
"""
self._dirty = True
if isSmall:
self._small_spacing = spacing
else:
self._normal_spacing = spacing
def GetItemSpacing(self, isSmall=False):
"""
Returns the spacing between item texts and icons, in pixels.
:param `isSmall`: ``True`` if using a ``wx.IMAGE_LIST_SMALL`` image list,
``False`` if using a ``wx.IMAGE_LIST_NORMAL`` image list.
"""
return (isSmall and [self._small_spacing] or [self._normal_spacing])[0]
# ----------------------------------------------------------------------------
# columns
# ----------------------------------------------------------------------------
def SetColumn(self, col, item):
"""
Sets information about this column.
:param `col`: an integer specifying the column index;
:param `item`: an instance of :class:`UltimateListItem`.
"""
column = self._columns[col]
if item._width == ULC_AUTOSIZE_USEHEADER:
item._width = self.GetTextLength(item._text)
column.SetItem(item)
headerWin = self.GetListCtrl()._headerWin
if headerWin:
headerWin._dirty = True
self._dirty = True
# invalidate it as it has to be recalculated
self._headerWidth = 0
def SetColumnWidth(self, col, width):
"""
Sets the column width.
:param `width`: can be a width in pixels or ``wx.LIST_AUTOSIZE`` (-1) or
``wx.LIST_AUTOSIZE_USEHEADER`` (-2) or ``ULC_AUTOSIZE_FILL`` (-3).
``wx.LIST_AUTOSIZE`` will resize the column to the length of its longest
item. ``wx.LIST_AUTOSIZE_USEHEADER`` will resize the column to the
length of the header (Win32) or 80 pixels (other platforms).
``ULC_AUTOSIZE_FILL`` will resize the column fill the remaining width
of the window.
:note: In small or normal icon view, col must be -1, and the column width
is set for all columns.
"""
if col < 0:
raise Exception("invalid column index")
if not self.InReportView() and not self.InTileView() and not self.HasAGWFlag(ULC_HEADER_IN_ALL_VIEWS):
raise Exception("SetColumnWidth() can only be called in report/tile modes or with the ULC_HEADER_IN_ALL_VIEWS flag set.")
self._dirty = True
headerWin = self.GetListCtrl()._headerWin
footerWin = self.GetListCtrl()._footerWin
if headerWin:
headerWin._dirty = True
if footerWin:
footerWin._dirty = True
column = self._columns[col]
count = self.GetItemCount()
if width == ULC_AUTOSIZE_FILL:
width = self.GetColumnWidth(col)
if width == 0:
width = WIDTH_COL_DEFAULT
self._resizeColumn = col
elif width == ULC_AUTOSIZE_USEHEADER:
width = self.GetTextLength(column.GetText())
width += 2*EXTRA_WIDTH
if column.GetKind() in [1, 2]:
ix, iy = self._owner.GetCheckboxImageSize()
width += ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE
# check for column header's image availability
images = column.GetImage()
for img in images:
if self._small_image_list:
ix, iy = self._small_image_list.GetSize(img)
width += ix + HEADER_IMAGE_MARGIN_IN_REPORT_MODE
elif width == ULC_AUTOSIZE:
if self.IsVirtual() or not self.InReportView():
# TODO: determine the max width somehow...
width = WIDTH_COL_DEFAULT
else: # !virtual
maxW = AUTOSIZE_COL_MARGIN
# if the cached column width isn't valid then recalculate it
if self._aColWidths[col]._bNeedsUpdate:
for i in range(count):
line = self.GetLine(i)
itemData = line._items[col]
item = UltimateListItem()
item = itemData.GetItem(item)
itemWidth = self.GetItemWidthWithImage(item)
if itemWidth > maxW and not item._overFlow:
maxW = itemWidth
self._aColWidths[col]._bNeedsUpdate = False
self._aColWidths[col]._nMaxWidth = maxW
maxW = self._aColWidths[col]._nMaxWidth
width = maxW + AUTOSIZE_COL_MARGIN
column.SetWidth(width)
# invalidate it as it has to be recalculated
self._headerWidth = 0
self._footerWidth = 0
if footerWin:
footerWin.Refresh()
def GetHeaderWidth(self):
""" Returns the header window width, in pixels. """
if not self._headerWidth:
count = self.GetColumnCount()
for col in range(count):
if not self.IsColumnShown(col):
continue
self._headerWidth += self.GetColumnWidth(col)
if self.HasAGWFlag(ULC_FOOTER):
self._footerWidth = self._headerWidth
return self._headerWidth
def GetColumn(self, col):
"""
Returns information about this column.
:param `col`: an integer specifying the column index.
"""
item = UltimateListItem()
column = self._columns[col]
item = column.GetItem(item)
return item
def GetColumnWidth(self, col):
"""
Returns the column width for the input column.
:param `col`: an integer specifying the column index.
"""
column = self._columns[col]
return column.GetWidth()
def GetTotalWidth(self):
""" Returns the total width of the columns in :class:`UltimateListCtrl`. """
width = 0
for column in self._columns:
width += column.GetWidth()
return width
# ----------------------------------------------------------------------------
# item state
# ----------------------------------------------------------------------------
def SetItem(self, item):
"""
Sets information about the item.
:param `item`: an instance of :class:`UltimateListItemData`.
"""
id = item._itemId
if id < 0 or id >= self.GetItemCount():
raise Exception("invalid item index in SetItem")
if not self.IsVirtual():
line = self.GetLine(id)
line.SetItem(item._col, item)
# Set item state if user wants
if item._mask & ULC_MASK_STATE:
self.SetItemState(item._itemId, item._state, item._state)
if self.InReportView():
# update the Max Width Cache if needed
width = self.GetItemWidthWithImage(item)
if width > self._aColWidths[item._col]._nMaxWidth:
self._aColWidths[item._col]._nMaxWidth = width
self._aColWidths[item._col]._bNeedsUpdate = True
if self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
line.ResetDimensions()
# update the item on screen
if self.InReportView():
rectItem = self.GetItemRect(id)
self.RefreshRect(rectItem)
def SetItemStateAll(self, state, stateMask):
"""
Sets the item state flags for all the items.
:param `state`: any combination of the following bits:
============================ ========= ==============================
State Bits Hex Value Description
============================ ========= ==============================
``ULC_STATE_DONTCARE`` 0x0 Don't care what the state is
``ULC_STATE_DROPHILITED`` 0x1 The item is highlighted to receive a drop event
``ULC_STATE_FOCUSED`` 0x2 The item has the focus
``ULC_STATE_SELECTED`` 0x4 The item is selected
``ULC_STATE_CUT`` 0x8 The item is in the cut state
``ULC_STATE_DISABLED`` 0x10 The item is disabled
``ULC_STATE_FILTERED`` 0x20 The item has been filtered
``ULC_STATE_INUSE`` 0x40 The item is in use
``ULC_STATE_PICKED`` 0x80 The item has been picked
``ULC_STATE_SOURCE`` 0x100 The item is a drag and drop source
============================ ========= ==============================
:param `stateMask`: the bitmask for the state flag.
:note: The valid state flags are influenced by the value of the state mask.
"""
if self.IsEmpty():
return
# first deal with selection
if stateMask & ULC_STATE_SELECTED:
# set/clear select state
if self.IsVirtual():
# optimized version for virtual listctrl.
self._selStore.SelectRange(0, self.GetItemCount() - 1, state==ULC_STATE_SELECTED)
self.Refresh()
elif state & ULC_STATE_SELECTED:
count = self.GetItemCount()
for i in range(count):
self.SetItemState(i, ULC_STATE_SELECTED, ULC_STATE_SELECTED)
else:
# clear for non virtual (somewhat optimized by using GetNextItem())
i = -1
while 1:
i += 1
if self.GetNextItem(i, ULC_NEXT_ALL, ULC_STATE_SELECTED) == -1:
break
self.SetItemState(i, 0, ULC_STATE_SELECTED)
if self.HasCurrent() and state == 0 and stateMask & ULC_STATE_FOCUSED:
# unfocus all: only one item can be focussed, so clearing focus for
# all items is simply clearing focus of the focussed item.
self.SetItemState(self._current, state, stateMask)
#(setting focus to all items makes no sense, so it is not handled here.)
def SetItemState(self, litem, state, stateMask):
"""
Sets the item state flags for the input item.
:param `litem`: the index of the item; if defaulted to -1, the state flag
will be set for all the items;
:param `state`: the item state flag;
:param `stateMask`: the bitmask for the state flag.
:see: :meth:`~UltimateListMainWindow.SetItemStateAll` for a list of valid state flags.
"""
if litem == -1:
self.SetItemStateAll(state, stateMask)
return
if litem < 0 or litem >= self.GetItemCount():
raise Exception("invalid item index in SetItemState")
oldCurrent = self._current
item = litem # safe because of the check above
# do we need to change the focus?
if stateMask & ULC_STATE_FOCUSED:
if state & ULC_STATE_FOCUSED:
# don't do anything if this item is already focused
if item != self._current:
self.ChangeCurrent(item)
if oldCurrent != - 1:
if self.IsSingleSel():
self.HighlightLine(oldCurrent, False)
self.RefreshLine(oldCurrent)
self.RefreshLine(self._current)
else: # unfocus
# don't do anything if this item is not focused
if item == self._current:
self.ResetCurrent()
if self.IsSingleSel():
# we must unselect the old current item as well or we
# might end up with more than one selected item in a
# single selection control
self.HighlightLine(oldCurrent, False)
self.RefreshLine(oldCurrent)
# do we need to change the selection state?
if stateMask & ULC_STATE_SELECTED:
on = (state & ULC_STATE_SELECTED) != 0
if self.IsSingleSel():
if on:
# selecting the item also makes it the focused one in the
# single sel mode
if self._current != item:
self.ChangeCurrent(item)
if oldCurrent != - 1:
self.HighlightLine(oldCurrent, False)
self.RefreshLine(oldCurrent)
else: # off
# only the current item may be selected anyhow
if item != self._current:
return
if self.HighlightLine(item, on):
self.RefreshLine(item)
def GetItemState(self, item, stateMask):
"""
Returns the item state flags for the input item.
:param `item`: the index of the item;
:param `stateMask`: the bitmask for the state flag.
:see: :meth:`~UltimateListMainWindow.SetItemStateAll` for a list of valid state flags.
"""
if item < 0 or item >= self.GetItemCount():
raise Exception("invalid item index in GetItemState")
ret = ULC_STATE_DONTCARE
if stateMask & ULC_STATE_FOCUSED:
if item == self._current:
ret |= ULC_STATE_FOCUSED
if stateMask & ULC_STATE_SELECTED:
if self.IsHighlighted(item):
ret |= ULC_STATE_SELECTED
return ret
def GetItem(self, item, col=0):
"""
Returns the information about the input item.
:param `item`: an instance of :class:`UltimateListItem`;
:param `col`: the column to which the item belongs to.
"""
if item._itemId < 0 or item._itemId >= self.GetItemCount():
raise Exception("invalid item index in GetItem")
line = self.GetLine(item._itemId)
item = line.GetItem(col, item)
# Get item state if user wants it
if item._mask & ULC_MASK_STATE:
item._state = self.GetItemState(item._itemId, ULC_STATE_SELECTED | ULC_STATE_FOCUSED)
return item
def CheckItem(self, item, checked=True, sendEvent=True):
"""
Actually checks/uncheks an item, sending (eventually) the two
events ``EVT_LIST_ITEM_CHECKING`` / ``EVT_LIST_ITEM_CHECKED``.
:param `item`: an instance of :class:`UltimateListItem`;
:param `checked`: ``True`` to check an item, ``False`` to uncheck it;
:param `sendEvent`: ``True`` to send a {UltimateListEvent}, ``False`` otherwise.
:note: This method is meaningful only for checkbox-like and radiobutton-like items.
"""
# Should we raise an error here?!?
if item.GetKind() == 0 or not item.IsEnabled():
return
if sendEvent:
parent = self.GetParent()
le = UltimateListEvent(wxEVT_COMMAND_LIST_ITEM_CHECKING, parent.GetId())
le.m_itemIndex = item._itemId
le.m_item = item
le.SetEventObject(parent)
if parent.GetEventHandler().ProcessEvent(le):
# Blocked by user
return
item.Check(checked)
self.SetItem(item)
self.RefreshLine(item._itemId)
if not sendEvent:
return
le = UltimateListEvent(wxEVT_COMMAND_LIST_ITEM_CHECKED, parent.GetId())
le.m_itemIndex = item._itemId
le.m_item = item
le.SetEventObject(parent)
parent.GetEventHandler().ProcessEvent(le)
def AutoCheckChild(self, isChecked, column):
"""
Checks/unchecks all the items.
:param `isChecked`: ``True`` to check the items, ``False`` to uncheck them;
:param `column`: the column to which the items belongs to.
:note: This method is meaningful only for checkbox-like and radiobutton-like items.
"""
for indx in range(self.GetItemCount()):
item = CreateListItem(indx, column)
newItem = self.GetItem(item, column)
self.CheckItem(newItem, not isChecked, False)
def AutoToggleChild(self, column):
"""
Toggles all the items.
:param `column`: the column to which the items belongs to.
:note: This method is meaningful only for checkbox-like and radiobutton-like items.
"""
for indx in range(self.GetItemCount()):
item = CreateListItem(indx, column)
newItem = self.GetItem(item, column)
if newItem.GetKind() != 1:
continue
self.CheckItem(newItem, not item.IsChecked(), False)
def IsItemChecked(self, item):
"""
Returns whether an item is checked or not.
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.IsChecked()
def IsItemEnabled(self, item):
"""
Returns whether an item is enabled or not.
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.IsEnabled()
def EnableItem(self, item, enable=True):
"""
Enables/disables an item.
:param `item`: an instance of :class:`UltimateListItem`;
:param `enable`: ``True`` to enable the item, ``False`` otherwise.
"""
item = self.GetItem(item, 0)
if item.IsEnabled() == enable:
return False
item.Enable(enable)
wnd = item.GetWindow()
# Handles the eventual window associated to the item
if wnd:
wnd.Enable(enable)
self.SetItem(item)
return True
def GetItemKind(self, item):
"""
Returns the item kind.
:param `item`: an instance of :class:`UltimateListItem`.
:see: :meth:`~UltimateListMainWindow.SetItemKind` for a list of valid item kinds.
"""
item = self.GetItem(item, item._col)
return item.GetKind()
def SetItemKind(self, item, kind):
"""
Sets the item kind.
:param `item`: an instance of :class:`UltimateListItem`;
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
item = self.GetItem(item, item._col)
item.SetKind(kind)
self.SetItem(item)
return True
def IsItemHyperText(self, item):
"""
Returns whether an item is hypertext or not.
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.IsHyperText()
def SetItemHyperText(self, item, hyper=True):
"""
Sets whether the item is hypertext or not.
:param `item`: an instance of :class:`UltimateListItem`;
:param `hyper`: ``True`` to have an item with hypertext behaviour, ``False`` otherwise.
"""
item = self.GetItem(item, item._col)
item.SetHyperText(hyper)
self.SetItem(item)
return True
def GetHyperTextFont(self):
"""Returns the font used to render an hypertext item."""
return self._hypertextfont
def SetHyperTextFont(self, font):
"""
Sets the font used to render hypertext items.
:param `font`: a valid :class:`wx.Font` instance.
"""
self._hypertextfont = font
self._dirty = True
def SetHyperTextNewColour(self, colour):
"""
Sets the colour used to render a non-visited hypertext item.
:param `colour`: a valid :class:`wx.Colour` instance.
"""
self._hypertextnewcolour = colour
self._dirty = True
def GetHyperTextNewColour(self):
""" Returns the colour used to render a non-visited hypertext item. """
return self._hypertextnewcolour
def SetHyperTextVisitedColour(self, colour):
"""
Sets the colour used to render a visited hypertext item.
:param `colour`: a valid :class:`wx.Colour` instance.
"""
self._hypertextvisitedcolour = colour
self._dirty = True
def GetHyperTextVisitedColour(self):
""" Returns the colour used to render a visited hypertext item. """
return self._hypertextvisitedcolour
def SetItemVisited(self, item, visited=True):
"""
Sets whether an hypertext item was visited.
:param `item`: an instance of :class:`UltimateListItem`;
:param `visited`: ``True`` to mark an hypertext item as visited, ``False`` otherwise.
"""
newItem = self.GetItem(item, item._col)
newItem.SetVisited(visited)
self.SetItem(newItem)
return True
def GetItemVisited(self, item):
"""
Returns whether an hypertext item was visited.
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.GetVisited()
def GetItemWindow(self, item):
"""
Returns the window associated to the item (if any).
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.GetWindow()
def SetItemWindow(self, item, wnd, expand=False):
"""
Sets the window for the given item.
:param `item`: an instance of :class:`UltimateListItem`;
:param `wnd`: if not ``None``, a non-toplevel window to be displayed next to
the item;
:param `expand`: ``True`` to expand the column where the item/subitem lives,
so that the window will be fully visible.
"""
if not self.InReportView() or not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("Widgets are only allowed in report mode and with the ULC_HAS_VARIABLE_ROW_HEIGHT style.")
item = self.GetItem(item, item._col)
if wnd is not None:
self._hasWindows = True
if item not in self._itemWithWindow:
self._itemWithWindow.append(item)
else:
self.DeleteItemWindow(item)
else:
self.DeleteItemWindow(item)
item.SetWindow(wnd, expand)
self.SetItem(item)
self.RecalculatePositions()
self.Refresh()
def DeleteItemWindow(self, item):
"""
Deletes the window associated to an item (if any).
:param `item`: an instance of :class:`UltimateListItem`.
"""
if item.GetWindow() is None:
return
item.DeleteWindow()
if item in self._itemWithWindow:
self._itemWithWindow.remove(item)
self.SetItem(item)
self.RecalculatePositions()
def GetItemWindowEnabled(self, item):
"""
Returns whether the window associated to the item is enabled.
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.GetWindowEnabled()
def SetItemWindowEnabled(self, item, enable=True):
"""
Enables/disables the window associated to the item.
:param `item`: an instance of :class:`UltimateListItem`;
:param `enable`: ``True`` to enable the associated window, ``False`` to
disable it.
"""
item = self.GetItem(item, item._col)
item.SetWindowEnabled(enable)
self.SetItem(item)
self.Refresh()
def SetColumnCustomRenderer(self, col=0, renderer=None):
"""
Associate a custom renderer to this column's header
:param `col`: the column index.
:param `renderer`: a class able to correctly render the input item.
:note: the renderer class **must** implement the methods `DrawHeaderButton`
and `GetForegroundColor`.
"""
self._columns[col].SetCustomRenderer(renderer)
def GetColumnCustomRenderer(self, col):
"""
Returns the custom renderer used to draw the column header
:param `col`: the column index.
"""
return self._columns[col].GetCustomRenderer()
def GetItemCustomRenderer(self, item):
"""
Returns the custom renderer used to draw the input item (if any).
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.GetCustomRenderer()
def SetItemCustomRenderer(self, item, renderer=None):
"""
Associate a custom renderer to this item.
:param `item`: an instance of :class:`UltimateListItem`;
:param `renderer`: a class able to correctly render the item.
:note: the renderer class **must** implement the methods `DrawSubItem`,
`GetLineHeight` and `GetSubItemWidth`.
"""
item = self.GetItem(item, item._col)
item.SetCustomRenderer(renderer)
self.SetItem(item)
self.ResetLineDimensions()
self.Refresh()
def GetItemOverFlow(self, item):
"""
Returns if the item is in the overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
:param `item`: an instance of :class:`UltimateListItem`.
"""
item = self.GetItem(item, item._col)
return item.GetOverFlow()
def SetItemOverFlow(self, item, over=True):
"""
Sets the item in the overflow/non overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
:param `item`: an instance of :class:`UltimateListItem`;
:param `over`: ``True`` to set the item in a overflow state, ``False`` otherwise.
"""
item = self.GetItem(item, item._col)
item.SetOverFlow(over)
self.SetItem(item)
self.Refresh()
# ----------------------------------------------------------------------------
# item count
# ----------------------------------------------------------------------------
def GetItemCount(self):
""" Returns the number of items in the :class:`UltimateListCtrl`. """
return (self.IsVirtual() and [self._countVirt] or [len(self._lines)])[0]
def SetItemCount(self, count):
"""
This method can only be used with virtual :class:`UltimateListCtrl`. It is used to
indicate to the control the number of items it contains. After calling it,
the main program should be ready to handle calls to various item callbacks
(such as :meth:`UltimateListCtrl.OnGetItemText() <UltimateListCtrl.OnGetItemText>`) for all items in the range from 0 to `count`.
:param `count`: the total number of items in :class:`UltimateListCtrl`.
"""
self._selStore.SetItemCount(count)
self._countVirt = count
self.ResetVisibleLinesRange()
# scrollbars must be reset
self._dirty = True
def GetSelectedItemCount(self):
""" Returns the number of selected items in :class:`UltimateListCtrl`. """
# deal with the quick case first
if self.IsSingleSel():
return (self.HasCurrent() and [self.IsHighlighted(self._current)] or [False])[0]
# virtual controls remmebers all its selections itself
if self.IsVirtual():
return self._selStore.GetSelectedCount()
# TODO: we probably should maintain the number of items selected even for
# non virtual controls as enumerating all lines is really slow...
countSel = 0
count = self.GetItemCount()
for line in range(count):
if self.GetLine(line).IsHighlighted():
countSel += 1
return countSel
# ----------------------------------------------------------------------------
# item position/size
# ----------------------------------------------------------------------------
def GetViewRect(self):
"""
Returns the rectangle taken by all items in the control. In other words,
if the controls client size were equal to the size of this rectangle, no
scrollbars would be needed and no free space would be left.
:note: This function only works in the icon and small icon views, not in
list or report views.
"""
if self.HasAGWFlag(ULC_LIST):
raise Exception("UltimateListCtrl.GetViewRect() not implemented for list view")
# we need to find the longest/tallest label
xMax = yMax = 0
count = self.GetItemCount()
if count:
for i in range(count):
# we need logical, not physical, coordinates here, so use
# GetLineRect() instead of GetItemRect()
r = self.GetLineRect(i)
x, y = r.GetRight(), r.GetBottom()
if x > xMax:
xMax = x
if y > yMax:
yMax = y
# some fudge needed to make it look prettier
xMax += 2*EXTRA_BORDER_X
yMax += 2*EXTRA_BORDER_Y
# account for the scrollbars if necessary
sizeAll = self.GetClientSize()
if xMax > sizeAll.x:
yMax += wx.SystemSettings.GetMetric(wx.SYS_HSCROLL_Y)
if yMax > sizeAll.y:
xMax += wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X)
return wx.Rect(0, 0, xMax, yMax)
def GetSubItemRect(self, item, subItem):
"""
Returns the rectangle representing the size and position, in physical coordinates,
of the given subitem, i.e. the part of the row `item` in the column `subItem`.
:param `item`: the row in which the item lives;
:param `subItem`: the column in which the item lives. If set equal to the special
value ``ULC_GETSUBITEMRECT_WHOLEITEM`` the return value is the same as for
:meth:`~UltimateListMainWindow.GetItemRect`.
:note: This method is only meaningful when the :class:`UltimateListCtrl` is in the
report mode.
"""
if not self.InReportView() and subItem == ULC_GETSUBITEMRECT_WHOLEITEM:
raise Exception("GetSubItemRect only meaningful in report view")
if item < 0 or item >= self.GetItemCount():
raise Exception("invalid item in GetSubItemRect")
# ensure that we're laid out, otherwise we could return nonsense
if self._dirty:
self.RecalculatePositions(True)
rect = self.GetLineRect(item)
# Adjust rect to specified column
if subItem != ULC_GETSUBITEMRECT_WHOLEITEM:
if subItem < 0 or subItem >= self.GetColumnCount():
raise Exception("invalid subItem in GetSubItemRect")
for i in range(subItem):
rect.x += self.GetColumnWidth(i)
rect.width = self.GetColumnWidth(subItem)
rect.x, rect.y = self.CalcScrolledPosition(rect.x, rect.y)
return rect
def GetItemRect(self, item):
"""
Returns the rectangle representing the item's size and position, in physical
coordinates.
:param `item`: the row in which the item lives.
"""
return self.GetSubItemRect(item, ULC_GETSUBITEMRECT_WHOLEITEM)
def GetItemPosition(self, item):
"""
Returns the position of the item, in icon or small icon view.
:param `item`: the row in which the item lives.
"""
rect = self.GetItemRect(item)
return wx.Point(rect.x, rect.y)
# ----------------------------------------------------------------------------
# geometry calculation
# ----------------------------------------------------------------------------
def RecalculatePositions(self, noRefresh=False):
"""
Recalculates all the items positions, and sets the scrollbars positions
too.
:param `noRefresh`: ``True`` to avoid calling `Refresh`, ``False`` otherwise.
"""
count = self.GetItemCount()
if self.HasAGWFlag(ULC_ICON) and self._normal_image_list:
iconSpacing = self._normal_spacing
elif self.HasAGWFlag(ULC_SMALL_ICON) and self._small_image_list:
iconSpacing = self._small_spacing
else:
iconSpacing = 0
# Note that we do not call GetClientSize() here but
# GetSize() and subtract the border size for sunken
# borders manually. This is technically incorrect,
# but we need to know the client area's size WITHOUT
# scrollbars here. Since we don't know if there are
# any scrollbars, we use GetSize() instead. Another
# solution would be to call SetScrollbars() here to
# remove the scrollbars and call GetClientSize() then,
# but this might result in flicker and - worse - will
# reset the scrollbars to 0 which is not good at all
# if you resize a dialog/window, but don't want to
# reset the window scrolling. RR.
# Furthermore, we actually do NOT subtract the border
# width as 2 pixels is just the extra space which we
# need around the actual content in the window. Other-
# wise the text would e.g. touch the upper border. RR.
clientWidth, clientHeight = self.GetSize()
if self.InReportView():
self.ResetVisibleLinesRange()
if not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
# all lines have the same height and we scroll one line per step
lineHeight = self.GetLineHeight()
entireHeight = count*lineHeight + LINE_SPACING
decrement = 0
if entireHeight > self.GetClientSize()[1]:
decrement = SCROLL_UNIT_X
self._linesPerPage = clientHeight//lineHeight
self.SetScrollbars(SCROLL_UNIT_X, lineHeight,
(self.GetHeaderWidth()-decrement)/SCROLL_UNIT_X,
(entireHeight + lineHeight - 1)/lineHeight,
self.GetScrollPos(wx.HORIZONTAL),
self.GetScrollPos(wx.VERTICAL),
True)
else:
if count > 0:
entireHeight = self.GetLineY(count-1) + self.GetLineHeight(count-1) + LINE_SPACING
lineFrom, lineTo = self.GetVisibleLinesRange()
self._linesPerPage = lineTo - lineFrom + 1
else:
lineHeight = self.GetLineHeight()
entireHeight = count*lineHeight + LINE_SPACING
self._linesPerPage = clientHeight/lineHeight
decrement = 0
if entireHeight > self.GetClientSize()[1]:
decrement = SCROLL_UNIT_X
self.SetScrollbars(SCROLL_UNIT_X, SCROLL_UNIT_Y,
(self.GetHeaderWidth()-decrement)/SCROLL_UNIT_X,
(entireHeight + SCROLL_UNIT_Y - 1)/SCROLL_UNIT_Y,
self.GetScrollPos(wx.HORIZONTAL),
self.GetScrollPos(wx.VERTICAL),
True)
else: # !report
dc = wx.ClientDC(self)
dc.SetFont(self.GetFont())
lineHeight = self.GetLineHeight()
# we have 3 different layout strategies: either layout all items
# horizontally/vertically (ULC_ALIGN_XXX styles explicitly given) or
# to arrange them in top to bottom, left to right (don't ask me why
# not the other way round...) order
if self.HasAGWFlag(ULC_ALIGN_LEFT | ULC_ALIGN_TOP):
x = EXTRA_BORDER_X
y = EXTRA_BORDER_Y
widthMax = 0
for i in range(count):
line = self.GetLine(i)
line.CalculateSize(dc, iconSpacing)
line.SetPosition(x, y, iconSpacing)
sizeLine = self.GetLineSize(i)
if self.HasAGWFlag(ULC_ALIGN_TOP):
if sizeLine.x > widthMax:
widthMax = sizeLine.x
y += sizeLine.y
else: # ULC_ALIGN_LEFT
x += sizeLine.x + MARGIN_BETWEEN_ROWS
if self.HasAGWFlag(ULC_ALIGN_TOP):
# traverse the items again and tweak their sizes so that they are
# all the same in a row
for i in range(count):
line = self.GetLine(i)
line._gi.ExtendWidth(widthMax)
self.SetScrollbars(SCROLL_UNIT_X, lineHeight,
(x + SCROLL_UNIT_X)/SCROLL_UNIT_X,
(y + lineHeight)/lineHeight,
self.GetScrollPos(wx.HORIZONTAL),
self.GetScrollPos(wx.VERTICAL),
True)
else: # "flowed" arrangement, the most complicated case
# at first we try without any scrollbars, if the items don't fit into
# the window, we recalculate after subtracting the space taken by the
# scrollbar
entireWidth = 0
for tries in range(2):
entireWidth = 2*EXTRA_BORDER_X
if tries == 1:
# Now we have decided that the items do not fit into the
# client area, so we need a scrollbar
entireWidth += SCROLL_UNIT_X
x = EXTRA_BORDER_X
y = EXTRA_BORDER_Y
maxWidthInThisRow = 0
self._linesPerPage = 0
currentlyVisibleLines = 0
for i in range(count):
currentlyVisibleLines += 1
line = self.GetLine(i)
line.CalculateSize(dc, iconSpacing)
line.SetPosition(x, y, iconSpacing)
sizeLine = self.GetLineSize(i)
if maxWidthInThisRow < sizeLine.x:
maxWidthInThisRow = sizeLine.x
y += sizeLine.y
if currentlyVisibleLines > self._linesPerPage:
self._linesPerPage = currentlyVisibleLines
if y + sizeLine.y >= clientHeight:
currentlyVisibleLines = 0
y = EXTRA_BORDER_Y
maxWidthInThisRow += MARGIN_BETWEEN_ROWS
x += maxWidthInThisRow
entireWidth += maxWidthInThisRow
maxWidthInThisRow = 0
# We have reached the last item.
if i == count - 1:
entireWidth += maxWidthInThisRow
if tries == 0 and entireWidth + SCROLL_UNIT_X > clientWidth:
clientHeight -= wx.SystemSettings.GetMetric(wx.SYS_HSCROLL_Y)
self._linesPerPage = 0
break
if i == count - 1:
break # Everything fits, no second try required.
self.SetScrollbars(SCROLL_UNIT_X, lineHeight,
(entireWidth + SCROLL_UNIT_X)/SCROLL_UNIT_X,
0,
self.GetScrollPos(wx.HORIZONTAL),
0,
True)
self._dirty = False
if not noRefresh:
self.RefreshAll()
def RefreshAll(self):
""" Refreshes the entire :class:`UltimateListCtrl`. """
self._dirty = False
self.Refresh()
headerWin = self.GetListCtrl()._headerWin
if headerWin and headerWin._dirty:
headerWin._dirty = False
headerWin.Refresh()
def UpdateCurrent(self):
""" Updates the current line selection. """
if not self.HasCurrent() and not self.IsEmpty():
self.ChangeCurrent(0)
def GetNextItem(self, item, geometry=ULC_NEXT_ALL, state=ULC_STATE_DONTCARE):
"""
Searches for an item with the given `geometry` or `state`, starting from `item`
but excluding the `item` itself.
:param `item`: the item at which starting the search. If set to -1, the first
item that matches the specified flags will be returned.
:param `geometry`: can be one of:
=================== ========= =================================
Geometry Flag Hex Value Description
=================== ========= =================================
``ULC_NEXT_ABOVE`` 0x0 Searches for an item above the specified item
``ULC_NEXT_ALL`` 0x1 Searches for subsequent item by index
``ULC_NEXT_BELOW`` 0x2 Searches for an item below the specified item
``ULC_NEXT_LEFT`` 0x3 Searches for an item to the left of the specified item
``ULC_NEXT_RIGHT`` 0x4 Searches for an item to the right of the specified item
=================== ========= =================================
:param `state`: any combination of the following bits:
============================ ========= ==============================
State Bits Hex Value Description
============================ ========= ==============================
``ULC_STATE_DONTCARE`` 0x0 Don't care what the state is
``ULC_STATE_DROPHILITED`` 0x1 The item is highlighted to receive a drop event
``ULC_STATE_FOCUSED`` 0x2 The item has the focus
``ULC_STATE_SELECTED`` 0x4 The item is selected
``ULC_STATE_CUT`` 0x8 The item is in the cut state
``ULC_STATE_DISABLED`` 0x10 The item is disabled
``ULC_STATE_FILTERED`` 0x20 The item has been filtered
``ULC_STATE_INUSE`` 0x40 The item is in use
``ULC_STATE_PICKED`` 0x80 The item has been picked
``ULC_STATE_SOURCE`` 0x100 The item is a drag and drop source
============================ ========= ==============================
:return: The first item with given `state` following `item` or -1 if no such item found.
:note: This function may be used to find all selected items in the
control like this::
item = -1
while 1:
item = listctrl.GetNextItem(item, ULC_NEXT_ALL, ULC_STATE_SELECTED)
if item == -1:
break
# This item is selected - do whatever is needed with it
wx.LogMessage("Item %ld is selected."%item)
"""
ret = item
maxI = self.GetItemCount()
# notice that we start with the next item (or the first one if item == -1)
# and this is intentional to allow writing a simple loop to iterate over
# all selected items
ret += 1
if ret == maxI:
# this is not an error because the index was ok initially, just no
# such item
return -1
if not state:
# any will do
return ret
for line in range(ret, maxI):
if state & ULC_STATE_FOCUSED and line == self._current:
return line
if state & ULC_STATE_SELECTED and self.IsHighlighted(line):
return line
return -1
# ----------------------------------------------------------------------------
# deleting stuff
# ----------------------------------------------------------------------------
def DeleteItem(self, lindex):
"""
Deletes the specified item.
:param `lindex`: the index of the item to delete.
:note: This function sends the ``EVT_LIST_DELETE_ITEM`` event for the item
being deleted.
"""
count = self.GetItemCount()
if lindex < 0 or lindex >= self.GetItemCount():
raise Exception("invalid item index in DeleteItem")
# we don't need to adjust the index for the previous items
if self.HasCurrent() and self._current >= lindex:
# if the current item is being deleted, we want the next one to
# become selected - unless there is no next one - so don't adjust
# self._current in this case
if self._current != lindex or self._current == count - 1:
self._current -= 1
if self.InReportView():
# mark the Column Max Width cache as dirty if the items in the line
# we're deleting contain the Max Column Width
line = self.GetLine(lindex)
item = UltimateListItem()
for i in range(len(self._columns)):
itemData = line._items[i]
item = itemData.GetItem(item)
itemWidth = self.GetItemWidthWithImage(item)
if itemWidth >= self._aColWidths[i]._nMaxWidth:
self._aColWidths[i]._bNeedsUpdate = True
if item.GetWindow():
self.DeleteItemWindow(item)
self.ResetVisibleLinesRange(True)
self._current = -1
self.SendNotify(lindex, wxEVT_COMMAND_LIST_DELETE_ITEM)
if self.IsVirtual():
self._countVirt -= 1
self._selStore.OnItemDelete(lindex)
else:
self._lines.pop(lindex)
# we need to refresh the (vert) scrollbar as the number of items changed
self._dirty = True
self._lineHeight = 0
self.ResetLineDimensions(True)
self.RecalculatePositions()
self.RefreshAfter(lindex)
def DeleteColumn(self, col):
"""
Deletes the specified column.
:param `col`: the index of the column to delete.
"""
self._columns.pop(col)
self._dirty = True
if not self.IsVirtual():
# update all the items
for i in range(len(self._lines)):
line = self.GetLine(i)
line._items.pop(col)
if self.InReportView(): # we only cache max widths when in Report View
self._aColWidths.pop(col)
# invalidate it as it has to be recalculated
self._headerWidth = 0
def DoDeleteAllItems(self):
""" Actually performs the deletion of all the items. """
if self.IsEmpty():
# nothing to do - in particular, don't send the event
return
self.ResetCurrent()
# to make the deletion of all items faster, we don't send the
# notifications for each item deletion in this case but only one event
# for all of them: this is compatible with wxMSW and documented in
# DeleteAllItems() description
event = UltimateListEvent(wxEVT_COMMAND_LIST_DELETE_ALL_ITEMS, self.GetParent().GetId())
event.SetEventObject(self.GetParent())
self.GetParent().GetEventHandler().ProcessEvent(event)
if self.IsVirtual():
self._countVirt = 0
self._selStore.Clear()
if self.InReportView():
self.ResetVisibleLinesRange(True)
for i in range(len(self._aColWidths)):
self._aColWidths[i]._bNeedsUpdate = True
for item in self._itemWithWindow[:]:
if item.GetWindow():
self.DeleteItemWindow(item)
self._lines = []
self._itemWithWindow = []
self._hasWindows = False
def DeleteAllItems(self):
"""
Deletes all items in the :class:`UltimateListCtrl`.
:note: This function does not send the ``EVT_LIST_DELETE_ITEM`` event because
deleting many items from the control would be too slow then (unlike :meth:`~UltimateListMainWindow.DeleteItem`).
"""
self.DoDeleteAllItems()
self.RecalculatePositions()
def DeleteEverything(self):
""" Deletes all items in the :class:`UltimateListCtrl`, resetting column widths to zero. """
self.DeleteAllItems()
count = len(self._columns)
for n in range(count):
self.DeleteColumn(0)
self.RecalculatePositions()
self.GetListCtrl().Refresh()
# ----------------------------------------------------------------------------
# scanning for an item
# ----------------------------------------------------------------------------
def EnsureVisible(self, index):
"""
Ensures this item is visible.
:param `index`: the index of the item to scroll into view.
"""
if index < 0 or index >= self.GetItemCount():
raise Exception("invalid item index in EnsureVisible")
# We have to call this here because the label in question might just have
# been added and its position is not known yet
if self._dirty:
self.RecalculatePositions(True)
self.MoveToItem(index)
def FindItem(self, start, string, partial=False):
"""
Find an item whose label matches this string.
:param `start`: the starting point of the input `string` or the beginning
if `start` is -1;
:param `string`: the string to look for matches;
:param `partial`: if ``True`` then this method will look for items which
begin with `string`.
:note: The string comparison is case insensitive.
"""
if start < 0:
start = 0
str_upper = string.upper()
count = self.GetItemCount()
for i in range(start, count):
line = self.GetLine(i)
text = line.GetText(0)
line_upper = text.upper()
if not partial:
if line_upper == str_upper:
return i
else:
if line_upper.find(str_upper) == 0:
return i
return wx.NOT_FOUND
def FindItemData(self, start, data):
"""
Find an item whose data matches this data.
:param `start`: the starting point of the input `data` or the beginning
if `start` is -1;
:param `data`: the data to look for matches.
"""
if start < 0:
start = 0
count = self.GetItemCount()
for i in range(start, count):
line = self.GetLine(i)
item = UltimateListItem()
item = line.GetItem(0, item)
if item._data == data:
return i
return wx.NOT_FOUND
def FindItemAtPos(self, pt):
"""
Find an item nearest this position.
:param `pt`: an instance of :class:`wx.Point`.
"""
topItem, dummy = self.GetVisibleLinesRange()
p = self.GetItemPosition(self.GetItemCount()-1)
if p.y == 0:
return topItem
id = int(math.floor(pt.y*float(self.GetItemCount()-topItem-1)/p.y+topItem))
if id >= 0 and id < self.GetItemCount():
return id
return wx.NOT_FOUND
def HitTest(self, x, y):
"""
HitTest method for a :class:`UltimateListCtrl`.
:param `x`: the mouse `x` position;
:param `y`: the mouse `y` position.
:see: :meth:`~UltimateListMainWindow.HitTestLine` for a list of return flags.
"""
x, y = self.CalcUnscrolledPosition(x, y)
count = self.GetItemCount()
if self.InReportView():
if not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
current = y // self.GetLineHeight()
if current < count:
newItem, flags = self.HitTestLine(current, x, y)
if flags:
return current, flags
else:
for current in range(self._lineFrom, count):
newItem, flags = self.HitTestLine(current, x, y)
if flags:
return current, flags
else:
# TODO: optimize it too! this is less simple than for report view but
# enumerating all items is still not a way to do it!!
for current in range(count):
newItem, flags = self.HitTestLine(current, x, y)
if flags:
return current, flags
return wx.NOT_FOUND, None
# ----------------------------------------------------------------------------
# adding stuff
# ----------------------------------------------------------------------------
def InsertItem(self, item):
"""
Inserts an item into :class:`UltimateListCtrl`.
:param `item`: an instance of :class:`UltimateListItem`.
"""
if self.IsVirtual():
raise Exception("can't be used with virtual control")
count = self.GetItemCount()
if item._itemId < 0:
raise Exception("invalid item index")
CheckVariableRowHeight(self, item._text)
if item._itemId > count:
item._itemId = count
id = item._itemId
self._dirty = True
if self.InReportView():
self.ResetVisibleLinesRange(True)
# calculate the width of the item and adjust the max column width
pWidthInfo = self._aColWidths[item.GetColumn()]
width = self.GetItemWidthWithImage(item)
item.SetWidth(width)
if width > pWidthInfo._nMaxWidth:
pWidthInfo._nMaxWidth = width
line = UltimateListLineData(self)
line.SetItem(item._col, item)
self._lines.insert(id, line)
self._dirty = True
# If an item is selected at or below the point of insertion, we need to
# increment the member variables because the current row's index has gone
# up by one
if self.HasCurrent() and self._current >= id:
self._current += 1
self.SendNotify(id, wxEVT_COMMAND_LIST_INSERT_ITEM)
self.RefreshLines(id, self.GetItemCount() - 1)
def InsertColumn(self, col, item):
"""
Inserts a column into :class:`UltimateListCtrl`.
:param `col`: the column index at which we wish to insert a new column;
:param `item`: an instance of :class:`UltimateListItem`.
:return: the index at which the column has been inserted.
:note: This method is meaningful only if :class:`UltimateListCtrl` has the ``ULC_REPORT``
or the ``ULC_TILE`` styles set.
"""
self._dirty = True
if self.InReportView() or self.InTileView() or self.HasAGWFlag(ULC_HEADER_IN_ALL_VIEWS):
if item._width == ULC_AUTOSIZE_USEHEADER:
item._width = self.GetTextLength(item._text)
column = UltimateListHeaderData(item)
colWidthInfo = ColWidthInfo()
insert = (col >= 0) and (col < len(self._columns))
if insert:
self._columns.insert(col, column)
self._aColWidths.insert(col, colWidthInfo)
idx = col
else:
self._columns.append(column)
self._aColWidths.append(colWidthInfo)
idx = len(self._columns)-1
if not self.IsVirtual():
# update all the items
for i in range(len(self._lines)):
line = self.GetLine(i)
data = UltimateListItemData(self)
if insert:
line._items.insert(col, data)
else:
line._items.append(data)
# invalidate it as it has to be recalculated
self._headerWidth = 0
return idx
def GetItemWidthWithImage(self, item):
"""
Returns the item width, in pixels, considering the item text and its images.
:param `item`: an instance of :class:`UltimateListItem`.
"""
if item.GetCustomRenderer():
return item.GetCustomRenderer().GetSubItemWidth()
width = 0
dc = wx.ClientDC(self)
if item.GetFont().IsOk():
font = item.GetFont()
else:
font = self.GetFont()
dc.SetFont(font)
if item.GetKind() in [1, 2]:
ix, iy = self.GetCheckboxImageSize()
width += ix
if item.GetImage():
ix, iy = self.GetImageSize(item.GetImage())
width += ix + IMAGE_MARGIN_IN_REPORT_MODE
if item.GetText():
w, h, dummy = dc.GetFullMultiLineTextExtent(item.GetText())
width += w
if item.GetWindow():
width += item._windowsize.x + 5
return width
def GetItemTextSize(self, item):
"""
Returns the item width, in pixels, considering only the item text.
:param `item`: an instance of :class:`UltimateListItem`.
"""
width = ix = iy = start = end = 0
dc = wx.ClientDC(self)
if item.HasFont():
font = item.GetFont()
else:
font = self.GetFont()
dc.SetFont(font)
if item.GetKind() in [1, 2]:
ix, iy = self.GetCheckboxImageSize()
start += ix
if item.GetImage():
ix, iy = self.GetImageSize(item.GetImage())
start += ix + IMAGE_MARGIN_IN_REPORT_MODE
if item.GetText():
w, h, dummy = dc.GetFullMultiLineTextExtent(item.GetText())
end = w
return start, end
# ----------------------------------------------------------------------------
# sorting
# ----------------------------------------------------------------------------
def OnCompareItems(self, line1, line2):
"""
Returns whether 2 lines have the same index.
Override this function in the derived class to change the sort order of the items
in the :class:`UltimateListCtrl`. The function should return a negative, zero or positive
value if the first line is less than, equal to or greater than the second one.
:param `line1`: an instance of :class:`UltimateListLineData`;
:param `line2`: another instance of :class:`UltimateListLineData`.
:note: The base class version compares lines by their index.
"""
item = UltimateListItem()
item1 = line1.GetItem(0, item)
item = UltimateListItem()
item2 = line2.GetItem(0, item)
data1 = item1._data
data2 = item2._data
if self.__func:
return self.__func(item1, item2)
else:
return (data1 > data2) - (data1 < data2)
def SortItems(self, func):
"""
Call this function to sort the items in the :class:`UltimateListCtrl`. Sorting is done
using the specified function `func`. This function must have the
following prototype::
def OnCompareItems(self, line1, line2):
DoSomething(line1, line2)
# function code
It is called each time when the two items must be compared and should return 0
if the items are equal, negative value if the first item is less than the second
one and positive value if the first one is greater than the second one.
:param `func`: the method to use to sort the items. The default is to use the
:meth:`~UltimateListMainWindow.OnCompareItems` method.
"""
self.HighlightAll(False)
self.ResetCurrent()
if self._hasWindows:
self.HideWindows()
if not func:
self.__func = None
else:
self.__func = func
self._lines.sort(key=cmp_to_key(self.OnCompareItems))
if self.IsShownOnScreen():
self._dirty = True
self._lineHeight = 0
self.ResetLineDimensions(True)
self.RecalculatePositions(True)
# ----------------------------------------------------------------------------
# scrolling
# ----------------------------------------------------------------------------
def OnScroll(self, event):
"""
Handles the ``wx.EVT_SCROLLWIN`` event for :class:`UltimateListMainWindow`.
:param `event`: a :class:`ScrollEvent` event to be processed.
"""
event.Skip()
# update our idea of which lines are shown when we redraw the window the
# next time
self.ResetVisibleLinesRange()
if self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
wx.CallAfter(self.RecalculatePositions, True)
if event.GetOrientation() == wx.HORIZONTAL:
lc = self.GetListCtrl()
if self.HasHeader():
lc._headerWin.Refresh()
lc._headerWin.Update()
if self.HasFooter():
lc._footerWin.Refresh()
lc._footerWin.Update()
def GetCountPerPage(self):
"""
Returns the number of items that can fit vertically in the visible area
of the :class:`UltimateListCtrl` (list or report view) or the total number of
items in the list control (icon or small icon view).
"""
if not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
if not self._linesPerPage:
self._linesPerPage = self.GetClientSize().y/self.GetLineHeight()
return self._linesPerPage
visibleFrom, visibleTo = self.GetVisibleLinesRange()
self._linesPerPage = visibleTo - visibleFrom + 1
return self._linesPerPage
def GetVisibleLinesRange(self):
"""
Returns the range of visible items on screen.
:note: This method can be used only if :class:`UltimateListCtrl` has the ``ULC_REPORT``
style set.
"""
if not self.InReportView():
raise Exception("this is for report mode only")
if self._lineFrom == -1:
count = self.GetItemCount()
if count:
if self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
view_x, view_y = self.GetViewStart()
view_y *= SCROLL_UNIT_Y
for i in range(0, count):
rc = self.GetLineY(i)
if rc > view_y:
self._lineFrom = i - 1
break
if self._lineFrom < 0:
self._lineFrom = 0
self._lineTo = self._lineFrom
clientWidth, clientHeight = self.GetClientSize()
for i in range(self._lineFrom, count):
rc = self.GetLineY(i) + self.GetLineHeight(i)
if rc > view_y + clientHeight - 5:
break
self._lineTo += 1
else:
# No variable row height
self._lineFrom = self.GetScrollPos(wx.VERTICAL)
# this may happen if SetScrollbars() hadn't been called yet
if self._lineFrom >= count:
self._lineFrom = count - 1
self._lineTo = self._lineFrom + self._linesPerPage
# we redraw one extra line but this is needed to make the redrawing
# logic work when there is a fractional number of lines on screen
if self._lineTo >= count:
self._lineTo = count - 1
else: # empty control
self._lineFrom = -1
self._lineTo = -1
return self._lineFrom, self._lineTo
def ResetTextControl(self):
""" Called by :class:`UltimateListTextCtrl` when it marks itself for deletion."""
self._textctrl.Destroy()
self._textctrl = None
self.RecalculatePositions()
self.Refresh()
def SetFirstGradientColour(self, colour=None):
"""
Sets the first gradient colour for gradient-style selections.
:param `colour`: if not ``None``, a valid :class:`wx.Colour` instance. Otherwise,
the colour is taken from the system value ``wx.SYS_COLOUR_HIGHLIGHT``.
"""
if colour is None:
colour = wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHT)
self._firstcolour = colour
if self._usegradients:
self.RefreshSelected()
def SetSecondGradientColour(self, colour=None):
"""
Sets the second gradient colour for gradient-style selections.
:param `colour`: if not ``None``, a valid :class:`wx.Colour` instance. Otherwise,
the colour generated is a slightly darker version of the :class:`UltimateListCtrl`
background colour.
"""
if colour is None:
# No colour given, generate a slightly darker from the
# UltimateListCtrl background colour
colour = self.GetBackgroundColour()
r, g, b = int(colour.Red()), int(colour.Green()), int(colour.Blue())
colour = ((r >> 1) + 20, (g >> 1) + 20, (b >> 1) + 20)
colour = wx.Colour(colour[0], colour[1], colour[2])
self._secondcolour = colour
if self._usegradients:
self.RefreshSelected()
def GetFirstGradientColour(self):
""" Returns the first gradient colour for gradient-style selections. """
return self._firstcolour
def GetSecondGradientColour(self):
""" Returns the second gradient colour for gradient-style selections. """
return self._secondcolour
def EnableSelectionGradient(self, enable=True):
"""
Globally enables/disables drawing of gradient selections.
:param `enable`: ``True`` to enable gradient-style selections, ``False``
to disable it.
:note: Calling this method disables any Vista-style selection previously
enabled.
"""
self._usegradients = enable
self._vistaselection = False
self.RefreshSelected()
def SetGradientStyle(self, vertical=0):
"""
Sets the gradient style for gradient-style selections.
:param `vertical`: 0 for horizontal gradient-style selections, 1 for vertical
gradient-style selections.
"""
# 0 = Horizontal, 1 = Vertical
self._gradientstyle = vertical
if self._usegradients:
self.RefreshSelected()
def GetGradientStyle(self):
"""
Returns the gradient style for gradient-style selections.
:return: 0 for horizontal gradient-style selections, 1 for vertical
gradient-style selections.
"""
return self._gradientstyle
def EnableSelectionVista(self, enable=True):
"""
Globally enables/disables drawing of Windows Vista selections.
:param `enable`: ``True`` to enable Vista-style selections, ``False`` to
disable it.
:note: Calling this method disables any gradient-style selection previously
enabled.
"""
self._usegradients = False
self._vistaselection = enable
self.RefreshSelected()
def SetBackgroundImage(self, image):
"""
Sets the :class:`UltimateListCtrl` background image.
:param `image`: if not ``None``, an instance of :class:`wx.Bitmap`.
:note: At present, the background image can only be used in "tile" mode.
.. todo:: Support background images also in stretch and centered modes.
"""
self._backgroundImage = image
self.Refresh()
def GetBackgroundImage(self):
"""
Returns the :class:`UltimateListCtrl` background image (if any).
:note: At present, the background image can only be used in "tile" mode.
.. todo:: Support background images also in stretch and centered modes.
"""
return self._backgroundImage
def SetWaterMark(self, watermark):
"""
Sets the :class:`UltimateListCtrl` watermark image to be displayed in the bottom
right part of the window.
:param `watermark`: if not ``None``, an instance of :class:`wx.Bitmap`.
.. todo:: Better support for this is needed.
"""
self._waterMark = watermark
self.Refresh()
def GetWaterMark(self):
"""
Returns the :class:`UltimateListCtrl` watermark image (if any), displayed in the
bottom right part of the window.
.. todo:: Better support for this is needed.
"""
return self._waterMark
def SetDisabledTextColour(self, colour):
"""
Sets the items disabled colour.
:param `colour`: an instance of :class:`wx.Colour`.
"""
# Disabled items colour
self._disabledColour = colour
self.Refresh()
def GetDisabledTextColour(self):
""" Returns the items disabled colour. """
return self._disabledColour
def ScrollList(self, dx, dy):
"""
Scrolls the :class:`UltimateListCtrl`.
:param `dx`: if in icon, small icon or report view mode, specifies the number
of pixels to scroll. If in list view mode, `dx` specifies the number of
columns to scroll.
:param `dy`: always specifies the number of pixels to scroll vertically.
"""
if not self.InReportView():
# TODO: this should work in all views but is not implemented now
return False
top, bottom = self.GetVisibleLinesRange()
if bottom == -1:
return 0
self.ResetVisibleLinesRange()
if not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
hLine = self.GetLineHeight()
self.Scroll(-1, top + dy/hLine)
else:
self.Scroll(-1, top + dy/SCROLL_UNIT_Y)
if wx.Platform == "__WXMAC__":
# see comment in MoveToItem() for why we do this
self.ResetVisibleLinesRange()
return True
# -------------------------------------------------------------------------------------
# UltimateListCtrl
# -------------------------------------------------------------------------------------
class UltimateListCtrl(wx.Control):
"""
UltimateListCtrl is a class that mimics the behaviour of :class:`ListCtrl`, with almost
the same base functionalities plus some more enhancements. This class does
not rely on the native control, as it is a full owner-drawn list control.
"""
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize,
style=0, agwStyle=0, validator=wx.DefaultValidator, name="UltimateListCtrl"):
"""
Default class constructor.
:param `parent`: parent window. Must not be ``None``;
:param `id`: window identifier. A value of -1 indicates a default value;
:param `pos`: the control position. A value of (-1, -1) indicates a default position,
chosen by either the windowing system or wxPython, depending on platform;
:param `size`: the control size. A value of (-1, -1) indicates a default size,
chosen by either the windowing system or wxPython, depending on platform;
:param `style`: the underlying :class:`wx.Control` window style;
:param `agwStyle`: the AGW-specific window style; can be almost any combination of the following
bits:
=============================== =========== ====================================================================================================
Window Styles Hex Value Description
=============================== =========== ====================================================================================================
``ULC_VRULES`` 0x1 Draws light vertical rules between rows in report mode.
``ULC_HRULES`` 0x2 Draws light horizontal rules between rows in report mode.
``ULC_ICON`` 0x4 Large icon view, with optional labels.
``ULC_SMALL_ICON`` 0x8 Small icon view, with optional labels.
``ULC_LIST`` 0x10 Multicolumn list view, with optional small icons. Columns are computed automatically, i.e. you don't set columns as in ``ULC_REPORT``. In other words, the list wraps, unlike a :class:`ListBox`.
``ULC_REPORT`` 0x20 Single or multicolumn report view, with optional header.
``ULC_ALIGN_TOP`` 0x40 Icons align to the top. Win32 default, Win32 only.
``ULC_ALIGN_LEFT`` 0x80 Icons align to the left.
``ULC_AUTOARRANGE`` 0x100 Icons arrange themselves. Win32 only.
``ULC_VIRTUAL`` 0x200 The application provides items text on demand. May only be used with ``ULC_REPORT``.
``ULC_EDIT_LABELS`` 0x400 Labels are editable: the application will be notified when editing starts.
``ULC_NO_HEADER`` 0x800 No header in report mode.
``ULC_NO_SORT_HEADER`` 0x1000 No Docs.
``ULC_SINGLE_SEL`` 0x2000 Single selection (default is multiple).
``ULC_SORT_ASCENDING`` 0x4000 Sort in ascending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_SORT_DESCENDING`` 0x8000 Sort in descending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_TILE`` 0x10000 Each item appears as a full-sized icon with a label of one or more lines beside it (partially implemented).
``ULC_NO_HIGHLIGHT`` 0x20000 No highlight when an item is selected.
``ULC_STICKY_HIGHLIGHT`` 0x40000 Items are selected by simply hovering on them, with no need to click on them.
``ULC_STICKY_NOSELEVENT`` 0x80000 Don't send a selection event when using ``ULC_STICKY_HIGHLIGHT`` style.
``ULC_SEND_LEFTCLICK`` 0x100000 Send a left click event when an item is selected.
``ULC_HAS_VARIABLE_ROW_HEIGHT`` 0x200000 The list has variable row heights.
``ULC_AUTO_CHECK_CHILD`` 0x400000 When a column header has a checkbox associated, auto-check all the subitems in that column.
``ULC_AUTO_TOGGLE_CHILD`` 0x800000 When a column header has a checkbox associated, toggle all the subitems in that column.
``ULC_AUTO_CHECK_PARENT`` 0x1000000 Only meaningful foe checkbox-type items: when an item is checked/unchecked its column header item is checked/unchecked as well.
``ULC_SHOW_TOOLTIPS`` 0x2000000 Show tooltips for ellipsized items/subitems (text too long to be shown in the available space) containing the full item/subitem text.
``ULC_HOT_TRACKING`` 0x4000000 Enable hot tracking of items on mouse motion.
``ULC_BORDER_SELECT`` 0x8000000 Changes border colour whan an item is selected, instead of highlighting the item.
``ULC_TRACK_SELECT`` 0x10000000 Enables hot-track selection in a list control. Hot track selection means that an item is automatically selected when the cursor remains over the item for a certain period of time. The delay is retrieved on Windows using the `win32api` call `win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)`, and is defaulted to 400ms on other platforms. This style applies to all views of `UltimateListCtrl`.
``ULC_HEADER_IN_ALL_VIEWS`` 0x20000000 Show column headers in all view modes.
``ULC_NO_FULL_ROW_SELECT`` 0x40000000 When an item is selected, the only the item in the first column is highlighted.
``ULC_FOOTER`` 0x80000000 Show a footer too (only when header is present).
``ULC_USER_ROW_HEIGHT`` 0x100000000 Allows to set a custom row height (one value for all the items, only in report mode).
=============================== =========== ====================================================================================================
:param `validator`: the window validator;
:param `name`: the window name.
"""
self._imageListNormal = None
self._imageListSmall = None
self._imageListState = None
if not agwStyle & ULC_MASK_TYPE:
raise Exception("UltimateListCtrl style should have exactly one mode bit set")
if not (agwStyle & ULC_REPORT) and agwStyle & ULC_HAS_VARIABLE_ROW_HEIGHT:
raise Exception("Style ULC_HAS_VARIABLE_ROW_HEIGHT can only be used in report, non-virtual mode")
if agwStyle & ULC_STICKY_HIGHLIGHT and agwStyle & ULC_TRACK_SELECT:
raise Exception("Styles ULC_STICKY_HIGHLIGHT and ULC_TRACK_SELECT can not be combined")
if agwStyle & ULC_NO_HEADER and agwStyle & ULC_HEADER_IN_ALL_VIEWS:
raise Exception("Styles ULC_NO_HEADER and ULC_HEADER_IN_ALL_VIEWS can not be combined")
if agwStyle & ULC_USER_ROW_HEIGHT and (agwStyle & ULC_REPORT) == 0:
raise Exception("Style ULC_USER_ROW_HEIGHT can be used only with ULC_REPORT")
wx.Control.__init__(self, parent, id, pos, size, style|wx.CLIP_CHILDREN, validator, name)
self._mainWin = None
self._headerWin = None
self._footerWin = None
self._headerHeight = wx.RendererNative.Get().GetHeaderButtonHeight(self)
self._footerHeight = self._headerHeight
if wx.Platform == "__WXGTK__":
style &= ~wx.BORDER_MASK
style |= wx.BORDER_THEME
else:
if style & wx.BORDER_THEME:
style -= wx.BORDER_THEME
self._agwStyle = agwStyle
if style & wx.SUNKEN_BORDER:
style -= wx.SUNKEN_BORDER
self._mainWin = UltimateListMainWindow(self, wx.ID_ANY, wx.Point(0, 0), wx.DefaultSize, style, agwStyle)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self._mainWin, 1, wx.GROW)
self.SetSizer(sizer)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self.CreateOrDestroyHeaderWindowAsNeeded()
self.CreateOrDestroyFooterWindowAsNeeded()
self.SetInitialSize(size)
wx.CallAfter(self.Layout)
def CreateOrDestroyHeaderWindowAsNeeded(self):
""" Creates or destroys the header window depending on the window style flags. """
needs_header = self.HasHeader()
has_header = self._headerWin is not None
if needs_header == has_header:
return
if needs_header:
self._headerWin = UltimateListHeaderWindow(self, wx.ID_ANY, self._mainWin,
wx.Point(0, 0),
wx.DefaultSize,
wx.TAB_TRAVERSAL, isFooter=False)
# ----------------------------------------------------
# How do you translate all this blah-blah to wxPython?
# ----------------------------------------------------
#if defined( __WXMAC__ ) && wxOSX_USE_COCOA_OR_CARBON
# wxFont font
#if wxOSX_USE_ATSU_TEXT
# font.MacCreateFromThemeFont( kThemeSmallSystemFont )
#else
# font.MacCreateFromUIFont( kCTFontSystemFontType )
#endif
# m_headerWin->SetFont( font )
#endif
self.GetSizer().Prepend(self._headerWin, 0, wx.GROW)
else:
self.GetSizer().Detach(self._headerWin)
self._headerWin.Destroy()
self._headerWin = None
def CreateOrDestroyFooterWindowAsNeeded(self):
""" Creates or destroys the footer window depending on the window style flags. """
needs_footer = self.HasFooter()
has_footer = self._footerWin is not None
if needs_footer == has_footer:
return
if needs_footer:
self._footerWin = UltimateListHeaderWindow(self, wx.ID_ANY, self._mainWin,
wx.Point(0, 0),
wx.DefaultSize,
wx.TAB_TRAVERSAL, isFooter=True)
# ----------------------------------------------------
# How do you translate all this blah-blah to wxPython?
# ----------------------------------------------------
#if defined( __WXMAC__ ) && wxOSX_USE_COCOA_OR_CARBON
# wxFont font
#if wxOSX_USE_ATSU_TEXT
# font.MacCreateFromThemeFont( kThemeSmallSystemFont )
#else
# font.MacCreateFromUIFont( kCTFontSystemFontType )
#endif
# m_headerWin->SetFont( font )
#endif
self.GetSizer().Add(self._footerWin, 0, wx.GROW)
else:
self.GetSizer().Detach(self._footerWin)
self._footerWin.Destroy()
self._footerWin = None
def HasHeader(self):
""" Returns ``True`` if :class:`UltimateListCtrl` has a header window. """
return self._mainWin.HasHeader()
def HasFooter(self):
""" Returns ``True`` if :class:`UltimateListCtrl` has a footer window. """
return self._mainWin.HasFooter()
def GetDefaultBorder(self):
""" Returns the default window border. """
return wx.BORDER_THEME
def SetSingleStyle(self, style, add=True):
"""
Adds or removes a single window style.
:param `style`: can be one of the following bits:
=============================== =========== ====================================================================================================
Window Styles Hex Value Description
=============================== =========== ====================================================================================================
``ULC_VRULES`` 0x1 Draws light vertical rules between rows in report mode.
``ULC_HRULES`` 0x2 Draws light horizontal rules between rows in report mode.
``ULC_ICON`` 0x4 Large icon view, with optional labels.
``ULC_SMALL_ICON`` 0x8 Small icon view, with optional labels.
``ULC_LIST`` 0x10 Multicolumn list view, with optional small icons. Columns are computed automatically, i.e. you don't set columns as in ``ULC_REPORT``. In other words, the list wraps, unlike a :class:`ListBox`.
``ULC_REPORT`` 0x20 Single or multicolumn report view, with optional header.
``ULC_ALIGN_TOP`` 0x40 Icons align to the top. Win32 default, Win32 only.
``ULC_ALIGN_LEFT`` 0x80 Icons align to the left.
``ULC_AUTOARRANGE`` 0x100 Icons arrange themselves. Win32 only.
``ULC_VIRTUAL`` 0x200 The application provides items text on demand. May only be used with ``ULC_REPORT``.
``ULC_EDIT_LABELS`` 0x400 Labels are editable: the application will be notified when editing starts.
``ULC_NO_HEADER`` 0x800 No header in report mode.
``ULC_NO_SORT_HEADER`` 0x1000 No Docs.
``ULC_SINGLE_SEL`` 0x2000 Single selection (default is multiple).
``ULC_SORT_ASCENDING`` 0x4000 Sort in ascending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_SORT_DESCENDING`` 0x8000 Sort in descending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_TILE`` 0x10000 Each item appears as a full-sized icon with a label of one or more lines beside it (partially implemented).
``ULC_NO_HIGHLIGHT`` 0x20000 No highlight when an item is selected.
``ULC_STICKY_HIGHLIGHT`` 0x40000 Items are selected by simply hovering on them, with no need to click on them.
``ULC_STICKY_NOSELEVENT`` 0x80000 Don't send a selection event when using ``ULC_STICKY_HIGHLIGHT`` style.
``ULC_SEND_LEFTCLICK`` 0x100000 Send a left click event when an item is selected.
``ULC_HAS_VARIABLE_ROW_HEIGHT`` 0x200000 The list has variable row heights.
``ULC_AUTO_CHECK_CHILD`` 0x400000 When a column header has a checkbox associated, auto-check all the subitems in that column.
``ULC_AUTO_TOGGLE_CHILD`` 0x800000 When a column header has a checkbox associated, toggle all the subitems in that column.
``ULC_AUTO_CHECK_PARENT`` 0x1000000 Only meaningful foe checkbox-type items: when an item is checked/unchecked its column header item is checked/unchecked as well.
``ULC_SHOW_TOOLTIPS`` 0x2000000 Show tooltips for ellipsized items/subitems (text too long to be shown in the available space) containing the full item/subitem text.
``ULC_HOT_TRACKING`` 0x4000000 Enable hot tracking of items on mouse motion.
``ULC_BORDER_SELECT`` 0x8000000 Changes border colour whan an item is selected, instead of highlighting the item.
``ULC_TRACK_SELECT`` 0x10000000 Enables hot-track selection in a list control. Hot track selection means that an item is automatically selected when the cursor remains over the item for a certain period of time. The delay is retrieved on Windows using the `win32api` call `win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)`, and is defaulted to 400ms on other platforms. This style applies to all views of `UltimateListCtrl`.
``ULC_HEADER_IN_ALL_VIEWS`` 0x20000000 Show column headers in all view modes.
``ULC_NO_FULL_ROW_SELECT`` 0x40000000 When an item is selected, the only the item in the first column is highlighted.
``ULC_FOOTER`` 0x80000000 Show a footer too (only when header is present).
=============================== =========== ====================================================================================================
:param `add`: ``True`` to add the window style, ``False`` to remove it.
:note: The style ``ULC_VIRTUAL`` can not be set/unset after construction.
"""
if style & ULC_VIRTUAL:
raise Exception("ULC_VIRTUAL can't be [un]set")
flag = self.GetAGWWindowStyleFlag()
if add:
if style & ULC_MASK_TYPE:
flag &= ~(ULC_MASK_TYPE | ULC_VIRTUAL)
if style & ULC_MASK_ALIGN:
flag &= ~ULC_MASK_ALIGN
if style & ULC_MASK_SORT:
flag &= ~ULC_MASK_SORT
if add:
flag |= style
else:
flag &= ~style
# some styles can be set without recreating everything (as happens in
# SetAGWWindowStyleFlag() which calls ListMainWindow.DeleteEverything())
if not style & ~(ULC_HRULES | ULC_VRULES):
self.Refresh()
self.SetAGWWindowStyleFlag(self, flag)
else:
self.SetAGWWindowStyleFlag(flag)
def GetAGWWindowStyleFlag(self):
"""
Returns the :class:`UltimateListCtrl` AGW-specific style flag.
:see: :meth:`~UltimateListCtrl.SetAGWWindowStyleFlag` for a list of possible style flags.
"""
return self._agwStyle
def SetAGWWindowStyleFlag(self, style):
"""
Sets the :class:`UltimateListCtrl` AGW-specific style flag.
:param `style`: the AGW-specific window style; can be almost any combination of the following
bits:
=============================== =========== ====================================================================================================
Window Styles Hex Value Description
=============================== =========== ====================================================================================================
``ULC_VRULES`` 0x1 Draws light vertical rules between rows in report mode.
``ULC_HRULES`` 0x2 Draws light horizontal rules between rows in report mode.
``ULC_ICON`` 0x4 Large icon view, with optional labels.
``ULC_SMALL_ICON`` 0x8 Small icon view, with optional labels.
``ULC_LIST`` 0x10 Multicolumn list view, with optional small icons. Columns are computed automatically, i.e. you don't set columns as in ``ULC_REPORT``. In other words, the list wraps, unlike a :class:`ListBox`.
``ULC_REPORT`` 0x20 Single or multicolumn report view, with optional header.
``ULC_ALIGN_TOP`` 0x40 Icons align to the top. Win32 default, Win32 only.
``ULC_ALIGN_LEFT`` 0x80 Icons align to the left.
``ULC_AUTOARRANGE`` 0x100 Icons arrange themselves. Win32 only.
``ULC_VIRTUAL`` 0x200 The application provides items text on demand. May only be used with ``ULC_REPORT``.
``ULC_EDIT_LABELS`` 0x400 Labels are editable: the application will be notified when editing starts.
``ULC_NO_HEADER`` 0x800 No header in report mode.
``ULC_NO_SORT_HEADER`` 0x1000 No Docs.
``ULC_SINGLE_SEL`` 0x2000 Single selection (default is multiple).
``ULC_SORT_ASCENDING`` 0x4000 Sort in ascending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_SORT_DESCENDING`` 0x8000 Sort in descending order. (You must still supply a comparison callback in :meth:`ListCtrl.SortItems`.)
``ULC_TILE`` 0x10000 Each item appears as a full-sized icon with a label of one or more lines beside it (partially implemented).
``ULC_NO_HIGHLIGHT`` 0x20000 No highlight when an item is selected.
``ULC_STICKY_HIGHLIGHT`` 0x40000 Items are selected by simply hovering on them, with no need to click on them.
``ULC_STICKY_NOSELEVENT`` 0x80000 Don't send a selection event when using ``ULC_STICKY_HIGHLIGHT`` style.
``ULC_SEND_LEFTCLICK`` 0x100000 Send a left click event when an item is selected.
``ULC_HAS_VARIABLE_ROW_HEIGHT`` 0x200000 The list has variable row heights.
``ULC_AUTO_CHECK_CHILD`` 0x400000 When a column header has a checkbox associated, auto-check all the subitems in that column.
``ULC_AUTO_TOGGLE_CHILD`` 0x800000 When a column header has a checkbox associated, toggle all the subitems in that column.
``ULC_AUTO_CHECK_PARENT`` 0x1000000 Only meaningful foe checkbox-type items: when an item is checked/unchecked its column header item is checked/unchecked as well.
``ULC_SHOW_TOOLTIPS`` 0x2000000 Show tooltips for ellipsized items/subitems (text too long to be shown in the available space) containing the full item/subitem text.
``ULC_HOT_TRACKING`` 0x4000000 Enable hot tracking of items on mouse motion.
``ULC_BORDER_SELECT`` 0x8000000 Changes border colour whan an item is selected, instead of highlighting the item.
``ULC_TRACK_SELECT`` 0x10000000 Enables hot-track selection in a list control. Hot track selection means that an item is automatically selected when the cursor remains over the item for a certain period of time. The delay is retrieved on Windows using the `win32api` call `win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)`, and is defaulted to 400ms on other platforms. This style applies to all views of `UltimateListCtrl`.
``ULC_HEADER_IN_ALL_VIEWS`` 0x20000000 Show column headers in all view modes.
``ULC_NO_FULL_ROW_SELECT`` 0x40000000 When an item is selected, the only the item in the first column is highlighted.
``ULC_FOOTER`` 0x80000000 Show a footer too (only when header is present).
``ULC_USER_ROW_HEIGHT`` 0x100000000 Allows to set a custom row height (one value for all the items, only in report mode).
=============================== =========== ====================================================================================================
"""
if style & ULC_HAS_VARIABLE_ROW_HEIGHT and not self.HasAGWFlag(ULC_REPORT):
raise Exception("ULC_HAS_VARIABLE_ROW_HEIGHT style can be used only in report mode")
wasInReportView = self.HasAGWFlag(ULC_REPORT)
self._agwStyle = style
if self._mainWin:
inReportView = (style & ULC_REPORT) != 0
if inReportView != wasInReportView:
# we need to notify the main window about this change as it must
# update its data structures
self._mainWin.SetReportView(inReportView)
self.CreateOrDestroyHeaderWindowAsNeeded()
self.CreateOrDestroyFooterWindowAsNeeded()
self.GetSizer().Layout()
if style & ULC_HAS_VARIABLE_ROW_HEIGHT:
self._mainWin.ResetLineDimensions()
self._mainWin.ResetVisibleLinesRange()
self.Refresh()
def HasAGWFlag(self, flag):
"""
Returns ``True`` if the window has the given flag bit set.
:param `flag`: the window style to check.
:see: :meth:`~UltimateListCtrl.SetAGWWindowStyleFlag` for a list of valid window styles.
"""
return self._agwStyle & flag
def SetUserLineHeight(self, height):
"""
Sets a custom value for the :class:`UltimateListCtrl` item height.
:param `height`: the custom height for all the items, in pixels.
:note: This method can be used only with ``ULC_REPORT`` and ``ULC_USER_ROW_HEIGHT`` styles set.
"""
if self._mainWin:
self._mainWin.SetUserLineHeight(height)
def GetUserLineHeight(self):
"""
Returns the custom value for the :class:`UltimateListCtrl` item height, if previously set with
:meth:`~UltimateListCtrl.SetUserLineHeight`.
:note: This method can be used only with ``ULC_REPORT`` and ``ULC_USER_ROW_HEIGHT`` styles set.
"""
if self._mainWin:
return self._mainWin.GetUserLineHeight()
def GetColumn(self, col):
"""
Returns information about this column.
:param `col`: an integer specifying the column index.
"""
return self._mainWin.GetColumn(col)
def SetColumn(self, col, item):
"""
Sets information about this column.
:param `col`: an integer specifying the column index;
:param `item`: an instance of :class:`UltimateListItem`.
"""
self._mainWin.SetColumn(col, item)
return True
def GetColumnWidth(self, col):
"""
Returns the column width for the input column.
:param `col`: an integer specifying the column index.
"""
return self._mainWin.GetColumnWidth(col)
def SetColumnWidth(self, col, width):
"""
Sets the column width.
:param `width`: can be a width in pixels or ``wx.LIST_AUTOSIZE`` (-1) or
``wx.LIST_AUTOSIZE_USEHEADER`` (-2) or ``LIST_AUTOSIZE_FILL`` (-3).
``wx.LIST_AUTOSIZE`` will resize the column to the length of its longest
item. ``wx.LIST_AUTOSIZE_USEHEADER`` will resize the column to the
length of the header (Win32) or 80 pixels (other platforms).
``LIST_AUTOSIZE_FILL`` will resize the column fill the remaining width
of the window.
:note: In small or normal icon view, col must be -1, and the column width
is set for all columns.
"""
self._mainWin.SetColumnWidth(col, width)
return True
def GetCountPerPage(self):
"""
Returns the number of items that can fit vertically in the visible area
of the :class:`UltimateListCtrl` (list or report view) or the total number of
items in the list control (icon or small icon view).
"""
return self._mainWin.GetCountPerPage() # different from Windows ?
def GetItem(self, itemOrId, col=0):
"""
Returns the information about the input item.
:param `itemOrId`: an instance of :class:`UltimateListItem` or an integer specifying
the item index;
:param `col`: the column to which the item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItem(item, col)
def SetItem(self, info):
"""
Sets the information about the input item.
:param `info`: an instance of :class:`UltimateListItem`.
"""
self._mainWin.SetItem(info)
return True
def SetStringItem(self, index, col, label, imageIds=[], it_kind=0):
"""
Sets a string or image at the given location.
:param `index`: the item index;
:param `col`: the column to which the item belongs to;
:param `label`: the item text;
:param `imageIds`: a Python list containing the image indexes for the
images associated to this item;
:param `it_kind`: the item kind. May be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
info = UltimateListItem()
info._text = label
info._mask = ULC_MASK_TEXT
if it_kind:
info._mask |= ULC_MASK_KIND
info._kind = it_kind
info._itemId = index
info._col = col
for ids in to_list(imageIds):
if ids > -1:
info._image.append(ids)
if info._image:
info._mask |= ULC_MASK_IMAGE
self._mainWin.SetItem(info)
return index
def GetItemState(self, item, stateMask):
"""
Returns the item state flags for the input item.
:param `item`: the index of the item;
:param `stateMask`: the bitmask for the state flag.
:see: :meth:`~UltimateListCtrl.SetItemState` for a list of valid state flags.
"""
return self._mainWin.GetItemState(item, stateMask)
def SetItemState(self, item, state, stateMask):
"""
Sets the item state flags for the input item.
:param `item`: the index of the item; if defaulted to -1, the state flag
will be set for all the items;
:param `state`: any combination of the following bits:
============================ ========= ==============================
State Bits Hex Value Description
============================ ========= ==============================
``ULC_STATE_DONTCARE`` 0x0 Don't care what the state is
``ULC_STATE_DROPHILITED`` 0x1 The item is highlighted to receive a drop event
``ULC_STATE_FOCUSED`` 0x2 The item has the focus
``ULC_STATE_SELECTED`` 0x4 The item is selected
``ULC_STATE_CUT`` 0x8 The item is in the cut state
``ULC_STATE_DISABLED`` 0x10 The item is disabled
``ULC_STATE_FILTERED`` 0x20 The item has been filtered
``ULC_STATE_INUSE`` 0x40 The item is in use
``ULC_STATE_PICKED`` 0x80 The item has been picked
``ULC_STATE_SOURCE`` 0x100 The item is a drag and drop source
============================ ========= ==============================
:param `stateMask`: the bitmask for the state flag.
"""
self._mainWin.SetItemState(item, state, stateMask)
return True
def SetItemImage(self, item, image, selImage=-1):
"""
Sets a Python list of image indexes associated with the item.
:param `item`: an integer specifying the item index;
:param `image`: a Python list of indexes into the image list associated
with the :class:`UltimateListCtrl`. In report view, this only sets the images
for the first column;
:param `selImage`: not used at present.
"""
return self.SetItemColumnImage(item, 0, image)
def SetItemColumnImage(self, item, column, image):
"""
Sets a Python list of image indexes associated with the item in the input
column.
:param `item`: an integer specifying the item index;
:param `column`: the column to which the item belongs to;
:param `image`: a Python list of indexes into the image list associated
with the :class:`UltimateListCtrl`.
"""
info = UltimateListItem()
info._image = to_list(image)
info._mask = ULC_MASK_IMAGE
info._itemId = item
info._col = column
self._mainWin.SetItem(info)
return True
def GetItemText(self, item):
"""
Returns the item text.
:param `item`: an instance of :class:`UltimateListItem` or an integer specifying
the item index.
"""
return self._mainWin.GetItemText(item)
def SetItemText(self, item, text):
"""
Sets the item text.
:param `item`: an instance of :class:`UltimateListItem` or an integer specifying
the item index;
:param `text`: the new item text.
"""
self._mainWin.SetItemText(item, text)
def GetItemData(self, item):
"""
Gets the application-defined data associated with this item.
:param `item`: an integer specifying the item index.
"""
info = UltimateListItem()
info._mask = ULC_MASK_DATA
info._itemId = item
self._mainWin.GetItem(info)
return info._data
def SetItemData(self, item, data):
"""
Sets the application-defined data associated with this item.
:param `item`: an integer specifying the item index;
:param `data`: the data to be associated with the input item.
:note: This function cannot be used to associate pointers with
the control items, use :meth:`~UltimateListCtrl.SetItemPyData` instead.
"""
info = UltimateListItem()
info._mask = ULC_MASK_DATA
info._itemId = item
info._data = data
self._mainWin.SetItem(info)
return True
def GetItemPyData(self, item):
"""
Returns the data for the item, which can be any Python object.
:param `item`: an integer specifying the item index.
:note: Please note that Python data is associated with the item and not
with subitems.
"""
info = UltimateListItem()
info._mask = ULC_MASK_PYDATA
info._itemId = item
self._mainWin.GetItem(info)
return info._pyData
def SetItemPyData(self, item, pyData):
"""
Sets the data for the item, which can be any Python object.
:param `item`: an integer specifying the item index;
:param `pyData`: any Python object.
:note: Please note that Python data is associated with the item and not
with subitems.
"""
info = UltimateListItem()
info._mask = ULC_MASK_PYDATA
info._itemId = item
info._pyData = pyData
self._mainWin.SetItem(info)
return True
SetPyData = SetItemPyData
GetPyData = GetItemPyData
def GetViewRect(self):
"""
Returns the rectangle taken by all items in the control. In other words,
if the controls client size were equal to the size of this rectangle, no
scrollbars would be needed and no free space would be left.
:note: This function only works in the icon and small icon views, not in
list or report views.
"""
return self._mainWin.GetViewRect()
def GetItemRect(self, item, code=ULC_RECT_BOUNDS):
"""
Returns the rectangle representing the item's size and position, in physical
coordinates.
:param `item`: the row in which the item lives;
:param `code`: one of ``ULC_RECT_BOUNDS``, ``ULC_RECT_ICON``, ``ULC_RECT_LABEL``.
"""
return self.GetSubItemRect(item, ULC_GETSUBITEMRECT_WHOLEITEM, code)
def GetSubItemRect(self, item, subItem, code):
"""
Returns the rectangle representing the size and position, in physical coordinates,
of the given subitem, i.e. the part of the row `item` in the column `subItem`.
:param `item`: the row in which the item lives;
:param `subItem`: the column in which the item lives. If set equal to the special
value ``ULC_GETSUBITEMRECT_WHOLEITEM`` the return value is the same as for
:meth:`~UltimateListCtrl.GetItemRect`;
:param `code`: one of ``ULC_RECT_BOUNDS``, ``ULC_RECT_ICON``, ``ULC_RECT_LABEL``.
:note: This method is only meaningful when the :class:`UltimateListCtrl` is in the
report mode.
"""
rect = self._mainWin.GetSubItemRect(item, subItem)
if self._mainWin.HasHeader():
rect.y += self._headerHeight + 1
return rect
def GetItemPosition(self, item):
"""
Returns the position of the item, in icon or small icon view.
:param `item`: the row in which the item lives.
"""
return self._mainWin.GetItemPosition(item)
def SetItemPosition(self, item, pos):
"""
Sets the position of the item, in icon or small icon view.
:param `item`: the row in which the item lives;
:param `pos`: the item position.
:note: This method is currently unimplemented and does nothing.
"""
return False
def GetItemCount(self):
""" Returns the number of items in the :class:`UltimateListCtrl`. """
return self._mainWin.GetItemCount()
def GetColumnCount(self):
""" Returns the total number of columns in the :class:`UltimateListCtrl`. """
return self._mainWin.GetColumnCount()
def SetItemSpacing(self, spacing, isSmall=False):
"""
Sets the spacing between item texts and icons.
:param `spacing`: the spacing between item texts and icons, in pixels;
:param `isSmall`: ``True`` if using a ``wx.IMAGE_LIST_SMALL`` image list,
``False`` if using a ``wx.IMAGE_LIST_NORMAL`` image list.
"""
self._mainWin.SetItemSpacing(spacing, isSmall)
def GetItemSpacing(self, isSmall=False):
"""
Returns the spacing between item texts and icons, in pixels.
:param `isSmall`: ``True`` if using a ``wx.IMAGE_LIST_SMALL`` image list,
``False`` if using a ``wx.IMAGE_LIST_NORMAL`` image list.
"""
return self._mainWin.GetItemSpacing(isSmall)
def SetItemTextColour(self, item, col):
"""
Sets the item text colour.
:param `item`: the index of the item;
:param `col`: a valid :class:`wx.Colour` object.
"""
info = UltimateListItem()
info._itemId = item
info = self._mainWin.GetItem(info)
info.SetTextColour(col)
self._mainWin.SetItem(info)
def GetItemTextColour(self, item):
"""
Returns the item text colour.
:param `item`: the index of the item.
"""
info = UltimateListItem()
info._itemId = item
info = self._mainWin.GetItem(info)
return info.GetTextColour()
def SetItemBackgroundColour(self, item, col):
"""
Sets the item background colour.
:param `item`: the index of the item;
:param `col`: a valid :class:`wx.Colour` object.
"""
info = UltimateListItem()
info._itemId = item
info = self._mainWin.GetItem(info)
info.SetBackgroundColour(col)
self._mainWin.SetItem(info)
def GetItemBackgroundColour(self, item):
"""
Returns the item background colour.
:param `item`: the index of the item.
"""
info = UltimateListItem()
info._itemId = item
info = self._mainWin.GetItem(info)
return info.GetBackgroundColour()
def SetItemFont(self, item, f):
"""
Sets the item font.
:param `item`: the index of the item;
:param `f`: a valid :class:`wx.Font` object.
"""
info = UltimateListItem()
info._itemId = item
info = self._mainWin.GetItem(info)
info.SetFont(f)
info.SetBackgroundColour(self.GetItemBackgroundColour(item))
self._mainWin.SetItem(info)
def GetItemFont(self, item):
"""
Returns the item font.
:param `item`: the index of the item.
"""
info = UltimateListItem()
info._itemId = item
info = self._mainWin.GetItem(info)
return info.GetFont()
def GetSelectedItemCount(self):
""" Returns the number of selected items in :class:`UltimateListCtrl`. """
return self._mainWin.GetSelectedItemCount()
def GetTextColour(self):
""" Returns the :class:`UltimateListCtrl` foreground colour. """
return self.GetForegroundColour()
def SetTextColour(self, col):
"""
Sets the :class:`UltimateListCtrl` foreground colour.
:param `col`: a valid :class:`wx.Colour` object.
"""
self.SetForegroundColour(col)
def GetTopItem(self):
""" Gets the index of the topmost visible item when in list or report view. """
top, dummy = self._mainWin.GetVisibleLinesRange()
return top
def GetNextItem(self, item, geometry=ULC_NEXT_ALL, state=ULC_STATE_DONTCARE):
"""
Searches for an item with the given `geometry` or `state`, starting from `item`
but excluding the `item` itself.
:param `item`: the item at which starting the search. If set to -1, the first
item that matches the specified flags will be returned.
:param `geometry`: can be one of:
=================== ========= =================================
Geometry Flag Hex Value Description
=================== ========= =================================
``ULC_NEXT_ABOVE`` 0x0 Searches for an item above the specified item
``ULC_NEXT_ALL`` 0x1 Searches for subsequent item by index
``ULC_NEXT_BELOW`` 0x2 Searches for an item below the specified item
``ULC_NEXT_LEFT`` 0x3 Searches for an item to the left of the specified item
``ULC_NEXT_RIGHT`` 0x4 Searches for an item to the right of the specified item
=================== ========= =================================
:param `state`: any combination of the following bits:
============================ ========= ==============================
State Bits Hex Value Description
============================ ========= ==============================
``ULC_STATE_DONTCARE`` 0x0 Don't care what the state is
``ULC_STATE_DROPHILITED`` 0x1 The item is highlighted to receive a drop event
``ULC_STATE_FOCUSED`` 0x2 The item has the focus
``ULC_STATE_SELECTED`` 0x4 The item is selected
``ULC_STATE_CUT`` 0x8 The item is in the cut state
``ULC_STATE_DISABLED`` 0x10 The item is disabled
``ULC_STATE_FILTERED`` 0x20 The item has been filtered
``ULC_STATE_INUSE`` 0x40 The item is in use
``ULC_STATE_PICKED`` 0x80 The item has been picked
``ULC_STATE_SOURCE`` 0x100 The item is a drag and drop source
============================ ========= ==============================
:return: The first item with given `state` following `item` or -1 if no such item found.
:note: This function may be used to find all selected items in the
control like this::
item = -1
while 1:
item = listctrl.GetNextItem(item, ULC_NEXT_ALL, ULC_STATE_SELECTED)
if item == -1:
break
# This item is selected - do whatever is needed with it
wx.LogMessage("Item %ld is selected."%item)
"""
return self._mainWin.GetNextItem(item, geometry, state)
def GetImageList(self, which):
"""
Returns the image list associated with the control.
:param `which`: one of ``wx.IMAGE_LIST_NORMAL``, ``wx.IMAGE_LIST_SMALL``,
``wx.IMAGE_LIST_STATE`` (the last is unimplemented).
:note:
As :class:`UltimateListCtrl` allows you to use a standard :class:`wx.ImageList` or
:class:`PyImageList`, the returned object depends on which kind of image list you
chose.
"""
if which == wx.IMAGE_LIST_NORMAL:
return self._imageListNormal
elif which == wx.IMAGE_LIST_SMALL:
return self._imageListSmall
elif which == wx.IMAGE_LIST_STATE:
return self._imageListState
return None
def SetImageList(self, imageList, which):
"""
Sets the image list associated with the control.
:param `imageList`: an instance of :class:`wx.ImageList` or an instance of :class:`PyImageList`;
:param `which`: one of ``wx.IMAGE_LIST_NORMAL``, ``wx.IMAGE_LIST_SMALL``,
``wx.IMAGE_LIST_STATE`` (the last is unimplemented).
:note: Using :class:`PyImageList` enables you to have images of different size inside the
image list. In your derived class, instead of doing this::
imageList = wx.ImageList(16, 16)
imageList.Add(someBitmap)
self.SetImageList(imageList, wx.IMAGE_LIST_SMALL)
You should do this::
imageList = PyImageList(16, 16)
imageList.Add(someBitmap)
self.SetImageList(imageList, wx.IMAGE_LIST_SMALL)
"""
if which == wx.IMAGE_LIST_NORMAL:
self._imageListNormal = imageList
elif which == wx.IMAGE_LIST_SMALL:
self._imageListSmall = imageList
elif which == wx.IMAGE_LIST_STATE:
self._imageListState = imageList
self._mainWin.SetImageList(imageList, which)
def AssignImageList(self, imageList, which):
"""
Assigns the image list associated with the control.
:param `imageList`: an instance of :class:`wx.ImageList` or an instance of :class:`PyImageList`;
:param `which`: one of ``wx.IMAGE_LIST_NORMAL``, ``wx.IMAGE_LIST_SMALL``,
``wx.IMAGE_LIST_STATE`` (the last is unimplemented).
:note: Using :class:`PyImageList` enables you to have images of different size inside the
image list. In your derived class, instead of doing this::
imageList = wx.ImageList(16, 16)
imageList.Add(someBitmap)
self.SetImageList(imageList, wx.IMAGE_LIST_SMALL)
You should do this::
imageList = PyImageList(16, 16)
imageList.Add(someBitmap)
self.SetImageList(imageList, wx.IMAGE_LIST_SMALL)
"""
self.SetImageList(imageList, which)
def Arrange(self, flag):
"""
Arranges the items in icon or small icon view.
:param `flag`: one of the following bits:
========================== ========= ===============================
Alignment Flag Hex Value Description
========================== ========= ===============================
``ULC_ALIGN_DEFAULT`` 0x0 Default alignment
``ULC_ALIGN_SNAP_TO_GRID`` 0x3 Snap to grid
========================== ========= ===============================
:note: This method is currently unimplemented and does nothing.
"""
return 0
def DeleteItem(self, item):
"""
Deletes the specified item.
:param `item`: the index of the item to delete.
:note: This function sends the ``EVT_LIST_DELETE_ITEM`` event for the item
being deleted.
"""
self._mainWin.DeleteItem(item)
return True
def DeleteAllItems(self):
"""
Deletes all items in the :class:`UltimateListCtrl`.
:note: This function does not send the ``EVT_LIST_DELETE_ITEM`` event because
deleting many items from the control would be too slow then (unlike :meth:`~UltimateListCtrl.DeleteItem`).
"""
self._mainWin.DeleteAllItems()
return True
def DeleteAllColumns(self):
""" Deletes all the column in :class:`UltimateListCtrl`. """
count = len(self._mainWin._columns)
for n in range(count):
self.DeleteColumn(0)
return True
def ClearAll(self):
""" Deletes everything in :class:`UltimateListCtrl`. """
self._mainWin.DeleteEverything()
def DeleteColumn(self, col):
"""
Deletes the specified column.
:param `col`: the index of the column to delete.
"""
self._mainWin.DeleteColumn(col)
return True
def EditLabel(self, item):
"""
Starts editing an item label.
:param `item`: the index of the item to edit.
"""
self._mainWin.EditLabel(item)
def EnsureVisible(self, item):
"""
Ensures this item is visible.
:param `index`: the index of the item to scroll into view.
"""
self._mainWin.EnsureVisible(item)
return True
def FindItem(self, start, str, partial=False):
"""
Find an item whose label matches this string.
:param `start`: the starting point of the input `string` or the beginning
if `start` is -1;
:param `string`: the string to look for matches;
:param `partial`: if ``True`` then this method will look for items which
begin with `string`.
:note: The string comparison is case insensitive.
"""
return self._mainWin.FindItem(start, str, partial)
def FindItemData(self, start, data):
"""
Find an item whose data matches this data.
:param `start`: the starting point of the input `data` or the beginning
if `start` is -1;
:param `data`: the data to look for matches.
"""
return self._mainWin.FindItemData(start, data)
def FindItemAtPos(self, start, pt):
"""
Find an item nearest this position.
:param `pt`: an instance of :class:`wx.Point`.
"""
return self._mainWin.FindItemAtPos(pt)
def HitTest(self, pointOrTuple):
"""
HitTest method for a :class:`UltimateListCtrl`.
:param `pointOrTuple`: an instance of :class:`wx.Point` or a tuple representing
the mouse `x`, `y` position.
:see: :meth:`UltimateListMainWindow.HitTestLine() <UltimateListMainWindow.HitTestLine>` for a list of return flags.
"""
if isinstance(pointOrTuple, wx.Point):
x, y = pointOrTuple.x, pointOrTuple.y
else:
x, y = pointOrTuple
return self._mainWin.HitTest(x, y)
def InsertItem(self, info):
"""
Inserts an item into :class:`UltimateListCtrl`.
:param `info`: an instance of :class:`UltimateListItem`.
"""
self._mainWin.InsertItem(info)
return info._itemId
def InsertStringItem(self, index, label, it_kind=0):
"""
Inserts a string item at the given location.
:param `index`: the index at which we wish to insert the item;
:param `label`: the item text;
:param `it_kind`: the item kind.
:see: :meth:`~UltimateListCtrl.SetStringItem` for a list of valid item kinds.
"""
info = UltimateListItem()
info._text = label
info._mask = ULC_MASK_TEXT
if it_kind:
info._mask |= ULC_MASK_KIND
info._kind = it_kind
info._itemId = index
return self.InsertItem(info)
def InsertImageItem(self, index, imageIds, it_kind=0):
"""
Inserts an image item at the given location.
:param `index`: the index at which we wish to insert the item;
:param `imageIds`: a Python list containing the image indexes for the
images associated to this item;
:param `it_kind`: the item kind.
:see: :meth:`~UltimateListCtrl.SetStringItem` for a list of valid item kinds.
"""
info = UltimateListItem()
info._mask = ULC_MASK_IMAGE
if it_kind:
info._mask |= ULC_MASK_KIND
info._kind = it_kind
info._image = to_list(imageIds)
info._itemId = index
return self.InsertItem(info)
def InsertImageStringItem(self, index, label, imageIds, it_kind=0):
"""
Inserts an image+string item at the given location.
:param `index`: the index at which we wish to insert the item;
:param `label`: the item text;
:param `imageIds`: a Python list containing the image indexes for the
images associated to this item;
:param `it_kind`: the item kind.
:see: :meth:`~UltimateListCtrl.SetStringItem` for a list of valid item kinds.
"""
info = UltimateListItem()
info._text = label
info._image = to_list(imageIds)
info._mask = ULC_MASK_TEXT | ULC_MASK_IMAGE
if it_kind:
info._mask |= ULC_MASK_KIND
info._kind = it_kind
info._itemId = index
return self.InsertItem(info)
def InsertColumnInfo(self, col, item):
"""
Inserts a column into :class:`UltimateListCtrl`.
:param `col`: the column index at which we wish to insert a column;
:param `item`: an instance of :class:`UltimateListItem`.
:return: the index at which the column has been inserted.
"""
if not self._mainWin.InReportView() and not self.HasAGWFlag(ULC_HEADER_IN_ALL_VIEWS) and \
not self._mainWin.InTileView():
raise Exception("Can't add column in non report/tile modes or without the ULC_HEADER_IN_ALL_VIEWS style set")
idx = self._mainWin.InsertColumn(col, item)
if self._headerWin:
self._headerWin.Refresh()
return idx
def InsertColumn(self, col, heading, format=ULC_FORMAT_LEFT, width=-1):
"""
Inserts a column into :class:`UltimateListCtrl`.
:param `col`: the column index at which we wish to insert a column;
:param `heading`: the header text;
:param `format`: the column alignment flag. This can be one of the following
bits:
============================ ========= ==============================
Alignment Bits Hex Value Description
============================ ========= ==============================
``ULC_FORMAT_LEFT`` 0x0 The item is left-aligned
``ULC_FORMAT_RIGHT`` 0x1 The item is right-aligned
``ULC_FORMAT_CENTRE`` 0x2 The item is centre-aligned
``ULC_FORMAT_CENTER`` 0x2 The item is center-aligned
============================ ========= ==============================
:param `width`: can be a width in pixels or ``wx.LIST_AUTOSIZE`` (-1) or
``wx.LIST_AUTOSIZE_USEHEADER`` (-2) or ``LIST_AUTOSIZE_FILL`` (-3).
``wx.LIST_AUTOSIZE`` will resize the column to the length of its longest
item. ``wx.LIST_AUTOSIZE_USEHEADER`` will resize the column to the
length of the header (Win32) or 80 pixels (other platforms).
``LIST_AUTOSIZE_FILL`` will resize the column fill the remaining width
of the window.
:return: the index at which the column has been inserted.
"""
item = UltimateListItem()
item._mask = ULC_MASK_TEXT | ULC_MASK_FORMAT | ULC_MASK_FONT
item._text = heading
if width >= -2:
item._mask |= ULC_MASK_WIDTH
item._width = width
item._format = format
return self.InsertColumnInfo(col, item)
def IsColumnShown(self, column):
"""
Returns ``True`` if the input column is shown, ``False`` if it is hidden.
:param `column`: an integer specifying the column index.
"""
if self._headerWin:
return self._mainWin.IsColumnShown(column)
raise Exception("Showing/hiding columns works only with the header shown")
def SetColumnShown(self, column, shown=True):
"""
Sets the specified column as shown or hidden.
:param `column`: an integer specifying the column index;
:param `shown`: ``True`` to show the column, ``False`` to hide it.
"""
col = self.GetColumn(column)
col._mask |= ULC_MASK_SHOWN
col.SetShown(shown)
self._mainWin.SetColumn(column, col)
self.Update()
def ScrollList(self, dx, dy):
"""
Scrolls the :class:`UltimateListCtrl`.
:param `dx`: if in icon, small icon or report view mode, specifies the number
of pixels to scroll. If in list view mode, `dx` specifies the number of
columns to scroll.
:param `dy`: always specifies the number of pixels to scroll vertically.
"""
return self._mainWin.ScrollList(dx, dy)
# Sort items.
# The return value is a negative number if the first item should precede the second
# item, a positive number of the second item should precede the first,
# or zero if the two items are equivalent.
def SortItems(self, func=None):
"""
Call this function to sort the items in the :class:`UltimateListCtrl`. Sorting is done
using the specified function `func`. This function must have the
following prototype::
def OnCompareItems(self, line1, line2):
DoSomething(line1, line2)
# function code
It is called each time when the two items must be compared and should return 0
if the items are equal, negative value if the first item is less than the second
one and positive value if the first one is greater than the second one.
:param `func`: the method to use to sort the items. The default is to use the
:meth:`UltimateListMainWindow.OnCompareItems() <UltimateListMainWindow.OnCompareItems>` method.
"""
self._mainWin.SortItems(func)
wx.CallAfter(self.Refresh)
return True
# ----------------------------------------------------------------------------
# event handlers
# ----------------------------------------------------------------------------
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` event for :class:`UltimateListCtrl`.
:param `event`: a :class:`wx.SizeEvent` event to be processed.
"""
if not self.IsShownOnScreen():
# We don't have the proper column sizes until we are visible so
# use CallAfter to resize the columns on the first display
if self._mainWin:
wx.CallAfter(self._mainWin.ResizeColumns)
if not self._mainWin:
return
# We need to override OnSize so that our scrolled
# window a) does call Layout() to use sizers for
# positioning the controls but b) does not query
# the sizer for their size and use that for setting
# the scrollable area as set that ourselves by
# calling SetScrollbar() further down.
self.DoLayout()
def OnSetFocus(self, event):
"""
Handles the ``wx.EVT_SET_FOCUS`` event for :class:`UltimateListCtrl`.
:param `event`: a :class:`FocusEvent` event to be processed.
"""
if self._mainWin:
self._mainWin.SetFocusIgnoringChildren()
self._mainWin.Update()
event.Skip()
def OnInternalIdle(self):
"""
This method is normally only used internally, but sometimes an application
may need it to implement functionality that should not be disabled by an
application defining an `OnIdle` handler in a derived class.
This method may be used to do delayed painting, for example, and most
implementations call :meth:`wx.Window.UpdateWindowUI` in order to send update events
to the window in idle time.
"""
wx.Control.OnInternalIdle(self)
# do it only if needed
if self._mainWin and self._mainWin._dirty:
self._mainWin._shortItems = []
self._mainWin.RecalculatePositions()
# ----------------------------------------------------------------------------
# font/colours
# ----------------------------------------------------------------------------
def SetBackgroundColour(self, colour):
"""
Changes the background colour of :class:`UltimateListCtrl`.
:param `colour`: the colour to be used as the background colour, pass
:class:`NullColour` to reset to the default colour.
:note: The background colour is usually painted by the default :class:`EraseEvent`
event handler function under Windows and automatically under GTK.
:note: Setting the background colour does not cause an immediate refresh, so
you may wish to call :meth:`wx.Window.ClearBackground` or :meth:`wx.Window.Refresh` after
calling this function.
:note: Overridden from :class:`wx.Control`.
"""
if self._mainWin:
self._mainWin.SetBackgroundColour(colour)
self._mainWin._dirty = True
return True
def SetForegroundColour(self, colour):
"""
Changes the foreground colour of :class:`UltimateListCtrl`.
:param `colour`: the colour to be used as the foreground colour, pass
:class:`NullColour` to reset to the default colour.
:note: Overridden from :class:`wx.Control`.
"""
if not wx.Control.SetForegroundColour(self, colour):
return False
if self._mainWin:
self._mainWin.SetForegroundColour(colour)
self._mainWin._dirty = True
if self._headerWin:
self._headerWin.SetForegroundColour(colour)
return True
def SetFont(self, font):
"""
Sets the :class:`UltimateListCtrl` font.
:param `font`: a valid :class:`wx.Font` instance.
:note: Overridden from :class:`wx.Control`.
"""
if not wx.Control.SetFont(self, font):
return False
if self._mainWin:
self._mainWin.SetFont(font)
self._mainWin._dirty = True
if self._headerWin:
self._headerWin.SetFont(font)
self.Refresh()
return True
def GetClassDefaultAttributes(self, variant):
"""
Returns the default font and colours which are used by the control. This is
useful if you want to use the same font or colour in your own control as in
a standard control -- which is a much better idea than hard coding specific
colours or fonts which might look completely out of place on the users system,
especially if it uses themes.
This static method is "overridden'' in many derived classes and so calling,
for example, :meth:`Button.GetClassDefaultAttributes` () will typically return the
values appropriate for a button which will be normally different from those
returned by, say, :meth:`ListCtrl.GetClassDefaultAttributes` ().
:note: The :class:`VisualAttributes` structure has at least the fields `font`,
`colFg` and `colBg`. All of them may be invalid if it was not possible to
determine the default control appearance or, especially for the background
colour, if the field doesn't make sense as is the case for `colBg` for the
controls with themed background.
:note: Overridden from :class:`wx.Control`.
"""
attr = wx.VisualAttributes()
attr.colFg = wx.SystemSettings.GetColour(wx.SYS_COLOUR_LISTBOXTEXT)
attr.colBg = wx.SystemSettings.GetColour(wx.SYS_COLOUR_LISTBOX)
attr.font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
return attr
def GetScrolledWin(self):
""" Returns the header window owner. """
return self._headerWin.GetOwner()
# ----------------------------------------------------------------------------
# methods forwarded to self._mainWin
# ----------------------------------------------------------------------------
def SetDropTarget(self, dropTarget):
"""
Associates a drop target with this window.
If the window already has a drop target, it is deleted.
:param `dropTarget`: an instance of :class:`DropTarget`.
:note: Overridden from :class:`wx.Control`.
"""
self._mainWin.SetDropTarget(dropTarget)
def GetDropTarget(self):
"""
Returns the associated drop target, which may be ``None``.
:note: Overridden from :class:`wx.Control`.
"""
return self._mainWin.GetDropTarget()
def SetCursor(self, cursor):
"""
Sets the window's cursor.
:param `cursor`: specifies the cursor that the window should normally display.
The `cursor` may be :class:`NullCursor` in which case the window cursor will be
reset back to default.
:note: The window cursor also sets it for the children of the window implicitly.
:note: Overridden from :class:`wx.Control`.
"""
return (self._mainWin and [self._mainWin.SetCursor(cursor)] or [False])[0]
def GetBackgroundColour(self):
"""
Returns the background colour of the window.
:note: Overridden from :class:`wx.Control`.
"""
return (self._mainWin and [self._mainWin.GetBackgroundColour()] or [wx.NullColour])[0]
def GetForegroundColour(self):
"""
Returns the foreground colour of the window.
:note: Overridden from :class:`wx.Control`.
"""
return (self._mainWin and [self._mainWin.GetForegroundColour()] or [wx.NullColour])[0]
def PopupMenu(self, menu, pos=wx.DefaultPosition):
"""
Pops up the given `menu` at the specified coordinates, relative to this window,
and returns control when the user has dismissed the menu. If a menu item is
selected, the corresponding menu event is generated and will be processed as
usual. If the coordinates are not specified, the current mouse cursor position
is used.
:param `menu`: an instance of :class:`wx.Menu` to pop up;
:param `pos`: the position where the menu will appear.
:note: Overridden from :class:`wx.Control`.
"""
return self._mainWin.PopupMenu(menu, pos)
def ClientToScreen(self, pointOrTuple):
"""
Converts to screen coordinates from coordinates relative to this window.
:param `pointOrTuple`: an instance of :class:`wx.Point` or a tuple representing the
`x`, `y` coordinates for this point.
:return: the coordinates relative to the screen.
:note: Overridden from :class:`wx.Control`.
"""
return self._mainWin.ClientToScreen(*pointOrTuple)
def ClientToScreenXY(self, x, y):
"""
Converts to screen coordinates from coordinates relative to this window.
:param `x`: an integer specifying the `x` client coordinate;
:param `y`: an integer specifying the `y` client coordinate.
:return: the coordinates relative to the screen.
:note: Overridden from :class:`wx.Control`.
"""
return self._mainWin.ClientToScreen(x, y)
def ScreenToClient(self, pointOrTuple):
"""
Converts from screen to client window coordinates.
:param `pointOrTuple`: an instance of :class:`wx.Point` or a tuple representing the
`x`, `y` coordinates for this point.
:return: the coordinates relative to this window.
:note: Overridden from :class:`wx.Control`.
"""
return self._mainWin.ScreenToClient(*pointOrTuple)
def ScreenToClientXY(self, x, y):
"""
Converts from screen to client window coordinates.
:param `x`: an integer specifying the `x` screen coordinate;
:param `y`: an integer specifying the `y` screen coordinate.
:return: the coordinates relative to this window.
:note: Overridden from :class:`wx.Control`.
"""
return self._mainWin.ScreenToClient(x, y)
def SetFocus(self):
""" This sets the window to receive keyboard input. """
# The test in window.cpp fails as we are a composite
# window, so it checks against "this", but not self._mainWin.
if wx.Window.FindFocus() != self:
self._mainWin.SetFocusIgnoringChildren()
def DoGetBestSize(self):
"""
Gets the size which best suits the window: for a control, it would be the
minimal size which doesn't truncate the control, for a panel - the same size
as it would have after a call to `Fit()`.
"""
# Something is better than nothing...
# 100x80 is what the MSW version will get from the default
# wx.Control.DoGetBestSize
return wx.Size(100, 80)
# ----------------------------------------------------------------------------
# virtual list control support
# ----------------------------------------------------------------------------
def OnGetItemText(self, item, col):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return the string containing the text of
the given column for the specified item.
:param `item`: an integer specifying the item index;
:param `col`: the column index to which the item belongs to.
"""
# this is a pure virtual function, in fact - which is not really pure
# because the controls which are not virtual don't need to implement it
raise Exception("UltimateListCtrl.OnGetItemText not supposed to be called")
def OnGetItemTextColour(self, item, col):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return a :class:`wx.Colour` object or ``None`` for
the default color.
:param `item`: an integer specifying the item index;
:param `col`: the column index to which the item belongs to.
"""
# this is a pure virtual function, in fact - which is not really pure
# because the controls which are not virtual don't need to implement it
raise Exception("UltimateListCtrl.OnGetItemTextColour not supposed to be called")
def OnGetItemToolTip(self, item, col):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return the string containing the text of
the tooltip for the specified item.
:param `item`: an integer specifying the item index;
:param `col`: the column index to which the item belongs to.
"""
# this is a pure virtual function, in fact - which is not really pure
# because the controls which are not virtual don't need to implement it
raise Exception("UltimateListCtrl.OnGetItemToolTip not supposed to be called")
def OnGetItemImage(self, item):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style having an image list (if the control doesn't have an
image list, it is not necessary to overload it). It should return a Python
list of indexes representing the images associated to the input item or an
empty list for no images.
:param `item`: an integer specifying the item index;
:note: In a control with ``ULC_REPORT`` style, :meth:`~UltimateListCtrl.OnGetItemImage` only gets called
for the first column of each line.
:note: The base class version always returns an empty Python list.
"""
if self.GetImageList(wx.IMAGE_LIST_SMALL):
raise Exception("List control has an image list, OnGetItemImage should be overridden.")
return []
def OnGetItemColumnImage(self, item, column=0):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` and ``ULC_REPORT`` style. It should return a Python list of
indexes representing the images associated to the input item or an empty list
for no images.
:param `item`: an integer specifying the item index.
:note: The base class version always returns an empty Python list.
"""
if column == 0:
return self.OnGetItemImage(item)
return []
def OnGetItemAttr(self, item):
"""
This function may be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return the attribute for the specified
item or ``None`` to use the default appearance parameters.
:param `item`: an integer specifying the item index.
:note:
:class:`UltimateListCtrl` will not delete the pointer or keep a reference of it.
You can return the same :class:`UltimateListItemAttr` pointer for every
:meth:`~UltimateListCtrl.OnGetItemAttr` call.
:note: The base class version always returns ``None``.
"""
if item < 0 or item > self.GetItemCount():
raise Exception("Invalid item index in OnGetItemAttr()")
# no attributes by default
return None
def OnGetItemCheck(self, item):
"""
This function may be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return whether a checkbox-like item or
a radiobutton-like item is checked or unchecked.
:param `item`: an integer specifying the item index.
:note: The base class version always returns an empty list.
"""
return []
def OnGetItemColumnCheck(self, item, column=0):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` and ``ULC_REPORT`` style. It should return whether a
checkbox-like item or a radiobutton-like item in the column header is checked
or unchecked.
:param `item`: an integer specifying the item index.
:note: The base class version always returns an empty Python list.
"""
if column == 0:
return self.OnGetItemCheck(item)
return []
def OnGetItemKind(self, item):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return the item kind for the input item.
:param `item`: an integer specifying the item index.
:note: The base class version always returns 0 (a standard item).
:see: :meth:`~UltimateListCtrl.SetItemKind` for a list of valid item kinds.
"""
return 0
def OnGetItemColumnKind(self, item, column=0):
"""
This function **must** be overloaded in the derived class for a control with
``ULC_VIRTUAL`` style. It should return the item kind for the input item in
the header window.
:param `item`: an integer specifying the item index;
:param `column`: the column index.
:note: The base class version always returns 0 (a standard item).
:see: :meth:`~UltimateListCtrl.SetItemKind` for a list of valid item kinds.
"""
if column == 0:
return self.OnGetItemKind(item)
return 0
def SetItemCount(self, count):
"""
Sets the total number of items we handle.
:param `count`: the total number of items we handle.
"""
if not self._mainWin.IsVirtual():
raise Exception("This is for virtual controls only")
self._mainWin.SetItemCount(count)
def RefreshItem(self, item):
"""
Redraws the given item.
:param `item`: an integer specifying the item index;
:note: This is only useful for the virtual list controls as without calling
this function the displayed value of the item doesn't change even when the
underlying data does change.
"""
self._mainWin.RefreshLine(item)
def RefreshItems(self, itemFrom, itemTo):
"""
Redraws the items between `itemFrom` and `itemTo`.
The starting item must be less than or equal to the ending one.
Just as :meth:`~UltimateListCtrl.RefreshItem` this is only useful for virtual list controls
:param `itemFrom`: the first index of the refresh range;
:param `itemTo`: the last index of the refresh range.
"""
self._mainWin.RefreshLines(itemFrom, itemTo)
#
# Generic UltimateListCtrl is more or less a container for two other
# windows which drawings are done upon. These are namely
# 'self._headerWin' and 'self._mainWin'.
# Here we override 'virtual wxWindow::Refresh()' to mimic the
# behaviour UltimateListCtrl has under wxMSW.
#
def Refresh(self, eraseBackground=True, rect=None):
"""
Causes this window, and all of its children recursively (except under wxGTK1
where this is not implemented), to be repainted.
:param `eraseBackground`: If ``True``, the background will be erased;
:param `rect`: If not ``None``, only the given rectangle will be treated as damaged.
:note: Note that repainting doesn't happen immediately but only during the next
event loop iteration, if you need to update the window immediately you should
use :meth:`~UltimateListCtrl.Update` instead.
:note: Overridden from :class:`wx.Control`.
"""
if not rect:
# The easy case, no rectangle specified.
if self._headerWin:
self._headerWin.Refresh(eraseBackground)
if self._mainWin:
self._mainWin.Refresh(eraseBackground)
else:
# Refresh the header window
if self._headerWin:
rectHeader = self._headerWin.GetRect()
rectHeader.Intersect(rect)
if rectHeader.GetWidth() and rectHeader.GetHeight():
x, y = self._headerWin.GetPosition()
rectHeader.OffsetXY(-x, -y)
self._headerWin.Refresh(eraseBackground, rectHeader)
# Refresh the main window
if self._mainWin:
rectMain = self._mainWin.GetRect()
rectMain.Intersect(rect)
if rectMain.GetWidth() and rectMain.GetHeight():
x, y = self._mainWin.GetPosition()
rectMain.OffsetXY(-x, -y)
self._mainWin.Refresh(eraseBackground, rectMain)
def Update(self):
"""
Calling this method immediately repaints the invalidated area of the window
and all of its children recursively while this would usually only happen when
the flow of control returns to the event loop.
:note: This function doesn't invalidate any area of the window so nothing
happens if nothing has been invalidated (i.e. marked as requiring a redraw).
Use :meth:`~UltimateListCtrl.Refresh` first if you want to immediately redraw the window unconditionally.
:note: Overridden from :class:`wx.Control`.
"""
self._mainWin.ResetVisibleLinesRange(True)
wx.Control.Update(self)
def GetEditControl(self):
"""
Returns a pointer to the edit :class:`UltimateListTextCtrl` if the item is being edited or
``None`` otherwise (it is assumed that no more than one item may be edited
simultaneously).
"""
retval = None
if self._mainWin:
retval = self._mainWin.GetEditControl()
return retval
def Select(self, idx, on=True):
"""
Selects/deselects an item.
:param `idx`: the index of the item to select;
:param `on`: ``True`` to select the item, ``False`` to deselect it.
"""
item = CreateListItem(idx, 0)
item = self._mainWin.GetItem(item, 0)
if not item.IsEnabled():
return
if on:
state = ULC_STATE_SELECTED
else:
state = 0
self.SetItemState(idx, state, ULC_STATE_SELECTED)
def Focus(self, idx):
"""
Focus and show the given item.
:param `idx`: the index of the item to be focused.
"""
self.SetItemState(idx, ULC_STATE_FOCUSED, ULC_STATE_FOCUSED)
self.EnsureVisible(idx)
def GetFocusedItem(self):
""" Returns the currently focused item or -1 if none is focused. """
return self.GetNextItem(-1, ULC_NEXT_ALL, ULC_STATE_FOCUSED)
def GetFirstSelected(self):
""" Return first selected item, or -1 when none is selected. """
return self.GetNextSelected(-1)
def GetNextSelected(self, item):
"""
Returns subsequent selected items, or -1 when no more are selected.
:param `item`: the index of the item.
"""
return self.GetNextItem(item, ULC_NEXT_ALL, ULC_STATE_SELECTED)
def IsSelected(self, idx):
"""
Returns ``True`` if the item is selected.
:param `idx`: the index of the item to check for selection.
"""
return (self.GetItemState(idx, ULC_STATE_SELECTED) & ULC_STATE_SELECTED) != 0
def IsItemChecked(self, itemOrId, col=0):
"""
Returns whether an item is checked or not.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.IsItemChecked(item)
def IsItemEnabled(self, itemOrId, col=0):
"""
Returns whether an item is enabled or not.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.IsItemEnabled(item)
def GetItemKind(self, itemOrId, col=0):
"""
Returns the item kind.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
:see: :meth:`~UltimateListCtrl.SetItemKind` for a list of valid item kinds.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItemKind(item)
def SetItemKind(self, itemOrId, col=0, kind=0):
"""
Sets the item kind.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `kind`: may be one of the following integers:
=============== ==========================
Item Kind Description
=============== ==========================
0 A normal item
1 A checkbox-like item
2 A radiobutton-type item
=============== ==========================
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemKind(item, kind)
def EnableItem(self, itemOrId, col=0, enable=True):
"""
Enables/disables an item.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `enable`: ``True`` to enable the item, ``False`` otherwise.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.EnableItem(item, enable)
def IsItemHyperText(self, itemOrId, col=0):
"""
Returns whether an item is hypertext or not.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.IsItemHyperText(item)
def SetItemHyperText(self, itemOrId, col=0, hyper=True):
"""
Sets whether the item is hypertext or not.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `hyper`: ``True`` to have an item with hypertext behaviour, ``False`` otherwise.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemHyperText(item, hyper)
def SetColumnToolTip(self, col, tip):
"""
Sets the tooltip for the column header
:param `col`: the column index;
:param `tip`: the tooltip text
"""
item = self.GetColumn(col)
item.SetToolTip(tip)
self.SetColumn(col, item)
def SetColumnImage(self, col, image):
"""
Sets one or more images to the specified column.
:param `col`: the column index;
:param `image`: a Python list containing the image indexes for the
images associated to this column item.
"""
item = self.GetColumn(col)
# preserve all other attributes too
item.SetMask(ULC_MASK_STATE |
ULC_MASK_TEXT |
ULC_MASK_IMAGE |
ULC_MASK_DATA |
ULC_SET_ITEM |
ULC_MASK_WIDTH |
ULC_MASK_FORMAT |
ULC_MASK_FONTCOLOUR |
ULC_MASK_FONT |
ULC_MASK_BACKCOLOUR |
ULC_MASK_KIND |
ULC_MASK_CHECK
)
item.SetImage(image)
self.SetColumn(col, item)
def ClearColumnImage(self, col):
"""
Clears all the images in the specified column.
:param `col`: the column index;
"""
self.SetColumnImage(col, -1)
def Append(self, entry):
"""
Append an item to the :class:`UltimateListCtrl`.
:param `entry`: should be a sequence with an item for each column.
"""
if entry:
pos = self.GetItemCount()
self.InsertStringItem(pos, six.u(entry[0]))
for i in range(1, len(entry)):
self.SetStringItem(pos, i, six.u(entry[i]))
return pos
def SetFirstGradientColour(self, colour=None):
"""
Sets the first gradient colour for gradient-style selections.
:param `colour`: if not ``None``, a valid :class:`wx.Colour` instance. Otherwise,
the colour is taken from the system value ``wx.SYS_COLOUR_HIGHLIGHT``.
"""
self._mainWin.SetFirstGradientColour(colour)
def SetSecondGradientColour(self, colour=None):
"""
Sets the second gradient colour for gradient-style selections.
:param `colour`: if not ``None``, a valid :class:`wx.Colour` instance. Otherwise,
the colour generated is a slightly darker version of the :class:`UltimateListCtrl`
background colour.
"""
self._mainWin.SetSecondGradientColour(colour)
def GetFirstGradientColour(self):
""" Returns the first gradient colour for gradient-style selections. """
return self._mainWin.GetFirstGradientColour()
def GetSecondGradientColour(self):
""" Returns the second gradient colour for gradient-style selections. """
return self._mainWin.GetSecondGradientColour()
def EnableSelectionGradient(self, enable=True):
"""
Globally enables/disables drawing of gradient selections.
:param `enable`: ``True`` to enable gradient-style selections, ``False``
to disable it.
:note: Calling this method disables any Vista-style selection previously
enabled.
"""
self._mainWin.EnableSelectionGradient(enable)
def SetGradientStyle(self, vertical=0):
"""
Sets the gradient style for gradient-style selections.
:param `vertical`: 0 for horizontal gradient-style selections, 1 for vertical
gradient-style selections.
"""
self._mainWin.SetGradientStyle(vertical)
def GetGradientStyle(self):
"""
Returns the gradient style for gradient-style selections.
:return: 0 for horizontal gradient-style selections, 1 for vertical
gradient-style selections.
"""
return self._mainWin.GetGradientStyle()
def EnableSelectionVista(self, enable=True):
"""
Globally enables/disables drawing of Windows Vista selections.
:param `enable`: ``True`` to enable Vista-style selections, ``False`` to
disable it.
:note: Calling this method disables any gradient-style selection previously
enabled.
"""
self._mainWin.EnableSelectionVista(enable)
def SetBackgroundImage(self, image=None):
"""
Sets the :class:`UltimateListCtrl` background image.
:param `image`: if not ``None``, an instance of :class:`wx.Bitmap`.
:note: At present, the background image can only be used in "tile" mode.
.. todo:: Support background images also in stretch and centered modes.
"""
self._mainWin.SetBackgroundImage(image)
def GetBackgroundImage(self):
"""
Returns the :class:`UltimateListCtrl` background image (if any).
:note: At present, the background image can only be used in "tile" mode.
.. todo:: Support background images also in stretch and centered modes.
"""
return self._mainWin.GetBackgroundImage()
def SetWaterMark(self, watermark=None):
"""
Sets the :class:`UltimateListCtrl` watermark image to be displayed in the bottom
right part of the window.
:param `watermark`: if not ``None``, an instance of :class:`wx.Bitmap`.
.. todo:: Better support for this is needed.
"""
self._mainWin.SetWaterMark(watermark)
def GetWaterMark(self):
"""
Returns the :class:`UltimateListCtrl` watermark image (if any), displayed in the
bottom right part of the window.
.. todo:: Better support for this is needed.
"""
return self._mainWin.GetWaterMark()
def SetDisabledTextColour(self, colour):
"""
Sets the items disabled colour.
:param `colour`: an instance of :class:`wx.Colour`.
"""
self._mainWin.SetDisabledTextColour(colour)
def GetDisabledTextColour(self):
""" Returns the items disabled colour. """
return self._mainWin.GetDisabledTextColour()
def GetHyperTextFont(self):
""" Returns the font used to render an hypertext item. """
return self._mainWin.GetHyperTextFont()
def SetHyperTextFont(self, font):
"""
Sets the font used to render hypertext items.
:param `font`: a valid :class:`wx.Font` instance.
"""
self._mainWin.SetHyperTextFont(font)
def SetHyperTextNewColour(self, colour):
"""
Sets the colour used to render a non-visited hypertext item.
:param `colour`: a valid :class:`wx.Colour` instance.
"""
self._mainWin.SetHyperTextNewColour(colour)
def GetHyperTextNewColour(self):
""" Returns the colour used to render a non-visited hypertext item. """
return self._mainWin.GetHyperTextNewColour()
def SetHyperTextVisitedColour(self, colour):
"""
Sets the colour used to render a visited hypertext item.
:param `colour`: a valid :class:`wx.Colour` instance.
"""
self._mainWin.SetHyperTextVisitedColour(colour)
def GetHyperTextVisitedColour(self):
""" Returns the colour used to render a visited hypertext item. """
return self._mainWin.GetHyperTextVisitedColour()
def SetItemVisited(self, itemOrId, col=0, visited=True):
"""
Sets whether an hypertext item was visited or not.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `visited`: ``True`` to mark an hypertext item as visited, ``False`` otherwise.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemVisited(item, visited)
def GetItemVisited(self, itemOrId, col=0):
"""
Returns whether an hypertext item was visited.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItemVisited(item)
def GetItemWindow(self, itemOrId, col=0):
"""
Returns the window associated to the item (if any).
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItemWindow(item)
def SetItemWindow(self, itemOrId, col=0, wnd=None, expand=False):
"""
Sets the window for the given item.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `wnd`: a non-toplevel window to be displayed next to the item;
:param `expand`: ``True`` to expand the column where the item/subitem lives,
so that the window will be fully visible.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemWindow(item, wnd, expand)
def DeleteItemWindow(self, itemOrId, col=0):
"""
Deletes the window associated to an item (if any).
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.DeleteItemWindow(item)
def GetItemWindowEnabled(self, itemOrId, col=0):
"""
Returns whether the window associated to the item is enabled.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItemWindowEnabled(item)
def SetItemWindowEnabled(self, itemOrId, col=0, enable=True):
"""
Enables/disables the window associated to the item.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `enable`: ``True`` to enable the associated window, ``False`` to disable it.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemWindowEnabled(item, enable)
def GetItemCustomRenderer(self, itemOrId, col=0):
"""
Returns the custom renderer used to draw the input item (if any).
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItemCustomRenderer(item)
def SetHeaderCustomRenderer(self, renderer=None):
"""
Associate a custom renderer with the header - all columns will use it.
:param `renderer`: a class able to correctly render header buttons
:note: the renderer class **must** implement the methods `DrawHeaderButton`
and `GetForegroundColor`.
"""
if not self.HasAGWFlag(ULC_REPORT):
raise Exception("Custom renderers can be used on with style = ULC_REPORT")
self._headerWin.SetCustomRenderer(renderer)
def SetFooterCustomRenderer(self, renderer=None):
"""
Associate a custom renderer with the footer - all columns will use it.
:param `renderer`: a class able to correctly render header buttons
:note: the renderer class **must** implement the methods `DrawHeaderButton`
and `GetForegroundColor`.
"""
if not self.HasAGWFlag(ULC_REPORT) or not self.HasAGWFlag(ULC_FOOTER):
raise Exception("Custom renderers can only be used on with style = ULC_REPORT | ULC_FOOTER")
self._footerWin.SetCustomRenderer(renderer)
def SetColumnCustomRenderer(self, col=0, renderer=None):
"""
Associate a custom renderer to this column's header.
:param `col`: the column index.
:param `renderer`: a class able to correctly render the input item.
:note: the renderer class **must** implement the methods `DrawHeaderButton`
and `GetForegroundColor`.
"""
if not self.HasAGWFlag(ULC_REPORT):
raise Exception("Custom renderers can be used on with style = ULC_REPORT")
return self._mainWin.SetCustomRenderer(col, renderer)
def SetItemCustomRenderer(self, itemOrId, col=0, renderer=None):
"""
Associate a custom renderer to this item.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `renderer`: a class able to correctly render the input item.
:note: the renderer class **must** implement the methods `DrawSubItem`,
`GetLineHeight` and `GetSubItemWidth`.
"""
if not self.HasAGWFlag(ULC_REPORT) or not self.HasAGWFlag(ULC_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("Custom renderers can be used on with style = ULC_REPORT | ULC_HAS_VARIABLE_ROW_HEIGHT")
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemCustomRenderer(item, renderer)
def SetItemOverFlow(self, itemOrId, col=0, over=True):
"""
Sets the item in the overflow/non overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to;
:param `over`: ``True`` to set the item in a overflow state, ``False`` otherwise.
"""
if not self.HasAGWFlag(ULC_REPORT) or self._mainWin.IsVirtual():
raise Exception("Overflowing items can be used only in report, non-virtual mode")
item = CreateListItem(itemOrId, col)
return self._mainWin.SetItemOverFlow(item, over)
def GetItemOverFlow(self, itemOrId, col=0):
"""
Returns if the item is in the overflow state.
An item/subitem may overwrite neighboring items/subitems if its text would
not normally fit in the space allotted to it.
:param `itemOrId`: an instance of :class:`UltimateListItem` or the item index;
:param `col`: the column index to which the input item belongs to.
"""
item = CreateListItem(itemOrId, col)
return self._mainWin.GetItemOverFlow(item)
def IsVirtual(self):
""" Returns ``True`` if the :class:`UltimateListCtrl` has the ``ULC_VIRTUAL`` style set. """
return self._mainWin.IsVirtual()
def GetScrollPos(self):
"""
Returns the scrollbar position.
:note: This method is forwarded to :class:`UltimateListMainWindow`.
"""
if self._mainWin:
return self._mainWin.GetScrollPos()
return 0
def SetScrollPos(self, orientation, pos, refresh=True):
"""
Sets the scrollbar position.
:param `orientation`: determines the scrollbar whose position is to be set.
May be ``wx.HORIZONTAL`` or ``wx.VERTICAL``;
:param `pos`: the scrollbar position in scroll units;
:param `refresh`: ``True`` to redraw the scrollbar, ``False`` otherwise.
:note: This method is forwarded to :class:`UltimateListMainWindow`.
"""
if self._mainWin:
self._mainWin.SetScrollPos(orientation, pos, refresh)
def GetScrollThumb(self):
"""
Returns the scrollbar size in pixels.
:note: This method is forwarded to :class:`UltimateListMainWindow`.
"""
if self._mainWin:
return self._mainWin.GetScrollThumb()
return 0
def GetScrollRange(self):
"""
Returns the scrollbar range in pixels.
:note: This method is forwarded to :class:`UltimateListMainWindow`.
"""
if self._mainWin:
return self._mainWin.GetScrollRange()
return 0
def SetHeaderHeight(self, height):
"""
Sets the :class:`UltimateListHeaderWindow` height, in pixels. This overrides the default
header window size derived from :class:`RendererNative`. If `height` is ``None``, the
default behaviour is restored.
:param `height`: the header window height, in pixels (if it is ``None``, the default
height obtained using :class:`RendererNative` is used).
"""
if not self._headerWin:
return
if height is not None and height < 1:
raise Exception("Invalid height passed to SetHeaderHeight: %s"%repr(height))
self._headerWin._headerHeight = height
self.DoLayout()
def GetHeaderHeight(self):
""" Returns the :class:`UltimateListHeaderWindow` height, in pixels. """
if not self._headerWin:
return -1
return self._headerWin.GetWindowHeight()
def SetFooterHeight(self, height):
"""
Sets the :class:`UltimateListHeaderWindow` height, in pixels. This overrides the default
footer window size derived from :class:`RendererNative`. If `height` is ``None``, the
default behaviour is restored.
:param `height`: the footer window height, in pixels (if it is ``None``, the default
height obtained using :class:`RendererNative` is used).
"""
if not self._footerWin:
return
if height is not None and height < 1:
raise Exception("Invalid height passed to SetFooterHeight: %s"%repr(height))
self._footerWin._footerHeight = height
self.DoLayout()
def GetFooterHeight(self):
""" Returns the :class:`UltimateListHeaderWindow` height, in pixels. """
if not self._footerWin:
return -1
return self._headerWin.GetWindowHeight()
def DoLayout(self):
"""
Layouts the header, main and footer windows. This is an auxiliary method to avoid code
duplication.
"""
self.Layout()
self._mainWin.ResizeColumns()
self._mainWin.ResetVisibleLinesRange(True)
self._mainWin.RecalculatePositions()
self._mainWin.AdjustScrollbars()
if self._headerWin:
self._headerWin.Refresh()
if self._footerWin:
self._footerWin.Refresh()
| gpl-3.0 | -2,917,967,209,524,716,500 | 32.50866 | 459 | 0.561998 | false |
BiaDarkia/scikit-learn | sklearn/utils/tests/test_testing.py | 1 | 16091 | import warnings
import unittest
import sys
import os
import atexit
import numpy as np
from scipy import sparse
from sklearn.utils.deprecation import deprecated
from sklearn.utils.metaestimators import if_delegate_has_method
from sklearn.utils.testing import (
assert_raises,
assert_less,
assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message,
ignore_warnings,
check_docstring_parameters,
assert_allclose_dense_sparse,
assert_raises_regex,
TempMemmap,
create_memmap_backed_data,
_delete_folder)
from sklearn.utils.testing import SkipTest
from sklearn.tree import DecisionTreeClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
def test_assert_less():
assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
def test_assert_greater():
assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LinearDiscriminantAnalysis()
tree = DecisionTreeClassifier()
# Linear Discriminant Analysis doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_allclose_dense_sparse():
x = np.arange(9).reshape(3, 3)
msg = "Not equal to tolerance "
y = sparse.csc_matrix(x)
for X in [x, y]:
# basic compare
assert_raise_message(AssertionError, msg, assert_allclose_dense_sparse,
X, X * 2)
assert_allclose_dense_sparse(X, X)
assert_raise_message(ValueError, "Can only compare two sparse",
assert_allclose_dense_sparse, x, y)
A = sparse.diags(np.ones(5), offsets=0).tocsr()
B = sparse.csr_matrix(np.ones((1, 5)))
assert_raise_message(AssertionError, "Arrays are not equal",
assert_allclose_dense_sparse, B, A)
def test_assert_raises_msg():
with assert_raises_regex(AssertionError, 'Hello world'):
with assert_raises(ValueError, msg='Hello world'):
pass
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
def _no_raise():
pass
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "test",
_no_raise)
# multiple exceptions in a tuple
assert_raises(AssertionError,
assert_raise_message, (ValueError, AttributeError),
"test", _no_raise)
def test_ignore_warning():
# This check that ignore_warning decorateur and context manager are working
# as expected
def _warning_function():
warnings.warn("deprecation warning", DeprecationWarning)
def _multiple_warning_function():
warnings.warn("deprecation warning", DeprecationWarning)
warnings.warn("deprecation warning")
# Check the function directly
assert_no_warnings(ignore_warnings(_warning_function))
assert_no_warnings(ignore_warnings(_warning_function,
category=DeprecationWarning))
assert_warns(DeprecationWarning, ignore_warnings(_warning_function,
category=UserWarning))
assert_warns(UserWarning,
ignore_warnings(_multiple_warning_function,
category=DeprecationWarning))
assert_warns(DeprecationWarning,
ignore_warnings(_multiple_warning_function,
category=UserWarning))
assert_no_warnings(ignore_warnings(_warning_function,
category=(DeprecationWarning,
UserWarning)))
# Check the decorator
@ignore_warnings
def decorator_no_warning():
_warning_function()
_multiple_warning_function()
@ignore_warnings(category=(DeprecationWarning, UserWarning))
def decorator_no_warning_multiple():
_multiple_warning_function()
@ignore_warnings(category=DeprecationWarning)
def decorator_no_deprecation_warning():
_warning_function()
@ignore_warnings(category=UserWarning)
def decorator_no_user_warning():
_warning_function()
@ignore_warnings(category=DeprecationWarning)
def decorator_no_deprecation_multiple_warning():
_multiple_warning_function()
@ignore_warnings(category=UserWarning)
def decorator_no_user_multiple_warning():
_multiple_warning_function()
assert_no_warnings(decorator_no_warning)
assert_no_warnings(decorator_no_warning_multiple)
assert_no_warnings(decorator_no_deprecation_warning)
assert_warns(DeprecationWarning, decorator_no_user_warning)
assert_warns(UserWarning, decorator_no_deprecation_multiple_warning)
assert_warns(DeprecationWarning, decorator_no_user_multiple_warning)
# Check the context manager
def context_manager_no_warning():
with ignore_warnings():
_warning_function()
def context_manager_no_warning_multiple():
with ignore_warnings(category=(DeprecationWarning, UserWarning)):
_multiple_warning_function()
def context_manager_no_deprecation_warning():
with ignore_warnings(category=DeprecationWarning):
_warning_function()
def context_manager_no_user_warning():
with ignore_warnings(category=UserWarning):
_warning_function()
def context_manager_no_deprecation_multiple_warning():
with ignore_warnings(category=DeprecationWarning):
_multiple_warning_function()
def context_manager_no_user_multiple_warning():
with ignore_warnings(category=UserWarning):
_multiple_warning_function()
assert_no_warnings(context_manager_no_warning)
assert_no_warnings(context_manager_no_warning_multiple)
assert_no_warnings(context_manager_no_deprecation_warning)
assert_warns(DeprecationWarning, context_manager_no_user_warning)
assert_warns(UserWarning, context_manager_no_deprecation_multiple_warning)
assert_warns(DeprecationWarning, context_manager_no_user_multiple_warning)
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
# `clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
# Tests for docstrings:
def f_ok(a, b):
"""Function f
Parameters
----------
a : int
Parameter a
b : float
Parameter b
Returns
-------
c : list
Parameter c
"""
c = a + b
return c
def f_bad_sections(a, b):
"""Function f
Parameters
----------
a : int
Parameter a
b : float
Parameter b
Results
-------
c : list
Parameter c
"""
c = a + b
return c
def f_bad_order(b, a):
"""Function f
Parameters
----------
a : int
Parameter a
b : float
Parameter b
Returns
-------
c : list
Parameter c
"""
c = a + b
return c
def f_missing(a, b):
"""Function f
Parameters
----------
a : int
Parameter a
Returns
-------
c : list
Parameter c
"""
c = a + b
return c
def f_check_param_definition(a, b, c, d, e):
"""Function f
Parameters
----------
a: int
Parameter a
b:
Parameter b
c :
Parameter c
d:int
Parameter d
e
No typespec is allowed without colon
"""
return a + b + c + d
class Klass(object):
def f_missing(self, X, y):
pass
def f_bad_sections(self, X, y):
"""Function f
Parameter
----------
a : int
Parameter a
b : float
Parameter b
Results
-------
c : list
Parameter c
"""
pass
class MockEst(object):
def __init__(self):
"""MockEstimator"""
def fit(self, X, y):
return X
def predict(self, X):
return X
def predict_proba(self, X):
return X
def score(self, X):
return 1.
class MockMetaEstimator(object):
def __init__(self, delegate):
"""MetaEstimator to check if doctest on delegated methods work.
Parameters
---------
delegate : estimator
Delegated estimator.
"""
self.delegate = delegate
@if_delegate_has_method(delegate=('delegate'))
def predict(self, X):
"""This is available only if delegate has predict.
Parameters
----------
y : ndarray
Parameter y
"""
return self.delegate.predict(X)
@if_delegate_has_method(delegate=('delegate'))
@deprecated("Testing a deprecated delegated method")
def score(self, X):
"""This is available only if delegate has score.
Parameters
---------
y : ndarray
Parameter y
"""
@if_delegate_has_method(delegate=('delegate'))
def predict_proba(self, X):
"""This is available only if delegate has predict_proba.
Parameters
---------
X : ndarray
Parameter X
"""
return X
@deprecated('Testing deprecated function with wrong params')
def fit(self, X, y):
"""Incorrect docstring but should not be tested"""
def test_check_docstring_parameters():
try:
import numpydoc # noqa
assert sys.version_info >= (3, 5)
except (ImportError, AssertionError):
raise SkipTest(
"numpydoc is required to test the docstrings")
incorrect = check_docstring_parameters(f_ok)
assert incorrect == []
incorrect = check_docstring_parameters(f_ok, ignore=['b'])
assert incorrect == []
incorrect = check_docstring_parameters(f_missing, ignore=['b'])
assert incorrect == []
assert_raise_message(RuntimeError, 'Unknown section Results',
check_docstring_parameters, f_bad_sections)
assert_raise_message(RuntimeError, 'Unknown section Parameter',
check_docstring_parameters, Klass.f_bad_sections)
incorrect = check_docstring_parameters(f_check_param_definition)
assert (
incorrect == [
"sklearn.utils.tests.test_testing.f_check_param_definition There "
"was no space between the param name and colon ('a: int')",
"sklearn.utils.tests.test_testing.f_check_param_definition There "
"was no space between the param name and colon ('b:')",
"sklearn.utils.tests.test_testing.f_check_param_definition "
"Parameter 'c :' has an empty type spec. Remove the colon",
"sklearn.utils.tests.test_testing.f_check_param_definition There "
"was no space between the param name and colon ('d:int')",
])
messages = ["a != b", "arg mismatch: ['b']", "arg mismatch: ['X', 'y']",
"predict y != X",
"predict_proba arg mismatch: ['X']",
"score arg mismatch: ['X']",
".fit arg mismatch: ['X', 'y']"]
mock_meta = MockMetaEstimator(delegate=MockEst())
for mess, f in zip(messages,
[f_bad_order, f_missing, Klass.f_missing,
mock_meta.predict, mock_meta.predict_proba,
mock_meta.score, mock_meta.fit]):
incorrect = check_docstring_parameters(f)
assert len(incorrect) >= 1
assert mess in incorrect[0], '"%s" not in "%s"' % (mess, incorrect[0])
class RegistrationCounter(object):
def __init__(self):
self.nb_calls = 0
def __call__(self, to_register_func):
self.nb_calls += 1
assert to_register_func.func is _delete_folder
def check_memmap(input_array, mmap_data, mmap_mode='r'):
assert isinstance(mmap_data, np.memmap)
writeable = mmap_mode != 'r'
assert mmap_data.flags.writeable is writeable
np.testing.assert_array_equal(input_array, mmap_data)
def test_tempmemmap(monkeypatch):
registration_counter = RegistrationCounter()
monkeypatch.setattr(atexit, 'register', registration_counter)
input_array = np.ones(3)
with TempMemmap(input_array) as data:
check_memmap(input_array, data)
temp_folder = os.path.dirname(data.filename)
if os.name != 'nt':
assert not os.path.exists(temp_folder)
assert registration_counter.nb_calls == 1
mmap_mode = 'r+'
with TempMemmap(input_array, mmap_mode=mmap_mode) as data:
check_memmap(input_array, data, mmap_mode=mmap_mode)
temp_folder = os.path.dirname(data.filename)
if os.name != 'nt':
assert not os.path.exists(temp_folder)
assert registration_counter.nb_calls == 2
def test_create_memmap_backed_data(monkeypatch):
registration_counter = RegistrationCounter()
monkeypatch.setattr(atexit, 'register', registration_counter)
input_array = np.ones(3)
data = create_memmap_backed_data(input_array)
check_memmap(input_array, data)
assert registration_counter.nb_calls == 1
data, folder = create_memmap_backed_data(input_array,
return_folder=True)
check_memmap(input_array, data)
assert folder == os.path.dirname(data.filename)
assert registration_counter.nb_calls == 2
mmap_mode = 'r+'
data = create_memmap_backed_data(input_array, mmap_mode=mmap_mode)
check_memmap(input_array, data, mmap_mode)
assert registration_counter.nb_calls == 3
input_list = [input_array, input_array + 1, input_array + 2]
mmap_data_list = create_memmap_backed_data(input_list)
for input_array, data in zip(input_list, mmap_data_list):
check_memmap(input_array, data)
assert registration_counter.nb_calls == 4
| bsd-3-clause | 3,253,426,068,011,456,500 | 28.256364 | 79 | 0.615934 | false |
rackerlabs/heat-pyrax | pyrax/autoscale.py | 1 | 46139 | # -*- coding: utf-8 -*-
# Copyright (c)2013 Rackspace US, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import pyrax
from pyrax.client import BaseClient
from pyrax.cloudloadbalancers import CloudLoadBalancer
import pyrax.exceptions as exc
from pyrax.manager import BaseManager
from pyrax.resource import BaseResource
import pyrax.utils as utils
class ScalingGroup(BaseResource):
def __init__(self, *args, **kwargs):
super(ScalingGroup, self).__init__(*args, **kwargs)
self._non_display = ["active", "launchConfiguration", "links",
"groupConfiguration", "policies", "scalingPolicies"]
self._repr_properties = ["name", "cooldown", "metadata",
"min_entities", "max_entities"]
self._make_policies()
def _make_policies(self):
"""
Convert the 'scalingPolicies' dictionary into AutoScalePolicy objects.
"""
self.policies = [AutoScalePolicy(self.manager, dct, self)
for dct in self.scalingPolicies]
def get_state(self):
"""
Returns the current state of this scaling group.
"""
return self.manager.get_state(self)
def pause(self):
"""
Pauses all execution of the policies for this scaling group.
"""
return self.manager.pause(self)
def resume(self):
"""
Resumes execution of the policies for this scaling group.
"""
return self.manager.resume(self)
def update(self, name=None, cooldown=None, min_entities=None,
max_entities=None, metadata=None):
"""
Updates this ScalingGroup. One or more of the attributes can be
specified.
NOTE: if you specify metadata, it will *replace* any existing metadata.
If you want to add to it, you either need to pass the complete dict of
metadata, or call the update_metadata() method.
"""
return self.manager.update(self, name=name,
cooldown=cooldown, min_entities=min_entities,
max_entities=max_entities, metadata=metadata)
def update_metadata(self, metadata):
"""
Adds the given metadata dict to the existing metadata for this scaling
group.
"""
return self.manager.update_metadata(self, metadata=metadata)
def get_configuration(self):
"""
Returns the scaling group configuration in a dictionary.
"""
return self.manager.get_configuration(self)
def get_launch_config(self):
"""
Returns the launch configuration for this scaling group.
"""
return self.manager.get_launch_config(self)
def update_launch_config(self, scaling_group, launch_config_type,
**kwargs):
"""
Updates the server launch configuration for this scaling group.
One or more of the available attributes can be specified.
NOTE: if you specify metadata, it will *replace* any existing metadata.
If you want to add to it, you either need to pass the complete dict of
metadata, or call the update_launch_metadata() method.
"""
return self.manager.update_launch_config(scaling_group,
launch_config_type, **kwargs)
def update_launch_metadata(self, metadata):
"""
Adds the given metadata dict to the existing metadata for this scaling
group's launch configuration.
"""
return self.manager.update_launch_metadata(self, metadata)
def add_policy(self, name, policy_type, cooldown, change=None,
is_percent=False, desired_capacity=None, args=None):
"""
Adds a policy with the given values to this scaling group. The
'change' parameter is treated as an absolute amount, unless
'is_percent' is True, in which case it is treated as a percentage.
"""
return self.manager.add_policy(self, name, policy_type, cooldown,
change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
def list_policies(self):
"""
Returns a list of all policies defined for this scaling group.
"""
return self.manager.list_policies(self)
def get_policy(self, policy):
"""
Gets the detail for the specified policy.
"""
return self.manager.get_policy(self, policy)
def update_policy(self, policy, name=None, policy_type=None, cooldown=None,
change=None, is_percent=False, desired_capacity=None, args=None):
"""
Updates the specified policy. One or more of the parameters may be
specified.
"""
return self.manager.update_policy(scaling_group=self, policy=policy,
name=name, policy_type=policy_type, cooldown=cooldown,
change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
def execute_policy(self, policy):
"""
Executes the specified policy for this scaling group.
"""
return self.manager.execute_policy(scaling_group=self, policy=policy)
def delete_policy(self, policy):
"""
Deletes the specified policy from this scaling group.
"""
return self.manager.delete_policy(scaling_group=self, policy=policy)
def add_webhook(self, policy, name, metadata=None):
"""
Adds a webhook to the specified policy.
"""
return self.manager.add_webhook(self, policy, name, metadata=metadata)
def list_webhooks(self, policy):
"""
Returns a list of all webhooks for the specified policy.
"""
return self.manager.list_webhooks(self, policy)
def update_webhook(self, policy, webhook, name=None, metadata=None):
"""
Updates the specified webhook. One or more of the parameters may be
specified.
"""
return self.manager.update_webhook(scaling_group=self, policy=policy,
webhook=webhook, name=name, metadata=metadata)
def update_webhook_metadata(self, policy, webhook, metadata):
"""
Adds the given metadata dict to the existing metadata for the specified
webhook.
"""
return self.manager.update_webhook_metadata(self, policy, webhook,
metadata)
def delete_webhook(self, policy, webhook):
"""
Deletes the specified webhook from the specified policy.
"""
return self.manager.delete_webhook(self, policy, webhook)
@property
def policy_count(self):
return len(self.policies)
##################################################################
# The following property declarations allow access to the base attributes
# of the ScalingGroup held in the 'groupConfiguration' dict as if they
# were native attributes.
##################################################################
@property
def name(self):
return self.groupConfiguration.get("name")
@name.setter
def name(self, val):
self.groupConfiguration["name"] = val
@property
def cooldown(self):
return self.groupConfiguration.get("cooldown")
@cooldown.setter
def cooldown(self, val):
self.groupConfiguration["cooldown"] = val
@property
def metadata(self):
return self.groupConfiguration.get("metadata")
@metadata.setter
def metadata(self, val):
self.groupConfiguration["metadata"] = val
@property
def min_entities(self):
return self.groupConfiguration.get("minEntities")
@min_entities.setter
def min_entities(self, val):
self.groupConfiguration["minEntities"] = val
@property
def max_entities(self):
return self.groupConfiguration.get("maxEntities")
@max_entities.setter
def max_entities(self, val):
self.groupConfiguration["maxEntities"] = val
##################################################################
class ScalingGroupManager(BaseManager):
def __init__(self, api, resource_class=None, response_key=None,
plural_response_key=None, uri_base=None):
super(ScalingGroupManager, self).__init__(api,
resource_class=resource_class, response_key=response_key,
plural_response_key=plural_response_key, uri_base=uri_base)
def get_state(self, scaling_group):
"""
Returns the current state of the specified scaling group as a
dictionary.
"""
uri = "/%s/%s/state" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_get(uri)
data = resp_body["group"]
ret = {}
ret["active"] = [itm["id"] for itm in data["active"]]
ret["active_capacity"] = data["activeCapacity"]
ret["desired_capacity"] = data["desiredCapacity"]
ret["pending_capacity"] = data["pendingCapacity"]
ret["paused"] = data["paused"]
return ret
def pause(self, scaling_group):
"""
Pauses all execution of the policies for the specified scaling group.
"""
uri = "/%s/%s/pause" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_post(uri)
return None
def resume(self, scaling_group):
"""
Resumes execution of the policies for the specified scaling group.
"""
uri = "/%s/%s/resume" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_post(uri)
return None
def get_configuration(self, scaling_group):
"""
Returns the scaling group's configuration in a dictionary.
"""
uri = "/%s/%s/config" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_get(uri)
return resp_body.get("groupConfiguration")
def replace(self, scaling_group, name, cooldown, min_entities,
max_entities, metadata=None):
"""
Replace an existing ScalingGroup configuration. All of the attributes
must be specified If you wish to delete any of the optional attributes,
pass them in as None.
"""
body = self._create_group_config_body(name, cooldown, min_entities,
max_entities, metadata=metadata)
group_id = utils.get_id(scaling_group)
uri = "/%s/%s/config" % (self.uri_base, group_id)
resp, resp_body = self.api.method_put(uri, body=body)
def update(self, scaling_group, name=None, cooldown=None,
min_entities=None, max_entities=None, metadata=None):
"""
Updates an existing ScalingGroup. One or more of the attributes can
be specified.
NOTE: if you specify metadata, it will *replace* any existing metadata.
If you want to add to it, you either need to pass the complete dict of
metadata, or call the update_metadata() method.
"""
if not isinstance(scaling_group, ScalingGroup):
scaling_group = self.get(scaling_group)
uri = "/%s/%s/config" % (self.uri_base, scaling_group.id)
if cooldown is None:
cooldown = scaling_group.cooldown
if min_entities is None:
min_entities = scaling_group.min_entities
if max_entities is None:
max_entities = scaling_group.max_entities
body = {"name": name or scaling_group.name,
"cooldown": cooldown,
"minEntities": min_entities,
"maxEntities": max_entities,
"metadata": metadata or scaling_group.metadata,
}
resp, resp_body = self.api.method_put(uri, body=body)
return None
def update_metadata(self, scaling_group, metadata):
"""
Adds the given metadata dict to the existing metadata for the scaling
group.
"""
if not isinstance(scaling_group, ScalingGroup):
scaling_group = self.get(scaling_group)
curr_meta = scaling_group.metadata
curr_meta.update(metadata)
return self.update(scaling_group, metadata=curr_meta)
def get_launch_config(self, scaling_group):
"""
Returns the launch configuration for the specified scaling group.
"""
uri = "/%s/%s/launch" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_get(uri)
ret = {}
data = resp_body.get("launchConfiguration")
ret["type"] = data.get("type")
args = data.get("args", {})
if ret['type'] == 'launch_server':
key_map = {
"OS-DCF:diskConfig": "disk_config",
"flavorRef": "flavor",
"imageRef": "image",
}
ret['args'] = {}
ret['args']["load_balancers"] = args.get("loadBalancers")
for key, value in args.get("server", {}).items():
norm_key = key_map.get(key, key)
ret['args'][norm_key] = value
elif ret['type'] == 'launch_stack':
ret['args'] = args.get("stack", {})
return ret
def replace_launch_config(self, scaling_group, launch_config_type,
**kwargs):
"""
Replace an existing launch configuration. All of the attributes must be
specified. If you wish to delete any of the optional attributes, pass
them in as None.
"""
group_id = utils.get_id(scaling_group)
uri = "/%s/%s/launch" % (self.uri_base, group_id)
body = self._create_launch_config_body(launch_config_type,
**kwargs)
resp, resp_body = self.api.method_put(uri, body=body)
def _update_server_launch_config_body(
self, scaling_group, server_name=None, image=None, flavor=None,
disk_config=None, metadata=None, personality=None, networks=None,
load_balancers=None, key_name=None, config_drive=False,
user_data=None):
if not isinstance(scaling_group, ScalingGroup):
scaling_group = self.get(scaling_group)
largs = scaling_group.launchConfiguration.get("args", {})
srv_args = largs.get("server", {})
lb_args = largs.get("loadBalancers", {})
flav = flavor or srv_args.get("flavorRef")
dconf = disk_config or srv_args.get("OS-DCF:diskConfig", "AUTO")
if personality is None:
personality = srv_args.get("personality", [])
cfg_drv = config_drive or srv_args.get("config_drive")
if user_data:
user_data = base64.b64encode(user_data)
usr_data = user_data or srv_args.get("user_data")
update_metadata = metadata or srv_args.get("metadata")
body = {"type": "launch_server",
"args": {
"server": {
"name": server_name or srv_args.get("name"),
"imageRef": image or srv_args.get("imageRef"),
"flavorRef": flav,
"OS-DCF:diskConfig": dconf,
"networks": networks or srv_args.get("networks"),
},
"loadBalancers": load_balancers or lb_args,
},
}
bas = body["args"]["server"]
if cfg_drv:
bas["config_drive"] = cfg_drv
if usr_data:
bas["user_data"] = usr_data
if personality:
bas["personality"] = self._encode_personality(personality)
if update_metadata:
bas["metadata"] = update_metadata
key_name = key_name or srv_args.get("key_name")
if key_name:
bas["key_name"] = key_name
return body
def _update_stack_launch_config_body(self, **kwargs):
return self._create_stack_launch_config_body(**kwargs)
def update_launch_config(self, scaling_group, launch_config_type,
**kwargs):
"""
Updates the server launch configuration for an existing scaling group.
One or more of the available attributes can be specified.
NOTE: if you specify metadata, it will *replace* any existing metadata.
If you want to add to it, you either need to pass the complete dict of
metadata, or call the update_launch_metadata() method.
"""
uri = "/%s/%s/launch" % (self.uri_base, scaling_group.id)
if launch_config_type == 'launch_server':
body = self._update_server_launch_config_body(
scaling_group=scaling_group, **kwargs)
elif launch_config_type == 'launch_stack':
body = self._update_stack_launch_config_body(**kwargs)
resp, resp_body = self.api.method_put(uri, body=body)
def update_launch_metadata(self, scaling_group, metadata):
"""
Adds the given metadata dict to the existing metadata for the scaling
group's launch configuration.
"""
if not isinstance(scaling_group, ScalingGroup):
scaling_group = self.get(scaling_group)
curr_meta = scaling_group.launchConfiguration.get("args", {}).get(
"server", {}).get("metadata", {})
curr_meta.update(metadata)
return self.update_launch_config(scaling_group, metadata=curr_meta)
def add_policy(self, scaling_group, name, policy_type, cooldown,
change=None, is_percent=False, desired_capacity=None, args=None):
"""
Adds a policy with the given values to the specified scaling group. The
'change' parameter is treated as an absolute amount, unless
'is_percent' is True, in which case it is treated as a percentage.
"""
uri = "/%s/%s/policies" % (self.uri_base, utils.get_id(scaling_group))
body = self._create_policy_body(name, policy_type, cooldown,
change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
# "body" needs to be a list
body = [body]
resp, resp_body = self.api.method_post(uri, body=body)
pol_info = resp_body.get("policies")[0]
return AutoScalePolicy(self, pol_info, scaling_group)
def _create_policy_body(self, name, policy_type, cooldown, change=None,
is_percent=None, desired_capacity=None, args=None):
body = {"name": name, "cooldown": cooldown, "type": policy_type}
if change is not None:
if is_percent:
body["changePercent"] = change
else:
body["change"] = change
if desired_capacity is not None:
body["desiredCapacity"] = desired_capacity
if args is not None:
body["args"] = args
return body
def list_policies(self, scaling_group):
"""
Returns a list of all policies defined for the specified scaling group.
"""
uri = "/%s/%s/policies" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_get(uri)
return [AutoScalePolicy(self, data, scaling_group)
for data in resp_body.get("policies", [])]
def get_policy(self, scaling_group, policy):
"""
Gets the detail for the specified policy.
"""
uri = "/%s/%s/policies/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy))
resp, resp_body = self.api.method_get(uri)
data = resp_body.get("policy")
return AutoScalePolicy(self, data, scaling_group)
def replace_policy(self, scaling_group, policy, name,
policy_type, cooldown, change=None, is_percent=False,
desired_capacity=None, args=None):
"""
Replace an existing policy. All of the attributes must be specified. If
you wish to delete any of the optional attributes, pass them in as
None.
"""
policy_id = utils.get_id(policy)
group_id = utils.get_id(scaling_group)
uri = "/%s/%s/policies/%s" % (self.uri_base, group_id, policy_id)
body = self._create_policy_body(name=name, policy_type=policy_type,
cooldown=cooldown, change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
resp, resp_body = self.api.method_put(uri, body=body)
def update_policy(self, scaling_group, policy, name=None, policy_type=None,
cooldown=None, change=None, is_percent=False,
desired_capacity=None, args=None):
"""
Updates the specified policy. One or more of the parameters may be
specified.
"""
uri = "/%s/%s/policies/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy))
if not isinstance(policy, AutoScalePolicy):
# Received an ID
policy = self.get_policy(scaling_group, policy)
body = {"name": name or policy.name,
"type": policy_type or policy.type,
"cooldown": cooldown or policy.cooldown,
}
if desired_capacity is not None:
body["desiredCapacity"] = desired_capacity
elif change is not None:
if is_percent:
body["changePercent"] = change
else:
body["change"] = change
else:
if getattr(policy, "changePercent", None) is not None:
body["changePercent"] = policy.changePercent
elif getattr(policy, "change", None) is not None:
body["change"] = policy.change
elif getattr(policy, "desiredCapacity", None) is not None:
body["desiredCapacity"] = policy.desiredCapacity
args = args or getattr(policy, "args", None)
if args is not None:
body["args"] = args
resp, resp_body = self.api.method_put(uri, body=body)
return None
def execute_policy(self, scaling_group, policy):
"""
Executes the specified policy for this scaling group.
"""
uri = "/%s/%s/policies/%s/execute" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy))
resp, resp_body = self.api.method_post(uri)
return None
def delete_policy(self, scaling_group, policy):
"""
Deletes the specified policy from the scaling group.
"""
uri = "/%s/%s/policies/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy))
resp, resp_body = self.api.method_delete(uri)
def _create_webhook_body(self, name, metadata=None):
if metadata is None:
# If updating a group with existing metadata, metadata MUST be
# passed. Leaving it out causes Otter to return 400.
metadata = {}
body = {"name": name, "metadata": metadata}
return body
def add_webhook(self, scaling_group, policy, name, metadata=None):
"""
Adds a webhook to the specified policy.
"""
uri = "/%s/%s/policies/%s/webhooks" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy))
body = self._create_webhook_body(name, metadata=metadata)
# "body" needs to be a list
body = [body]
resp, resp_body = self.api.method_post(uri, body=body)
data = resp_body.get("webhooks")[0]
return AutoScaleWebhook(self, data, policy, scaling_group)
def list_webhooks(self, scaling_group, policy):
"""
Returns a list of all webhooks for the specified policy.
"""
uri = "/%s/%s/policies/%s/webhooks" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy))
resp, resp_body = self.api.method_get(uri)
return [AutoScaleWebhook(self, data, policy, scaling_group)
for data in resp_body.get("webhooks", [])]
def get_webhook(self, scaling_group, policy, webhook):
"""
Gets the detail for the specified webhook.
"""
uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy),
utils.get_id(webhook))
resp, resp_body = self.api.method_get(uri)
data = resp_body.get("webhook")
return AutoScaleWebhook(self, data, policy, scaling_group)
def replace_webhook(self, scaling_group, policy, webhook, name,
metadata=None):
"""
Replace an existing webhook. All of the attributes must be specified.
If you wish to delete any of the optional attributes, pass them in as
None.
"""
uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy),
utils.get_id(webhook))
group_id = utils.get_id(scaling_group)
policy_id = utils.get_id(policy)
webhook_id = utils.get_id(webhook)
body = self._create_webhook_body(name, metadata=metadata)
resp, resp_body = self.api.method_put(uri, body=body)
def update_webhook(self, scaling_group, policy, webhook, name=None,
metadata=None):
"""
Updates the specified webhook. One or more of the parameters may be
specified.
"""
uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy),
utils.get_id(webhook))
if not isinstance(webhook, AutoScaleWebhook):
# Received an ID
webhook = self.get_webhook(scaling_group, policy, webhook)
body = {"name": name or webhook.name,
"metadata": metadata or webhook.metadata,
}
resp, resp_body = self.api.method_put(uri, body=body)
webhook.reload()
return webhook
def update_webhook_metadata(self, scaling_group, policy, webhook, metadata):
"""
Adds the given metadata dict to the existing metadata for the specified
webhook.
"""
if not isinstance(webhook, AutoScaleWebhook):
webhook = self.get_webhook(scaling_group, policy, webhook)
curr_meta = webhook.metadata or {}
curr_meta.update(metadata)
return self.update_webhook(scaling_group, policy, webhook,
metadata=curr_meta)
def delete_webhook(self, scaling_group, policy, webhook):
"""
Deletes the specified webhook from the specified policy.
"""
uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base,
utils.get_id(scaling_group), utils.get_id(policy),
utils.get_id(webhook))
resp, resp_body = self.api.method_delete(uri)
return None
@staticmethod
def _resolve_lbs(load_balancers):
"""
Takes either a single LB reference or a list of references and returns
the dictionary required for creating a Scaling Group.
References can be either a dict that matches the structure required by
the autoscale API, a CloudLoadBalancer instance, or the ID of the load
balancer.
"""
lb_args = []
if not isinstance(load_balancers, list):
lbs = [load_balancers]
else:
lbs = load_balancers
for lb in lbs:
if isinstance(lb, dict):
lb_args.append(lb)
elif isinstance(lb, CloudLoadBalancer):
lb_args.append({
"loadBalancerId": lb.id,
"port": lb.port,
})
elif isinstance(lb, tuple):
lb_args.append({"loadBalancerId": lb[0],
"port": lb[1]})
else:
# See if it's an ID for a Load Balancer
try:
instance = pyrax.cloud_loadbalancers.get(lb)
except Exception:
raise exc.InvalidLoadBalancer("Received an invalid "
"specification for a Load Balancer: '%s'" % lb)
lb_args.append({
"loadBalancerId": instance.id,
"port": instance.port,
})
return lb_args
def _encode_personality(self, personality):
"""
Personality files must be base64-encoded before transmitting.
"""
if personality is None:
personality = []
else:
personality = utils.coerce_to_list(personality)
for pfile in personality:
if "contents" in pfile:
pfile["contents"] = base64.b64encode(pfile["contents"])
return personality
def _create_launch_config_body(self, launch_config_type, **kwargs):
if launch_config_type == 'launch_server':
return self._create_server_launch_config_body(**kwargs)
elif launch_config_type == 'launch_stack':
return self._create_stack_launch_config_body(**kwargs)
def _create_body(self, name, cooldown, min_entities, max_entities,
launch_config_type, group_metadata=None,
scaling_policies=None, **kwargs):
"""
Used to create the dict required to create any of the following:
A Scaling Group
"""
group_config = self._create_group_config_body(
name, cooldown, min_entities, max_entities,
metadata=group_metadata)
if scaling_policies is None:
scaling_policies = []
launch_config = self._create_launch_config_body(launch_config_type,
**kwargs)
body = {
"groupConfiguration": group_config,
"launchConfiguration": launch_config,
"scalingPolicies": scaling_policies,
}
return body
def _create_group_config_body(self, name, cooldown, min_entities,
max_entities, metadata=None):
if metadata is None:
# If updating a group with existing metadata, metadata MUST be
# passed. Leaving it out causes Otter to return 400.
metadata = {}
body = {
"name": name,
"cooldown": cooldown,
"minEntities": min_entities,
"maxEntities": max_entities,
"metadata": metadata,
}
return body
def _create_stack_launch_config_body(
self, template=None, template_url=None, disable_rollback=True,
environment=None, files=None, parameters=None, timeout_mins=None):
st_args = {
'template': template,
'template_url': template_url,
'disable_rollback': disable_rollback,
'environment': environment,
'files': files,
'parameters': parameters,
'timeout_mins': timeout_mins
}
st_args = {
k: st_args[k] for k in st_args if st_args[k] is not None}
return {"type": 'launch_stack', "args": {"stack": st_args}}
def _create_server_launch_config_body(
self, server_name=None, image=None, flavor=None, disk_config=None,
metadata=None, personality=None, networks=None,
load_balancers=None, key_name=None, config_drive=False,
user_data=None):
if metadata is None:
metadata = {}
server_args = {
"flavorRef": "%s" % flavor,
"name": server_name,
"imageRef": utils.get_id(image),
}
if metadata is not None:
server_args["metadata"] = metadata
if personality is not None:
server_args["personality"] = self._encode_personality(personality)
if networks is not None:
server_args["networks"] = networks
if disk_config is not None:
server_args["OS-DCF:diskConfig"] = disk_config
if key_name is not None:
server_args["key_name"] = key_name
if config_drive is not False:
server_args['config_drive'] = config_drive
if user_data is not None:
server_args['user_data'] = base64.b64encode(user_data)
if load_balancers is None:
load_balancers = []
load_balancer_args = self._resolve_lbs(load_balancers)
return {"type": 'launch_server',
"args": {"server": server_args,
"loadBalancers": load_balancer_args}}
class AutoScalePolicy(BaseResource):
def __init__(self, manager, info, scaling_group, *args, **kwargs):
super(AutoScalePolicy, self).__init__(manager, info, *args, **kwargs)
if not isinstance(scaling_group, ScalingGroup):
scaling_group = manager.get(scaling_group)
self.scaling_group = scaling_group
self._non_display = ["links", "scaling_group"]
def get(self):
"""
Gets the details for this policy.
"""
return self.manager.get_policy(self.scaling_group, self)
reload = get
def delete(self):
"""
Deletes this policy.
"""
return self.manager.delete_policy(self.scaling_group, self)
def update(self, name=None, policy_type=None, cooldown=None, change=None,
is_percent=False, desired_capacity=None, args=None):
"""
Updates this policy. One or more of the parameters may be
specified.
"""
return self.manager.update_policy(scaling_group=self.scaling_group,
policy=self, name=name, policy_type=policy_type,
cooldown=cooldown, change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
def execute(self):
"""
Executes this policy.
"""
return self.manager.execute_policy(self.scaling_group, self)
def add_webhook(self, name, metadata=None):
"""
Adds a webhook to this policy.
"""
return self.manager.add_webhook(self.scaling_group, self, name,
metadata=metadata)
def list_webhooks(self):
"""
Returns a list of all webhooks for this policy.
"""
return self.manager.list_webhooks(self.scaling_group, self)
def get_webhook(self, webhook):
"""
Gets the detail for the specified webhook.
"""
return self.manager.get_webhook(self.scaling_group, self, webhook)
def update_webhook(self, webhook, name=None, metadata=None):
"""
Updates the specified webhook. One or more of the parameters may be
specified.
"""
return self.manager.update_webhook(self.scaling_group, policy=self,
webhook=webhook, name=name, metadata=metadata)
def update_webhook_metadata(self, webhook, metadata):
"""
Adds the given metadata dict to the existing metadata for the specified
webhook.
"""
return self.manager.update_webhook_metadata(self.scaling_group, self,
webhook, metadata)
def delete_webhook(self, webhook):
"""
Deletes the specified webhook from this policy.
"""
return self.manager.delete_webhook(self.scaling_group, self, webhook)
class AutoScaleWebhook(BaseResource):
def __init__(self, manager, info, policy, scaling_group, *args, **kwargs):
super(AutoScaleWebhook, self).__init__(manager, info, *args, **kwargs)
if not isinstance(policy, AutoScalePolicy):
policy = manager.get_policy(scaling_group, policy)
self.policy = policy
self._non_display = ["links", "policy"]
def get(self):
return self.policy.get_webhook(self)
reload = get
def update(self, name=None, metadata=None):
"""
Updates this webhook. One or more of the parameters may be specified.
"""
return self.policy.update_webhook(self, name=name, metadata=metadata)
def update_metadata(self, metadata):
"""
Adds the given metadata dict to the existing metadata for this webhook.
"""
return self.policy.update_webhook_metadata(self, metadata)
def delete(self):
"""
Deletes this webhook.
"""
return self.policy.delete_webhook(self)
class AutoScaleClient(BaseClient):
"""
This is the primary class for interacting with AutoScale.
"""
name = "Autoscale"
def _configure_manager(self):
"""
Creates a manager to handle autoscale operations.
"""
self._manager = ScalingGroupManager(self,
resource_class=ScalingGroup, response_key="group",
uri_base="groups")
def get_state(self, scaling_group):
"""
Returns the current state of the specified scaling group.
"""
return self._manager.get_state(scaling_group)
def pause(self, scaling_group):
"""
Pauses all execution of the policies for the specified scaling group.
"""
# NOTE: This is not yet implemented. The code is based on the docs,
# so it should either work or be pretty close.
return self._manager.pause(scaling_group)
def resume(self, scaling_group):
"""
Resumes execution of the policies for the specified scaling group.
"""
# NOTE: This is not yet implemented. The code is based on the docs,
# so it should either work or be pretty close.
return self._manager.resume(scaling_group)
def replace(self, scaling_group, name, cooldown, min_entities,
max_entities, metadata=None):
"""
Replace an existing ScalingGroup configuration. All of the attributes
must be specified. If you wish to delete any of the optional
attributes, pass them in as None.
"""
return self._manager.replace(scaling_group, name, cooldown,
min_entities, max_entities, metadata=metadata)
def update(self, scaling_group, name=None, cooldown=None, min_entities=None,
max_entities=None, metadata=None):
"""
Updates an existing ScalingGroup. One or more of the attributes can be
specified.
NOTE: if you specify metadata, it will *replace* any existing metadata.
If you want to add to it, you either need to pass the complete dict of
metadata, or call the update_metadata() method.
"""
return self._manager.update(scaling_group, name=name, cooldown=cooldown,
min_entities=min_entities, max_entities=max_entities,
metadata=metadata)
def update_metadata(self, scaling_group, metadata):
"""
Adds the given metadata dict to the existing metadata for the scaling
group.
"""
return self._manager.update_metadata(scaling_group, metadata)
def get_configuration(self, scaling_group):
"""
Returns the scaling group's configuration in a dictionary.
"""
return self._manager.get_configuration(scaling_group)
def get_launch_config(self, scaling_group):
"""
Returns the launch configuration for the specified scaling group.
"""
return self._manager.get_launch_config(scaling_group)
def replace_launch_config(self, scaling_group, launch_config_type,
**kwargs):
"""
Replace an existing launch configuration. All of the attributes must be
specified. If you wish to delete any of the optional attributes, pass
them in as None.
"""
return self._manager.replace_launch_config(scaling_group,
launch_config_type, **kwargs)
def update_launch_config(self, scaling_group, launch_config_type,
**kwargs):
"""
Updates the server launch configuration for an existing scaling group.
One or more of the available attributes can be specified.
NOTE: if you specify metadata, it will *replace* any existing metadata.
If you want to add to it, you either need to pass the complete dict of
metadata, or call the update_launch_metadata() method.
"""
return self._manager.update_launch_config(
scaling_group, launch_config_type, **kwargs)
def update_launch_metadata(self, scaling_group, metadata):
"""
Adds the given metadata dict to the existing metadata for the scaling
group's launch configuration.
"""
return self._manager.update_launch_metadata(scaling_group, metadata)
def add_policy(self, scaling_group, name, policy_type, cooldown,
change=None, is_percent=False, desired_capacity=None, args=None):
"""
Adds a policy with the given values to the specified scaling group. The
'change' parameter is treated as an absolute amount, unless
'is_percent' is True, in which case it is treated as a percentage.
"""
return self._manager.add_policy(scaling_group, name, policy_type,
cooldown, change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
def list_policies(self, scaling_group):
"""
Returns a list of all policies defined for the specified scaling group.
"""
return self._manager.list_policies(scaling_group)
def get_policy(self, scaling_group, policy):
"""
Gets the detail for the specified policy.
"""
return self._manager.get_policy(scaling_group, policy)
def replace_policy(self, scaling_group, policy, name,
policy_type, cooldown, change=None, is_percent=False,
desired_capacity=None, args=None):
"""
Replace an existing policy. All of the attributes must be specified. If
you wish to delete any of the optional attributes, pass them in as
None.
"""
return self._manager.replace_policy(scaling_group, policy, name,
policy_type, cooldown, change=change, is_percent=is_percent,
desired_capacity=desired_capacity, args=args)
def update_policy(self, scaling_group, policy, name=None, policy_type=None,
cooldown=None, change=None, is_percent=False,
desired_capacity=None, args=None):
"""
Updates the specified policy. One or more of the parameters may be
specified.
"""
return self._manager.update_policy(scaling_group, policy, name=name,
policy_type=policy_type, cooldown=cooldown, change=change,
is_percent=is_percent, desired_capacity=desired_capacity,
args=args)
def execute_policy(self, scaling_group, policy):
"""
Executes the specified policy for the scaling group.
"""
return self._manager.execute_policy(scaling_group=scaling_group,
policy=policy)
def delete_policy(self, scaling_group, policy):
"""
Deletes the specified policy from the scaling group.
"""
return self._manager.delete_policy(scaling_group=scaling_group,
policy=policy)
def add_webhook(self, scaling_group, policy, name, metadata=None):
"""
Adds a webhook to the specified policy.
"""
return self._manager.add_webhook(scaling_group, policy, name,
metadata=metadata)
def list_webhooks(self, scaling_group, policy):
"""
Returns a list of all webhooks defined for the specified policy.
"""
return self._manager.list_webhooks(scaling_group, policy)
def get_webhook(self, scaling_group, policy, webhook):
"""
Gets the detail for the specified webhook.
"""
return self._manager.get_webhook(scaling_group, policy, webhook)
def replace_webhook(self, scaling_group, policy, webhook, name,
metadata=None):
"""
Replace an existing webhook. All of the attributes must be specified.
If you wish to delete any of the optional attributes, pass them in as
None.
"""
return self._manager.replace_webhook(scaling_group, policy, webhook,
name, metadata=metadata)
def update_webhook(self, scaling_group, policy, webhook, name=None,
metadata=None):
"""
Updates the specified webhook. One or more of the parameters may be
specified.
"""
return self._manager.update_webhook(scaling_group=scaling_group,
policy=policy, webhook=webhook, name=name, metadata=metadata)
def update_webhook_metadata(self, scaling_group, policy, webhook, metadata):
"""
Adds the given metadata dict to the existing metadata for the specified
webhook.
"""
return self._manager.update_webhook_metadata(scaling_group, policy,
webhook, metadata)
def delete_webhook(self, scaling_group, policy, webhook):
"""
Deletes the specified webhook from the policy.
"""
return self._manager.delete_webhook(scaling_group, policy, webhook)
| apache-2.0 | -1,451,871,668,392,427,000 | 35.531275 | 80 | 0.592731 | false |
gplepage/lsqfit | examples/p-corr.py | 1 | 1909 | """
p-corr.py - Code for "Correlated Parameters"
"""
# Copyright (c) 2017-20 G. Peter Lepage.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version (see <http://www.gnu.org/licenses/>).
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import print_function # makes this work for python2 and 3
import sys
import numpy as np
import gvar as gv
import lsqfit
if sys.argv[1:]:
SHOW_PLOT = eval(sys.argv[1]) # display picture of grid ?
else:
SHOW_PLOT = True
if SHOW_PLOT:
try:
import matplotlib
except ImportError:
SHOW_PLOT = False
def main():
x, y = make_data()
prior = make_prior()
fit = lsqfit.nonlinear_fit(prior=prior, data=(x,y), fcn=fcn)
print(fit)
print('p1/p0 =', fit.p[1] / fit.p[0], 'p3/p2 =', fit.p[3] / fit.p[2])
print('corr(p0,p1) = {:.4f}'.format(gv.evalcorr(fit.p[:2])[1,0]))
if SHOW_PLOT:
fit.qqplot_residuals().show()
def make_data():
x = np.array([
4., 2., 1., 0.5, 0.25, 0.167, 0.125, 0.1, 0.0833, 0.0714, 0.0625
])
y = gv.gvar([
'0.198(14)', '0.216(15)', '0.184(23)', '0.156(44)', '0.099(49)',
'0.142(40)', '0.108(32)', '0.065(26)', '0.044(22)', '0.041(19)',
'0.044(16)'
])
return x, y
def make_prior():
p = gv.gvar(['0(1)', '0(1)', '0(1)', '0(1)'])
p[1] = 20 * p[0] + gv.gvar('0.0(1)') # p[1] correlated with p[0]
return p
def fcn(x, p):
return (p[0] * (x**2 + p[1] * x)) / (x**2 + x * p[2] + p[3])
if __name__ == '__main__':
main()
| gpl-3.0 | -3,654,074,718,163,940,400 | 28.369231 | 75 | 0.584075 | false |
brendanberg/trilobyte | data.py | 1 | 19064 | # An opaque Data class with Base encoding and decoding functionality
# -----------------------------------------------------------------------------
# A collection of classes for storing binary data and converting it into
# various base-encoded strings for text representations useful for
# over-the-wire transmission.
#
# Includes implementations of Base16, Doug Crockford's Base32, Flickr's
# Base58, and Base64 encodings.
#
# Documentation at http://github.com/brendn/Trilobyte
#
# Version 0.6
#
# Written by Brendan Berg
# Copyright Plus or Minus Five, 2012
from __future__ import division
from math import log, ceil
import re
class Data(object):
'''The `Data` class is an opaque data object that uses a byte string as a
backing store. The class provides functions to manipulate data objects and
generate string representations
'''
def __init__(self, string, encoding=None):
if encoding:
self.bytes = encoding.decode(string)
else:
# assert(isinstance(string, str) or isinstance(string, int) or isinstance()
if isinstance(string, str):
self.bytes = string
elif isinstance(string, int) or isinstance(string, long):
num = string
if num < 0:
raise ValueError('Data constructor requires a positive integer')
byteLen = int(ceil(num.bit_length() / 8))
self.bytes = ''.join(chr(num >> (8 * i) & 0xFF) for i in reversed(range(byteLen)))
else:
raise TypeError('Data constructor requires a byte string, int, or long')
def stringWithEncoding(self, encoding, **kwargs):
return encoding.encode(self.bytes, **kwargs)
def __str__(self):
return self.stringWithEncoding(Base64)
def __repr__(self):
encoded = self.stringWithEncoding(Base64)
if '\n' in encoded:
return "Data('''{0}''', Base64)".format(encoded)
else:
return "Data('{0}', Base64)".format(encoded)
def __hex__(self):
return self.stringWithEncoding(Base16)
def __add__(self, other):
return Data(self.bytes + other.bytes)
__concat__ = __add__
def __iadd__(self, other):
self.bytes += other.bytes
return self
def __contains__(self, item):
return item.bytes in self.bytes
def __eq__(self, other):
return self.bytes == other.bytes
def __len__(self):
return len(self.bytes)
def __getitem__(self, key):
return Data(self.bytes[key])
def __setitem__(self, key, value):
if isinstance(key, slice):
start, stop, step = key.indices(len(self.bytes))
if step != 1:
raise TypeError('cannot modify data contents with a stride')
self.bytes = self.bytes[:start] + value.bytes + self.bytes[stop:]
elif isinstance(key, int):
self.bytes = self.bytes[:key] + value.bytes + self.bytes[key+1:]
else:
raise TypeError('data indices must be integers or slices')
class Encoding(object):
'''The `Encoding` class is an abstract base for various encoding types.
It provides generic left-to-right bitwise conversion algorithms for its
subclasses. At a minimum, a subclass must override the `alphabet` and
`base` class properties.
Attempting to instantiate an encoding object will result in a
`NotImplementedError`:
>>> Encoding()
Traceback (most recent call last):
...
NotImplementedError: Encoding classes cannot be instantiated. ...
'''
alphabet = ''
base = 0
replacements = {}
def __init__(self):
raise NotImplementedError(
'Encoding classes cannot be instantiated. Use '
'Data.stringWithEncoding(Encoding) instead.'
)
@classmethod
def decode(clz, string, alphabet=None, ignoreinvalidchars=False):
if not alphabet:
alphabet = clz.alphabet
width = int(log(clz.base, 2))
bytes = ''
window = 0
winOffset = 16 - width
for ch in clz._canonicalRepr(string):
try:
window |= (alphabet.index(ch) << winOffset)
except ValueError:
raise ValueError('Illegal character in input string')
winOffset -= width
if winOffset <= (8 - width):
bytes += chr((window & 0xFF00) >> 8)
window = (window & 0xFF) << 8
winOffset += 8
if window:
# The padding was wrong, so we throw a tantrum
raise ValueError('Illegal input string')
# We assembled the byte string in reverse because it's faster
# to append to a string than to prepend in Python. Reversing a
# string, on the other hand is Super Fast (TM).
return bytes
@classmethod
def encode(
clz,
byteString,
alphabet=None,
linelength=64,
lineseparator='\r\n'
):
if not alphabet:
alphabet = clz.alphabet
width = int(log(clz.base, 2))
string = ''
lineCharCount = 0
window = 0
maskOffset = 8 - width
mask = (2 ** width - 1) << maskOffset
for ch in byteString:
window |= ord(ch)
while maskOffset >= 0:
string += alphabet[(window & mask) >> maskOffset]
lineCharCount += 1
if linelength and lineCharCount == linelength:
string += lineseparator
lineCharCount = 0
if maskOffset - width >= 0:
mask >>= width
maskOffset -= width
else:
break
window &= 0xFF
window <<= 8
mask <<= 8 - width
maskOffset += 8 - width
if maskOffset > 8 - width:
# If there are unencoded characters to the right of the mask, shift
# the mask all the way right and shift the window the remainder of
# the mask width to encode a zero-padded character at the end.
string += alphabet[(window & mask) >> maskOffset]
return string
@classmethod
def _canonicalRepr(clz, string):
for k, v in clz.replacements.iteritems():
string = string.replace(k, v)
return string
class Base16(Encoding):
"Encoder class for your friendly neighborhood hexidecimal numbers."
alphabet = '0123456789ABCDEF'
base = 16
# Discard hyphens, spaces, carriage returns, and new lines from input.
replacements = {
'-': '',
' ': '',
'\r': '',
'\n': '',
'I': '1',
'L': '1',
'O': '0',
'S': '5',
}
@classmethod
def _canonicalRepr(clz, string):
return super(Base16, clz)._canonicalRepr(string.upper())
class Base32(Encoding):
'''Encoder class for Doug Crockford's Base32 encoding. This is not merely
Python's `int(encoded, 32)` since Crockford's spec discusses replacements
for commonly confused characters, rather than a simple extension of the
alphabet used in hexadecimal. For example, the capital letter I, lower
case i, and lower case l could all be mistaken for the numeral 1. This
encoding removes that ambiguity by accepting any of these characters but
converting to a canonical representation for decoding.
http://www.crockford.com/wrmg/base32.html
'''
alphabet = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'
base = 32
replacements = {
'-': '',
' ': '',
'\r': '',
'\n': '',
'I': '1',
'L': '1',
'O': '0'
}
@classmethod
def _canonicalRepr(clz, string):
return super(Base32, clz)._canonicalRepr(string.upper())
class Base64(Encoding):
'''Encoder class for a flexible Base 64 encoding.
'''
alphabet = (
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
)
base = 64
replacements = {
'\r': '',
'\n': '',
' ': '',
'=': ''
}
@classmethod
def encode(clz, byteString, **kwargs):
if 'alphabet' not in kwargs:
highIndexChars = kwargs.get('highindexchars', '+/')
kwargs['alphabet'] = clz.alphabet[:-2] + highIndexChars
string = super(Base64, clz).encode(byteString, **kwargs)
padding = '=' * (4 - ((len(string) % 4) or 4))
return string + padding
# The general algorithm defined in the Encoding base class expects encoding
# characters to have integer widths. Base 58 encoding is approximately 5.857
# bits wide. I know, right? Numbers are weird.
class Base58(Encoding):
'''Encoder class for Flickr's base 58 encoding. Base 58 encoding is similar
to base 32, but includes upper and lower case letters. Upper case I, lower
case l, and upper case O are all excluded from the alphabet. Unlike
Crockford's base 32, base 58 encoding rejects input if it encounters
characters that are not in the alphabet. (Future versions may include a
flag to discard invalid characters.)
http://www.flickr.com/groups/api/discuss/72157616713786392
Because this encoding does not convert a fixed-width window of bits into a
base that is a multiple of two, the conversion process is different from
the encodings seen above.
'''
alphabet = '123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'
base = 58
replacements = {
'-': '',
' ': ''
}
@classmethod
def decode(clz, string):
width = int(log(clz.base, 2))
bytes = ''
temp = 0
# Because each digit is not a whole number of bits, we are using
# binary as an intermediary. There should be a better way to do
# this, but this is the best I can find:
# http://forums.xkcd.com/viewtopic.php?f=12&t=69664
for idx, char in enumerate(clz._canonicalRepr(string)[::-1]):
temp += clz.alphabet.index(char) * (58 ** idx)
while temp > 0:
bytes += chr(temp % 256)
temp //= 256
# We assembled the byte string in reverse because it's faster
# to append to a string than to prepend in Python. Reversing a
# string, on the other hand is Super Fast (TM).
return bytes[::-1]
@classmethod
def encode(clz, byteString):
width = int(log(clz.base, 2))
string = ''
temp = 0
for idx, char in enumerate(byteString[::-1]):
temp += ord(char) * (256 ** idx)
while temp > 0:
string += clz.alphabet[temp % 58]
temp //= 58
return string[::-1]
class Phonetic(Encoding):
r'''
Encodes byte strings as sequences of phonetic words and decodes sequences
of words into byte strings.
Each byte is encoded as one of 256 phonetically distinct words that are
easy for a human to speak or read. The word list is based on Zachary
Voase's humanhash word list, but some similar-sounding words have been
replaced with more distinct alternatives.
USAGE REFERENCE
An empty Phonetic-encoded string decodes to an empty byte string.
>>> a = Data('', Phonetic)
>>> hex(a)
''
A zero value byte string encodes to a Phonetic-encoded string as many words
as the byte width of the original string.
>>> b = Data('\x00\x00\x00\x00')
>>> b.stringWithEncoding(Phonetic)
'abacus abacus abacus abacus'
Decoding a Phonetic-encoded string accepts canonical strings (lower case
words separated by spaces).
>>> c = Data('chicken yankee wolfram asparagus', Phonetic)
>>> hex(c)
'26FCF90B'
Decoding a Phonetic-encoded string accepts reasonable deviations from the
canonical string (mixed case, alternative whitespace, hyphens, periods,
and underscores.)
>>> d = Data('table-tennis COFFEE.CUP Twenty_Three', Phonetic)
>>> hex(d)
'D8DC272EE3DF'
Re-encoding the data results in the canonicalized word string.
>>> d.stringWithEncoding(Phonetic)
'table tennis coffee cup twenty three'
Words that are not in the word list are invalid.
>>> Data('bandersnatch washington', Phonetic)
Traceback (most recent call last):
...
ValueError: Illegal input string
>>> Data('sink-hippopotamus-tennessee', Phonetic)
Traceback (most recent call last):
...
ValueError: Illegal input string
'''
wordList = [
'abacus', 'alabama', 'alarmist', 'alaska', 'alpha', 'angel', 'apart',
'april', 'arizona', 'arkansas', 'artist', 'asparagus', 'aspen', 'august',
'autumn', 'avocado', 'bacon', 'bakerloo', 'batman', 'beer', 'berlin',
'beryllium', 'black', 'blimp', 'blossom', 'bluebird', 'bravo', 'bulldog',
'burger', 'butter', 'california', 'carbon', 'cardinal', 'carolina',
'carpet', 'cat', 'ceiling', 'charlie', 'chicken', 'coffee', 'cola', 'cold',
'colorado', 'comet', 'connecticut', 'crazy', 'cup', 'dakota', 'december',
'delaware', 'delta', 'diet', 'don', 'double', 'early', 'earth', 'east',
'echo', 'edward', 'eight', 'eighteen', 'eleven', 'emma', 'enemy', 'equal',
'failed', 'fantail', 'fifteen', 'fillet', 'finland', 'fish', 'five', 'fix',
'floor', 'florida', 'football', 'four', 'fourteen', 'foxtrot', 'freddie',
'friend', 'fruit', 'gee', 'georgia', 'glucose', 'golf', 'green', 'great',
'hamper', 'happy', 'harry', 'hawaii', 'helium', 'high', 'hot', 'hotel',
'hydrogen', 'idaho', 'illinois', 'india', 'indigo', 'ink', 'iowa',
'island', 'item', 'jersey', 'jig', 'jogger', 'juliet', 'july', 'jupiter',
'kansas', 'kentucky', 'kilo', 'king', 'kitten', 'lactose', 'lake', 'lamp',
'lemon', 'leopard', 'lima', 'lion', 'lithium', 'london', 'louisiana',
'low', 'magazine', 'magnesium', 'maine', 'mango', 'march', 'mars',
'maryland', 'massachusetts', 'may', 'mexico', 'michigan', 'mike',
'minnesota', 'mirror', 'mississippi', 'missouri', 'mobile', 'mockingbird',
'monkey', 'montana', 'moon', 'mountain', 'multiply', 'music', 'nebraska',
'neptune', 'network', 'nevada', 'nine', 'nineteen', 'nitrogen', 'north',
'november', 'nuts', 'october', 'ohio', 'oklahoma', 'one', 'orange',
'oranges', 'oregon', 'oscar', 'oven', 'oxygen', 'paper', 'paris', 'pasta',
'pennsylvania', 'pip', 'pizza', 'pluto', 'potato', 'princess', 'purple',
'quebec', 'queen', 'quiet', 'red', 'river', 'robert', 'robin', 'romeo',
'rugby', 'sad', 'salami', 'saturn', 'september', 'seven', 'seventeen',
'shade', 'sierra', 'single', 'sink', 'six', 'sixteen', 'skylark', 'snake',
'social', 'sodium', 'solar', 'south', 'spaghetti', 'speaker', 'spring',
'stairway', 'steak', 'stream', 'summer', 'sweet', 'table', 'tango', 'ten',
'tennessee', 'tennis', 'texas', 'thirteen', 'three', 'timing', 'triple',
'twelve', 'twenty', 'two', 'uncle', 'undone', 'uniform', 'uranium', 'utah',
'vegan', 'venus', 'vermont', 'victor', 'video', 'violet', 'virginia',
'washington', 'west', 'whiskey', 'white', 'william', 'windmill', 'winter',
'wisconsin', 'wolfram', 'wyoming', 'xray', 'yankee', 'yellow', 'zebra',
'zulu'
]
wordMap = {
'abacus': 0x00, 'alabama': 0x01, 'alarmist': 0x02, 'alaska': 0x03,
'alpha': 0x04, 'angel': 0x05, 'apart': 0x06, 'april': 0x07,
'arizona': 0x08, 'arkansas': 0x09, 'artist': 0x0A, 'asparagus': 0x0B,
'aspen': 0x0C, 'august': 0x0D, 'autumn': 0x0E, 'avocado': 0x0F,
'bacon': 0x10, 'bakerloo': 0x11, 'batman': 0x12, 'beer': 0x13,
'berlin': 0x14, 'beryllium': 0x15, 'black': 0x16, 'blimp': 0x17,
'blossom': 0x18, 'bluebird': 0x19, 'bravo': 0x1A, 'bulldog': 0x1B,
'burger': 0x1C, 'butter': 0x1D, 'california': 0x1E, 'carbon': 0x1F,
'cardinal': 0x20, 'carolina': 0x21, 'carpet': 0x22, 'cat': 0x23,
'ceiling': 0x24, 'charlie': 0x25, 'chicken': 0x26, 'coffee': 0x27,
'cola': 0x28, 'cold': 0x29, 'colorado': 0x2A, 'comet': 0x2B,
'connecticut': 0x2C, 'crazy': 0x2D, 'cup': 0x2E, 'dakota': 0x2F,
'december': 0x30, 'delaware': 0x31, 'delta': 0x32, 'diet': 0x33,
'don': 0x34, 'double': 0x35, 'early': 0x36, 'earth': 0x37, 'east': 0x38,
'echo': 0x39, 'edward': 0x3A, 'eight': 0x3B, 'eighteen': 0x3C,
'eleven': 0x3D, 'emma': 0x3E, 'enemy': 0x3F, 'equal': 0x40, 'failed': 0x41,
'fantail': 0x42, 'fifteen': 0x43, 'fillet': 0x44, 'finland': 0x45,
'fish': 0x46, 'five': 0x47, 'fix': 0x48, 'floor': 0x49, 'florida': 0x4A,
'football': 0x4B, 'four': 0x4C, 'fourteen': 0x4D, 'foxtrot': 0x4E,
'freddie': 0x4F, 'friend': 0x50, 'fruit': 0x51, 'gee': 0x52,
'georgia': 0x53, 'glucose': 0x54, 'golf': 0x55, 'green': 0x56,
'great': 0x57, 'hamper': 0x58, 'happy': 0x59, 'harry': 0x5A, 'hawaii': 0x5B,
'helium': 0x5C, 'high': 0x5D, 'hot': 0x5E, 'hotel': 0x5F, 'hydrogen': 0x60,
'idaho': 0x61, 'illinois': 0x62, 'india': 0x63, 'indigo': 0x64,
'ink': 0x65, 'iowa': 0x66, 'island': 0x67, 'item': 0x68, 'jersey': 0x69,
'jig': 0x6A, 'jogger': 0x6B, 'juliet': 0x6C, 'july': 0x6D, 'jupiter': 0x6E,
'kansas': 0x6F, 'kentucky': 0x70, 'kilo': 0x71, 'king': 0x72,
'kitten': 0x73, 'lactose': 0x74, 'lake': 0x75, 'lamp': 0x76, 'lemon': 0x77,
'leopard': 0x78, 'lima': 0x79, 'lion': 0x7A, 'lithium': 0x7B,
'london': 0x7C, 'louisiana': 0x7D, 'low': 0x7E, 'magazine': 0x7F,
'magnesium': 0x80, 'maine': 0x81, 'mango': 0x82, 'march': 0x83,
'mars': 0x84, 'maryland': 0x85, 'massachusetts': 0x86, 'may': 0x87,
'mexico': 0x88, 'michigan': 0x89, 'mike': 0x8A, 'minnesota': 0x8B,
'mirror': 0x8C, 'mississippi': 0x8D, 'missouri': 0x8E, 'mobile': 0x8F,
'mockingbird': 0x90, 'monkey': 0x91, 'montana': 0x92, 'moon': 0x93,
'mountain': 0x94, 'multiply': 0x95, 'music': 0x96, 'nebraska': 0x97,
'neptune': 0x98, 'network': 0x99, 'nevada': 0x9A, 'nine': 0x9B,
'nineteen': 0x9C, 'nitrogen': 0x9D, 'north': 0x9E, 'november': 0x9F,
'nuts': 0xA0, 'october': 0xA1, 'ohio': 0xA2, 'oklahoma': 0xA3, 'one': 0xA4,
'orange': 0xA5, 'oranges': 0xA6, 'oregon': 0xA7, 'oscar': 0xA8,
'oven': 0xA9, 'oxygen': 0xAA, 'paper': 0xAB, 'paris': 0xAC, 'pasta': 0xAD,
'pennsylvania': 0xAE, 'pip': 0xAF, 'pizza': 0xB0, 'pluto': 0xB1,
'potato': 0xB2, 'princess': 0xB3, 'purple': 0xB4, 'quebec': 0xB5,
'queen': 0xB6, 'quiet': 0xB7, 'red': 0xB8, 'river': 0xB9, 'robert': 0xBA,
'robin': 0xBB, 'romeo': 0xBC, 'rugby': 0xBD, 'sad': 0xBE, 'salami': 0xBF,
'saturn': 0xC0, 'september': 0xC1, 'seven': 0xC2, 'seventeen': 0xC3,
'shade': 0xC4, 'sierra': 0xC5, 'single': 0xC6, 'sink': 0xC7, 'six': 0xC8,
'sixteen': 0xC9, 'skylark': 0xCA, 'snake': 0xCB, 'social': 0xCC,
'sodium': 0xCD, 'solar': 0xCE, 'south': 0xCF, 'spaghetti': 0xD0,
'speaker': 0xD1, 'spring': 0xD2, 'stairway': 0xD3, 'steak': 0xD4,
'stream': 0xD5, 'summer': 0xD6, 'sweet': 0xD7, 'table': 0xD8,
'tango': 0xD9, 'ten': 0xDA, 'tennessee': 0xDB, 'tennis': 0xDC,
'texas': 0xDD, 'thirteen': 0xDE, 'three': 0xDF, 'timing': 0xE0,
'triple': 0xE1, 'twelve': 0xE2, 'twenty': 0xE3, 'two': 0xE4, 'uncle': 0xE5,
'undone': 0xE6, 'uniform': 0xE7, 'uranium': 0xE8, 'utah': 0xE9,
'vegan': 0xEA, 'venus': 0xEB, 'vermont': 0xEC, 'victor': 0xED,
'video': 0xEE, 'violet': 0xEF, 'virginia': 0xF0, 'washington': 0xF1,
'west': 0xF2, 'whiskey': 0xF3, 'white': 0xF4, 'william': 0xF5,
'windmill': 0xF6, 'winter': 0xF7, 'wisconsin': 0xF8, 'wolfram': 0xF9,
'wyoming': 0xFA, 'xray': 0xFB, 'yankee': 0xFC, 'yellow': 0xFD,
'zebra': 0xFE, 'zulu': 0xFF
}
@classmethod
def setWordList(clz, wordList):
if len(wordList) != 256:
raise Exception()
clz.wordList = list(wordList)
clz.wordMap = {word: count for count, word in enumerate(wordList)}
@classmethod
def decode(clz, string):
string = clz._canonicalRepr(string)
wordlist = re.findall(r'\w+', string)
result = ''
for word in wordlist:
if word not in clz.wordMap:
raise ValueError('Illegal input string')
result += chr(clz.wordMap[word])
return result
@classmethod
def encode(clz, byteString):
result = []
for ch in byteString:
result.append(clz.wordList[ord(ch)])
return ' '.join(result)
@classmethod
def _canonicalRepr(clz, string):
return re.sub(r'[\W_]', ' ', string).lower()
if __name__ == '__main__':
import doctest
options = doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE
doctest.testmod(optionflags=options)
doctest.testfile('README.md', optionflags=options, globs=globals())
doctest.testfile('documentation/custom_encoding.md', optionflags=options, globs=globals())
doctest.testfile('tests/tests.md', optionflags=options, globs=globals())
| bsd-3-clause | -3,907,504,601,309,232,000 | 32.328671 | 91 | 0.648919 | false |
cpatrick/comic-django | django/filetransfers/views.py | 1 | 9577 | import pdb
import posixpath
import re
import os
try:
from urllib.parse import unquote
except ImportError: # Python 2
from urllib import unquote
from exceptions import Exception
from django.core.files import File
from django.core.files.storage import DefaultStorage
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden
from django.shortcuts import get_object_or_404, render
from django.http import Http404
from django.utils.translation import ugettext as _, ugettext_noop
#from django.views.generic.simple import direct_to_template
from filetransfers.forms import UploadForm
# FIXME : Sjoerd: comicmodels and filetransfers are being merged here. How to keep original Filetransfers seperate from this?
# Right now I feel as though I am entangeling things.. come back to this later
from filetransfers.api import prepare_upload, serve_file
from comicmodels.models import FileSystemDataset,ComicSite,UploadModel,ComicSiteModel
from django.conf import settings
def upload_handler(request):
view_url = reverse('filetransfers.views.upload_handler')
if request.method == 'POST':
form = UploadForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return HttpResponseRedirect(view_url)
upload_url, upload_data = prepare_upload(request, view_url)
form = UploadForm()
#return direct_to_template(request, 'upload/upload.html',
return render(request, 'upload/upload.html',
{'form': form, 'upload_url': upload_url, 'upload_data': upload_data,
'uploads': UploadModel.objects.all()})
def download_handler(request, pk):
upload = get_object_or_404(UploadModel, pk=pk)
return serve_file(request, upload.file, save_as=True)
def uploadedfileserve_handler(request, pk):
""" Serve a file through django, for displaying images etc. """
upload = get_object_or_404(UploadModel, pk=pk)
#if request.user.has_perm("comicmodels.view_ComicSiteModel"):
if upload.can_be_viewed_by(request.user):
return serve_file(request, upload.file, save_as=False)
else:
return HttpResponse("You do not have permission to view this.")
def download_handler_dataset_file(request, project_name, dataset_title,filename):
"""offer file for download based on filename and dataset"""
dataset = FileSystemDataset.objects.get(comicsite__short_name=project_name,title=dataset_title)
filefolder = dataset.get_full_folder_path()
filepath = os.path.join(filefolder,filename)
f = open(filepath, 'r')
file = File(f) # create django file object
return serve_file(request, file, save_as=True)
def download_handler_file(request, filepath):
"""offer file for download based on filepath relative to django root"""
f = open(filepath, 'r')
file = File(f) # create django file object
return serve_file(request, file, save_as=True)
def delete_handler(request, pk):
if request.method == 'POST':
upload = get_object_or_404(UploadModel, pk=pk)
comicsitename = upload.comicsite.short_name
try:
upload.file.delete() #if no file object can be found just continue
except:
pass
finally:
pass
upload.delete()
return HttpResponseRedirect(reverse('comicmodels.views.upload_handler',kwargs={'site_short_name':comicsitename}))
def can_access(user,path,project_name,override_permission=""):
""" Does this user have permission to access folder path which is part of
project named project_name?
Override permission can be used to make certain folders servable through
code even though this would not be allowed otherwise
"""
if override_permission == "":
required = _required_permission(user,path,project_name)
else:
choices = [x[0] for x in ComicSiteModel.PERMISSIONS_CHOICES]
if not override_permission in choices:
raise Exeption("input parameters should be one of [%s], "
"found '%s' " % (",".join(choices),needed))
required = override_permission
if required == ComicSiteModel.ALL:
return True
elif required == ComicSiteModel.REGISTERED_ONLY:
project = ComicSite.objects.get(short_name=project_name)
if project.is_participant(user):
return True
else:
return False
elif required == ComicSiteModel.ADMIN_ONLY:
project = ComicSite.objects.get(short_name=project_name)
if project.is_admin(user):
return True
else:
return False
else:
return False
def _sufficient_permission(needed,granted):
""" Return true if granted permission is greater than or equal to needed
permission.
"""
choices = [x[0] for x in CoomicSiteModel.PERMISSIONS_CHOICES]
if not needed in choices:
raise Exeption("input parameters should be one of [%s], found '%s' " % (",".join(choices),needed))
if not granted in choices:
raise Exeption("input parameters should be one of [%s], found '%s' " % (",".join(choices),granted))
if ComicSiteModel.PERMISSION_WEIGHTS[needed] >= ComicSiteModel.PERMISSION_WEIGHTS[granted]:
return True
else:
return False
def _required_permission(user,path,project_name):
""" Given a file path on local filesystem, which permission level is needed
to view this?
"""
#some config checking.
# TODO : check this once at server start but not every time this method is
# called. It is too late to throw this error once a user clicks
# something.
if not hasattr(settings,"COMIC_PUBLIC_FOLDER_NAME"):
raise ImproperlyConfigured("Don't know from which folder serving publiv files"
"is allowed. Please add a setting like "
"'COMIC_PUBLIC_FOLDER_NAME = \"public_html\""
" to your .conf file." )
if not hasattr(settings,"COMIC_REGISTERED_ONLY_FOLDER_NAME"):
raise ImproperlyConfigured("Don't know from which folder serving protected files"
"is allowed. Please add a setting like "
"'COMIC_REGISTERED_ONLY_FOLDER_NAME = \"datasets\""
" to your .conf file." )
if hasattr(settings,"COMIC_ADDITIONAL_PUBLIC_FOLDER_NAMES"):
if startwith_any(path,settings.COMIC_ADDITIONAL_PUBLIC_FOLDER_NAMES):
return ComicSiteModel.ALL
if path.startswith(settings.COMIC_PUBLIC_FOLDER_NAME):
return ComicSiteModel.ALL
elif path.startswith(settings.COMIC_REGISTERED_ONLY_FOLDER_NAME):
return ComicSiteModel.REGISTERED_ONLY
else:
return ComicSiteModel.ADMIN_ONLY
def startwith_any(path,start_options):
""" Return true if path starts with any of the strings in string array start_options
"""
for option in start_options:
if path.startswith(option):
return True
return False
def serve(request, project_name, path, document_root=None,override_permission=""):
"""
Serve static file for a given project.
This is meant as a replacement for the inefficient debug only
'django.views.static.serve' way of serving files under /media urls.
"""
if document_root == None:
document_root = settings.MEDIA_ROOT
path = posixpath.normpath(unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and path != newpath:
return HttpResponseRedirect(newpath)
fullpath = os.path.join(document_root,project_name, newpath)
storage = DefaultStorage()
if not storage.exists(fullpath):
# On case sensitive filesystems you can have problems if the project
# nameurl in the url is not exactly the same case as the filepath.
# find the correct case for projectname then.
# Also case sensitive file systems are weird.
# who would ever want to have a folder 'data' and 'Data' contain
# different files?
projectlist = ComicSite.objects.filter(short_name=project_name)
if projectlist == []:
raise Http404(_("project '%s' does not exist" % project_name ))
project_name = projectlist[0].short_name
fullpath = os.path.join(document_root,project_name, newpath)
if not storage.exists(fullpath):
raise Http404(_('"%(path)s" does not exist') % {'path': fullpath})
if can_access(request.user,path,project_name,override_permission):
f = storage.open(fullpath, 'rb')
file = File(f) # create django file object
# Do not offer to save images, but show them directly
return serve_file(request, file, save_as=True)
else:
return HttpResponseForbidden("This file is not available without "
"credentials")
| apache-2.0 | -8,936,553,819,921,376,000 | 36.120155 | 125 | 0.6454 | false |
larsborn/hetzner-zonefiles | dns.py | 1 | 3978 | import requests
import re
import os
import sys
from BeautifulSoup import BeautifulSoup
base_url = 'https://robot.your-server.de'
zonefiledir = 'zonefiles'
def log(msg, level = ''):
print '[%s] %s' % (level, msg)
def login(username, password):
login_form_url = base_url + '/login'
login_url = base_url + '/login/check'
r = requests.get(login_form_url)
r = requests.post(login_url, data={'user': username, 'password': password}, cookies=r.cookies)
# ugly: the hetzner status code is always 200 (delivering the login form
# as an "error message")
if 'Herzlich Willkommen auf Ihrer' not in r.text:
return False
return r.history[0].cookies
def list_zonefile_ids(cookies):
ret = {}
last_count = -1
page = 1
while last_count != len(ret):
last_count = len(ret)
dns_url = base_url + '/dns/index/page/%i' % page
r = requests.get(dns_url, cookies=cookies)
soup = BeautifulSoup(r.text, convertEntities=BeautifulSoup.HTML_ENTITIES)
boxes = soup.findAll('table', attrs={'class': 'box_title'})
for box in boxes:
expandBoxJavascript = dict(box.attrs)['onclick']
zoneid = _javascript_to_zoneid(expandBoxJavascript)
td = box.find('td', attrs={'class': 'title'})
domain = td.renderContents()
ret[zoneid] = domain
page += 1
return ret
def get_zonefile(cookies, id):
dns_url = base_url + '/dns/update/id/%i' % id
r = requests.get(dns_url, cookies=cookies)
soup = BeautifulSoup(r.text, convertEntities=BeautifulSoup.HTML_ENTITIES)
textarea = soup.find('textarea')
zonefile = textarea.renderContents()
return zonefile
def write_zonefile(cookies, id, zonefile):
update_url = base_url + '/dns/update'
r = requests.post(
update_url,
cookies=cookies,
data={'id': id, 'zonefile': zonefile}
)
# ugly: the hetzner status code is always 200 (delivering the login form
# as an "error message")
return 'Vielen Dank' in r.text
def logout(cookies):
logout_url = base_url + '/login/logout'
r = requests.get(logout_url, cookies=cookies)
return r.status_code == 200
def _javascript_to_zoneid(s):
r = re.compile('\'(\d+)\'')
m = r.search(s)
if not m: return False
return int(m.group(1))
def print_usage():
print 'Usage: %s [download|update] <username> <password>' % sys.argv[0]
if len(sys.argv) != 4:
print_usage()
exit()
command = sys.argv[1]
username = sys.argv[2]
password = sys.argv[3]
log('Logging in...')
cookies = login(username, password)
if not cookies:
print 'Cannot login'
exit()
if command == 'download':
log('Requesting list of zonefiles...')
list = list_zonefile_ids(cookies)
log('Found %i zonefiles.' % len(list))
for zoneid, domain in list.iteritems():
log('Loading zonefile for %s...' % domain)
zonefile = get_zonefile(cookies, zoneid)
filename = os.path.join(zonefiledir, '%i_%s.txt' % (zoneid, domain))
log('Saving zonefile to %s...' % filename)
f = open(filename, 'w+')
f.write(zonefile)
f.close()
elif command == 'update':
for file in os.listdir(zonefiledir):
domainid = int(file.split('_')[0])
filename = os.path.join(zonefiledir, file)
log('Updating zonefile %s' % filename)
f = open(filename)
zonefile = ''.join(f.readlines())
f.close()
success = write_zonefile(cookies, domainid, zonefile)
if success:
log('Update successfull.')
else:
log('Error updating')
else:
log('Invalid command "%s"' % command)
print_usage()
log('Logging out')
logout(cookies)
| mit | 5,876,934,618,418,820,000 | 26.414286 | 98 | 0.578431 | false |
misterresse/astrid-tp | tests/mail/tests.py | 1 | 1766 | # -*- coding: utf-8 -*-
# Copyright the original authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"John Resse" <[email protected]>'
]
from tp import core
from tp.builders import text
from tp.processors import smtp
def build():
return [core.MetaRequest({'name': 'lion', 'email': '[email protected]'}),
core.MetaRequest({'name': 'invalid', 'email': 'invalid'})]
def process(request):
string = '亲爱的 {0},您好:\n' \
'我公司在进行一项关于个人收入的调查......' \
'请勿回复此邮件\n{1}'
to = request.get('email')
subject = '来自Python邮件客户端的测试邮件'
msg = string.format(request.get('name'), to)
return {'to': to, 'subject': subject, 'msg': msg}
class Process2(object):
def process(self, request):
print('Sending to ' + request['to'] + ', ' + request['subject'])
return request
if __name__ == '__main__':
s = core.Scheduler()
s.add_requests_builders([build, text.TextFileRequestsBuilder('mail/test.csv', text.build_meta_request_csv)])
s.add_request_processors([process, Process2(), smtp.QQSTMPSendMailProcessor('1174751315', 'nnqqgmdwykcnhhjc')])
s.run() | apache-2.0 | -8,088,331,116,841,964,000 | 32.62 | 115 | 0.656548 | false |
lizhuangzi/QSQuantificationCode | QSQuantifier/DataGetter.py | 1 | 4473 | #!/usr/bin python
# coding=utf-8
# This Module provides some functions about getting stock's data.
import DataConnection as dc
import datetime
import numpy as np
import db_func
LEAST_DATE = '2016-01-01 00:00:00'
def string_toDatetime(timestr):
return datetime.datetime.strptime(timestr,"%Y-%m-%d %H:%M:%S")
def last_days(current_time,num = 1):
return current_time + datetime.timedelta(days = -num)
def add_zero_forsomeproperties(query_properties):
qpitks = query_properties.keys()
has_pre_close = 'pre_close' in qpitks
query_properties['high_limit'].append(0.0)
query_properties['low_limit'].append(0.0)
if has_pre_close:
query_properties['pre_close'].append(0.0)
# make statistics for datas
def make_statisticsByProperty(tuple_result,query_properties={},allcount=0):
qpitks = query_properties.keys()
# if len(tuple_result) == 0:
# for v in query_properties.itervalues():
# v.append(0.0)
# return
volume = 0.0
money = 0.0
opens = 0.0
close = 0.0
if len(tuple_result)!=0:
opens = tuple_result[0]['newest']
close = tuple_result[-1]['newest']
hasmoney = 'money' in qpitks
hasavg = 'avg' in qpitks
haslow = 'low' in qpitks
hashigh = 'high' in qpitks
has_pre_close = 'pre_close' in qpitks
high = 0.0
low = 0.0
avg = 0.0
for x in tuple_result:
volume += x['deal_amount']
money += x['deal_money']
if high<x['newest']:
high = x['newest']
if low>x['newest']:
low = x['newest']
if volume != 0:
avg = round(money/volume,2)
if hasmoney:
query_properties['money'].append(money)
if hasavg:
query_properties['avg'].append(avg)
if haslow:
query_properties['low'].append(low)
if hashigh:
query_properties['high'].append(high)
close_arr = query_properties['close']
#if has yestady data
if len(close_arr)!=0:
last_close = close
if has_pre_close:
query_properties['pre_close'].append(last_close)
high_limit = round(last_close + last_close*0.1,2)
low_limit = round(last_close - last_close*0.1,2)
query_properties['high_limit'].append(high_limit)
query_properties['low_limit'].append(low_limit)
query_properties['close'].append(close)
query_properties['open'].append(opens)
query_properties['volume'].append(volume)
if len(close_arr) == allcount:
add_zero_forsomeproperties(query_properties)
def serach_timelist(collection,query_time,unit,count,skip_paused=True,query_properties={}):
time_list = []
unit_type = -1
if unit == 'd':
unit_type = 0
elif unit == 'm':
unit_type = 1
elif unit == 's':
unit_type = 2
else:
print('The parameter unit is illegal')
return
i= 0; t=0
while i < count+1:
if unit_type == 0:
temp = query_time + datetime.timedelta(days = -1)
#if it is the loop to search day.
if t == 0:
tempstr = str(temp)
strArr = tempstr.split(" ")
newStr = strArr[0]+" "+"09:00:00"
temp = string_toDatetime(newStr)
query_time = string_toDatetime(strArr[0] +" "+"22:00:00")
elif unit_type == 1:
temp = query_time + datetime.timedelta(minutes = -1)
else:
temp = query_time + datetime.timedelta(seconds = -1)
# beyound threshold
if temp < string_toDatetime(LEAST_DATE):
add_zero_forsomeproperties(query_properties)
print('Warning: There is no early datas')
break;
results = db_func.query_dataThreshold(collection,str(query_time),str(temp))
# no skip
if (skip_paused and unit_type==0 and results.count()==0) == False:
i += 1
time_list.append(temp)
make_statisticsByProperty(tuple(results.clone()), query_properties,count+1)
del(results)
query_time = temp
t += 1
return time_list
def attribute_history(security,current_time,count,unit='d',fields=['open','close','high_limit','low_limit','volume'],skip_paused=True,fq='pre'):
db = dc.startConnection('192.168.69.54',27017)
if db == None:
return
# get collection from db by security
collection = db[security]
#Convert
query_time= string_toDatetime(current_time)
query_properties = {x:[] for x in fields}
#Return a dictionary key:datetime,value:tuple(query datas)
cds = serach_timelist(collection,query_time,unit,count,skip_paused,query_properties)
#show Search days
for x in xrange(0, count if (len(cds)==count+1) else len(cds)):
print "Searching time is %s" %cds[x]
#finall result dictionary
query_result = {}
# change all value to numpy.ndataArray
for k,v in query_properties.iteritems():
if len(cds)==count+1:
v = v[0:-1]
query_result[k] = np.array(v)
return query_result
| apache-2.0 | -8,739,037,170,679,827,000 | 22.919786 | 144 | 0.678068 | false |
openstack/networking-l2gw | networking_l2gw/tests/unit/services/l2gateway/common/test_tunnel_calls.py | 1 | 2611 | # Copyright (c) 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from neutron.plugins.ml2.drivers.l2pop import rpc as l2pop_rpc
from neutron.plugins.ml2.drivers import type_tunnel
from neutron.plugins.ml2 import managers
from neutron.plugins.ml2 import rpc as rpc
from neutron.tests import base
from networking_l2gw.services.l2gateway.common import tunnel_calls
class TestTunnelCalls(base.BaseTestCase):
def setUp(self):
super(TestTunnelCalls, self).setUp()
mock.patch.object(managers, 'TypeManager').start()
self.tunnel_call = tunnel_calls.Tunnel_Calls()
self.context = mock.MagicMock()
def test_trigger_tunnel_sync(self):
with mock.patch.object(rpc, 'RpcCallbacks'), \
mock.patch.object(type_tunnel.TunnelRpcCallbackMixin,
'tunnel_sync') as mock_tunnel_sync:
self.tunnel_call.trigger_tunnel_sync(self.context, 'fake_ip')
mock_tunnel_sync.assert_called_with(
self.context, tunnel_ip='fake_ip', tunnel_type='vxlan')
def test_trigger_l2pop_sync(self):
fake_fdb_entry = "fake_fdb_entry"
with mock.patch.object(l2pop_rpc.L2populationAgentNotifyAPI,
'add_fdb_entries') as (mock_add_fdb):
self.tunnel_call.trigger_l2pop_sync(self.context,
fake_fdb_entry)
mock_add_fdb.assert_called_with(self.context,
fake_fdb_entry)
def test_trigger_l2pop_delete(self):
fake_fdb_entry = "fake_fdb_entry"
fake_host = 'fake_host'
with mock.patch.object(l2pop_rpc.L2populationAgentNotifyAPI,
'remove_fdb_entries') as (mock_delete_fdb):
self.tunnel_call.trigger_l2pop_delete(self.context,
fake_fdb_entry, fake_host)
mock_delete_fdb.assert_called_with(self.context,
fake_fdb_entry, fake_host)
| apache-2.0 | -2,214,235,675,204,278,000 | 42.516667 | 76 | 0.634623 | false |
jpvanhal/flask-xuacompatible | setup.py | 1 | 1057 | """
Flask-XUACompatible
-------------------
This is a simple Flask extension that sets X-UA-Compatible HTTP header for
all responses.
"""
from setuptools import setup
setup(
name='flask-xuacompatible',
version='0.1.0',
url='https://github.com/jpvanhal/flask-xuacompatible',
license='BSD',
author='Janne Vanhala',
author_email='[email protected]',
description='Sets X-UA-Compatible HTTP header in your Flask application.',
long_description=__doc__,
py_modules=['flask_xuacompatible'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=['Flask'],
test_suite='test_xuacompatible.suite',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| bsd-3-clause | 1,511,387,754,708,708,000 | 28.361111 | 78 | 0.64333 | false |
DirectXMan12/nova-hacking | nova/tests/api/openstack/compute/contrib/test_server_start_stop.py | 1 | 3702 | # Copyright (c) 2012 Midokura Japan K.K.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mox
import webob
from nova.api.openstack.compute.contrib import server_start_stop
from nova.compute import api as compute_api
from nova import db
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
def fake_instance_get(self, context, instance_id):
result = fakes.stub_instance(id=1, uuid=instance_id)
result['created_at'] = None
result['deleted_at'] = None
result['updated_at'] = None
result['deleted'] = 0
result['info_cache'] = {'network_info': 'foo',
'instance_uuid': result['uuid']}
return result
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
class ServerStartStopTest(test.TestCase):
def setUp(self):
super(ServerStartStopTest, self).setUp()
self.controller = server_start_stop.ServerStartStopActionController()
def test_start(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.mox.StubOutWithMock(compute_api.API, 'start')
compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.controller._start_server(req, 'test_inst', body)
def test_start_not_ready(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready)
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, 'test_inst', body)
def test_stop(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.mox.StubOutWithMock(compute_api.API, 'stop')
compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(stop="")
self.controller._stop_server(req, 'test_inst', body)
def test_stop_not_ready(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready)
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, 'test_inst', body)
def test_start_with_bogus_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
def test_stop_with_bogus_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
| apache-2.0 | -2,595,194,122,138,429,400 | 38.806452 | 78 | 0.667747 | false |
valkyriesavage/invenio | modules/bibedit/lib/bibedit_engine.py | 1 | 51248 | ## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0103
"""Invenio BibEdit Engine."""
__revision__ = "$Id"
from invenio.bibedit_config import CFG_BIBEDIT_AJAX_RESULT_CODES, \
CFG_BIBEDIT_JS_CHECK_SCROLL_INTERVAL, CFG_BIBEDIT_JS_HASH_CHECK_INTERVAL, \
CFG_BIBEDIT_JS_CLONED_RECORD_COLOR, \
CFG_BIBEDIT_JS_CLONED_RECORD_COLOR_FADE_DURATION, \
CFG_BIBEDIT_JS_NEW_ADD_FIELD_FORM_COLOR, \
CFG_BIBEDIT_JS_NEW_ADD_FIELD_FORM_COLOR_FADE_DURATION, \
CFG_BIBEDIT_JS_NEW_CONTENT_COLOR, \
CFG_BIBEDIT_JS_NEW_CONTENT_COLOR_FADE_DURATION, \
CFG_BIBEDIT_JS_NEW_CONTENT_HIGHLIGHT_DELAY, \
CFG_BIBEDIT_JS_STATUS_ERROR_TIME, CFG_BIBEDIT_JS_STATUS_INFO_TIME, \
CFG_BIBEDIT_JS_TICKET_REFRESH_DELAY, CFG_BIBEDIT_MAX_SEARCH_RESULTS, \
CFG_BIBEDIT_TAG_FORMAT, CFG_BIBEDIT_AJAX_RESULT_CODES_REV, \
CFG_BIBEDIT_AUTOSUGGEST_TAGS, CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS,\
CFG_BIBEDIT_KEYWORD_TAXONOMY, CFG_BIBEDIT_KEYWORD_TAG, \
CFG_BIBEDIT_KEYWORD_RDFLABEL
from invenio.config import CFG_SITE_LANG, CFG_DEVEL_SITE
from invenio.bibedit_dblayer import get_name_tags_all, reserve_record_id, \
get_related_hp_changesets, get_hp_update_xml, delete_hp_change, \
get_record_last_modification_date, get_record_revision_author, \
get_marcxml_of_record_revision, delete_related_holdingpen_changes, \
get_record_revisions
from invenio.bibedit_utils import cache_exists, cache_expired, \
create_cache_file, delete_cache_file, get_bibrecord, \
get_cache_file_contents, get_cache_mtime, get_record_templates, \
get_record_template, latest_record_revision, record_locked_by_other_user, \
record_locked_by_queue, save_xml_record, touch_cache_file, \
update_cache_file_contents, get_field_templates, get_marcxml_of_revision, \
revision_to_timestamp, timestamp_to_revision, \
get_record_revision_timestamps, record_revision_exists, \
can_record_have_physical_copies
from invenio.bibrecord import create_record, print_rec, record_add_field, \
record_add_subfield_into, record_delete_field, \
record_delete_subfield_from, \
record_modify_subfield, record_move_subfield, \
create_field, record_replace_field, record_move_fields, \
record_modify_controlfield, record_get_field_values
from invenio.config import CFG_BIBEDIT_PROTECTED_FIELDS, CFG_CERN_SITE, \
CFG_SITE_URL
from invenio.search_engine import record_exists, search_pattern
from invenio.webuser import session_param_get, session_param_set
from invenio.bibcatalog import bibcatalog_system
from invenio.webpage import page
from invenio.bibknowledge import get_kbd_values_for_bibedit, get_kbr_values, \
get_kbt_items_for_bibedit #autosuggest
from invenio.bibcirculation_dblayer import get_number_copies, has_copies
from invenio.bibcirculation_utils import create_item_details_url
from datetime import datetime
import re
import difflib
import zlib
import sys
if sys.hexversion < 0x2060000:
try:
import simplejson as json
simplejson_available = True
except ImportError:
# Okay, no Ajax app will be possible, but continue anyway,
# since this package is only recommended, not mandatory.
simplejson_available = False
else:
import json
simplejson_available = True
import invenio.template
bibedit_templates = invenio.template.load('bibedit')
re_revdate_split = re.compile('^(\d\d\d\d)(\d\d)(\d\d)(\d\d)(\d\d)(\d\d)')
def get_empty_fields_templates():
"""
Returning the templates of empty fields :
- an empty data field
- an empty control field
"""
return [{
"name": "Empty field",
"description": "The data field not containing any " + \
"information filled in",
"tag" : "",
"ind1" : "",
"ind2" : "",
"subfields" : [("","")],
"isControlfield" : False
},{
"name" : "Empty control field",
"description" : "The controlfield not containing any " + \
"data or tag description",
"isControlfield" : True,
"tag" : "",
"value" : ""
}]
def get_available_fields_templates():
"""
A method returning all the available field templates
Returns a list of descriptors. Each descriptor has
the same structure as a full field descriptor inside the
record
"""
templates = get_field_templates()
result = get_empty_fields_templates()
for template in templates:
tplTag = template[3].keys()[0]
field = template[3][tplTag][0]
if (field[0] == []):
# if the field is a controlField, add different structure
result.append({
"name" : template[1],
"description" : template[2],
"isControlfield" : True,
"tag" : tplTag,
"value" : field[3]
})
else:
result.append({
"name": template[1],
"description": template[2],
"tag" : tplTag,
"ind1" : field[1],
"ind2" : field[2],
"subfields" : field[0],
"isControlfield" : False
})
return result
def perform_request_init(uid, ln, req, lastupdated):
"""Handle the initial request by adding menu and JavaScript to the page."""
errors = []
warnings = []
body = ''
# Add script data.
record_templates = get_record_templates()
record_templates.sort()
tag_names = get_name_tags_all()
protected_fields = ['001']
protected_fields.extend(CFG_BIBEDIT_PROTECTED_FIELDS.split(','))
history_url = '"' + CFG_SITE_URL + '/admin/bibedit/bibeditadmin.py/history"'
cern_site = 'false'
if not simplejson_available:
title = 'Record Editor'
body = '''Sorry, the record editor cannot operate when the
`simplejson' module is not installed. Please see the INSTALL
file.'''
return page(title = title,
body = body,
errors = [],
warnings = [],
uid = uid,
language = ln,
navtrail = "",
lastupdated = lastupdated,
req = req)
if CFG_CERN_SITE:
cern_site = 'true'
data = {'gRECORD_TEMPLATES': record_templates,
'gTAG_NAMES': tag_names,
'gPROTECTED_FIELDS': protected_fields,
'gSITE_URL': '"' + CFG_SITE_URL + '"',
'gHISTORY_URL': history_url,
'gCERN_SITE': cern_site,
'gHASH_CHECK_INTERVAL': CFG_BIBEDIT_JS_HASH_CHECK_INTERVAL,
'gCHECK_SCROLL_INTERVAL': CFG_BIBEDIT_JS_CHECK_SCROLL_INTERVAL,
'gSTATUS_ERROR_TIME': CFG_BIBEDIT_JS_STATUS_ERROR_TIME,
'gSTATUS_INFO_TIME': CFG_BIBEDIT_JS_STATUS_INFO_TIME,
'gCLONED_RECORD_COLOR':
'"' + CFG_BIBEDIT_JS_CLONED_RECORD_COLOR + '"',
'gCLONED_RECORD_COLOR_FADE_DURATION':
CFG_BIBEDIT_JS_CLONED_RECORD_COLOR_FADE_DURATION,
'gNEW_ADD_FIELD_FORM_COLOR':
'"' + CFG_BIBEDIT_JS_NEW_ADD_FIELD_FORM_COLOR + '"',
'gNEW_ADD_FIELD_FORM_COLOR_FADE_DURATION':
CFG_BIBEDIT_JS_NEW_ADD_FIELD_FORM_COLOR_FADE_DURATION,
'gNEW_CONTENT_COLOR': '"' + CFG_BIBEDIT_JS_NEW_CONTENT_COLOR + '"',
'gNEW_CONTENT_COLOR_FADE_DURATION':
CFG_BIBEDIT_JS_NEW_CONTENT_COLOR_FADE_DURATION,
'gNEW_CONTENT_HIGHLIGHT_DELAY':
CFG_BIBEDIT_JS_NEW_CONTENT_HIGHLIGHT_DELAY,
'gTICKET_REFRESH_DELAY': CFG_BIBEDIT_JS_TICKET_REFRESH_DELAY,
'gRESULT_CODES': CFG_BIBEDIT_AJAX_RESULT_CODES,
'gAUTOSUGGEST_TAGS' : CFG_BIBEDIT_AUTOSUGGEST_TAGS,
'gAUTOCOMPLETE_TAGS' : CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS.keys(),
'gKEYWORD_TAG' : '"' + CFG_BIBEDIT_KEYWORD_TAG + '"'
}
body += '<script type="text/javascript">\n'
for key in data:
body += ' var %s = %s;\n' % (key, data[key])
body += ' </script>\n'
# Adding the information about field templates
fieldTemplates = get_available_fields_templates()
body += "<script>\n" + \
" var fieldTemplates = %s\n" % (json.dumps(fieldTemplates), ) + \
"</script>\n"
# Add scripts (the ordering is NOT irrelevant).
scripts = ['jquery.min.js', 'jquery.effects.core.min.js',
'jquery.effects.highlight.min.js', 'jquery.autogrow.js',
'jquery.jeditable.mini.js', 'jquery.hotkeys.min.js', 'json2.js',
'bibedit_display.js', 'bibedit_engine.js', 'bibedit_keys.js',
'bibedit_menu.js', 'bibedit_holdingpen.js', 'marcxml.js',
'bibedit_clipboard.js']
for script in scripts:
body += ' <script type="text/javascript" src="%s/js/%s">' \
'</script>\n' % (CFG_SITE_URL, script)
# Build page structure and menu.
# rec = create_record(format_record(235, "xm"))[0]
#oaiId = record_extract_oai_id(rec)
body += bibedit_templates.menu()
body += ' <div id="bibEditContent"></div>\n'
return body, errors, warnings
def get_xml_comparison(header1, header2, xml1, xml2):
"""
Return diffs of two MARCXML records.
"""
return "".join(difflib.unified_diff(xml1.splitlines(1),
xml2.splitlines(1), header1, header2))
def get_marcxml_of_revision_id(recid, revid):
"""
Return MARCXML string with corresponding to revision REVID
(=RECID.REVDATE) of a record. Return empty string if revision
does not exist.
"""
res = ""
job_date = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(revid).groups()
tmp_res = get_marcxml_of_record_revision(recid, job_date)
if tmp_res:
for row in tmp_res:
res += zlib.decompress(row[0]) + "\n"
return res
def perform_request_compare(ln, recid, rev1, rev2):
"""Handle a request for comparing two records"""
body = ""
errors = []
warnings = []
if (not record_revision_exists(recid, rev1)) or \
(not record_revision_exists(recid, rev2)):
body = "The requested record revision does not exist !"
else:
xml1 = get_marcxml_of_revision_id(recid, rev1)
xml2 = get_marcxml_of_revision_id(recid, rev2)
fullrevid1 = "%i.%s" % (recid, rev1)
fullrevid2 = "%i.%s" % (recid, rev2)
comparison = bibedit_templates.clean_value(
get_xml_comparison(fullrevid1, fullrevid2, xml1, xml2),
'text').replace('\n', '<br />\n ')
job_date1 = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(rev1).groups()
job_date2 = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(rev2).groups()
body += bibedit_templates.history_comparebox(ln, job_date1,
job_date2, comparison)
return body, errors, warnings
def perform_request_newticket(recid, uid):
"""create a new ticket with this record's number
@param recid: record id
@param uid: user id
@return: (error_msg, url)
"""
t_id = bibcatalog_system.ticket_submit(uid, "", recid, "")
t_url = ""
errmsg = ""
if t_id:
#get the ticket's URL
t_url = bibcatalog_system.ticket_get_attribute(uid, t_id, 'url_modify')
else:
errmsg = "ticket_submit failed"
return (errmsg, t_url)
def perform_request_ajax(req, recid, uid, data, isBulk = False, \
ln = CFG_SITE_LANG):
"""Handle Ajax requests by redirecting to appropriate function."""
response = {}
request_type = data['requestType']
undo_redo = None
if data.has_key("undoRedo"):
undo_redo = data["undoRedo"]
# Call function based on request type.
if request_type == 'searchForRecord':
# Search request.
response.update(perform_request_search(data))
elif request_type in ['changeTagFormat']:
# User related requests.
response.update(perform_request_user(req, request_type, recid, data))
elif request_type in ('getRecord', 'submit', 'cancel', 'newRecord',
'deleteRecord', 'deleteRecordCache', 'prepareRecordMerge', 'revert'):
# 'Major' record related requests.
response.update(perform_request_record(req, request_type, recid, uid,
data))
elif request_type in ('addField', 'addSubfields', \
'addFieldsSubfieldsOnPositions', 'modifyContent', \
'moveSubfield', 'deleteFields', 'moveField', \
'modifyField', 'otherUpdateRequest', \
'disableHpChange', 'deactivateHoldingPenChangeset'):
# Record updates.
cacheMTime = data['cacheMTime']
if data.has_key('hpChanges'):
hpChanges = data['hpChanges']
else:
hpChanges = {}
response.update(perform_request_update_record(request_type, recid, \
uid, cacheMTime, data, \
hpChanges, undo_redo, \
isBulk, ln))
elif request_type in ('autosuggest', 'autocomplete', 'autokeyword'):
response.update(perform_request_autocomplete(request_type, recid, uid, \
data))
elif request_type in ('getTickets', ):
# BibCatalog requests.
response.update(perform_request_bibcatalog(request_type, recid, uid))
elif request_type in ('getHoldingPenUpdates', ):
response.update(perform_request_holdingpen(request_type, recid))
elif request_type in ('getHoldingPenUpdateDetails', \
'deleteHoldingPenChangeset'):
updateId = data['changesetNumber']
response.update(perform_request_holdingpen(request_type, recid, \
updateId))
elif request_type in ('applyBulkUpdates', ):
# a general version of a bulk request
changes = data['requestsData']
cacheMTime = data['cacheMTime']
response.update(perform_bulk_request_ajax(req, recid, uid, changes, \
undo_redo, cacheMTime))
return response
def perform_bulk_request_ajax(req, recid, uid, reqsData, undoRedo, cacheMTime):
""" An AJAX handler used when treating bulk updates """
lastResult = {}
lastTime = cacheMTime
isFirst = True
for data in reqsData:
assert data != None
data['cacheMTime'] = lastTime
if isFirst and undoRedo != None:
# we add the undo/redo handler to the first operation in order to
# save the handler on the server side !
data['undoRedo'] = undoRedo
isFirst = False
lastResult = perform_request_ajax(req, recid, uid, data, True)
# now we have to update the cacheMtime in next request !
# if lastResult.has_key('cacheMTime'):
try:
lastTime = lastResult['cacheMTime']
except:
raise Exception(str(lastResult))
return lastResult
def perform_request_search(data):
"""Handle search requests."""
response = {}
searchType = data['searchType']
searchPattern = data['searchPattern']
if searchType == 'anywhere':
pattern = searchPattern
else:
pattern = searchType + ':' + searchPattern
result_set = list(search_pattern(p=pattern))
response['resultCode'] = 1
response['resultSet'] = result_set[0:CFG_BIBEDIT_MAX_SEARCH_RESULTS]
return response
def perform_request_user(req, request_type, recid, data):
"""Handle user related requests."""
response = {}
if request_type == 'changeTagFormat':
try:
tagformat_settings = session_param_get(req, 'bibedit_tagformat')
except KeyError:
tagformat_settings = {}
tagformat_settings[recid] = data['tagFormat']
session_param_set(req, 'bibedit_tagformat', tagformat_settings)
response['resultCode'] = 2
return response
def perform_request_holdingpen(request_type, recId, changeId=None):
"""
A method performing the holdingPen ajax request. The following types of
requests can be made:
getHoldingPenUpdates - retrieving the holding pen updates pending
for a given record
"""
response = {}
if request_type == 'getHoldingPenUpdates':
changeSet = get_related_hp_changesets(recId)
changes = []
for change in changeSet:
changes.append((str(change[0]), str(change[1])))
response["changes"] = changes
elif request_type == 'getHoldingPenUpdateDetails':
# returning the list of changes related to the holding pen update
# the format based on what the record difference xtool returns
assert(changeId != None)
hpContent = get_hp_update_xml(changeId)
holdingPenRecord = create_record(hpContent[0], "xm")[0]
# databaseRecord = get_record(hpContent[1])
response['record'] = holdingPenRecord
response['changeset_number'] = changeId
elif request_type == 'deleteHoldingPenChangeset':
assert(changeId != None)
delete_hp_change(changeId)
return response
def perform_request_record(req, request_type, recid, uid, data, ln=CFG_SITE_LANG):
"""Handle 'major' record related requests like fetching, submitting or
deleting a record, cancel editing or preparing a record for merging.
"""
response = {}
if request_type == 'newRecord':
# Create a new record.
new_recid = reserve_record_id()
new_type = data['newType']
if new_type == 'empty':
# Create a new empty record.
create_cache_file(recid, uid)
response['resultCode'], response['newRecID'] = 6, new_recid
elif new_type == 'template':
# Create a new record from XML record template.
template_filename = data['templateFilename']
template = get_record_template(template_filename)
if not template:
response['resultCode'] = 108
else:
record = create_record(template)[0]
if not record:
response['resultCode'] = 109
else:
record_add_field(record, '001',
controlfield_value=str(new_recid))
create_cache_file(new_recid, uid, record, True)
response['resultCode'], response['newRecID'] = 7, new_recid
elif new_type == 'clone':
# Clone an existing record (from the users cache).
existing_cache = cache_exists(recid, uid)
if existing_cache:
try:
record = get_cache_file_contents(recid, uid)[2]
except:
# if, for example, the cache format was wrong (outdated)
record = get_bibrecord(recid)
else:
# Cache missing. Fall back to using original version.
record = get_bibrecord(recid)
record_delete_field(record, '001')
record_add_field(record, '001', controlfield_value=str(new_recid))
create_cache_file(new_recid, uid, record, True)
response['resultCode'], response['newRecID'] = 8, new_recid
elif request_type == 'getRecord':
# Fetch the record. Possible error situations:
# - Non-existing record
# - Deleted record
# - Record locked by other user
# - Record locked by queue
# A cache file will be created if it does not exist.
# If the cache is outdated (i.e., not based on the latest DB revision),
# cacheOutdated will be set to True in the response.
record_status = record_exists(recid)
existing_cache = cache_exists(recid, uid)
read_only_mode = False
if data.has_key("inReadOnlyMode"):
read_only_mode = data['inReadOnlyMode']
if record_status == 0:
response['resultCode'] = 102
elif record_status == -1:
response['resultCode'] = 103
elif not read_only_mode and not existing_cache and \
record_locked_by_other_user(recid, uid):
response['resultCode'] = 104
elif not read_only_mode and existing_cache and \
cache_expired(recid, uid) and \
record_locked_by_other_user(recid, uid):
response['resultCode'] = 104
elif not read_only_mode and record_locked_by_queue(recid):
response['resultCode'] = 105
else:
if data.get('deleteRecordCache'):
delete_cache_file(recid, uid)
existing_cache = False
pending_changes = []
disabled_hp_changes = {}
if read_only_mode:
if data.has_key('recordRevision'):
record_revision_ts = data['recordRevision']
record_xml = get_marcxml_of_revision(recid, \
record_revision_ts)
record = create_record(record_xml)[0]
record_revision = timestamp_to_revision(record_revision_ts)
pending_changes = []
disabled_hp_changes = {}
else:
# a normal cacheless retrieval of a record
record = get_bibrecord(recid)
record_revision = get_record_last_modification_date(recid)
if record_revision == None:
record_revision = datetime.now().timetuple()
pending_changes = []
disabled_hp_changes = {}
cache_dirty = False
mtime = 0
undo_list = []
redo_list = []
elif not existing_cache:
record_revision, record = create_cache_file(recid, uid)
mtime = get_cache_mtime(recid, uid)
pending_changes = []
disabled_hp_changes = {}
undo_list = []
redo_list = []
cache_dirty = False
else:
#TODO: This try except should be replaced with something nicer,
# like an argument indicating if a new cache file is to
# be created
try:
cache_dirty, record_revision, record, pending_changes, \
disabled_hp_changes, undo_list, redo_list = \
get_cache_file_contents(recid, uid)
touch_cache_file(recid, uid)
mtime = get_cache_mtime(recid, uid)
if not latest_record_revision(recid, record_revision) and \
get_record_revisions(recid) != ():
# This sould prevent from using old cache in case of
# viewing old version. If there are no revisions,
# it means we should skip this step because this
# is a new record
response['cacheOutdated'] = True
except:
record_revision, record = create_cache_file(recid, uid)
mtime = get_cache_mtime(recid, uid)
pending_changes = []
disabled_hp_changes = {}
cache_dirty = False
undo_list = []
redo_list = []
if data['clonedRecord']:
response['resultCode'] = 9
else:
response['resultCode'] = 3
revision_author = get_record_revision_author(recid, record_revision)
latest_revision = get_record_last_modification_date(recid)
if latest_revision == None:
latest_revision = datetime.now().timetuple()
last_revision_ts = revision_to_timestamp(latest_revision)
revisions_history = get_record_revision_timestamps(recid)
number_of_physical_copies = get_number_copies(recid)
bibcirc_details_URL = create_item_details_url(recid, ln)
can_have_copies = can_record_have_physical_copies(recid)
response['cacheDirty'], response['record'], \
response['cacheMTime'], response['recordRevision'], \
response['revisionAuthor'], response['lastRevision'], \
response['revisionsHistory'], response['inReadOnlyMode'], \
response['pendingHpChanges'], response['disabledHpChanges'], \
response['undoList'], response['redoList'] = cache_dirty, \
record, mtime, revision_to_timestamp(record_revision), \
revision_author, last_revision_ts, revisions_history, \
read_only_mode, pending_changes, disabled_hp_changes, \
undo_list, redo_list
response['numberOfCopies'] = number_of_physical_copies
response['bibCirculationUrl'] = bibcirc_details_URL
response['canRecordHavePhysicalCopies'] = can_have_copies
# Set tag format from user's session settings.
try:
tagformat_settings = session_param_get(req, 'bibedit_tagformat')
tagformat = tagformat_settings[recid]
except KeyError:
tagformat = CFG_BIBEDIT_TAG_FORMAT
response['tagFormat'] = tagformat
elif request_type == 'submit':
# Submit the record. Possible error situations:
# - Missing cache file
# - Cache file modified in other editor
# - Record locked by other user
# - Record locked by queue
# - Invalid XML characters
# If the cache is outdated cacheOutdated will be set to True in the
# response.
if not cache_exists(recid, uid):
response['resultCode'] = 106
elif not get_cache_mtime(recid, uid) == data['cacheMTime']:
response['resultCode'] = 107
elif cache_expired(recid, uid) and \
record_locked_by_other_user(recid, uid):
response['resultCode'] = 104
elif record_locked_by_queue(recid):
response['resultCode'] = 105
else:
try:
tmp_result = get_cache_file_contents(recid, uid)
record_revision = tmp_result[1]
record = tmp_result[2]
pending_changes = tmp_result[3]
# disabled_changes = tmp_result[4]
xml_record = print_rec(record)
record, status_code, list_of_errors = create_record(xml_record)
if status_code == 0:
response['resultCode'], response['errors'] = 110, \
list_of_errors
elif not data['force'] and \
not latest_record_revision(recid, record_revision):
response['cacheOutdated'] = True
if CFG_DEVEL_SITE:
response['record_revision'] = record_revision.__str__()
response['newest_record_revision'] = \
get_record_last_modification_date(recid).__str__()
else:
save_xml_record(recid, uid)
response['resultCode'] = 4
except Exception, e:
response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
'error_wrong_cache_file_format']
if CFG_DEVEL_SITE: # return debug information in the request
response['exception_message'] = e.__str__()
elif request_type == 'revert':
revId = data['revId']
job_date = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(revId).groups()
revision_xml = get_marcxml_of_revision(recid, job_date)
save_xml_record(recid, uid, revision_xml)
if (cache_exists(recid, uid)):
delete_cache_file(recid, uid)
response['resultCode'] = 4
elif request_type == 'cancel':
# Cancel editing by deleting the cache file. Possible error situations:
# - Cache file modified in other editor
if cache_exists(recid, uid):
if get_cache_mtime(recid, uid) == data['cacheMTime']:
delete_cache_file(recid, uid)
response['resultCode'] = 5
else:
response['resultCode'] = 107
else:
response['resultCode'] = 5
elif request_type == 'deleteRecord':
# Submit the record. Possible error situations:
# - Record locked by other user
# - Record locked by queue
# As the user is requesting deletion we proceed even if the cache file
# is missing and we don't check if the cache is outdated or has
# been modified in another editor.
existing_cache = cache_exists(recid, uid)
pending_changes = []
if has_copies(recid):
response['resultCode'] = \
CFG_BIBEDIT_AJAX_RESULT_CODES_REV['error_physical_copies_exist']
elif existing_cache and cache_expired(recid, uid) and \
record_locked_by_other_user(recid, uid):
response['resultCode'] = \
CFG_BIBEDIT_AJAX_RESULT_CODES_REV['error_rec_locked_by_user']
elif record_locked_by_queue(recid):
response['resultCode'] = \
CFG_BIBEDIT_AJAX_RESULT_CODES_REV['error_rec_locked_by_queue']
else:
if not existing_cache:
record_revision, record, pending_changes, \
deactivated_hp_changes, undo_list, redo_list = \
create_cache_file(recid, uid)
else:
try:
record_revision, record, pending_changes, \
deactivated_hp_changes, undo_list, redo_list = \
get_cache_file_contents(recid, uid)[1:]
except:
record_revision, record, pending_changes, \
deactivated_hp_changes = create_cache_file(recid, uid)
record_add_field(record, '980', ' ', ' ', '', [('c', 'DELETED')])
undo_list = []
redo_list = []
update_cache_file_contents(recid, uid, record_revision, record, \
pending_changes, \
deactivated_hp_changes, undo_list, \
redo_list)
save_xml_record(recid, uid)
delete_related_holdingpen_changes(recid) # we don't need any changes
# related to a deleted record
response['resultCode'] = 10
elif request_type == 'deleteRecordCache':
# Delete the cache file. Ignore the request if the cache has been
# modified in another editor.
if cache_exists(recid, uid) and get_cache_mtime(recid, uid) == \
data['cacheMTime']:
delete_cache_file(recid, uid)
response['resultCode'] = 11
elif request_type == 'prepareRecordMerge':
# We want to merge the cache with the current DB version of the record,
# so prepare an XML file from the file cache, to be used by BibMerge.
# Possible error situations:
# - Missing cache file
# - Record locked by other user
# - Record locked by queue
# We don't check if cache is outdated (a likely scenario for this
# request) or if it has been modified in another editor.
if not cache_exists(recid, uid):
response['resultCode'] = 106
elif cache_expired(recid, uid) and \
record_locked_by_other_user(recid, uid):
response['resultCode'] = 104
elif record_locked_by_queue(recid):
response['resultCode'] = 105
else:
save_xml_record(recid, uid, to_upload=False, to_merge=True)
response['resultCode'] = 12
return response
def perform_request_update_record(request_type, recid, uid, cacheMTime, data, \
hpChanges, undoRedoOp, isBulk=False, \
ln=CFG_SITE_LANG):
"""Handle record update requests like adding, modifying, moving or deleting
of fields or subfields. Possible common error situations:
- Missing cache file
- Cache file modified in other editor
Explanation of some parameters:
undoRedoOp - Indicates in "undo"/"redo"/undo_descriptor operation is
performed by a current request.
"""
response = {}
if not cache_exists(recid, uid):
response['resultCode'] = 106
elif not get_cache_mtime(recid, uid) == cacheMTime and isBulk == False:
# In case of a bulk request, the changes are deliberately performed
# imemdiately one after another
response['resultCode'] = 107
else:
try:
record_revision, record, pending_changes, deactivated_hp_changes, \
undo_list, redo_list = get_cache_file_contents(recid, uid)[1:]
except:
response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
'error_wrong_cache_file_format']
return response
# process all the Holding Pen changes operations ... regardles the
# request type
# import rpdb2;
# rpdb2.start_embedded_debugger('password', fAllowRemote=True)
if hpChanges.has_key("toDisable"):
for changeId in hpChanges["toDisable"]:
pending_changes[changeId]["applied_change"] = True
if hpChanges.has_key("toEnable"):
for changeId in hpChanges["toEnable"]:
pending_changes[changeId]["applied_change"] = False
if hpChanges.has_key("toOverride"):
pending_changes = hpChanges["toOverride"]
if hpChanges.has_key("changesetsToDeactivate"):
for changesetId in hpChanges["changesetsToDeactivate"]:
deactivated_hp_changes[changesetId] = True
if hpChanges.has_key("changesetsToActivate"):
for changesetId in hpChanges["changesetsToActivate"]:
deactivated_hp_changes[changesetId] = False
# processing the undo/redo entries
if undoRedoOp == "undo":
try:
redo_list = [undo_list[-1]] + redo_list
undo_list = undo_list[:-1]
except:
raise Exception("An exception occured when undoing previous" + \
" operation. Undo list: " + str(undo_list) + \
" Redo list " + str(redo_list))
elif undoRedoOp == "redo":
try:
undo_list = undo_list + [redo_list[0]]
redo_list = redo_list[1:]
except:
raise Exception("An exception occured when redoing previous" + \
" operation. Undo list: " + str(undo_list) + \
" Redo list " + str(redo_list))
else:
# This is a genuine operation - we have to add a new descriptor
# to the undo list and cancel the redo unless the operation is
# a bulk operation
if undoRedoOp != None:
undo_list = undo_list + [undoRedoOp]
redo_list = []
else:
assert isBulk == True
field_position_local = data.get('fieldPosition')
if field_position_local is not None:
field_position_local = int(field_position_local)
if request_type == 'otherUpdateRequest':
# An empty request. Might be useful if we want to perform
# operations that require only the actions performed globally,
# like modifying the holdingPen changes list
response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
'editor_modifications_changed']
elif request_type == 'deactivateHoldingPenChangeset':
# the changeset has been marked as processed ( user applied it in
# the editor). Marking as used in the cache file.
# CAUTION: This function has been implemented here because logically
# it fits with the modifications made to the cache file.
# No changes are made to the Holding Pen physically. The
# changesets are related to the cache because we want to
# cancel the removal every time the cache disappears for
# any reason
response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
'disabled_hp_changeset']
elif request_type == 'addField':
if data['controlfield']:
record_add_field(record, data['tag'],
controlfield_value=data['value'])
response['resultCode'] = 20
else:
record_add_field(record, data['tag'], data['ind1'],
data['ind2'], subfields=data['subfields'],
field_position_local=field_position_local)
response['resultCode'] = 21
elif request_type == 'addSubfields':
subfields = data['subfields']
for subfield in subfields:
record_add_subfield_into(record, data['tag'], subfield[0],
subfield[1], subfield_position=None,
field_position_local=field_position_local)
if len(subfields) == 1:
response['resultCode'] = 22
else:
response['resultCode'] = 23
elif request_type == 'addFieldsSubfieldsOnPositions':
#1) Sorting the fields by their identifiers
fieldsToAdd = data['fieldsToAdd']
subfieldsToAdd = data['subfieldsToAdd']
for tag in fieldsToAdd.keys():
positions = fieldsToAdd[tag].keys()
positions.sort()
for position in positions:
# now adding fields at a position
isControlfield = (len(fieldsToAdd[tag][position][0]) == 0)
# if there are n subfields, this is a control field
if isControlfield:
controlfieldValue = fieldsToAdd[tag][position][3]
record_add_field(record, tag, field_position_local = \
int(position), \
controlfield_value = \
controlfieldValue)
else:
subfields = fieldsToAdd[tag][position][0]
ind1 = fieldsToAdd[tag][position][1]
ind2 = fieldsToAdd[tag][position][2]
record_add_field(record, tag, ind1, ind2, subfields = \
subfields, field_position_local = \
int(position))
# now adding the subfields
for tag in subfieldsToAdd.keys():
for fieldPosition in subfieldsToAdd[tag].keys(): #now the fields
#order not important !
subfieldsPositions = subfieldsToAdd[tag][fieldPosition]. \
keys()
subfieldsPositions.sort()
for subfieldPosition in subfieldsPositions:
subfield = subfieldsToAdd[tag][fieldPosition]\
[subfieldPosition]
record_add_subfield_into(record, tag, subfield[0], \
subfield[1], \
subfield_position = \
int(subfieldPosition), \
field_position_local = \
int(fieldPosition))
response['resultCode'] = \
CFG_BIBEDIT_AJAX_RESULT_CODES_REV['added_positioned_subfields']
elif request_type == 'modifyField': # changing the field structure
# first remove subfields and then add new... change the indices
subfields = data['subFields'] # parse the JSON representation of
# the subfields here
new_field = create_field(subfields, data['ind1'], data['ind2'])
record_replace_field(record, data['tag'], new_field, \
field_position_local = data['fieldPosition'])
response['resultCode'] = 26
elif request_type == 'modifyContent':
if data['subfieldIndex'] != None:
record_modify_subfield(record, data['tag'],
data['subfieldCode'], data['value'],
int(data['subfieldIndex']),
field_position_local=field_position_local)
else:
record_modify_controlfield(record, data['tag'], data["value"],
field_position_local=field_position_local)
response['resultCode'] = 24
elif request_type == 'moveSubfield':
record_move_subfield(record, data['tag'],
int(data['subfieldIndex']), int(data['newSubfieldIndex']),
field_position_local=field_position_local)
response['resultCode'] = 25
elif request_type == 'moveField':
if data['direction'] == 'up':
final_position_local = field_position_local-1
else: # direction is 'down'
final_position_local = field_position_local+1
record_move_fields(record, data['tag'], [field_position_local],
final_position_local)
response['resultCode'] = 32
elif request_type == 'deleteFields':
to_delete = data['toDelete']
deleted_fields = 0
deleted_subfields = 0
for tag in to_delete:
#Sorting the fields in a edcreasing order by the local position!
fieldsOrder = to_delete[tag].keys()
fieldsOrder.sort(lambda a, b: int(b) - int(a))
for field_position_local in fieldsOrder:
if not to_delete[tag][field_position_local]:
# No subfields specified - delete entire field.
record_delete_field(record, tag,
field_position_local=int(field_position_local))
deleted_fields += 1
else:
for subfield_position in \
to_delete[tag][field_position_local][::-1]:
# Delete subfields in reverse order (to keep the
# indexing correct).
record_delete_subfield_from(record, tag,
int(subfield_position),
field_position_local=int(field_position_local))
deleted_subfields += 1
if deleted_fields == 1 and deleted_subfields == 0:
response['resultCode'] = 26
elif deleted_fields and deleted_subfields == 0:
response['resultCode'] = 27
elif deleted_subfields == 1 and deleted_fields == 0:
response['resultCode'] = 28
elif deleted_subfields and deleted_fields == 0:
response['resultCode'] = 29
else:
response['resultCode'] = 30
response['cacheMTime'], response['cacheDirty'] = \
update_cache_file_contents(recid, uid, record_revision,
record, \
pending_changes, \
deactivated_hp_changes, \
undo_list, redo_list), \
True
return response
def perform_request_autocomplete(request_type, recid, uid, data):
"""
Perfrom an AJAX request associated with the retrieval of autocomplete
data.
Arguments:
request_type: Type of the currently served request
recid: the identifer of the record
uid: The identifier of the user being currently logged in
data: The request data containing possibly important additional
arguments
"""
response = {}
# get the values based on which one needs to search
searchby = data['value']
#we check if the data is properly defined
fulltag = ''
if data.has_key('maintag') and data.has_key('subtag1') and \
data.has_key('subtag2') and data.has_key('subfieldcode'):
maintag = data['maintag']
subtag1 = data['subtag1']
subtag2 = data['subtag2']
u_subtag1 = subtag1
u_subtag2 = subtag2
if (not subtag1) or (subtag1 == ' '):
u_subtag1 = '_'
if (not subtag2) or (subtag2 == ' '):
u_subtag2 = '_'
subfieldcode = data['subfieldcode']
fulltag = maintag+u_subtag1+u_subtag2+subfieldcode
if (request_type == 'autokeyword'):
#call the keyword-form-ontology function
if fulltag and searchby:
items = get_kbt_items_for_bibedit(CFG_BIBEDIT_KEYWORD_TAXONOMY, \
CFG_BIBEDIT_KEYWORD_RDFLABEL, \
searchby)
response['autokeyword'] = items
if (request_type == 'autosuggest'):
#call knowledge base function to put the suggestions in an array..
if fulltag and searchby and len(searchby) > 3:
suggest_values = get_kbd_values_for_bibedit(fulltag, "", searchby)
#remove ..
new_suggest_vals = []
for sugg in suggest_values:
if sugg.startswith(searchby):
new_suggest_vals.append(sugg)
response['autosuggest'] = new_suggest_vals
if (request_type == 'autocomplete'):
#call the values function with the correct kb_name
if CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS.has_key(fulltag):
kbname = CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS[fulltag]
#check if the seachby field has semicolons. Take all
#the semicolon-separated items..
items = []
vals = []
if searchby:
if searchby.rfind(';'):
items = searchby.split(';')
else:
items = [searchby.strip()]
for item in items:
item = item.strip()
kbrvals = get_kbr_values(kbname, item, '', 'e') #we want an exact match
if kbrvals and kbrvals[0]: #add the found val into vals
vals.append(kbrvals[0])
#check that the values are not already contained in other
#instances of this field
record = get_cache_file_contents(recid, uid)[2]
xml_rec = print_rec(record)
record, status_code, dummy_errors = create_record(xml_rec)
existing_values = []
if (status_code != 0):
existing_values = record_get_field_values(record,
maintag,
subtag1,
subtag2,
subfieldcode)
#get the new values.. i.e. vals not in existing
new_vals = vals
for val in new_vals:
if val in existing_values:
new_vals.remove(val)
response['autocomplete'] = new_vals
response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['autosuggestion_scanned']
return response
def perform_request_bibcatalog(request_type, recid, uid):
"""Handle request to BibCatalog (RT).
"""
response = {}
if request_type == 'getTickets':
# Insert the ticket data in the response, if possible
if uid:
bibcat_resp = bibcatalog_system.check_system(uid)
if bibcat_resp == "":
tickets_found = bibcatalog_system.ticket_search(uid, \
status=['new', 'open'], recordid=recid)
t_url_str = '' #put ticket urls here, formatted for HTML display
for t_id in tickets_found:
#t_url = bibcatalog_system.ticket_get_attribute(uid, \
# t_id, 'url_display')
ticket_info = bibcatalog_system.ticket_get_info( \
uid, t_id, ['url_display', 'url_close'])
t_url = ticket_info['url_display']
t_close_url = ticket_info['url_close']
#format..
t_url_str += "#" + str(t_id) + '<a href="' + t_url + \
'">[read]</a> <a href="' + t_close_url + \
'">[close]</a><br/>'
#put ticket header and tickets links in the box
t_url_str = "<strong>Tickets</strong><br/>" + t_url_str + \
"<br/>" + '<a href="new_ticket?recid=' + str(recid) + \
'>[new ticket]</a>'
response['tickets'] = t_url_str
#add a new ticket link
else:
#put something in the tickets container, for debug
response['tickets'] = "<!--"+bibcat_resp+"-->"
response['resultCode'] = 31
return response
| gpl-2.0 | -4,159,289,337,030,909,400 | 44.312113 | 88 | 0.550812 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.